query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
sequencelengths
20
553
Like _add_value_to_dict_bf but will not add v if another element in under key k has the same value .
def _add_uniq_value_to_dict_bf ( d , k , v ) : prev = d . get ( k ) if prev is None : d [ k ] = v elif isinstance ( prev , list ) : if not isinstance ( v , list ) : v = [ v ] for sel in v : found = False for el in prev : if el == sel : found = True break if not found : prev . append ( sel ) else : if isinstance ( v , list ) : prev = [ prev ] for sel in v : found = False for el in prev : if el == sel : found = True break if not found : prev . append ( sel ) if len ( prev ) > 1 : d [ k ] = prev elif prev != v : d [ k ] = [ prev , v ]
400
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/helper.py#L136-L168
[ "def", "createExternalTable", "(", "self", ",", "tableName", ",", "path", "=", "None", ",", "source", "=", "None", ",", "schema", "=", "None", ",", "*", "*", "options", ")", ":", "return", "self", ".", "sparkSession", ".", "catalog", ".", "createExternalTable", "(", "tableName", ",", "path", ",", "source", ",", "schema", ",", "*", "*", "options", ")" ]
Debugging helper . Prints out el contents .
def _debug_dump_dom ( el ) : import xml . dom . minidom s = [ el . nodeName ] att_container = el . attributes for i in range ( att_container . length ) : attr = att_container . item ( i ) s . append ( ' @{a}="{v}"' . format ( a = attr . name , v = attr . value ) ) for c in el . childNodes : if c . nodeType == xml . dom . minidom . Node . TEXT_NODE : s . append ( ' {a} type="TEXT" data="{d}"' . format ( a = c . nodeName , d = c . data ) ) else : s . append ( ' {a} child' . format ( a = c . nodeName ) ) return '\n' . join ( s )
401
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/helper.py#L177-L190
[ "def", "merge_entities", "(", "self", ",", "from_entity_ids", ",", "to_entity_id", ",", "force", "=", "False", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "params", "=", "{", "'from_entity_ids'", ":", "from_entity_ids", ",", "'to_entity_id'", ":", "to_entity_id", ",", "'force'", ":", "force", ",", "}", "api_path", "=", "'/v1/{mount_point}/entity/merge'", ".", "format", "(", "mount_point", "=", "mount_point", ")", "return", "self", ".", "_adapter", ".", "post", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")" ]
Convert to a BadgerFish - style dict for addition to a dict suitable for addition to XML tree or for v1 . 0 to v0 . 0 conversion .
def _convert_hbf_meta_val_for_xml ( key , val ) : if isinstance ( val , list ) : return [ _convert_hbf_meta_val_for_xml ( key , i ) for i in val ] is_literal = True content = None if isinstance ( val , dict ) : ret = val if '@href' in val : is_literal = False else : content = val . get ( '$' ) if isinstance ( content , dict ) and _contains_hbf_meta_keys ( val ) : is_literal = False else : ret = { } content = val if is_literal : ret . setdefault ( '@xsi:type' , 'nex:LiteralMeta' ) ret . setdefault ( '@property' , key ) if content is not None : ret . setdefault ( '@datatype' , _python_instance_to_nexml_meta_datatype ( content ) ) if ret is not val : ret [ '$' ] = content else : ret . setdefault ( '@xsi:type' , 'nex:ResourceMeta' ) ret . setdefault ( '@rel' , key ) return ret
402
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/helper.py#L249-L277
[ "def", "sleep", "(", "self", ",", "seconds", ")", ":", "start", "=", "self", ".", "time", "(", ")", "while", "(", "self", ".", "time", "(", ")", "-", "start", "<", "seconds", "and", "not", "self", ".", "need_to_stop", ".", "is_set", "(", ")", ")", ":", "self", ".", "need_to_stop", ".", "wait", "(", "self", ".", "sim_time", ")" ]
Returns obj . for badgerfish and val for hbf . Appropriate for nested literals
def find_nested_meta_first ( d , prop_name , version ) : if _is_badgerfish_version ( version ) : return find_nested_meta_first_bf ( d , prop_name ) p = '^' + prop_name return d . get ( p )
403
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/helper.py#L361-L366
[ "def", "blocking_reader", "(", "reader", ",", "input", ",", "buffer_size", "=", "_DEFAULT_BUFFER_SIZE", ")", ":", "ion_event", "=", "None", "while", "True", ":", "read_event", "=", "(", "yield", "ion_event", ")", "ion_event", "=", "reader", ".", "send", "(", "read_event", ")", "while", "ion_event", "is", "not", "None", "and", "ion_event", ".", "event_type", ".", "is_stream_signal", ":", "data", "=", "input", ".", "read", "(", "buffer_size", ")", "if", "len", "(", "data", ")", "==", "0", ":", "# End of file.", "if", "ion_event", ".", "event_type", "is", "IonEventType", ".", "INCOMPLETE", ":", "ion_event", "=", "reader", ".", "send", "(", "NEXT_EVENT", ")", "continue", "else", ":", "yield", "ION_STREAM_END_EVENT", "return", "ion_event", "=", "reader", ".", "send", "(", "read_data_event", "(", "data", ")", ")" ]
Decode encoded credential attribute value .
def decode ( value : str ) -> Union [ str , None , bool , int , float ] : assert value . isdigit ( ) or value [ 0 ] == '-' and value [ 1 : ] . isdigit ( ) if - I32_BOUND <= int ( value ) < I32_BOUND : # it's an i32: it is its own encoding return int ( value ) elif int ( value ) == I32_BOUND : return None ( prefix , value ) = ( int ( value [ 0 ] ) , int ( value [ 1 : ] ) ) ival = int ( value ) - I32_BOUND if ival == 0 : return '' # special case: empty string encodes as 2**31 elif ival == 1 : return False # sentinel for bool False elif ival == 2 : return True # sentinel for bool True blen = ceil ( log ( ival , 16 ) / 2 ) ibytes = unhexlify ( ival . to_bytes ( blen , 'big' ) ) return DECODE_PREFIX . get ( prefix , str ) ( ibytes . decode ( ) )
404
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/codec.py#L70-L96
[ "def", "adapt", "(", "obj", ",", "to_cls", ")", ":", "if", "obj", "is", "None", ":", "return", "obj", "elif", "isinstance", "(", "obj", ",", "to_cls", ")", ":", "return", "obj", "errors", "=", "[", "]", "if", "hasattr", "(", "obj", ",", "'__adapt__'", ")", "and", "obj", ".", "__adapt__", ":", "try", ":", "return", "obj", ".", "__adapt__", "(", "to_cls", ")", "except", "(", "AdaptError", ",", "TypeError", ")", "as", "e", ":", "ex_type", ",", "ex", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "errors", ".", "append", "(", "(", "obj", ".", "__adapt__", ",", "ex_type", ",", "ex", ",", "tb", ")", ")", "if", "hasattr", "(", "to_cls", ",", "'__adapt__'", ")", "and", "to_cls", ".", "__adapt__", ":", "try", ":", "return", "to_cls", ".", "__adapt__", "(", "obj", ")", "except", "(", "AdaptError", ",", "TypeError", ")", "as", "e", ":", "ex_type", ",", "ex", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "errors", ".", "append", "(", "(", "to_cls", ".", "__adapt__", ",", "ex_type", ",", "ex", ",", "tb", ")", ")", "for", "k", "in", "get_adapter_path", "(", "obj", ",", "to_cls", ")", ":", "if", "k", "in", "__adapters__", ":", "try", ":", "return", "__adapters__", "[", "k", "]", "(", "obj", ",", "to_cls", ")", "except", "(", "AdaptError", ",", "TypeError", ")", "as", "e", ":", "ex_type", ",", "ex", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "errors", ".", "append", "(", "(", "__adapters__", "[", "k", "]", ",", "ex_type", ",", "ex", ",", "tb", ")", ")", "break", "raise", "AdaptErrors", "(", "'Could not adapt %r to %r'", "%", "(", "obj", ",", "to_cls", ")", ",", "errors", "=", "errors", ")" ]
Validates that the given parameters are exactly the method s declared parameters .
def validate_params_match ( method , parameters ) : argspec = inspect . getargspec ( method ) # pylint: disable=deprecated-method default_length = len ( argspec . defaults ) if argspec . defaults is not None else 0 if isinstance ( parameters , list ) : if len ( parameters ) > len ( argspec . args ) and argspec . varargs is None : raise InvalidParamsError ( "Too many parameters" ) remaining_parameters = len ( argspec . args ) - len ( parameters ) if remaining_parameters > default_length : raise InvalidParamsError ( "Not enough parameters" ) elif isinstance ( parameters , dict ) : missing_parameters = [ key for key in argspec . args if key not in parameters ] default_parameters = set ( argspec . args [ len ( argspec . args ) - default_length : ] ) for key in missing_parameters : if key not in default_parameters : raise InvalidParamsError ( "Parameter {} has not been satisfied" . format ( key ) ) extra_params = [ key for key in parameters if key not in argspec . args ] if len ( extra_params ) > 0 and argspec . keywords is None : raise InvalidParamsError ( "Too many parameters" )
405
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/parameter_checker.py#L27-L55
[ "def", "new_result", "(", "self", ",", "job", ",", "update_model", "=", "True", ")", ":", "if", "not", "job", ".", "exception", "is", "None", ":", "self", ".", "logger", ".", "warning", "(", "\"job {} failed with exception\\n{}\"", ".", "format", "(", "job", ".", "id", ",", "job", ".", "exception", ")", ")" ]
Checks that the given parameters have the correct types .
def check_types ( parameters , parameter_types , strict_floats ) : for name , parameter_type in parameter_types . items ( ) : if name not in parameters : raise InvalidParamsError ( "Parameter '{}' is missing." . format ( name ) ) if not _is_instance ( parameters [ name ] , parameter_type , strict_floats ) : raise InvalidParamsError ( "Value '{}' for parameter '{}' is not of expected type {}." . format ( parameters [ name ] , name , parameter_type ) )
406
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/parameter_checker.py#L58-L73
[ "def", "bounceStarter", "(", "url", ",", "nextSearch", ")", ":", "@", "classmethod", "def", "_starter", "(", "cls", ")", ":", "\"\"\"Get bounced start URL.\"\"\"", "data", "=", "cls", ".", "getPage", "(", "url", ")", "url1", "=", "cls", ".", "fetchUrl", "(", "url", ",", "data", ",", "cls", ".", "prevSearch", ")", "data", "=", "cls", ".", "getPage", "(", "url1", ")", "return", "cls", ".", "fetchUrl", "(", "url1", ",", "data", ",", "nextSearch", ")", "return", "_starter" ]
Checks that exactly the given parameter names have declared types .
def check_type_declaration ( parameter_names , parameter_types ) : if len ( parameter_names ) != len ( parameter_types ) : raise Exception ( "Number of method parameters ({}) does not match number of " "declared types ({})" . format ( len ( parameter_names ) , len ( parameter_types ) ) ) for parameter_name in parameter_names : if parameter_name not in parameter_types : raise Exception ( "Parameter '{}' does not have a declared type" . format ( parameter_name ) )
407
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/parameter_checker.py#L76-L90
[ "def", "generate_http_manifest", "(", "self", ")", ":", "base_path", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "translate_path", "(", "self", ".", "path", ")", ")", "self", ".", "dataset", "=", "dtoolcore", ".", "DataSet", ".", "from_uri", "(", "base_path", ")", "admin_metadata_fpath", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "\".dtool\"", ",", "\"dtool\"", ")", "with", "open", "(", "admin_metadata_fpath", ")", "as", "fh", ":", "admin_metadata", "=", "json", ".", "load", "(", "fh", ")", "http_manifest", "=", "{", "\"admin_metadata\"", ":", "admin_metadata", ",", "\"manifest_url\"", ":", "self", ".", "generate_url", "(", "\".dtool/manifest.json\"", ")", ",", "\"readme_url\"", ":", "self", ".", "generate_url", "(", "\"README.yml\"", ")", ",", "\"overlays\"", ":", "self", ".", "generate_overlay_urls", "(", ")", ",", "\"item_urls\"", ":", "self", ".", "generate_item_urls", "(", ")", "}", "return", "bytes", "(", "json", ".", "dumps", "(", "http_manifest", ")", ",", "\"utf-8\"", ")" ]
Checks that the given return value has the correct type .
def check_return_type ( value , expected_type , strict_floats ) : if expected_type is None : if value is not None : raise InvalidReturnTypeError ( "Returned value is '{}' but None was expected" . format ( value ) ) elif not _is_instance ( value , expected_type , strict_floats ) : raise InvalidReturnTypeError ( "Type of return value '{}' does not match expected type {}" . format ( value , expected_type ) )
408
https://github.com/palantir/typedjsonrpc/blob/274218fcd236ff9643506caa629029c9ba25a0fb/typedjsonrpc/parameter_checker.py#L93-L109
[ "def", "_create_settings", "(", "self", ")", ":", "self", ".", "settings", "=", "{", "\"columns\"", ":", "[", "{", "\"Header\"", ":", "s", ",", "\"accessor\"", ":", "s", "}", "for", "s", "in", "self", ".", "settings", "]", ",", "\"port\"", ":", "self", ".", "port", ",", "\"docs\"", ":", "construct_trie", "(", "self", ".", "docs", ")", "}" ]
Only intended to be called by the Phylesystem singleton .
def _make_phylesystem_cache_region ( * * kwargs ) : global _CACHE_REGION_CONFIGURED , _REGION if _CACHE_REGION_CONFIGURED : return _REGION _CACHE_REGION_CONFIGURED = True try : # noinspection PyPackageRequirements from dogpile . cache import make_region except : _LOG . debug ( 'dogpile.cache not available' ) return region = None trial_key = 'test_key' trial_val = { 'test_val' : [ 4 , 3 ] } trying_redis = True if trying_redis : try : a = { 'host' : 'localhost' , 'port' : 6379 , 'db' : 0 , # default is 0 'redis_expiration_time' : 60 * 60 * 24 * 2 , # 2 days 'distributed_lock' : False # True if multiple processes will use redis } region = make_region ( ) . configure ( 'dogpile.cache.redis' , arguments = a ) _LOG . debug ( 'cache region set up with cache.redis.' ) _LOG . debug ( 'testing redis caching...' ) region . set ( trial_key , trial_val ) assert trial_val == region . get ( trial_key ) _LOG . debug ( 'redis caching works' ) region . delete ( trial_key ) _REGION = region return region except : _LOG . debug ( 'redis cache set up failed.' ) region = None trying_file_dbm = False if trying_file_dbm : _LOG . debug ( 'Going to try dogpile.cache.dbm ...' ) first_par = _get_phylesystem_parent ( * * kwargs ) [ 0 ] cache_db_dir = os . path . split ( first_par ) [ 0 ] cache_db = os . path . join ( cache_db_dir , 'phylesystem-cachefile.dbm' ) _LOG . debug ( 'dogpile.cache region using "{}"' . format ( cache_db ) ) try : a = { 'filename' : cache_db } region = make_region ( ) . configure ( 'dogpile.cache.dbm' , expiration_time = 36000 , arguments = a ) _LOG . debug ( 'cache region set up with cache.dbm.' ) _LOG . debug ( 'testing anydbm caching...' ) region . set ( trial_key , trial_val ) assert trial_val == region . get ( trial_key ) _LOG . debug ( 'anydbm caching works' ) region . delete ( trial_key ) _REGION = region return region except : _LOG . debug ( 'anydbm cache set up failed' ) _LOG . debug ( 'exception in the configuration of the cache.' ) _LOG . debug ( 'Phylesystem will not use caching' ) return None
409
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/phylesystem/helper.py#L125-L187
[ "def", "update", "(", "self", ",", "message", "=", "None", ",", "subject", "=", "None", ",", "days", "=", "None", ",", "downloads", "=", "None", ",", "notify", "=", "None", ")", ":", "method", ",", "url", "=", "get_URL", "(", "'update'", ")", "payload", "=", "{", "'apikey'", ":", "self", ".", "config", ".", "get", "(", "'apikey'", ")", ",", "'logintoken'", ":", "self", ".", "session", ".", "cookies", ".", "get", "(", "'logintoken'", ")", ",", "'transferid'", ":", "self", ".", "transfer_id", ",", "}", "data", "=", "{", "'message'", ":", "message", "or", "self", ".", "transfer_info", ".", "get", "(", "'message'", ")", ",", "'message'", ":", "subject", "or", "self", ".", "transfer_info", ".", "get", "(", "'subject'", ")", ",", "'days'", ":", "days", "or", "self", ".", "transfer_info", ".", "get", "(", "'days'", ")", ",", "'downloads'", ":", "downloads", "or", "self", ".", "transfer_info", ".", "get", "(", "'downloads'", ")", ",", "'notify'", ":", "notify", "or", "self", ".", "transfer_info", ".", "get", "(", "'notify'", ")", "}", "payload", ".", "update", "(", "data", ")", "res", "=", "getattr", "(", "self", ".", "session", ",", "method", ")", "(", "url", ",", "params", "=", "payload", ")", "if", "res", ".", "status_code", ":", "self", ".", "transfer_info", ".", "update", "(", "data", ")", "return", "True", "hellraiser", "(", "res", ")" ]
Returns doc_dir and doc_filepath for doc_id .
def path_for_doc ( self , doc_id ) : full_path = self . path_for_doc_fn ( self . repo , doc_id ) # _LOG.debug('>>>>>>>>>> GitActionBase.path_for_doc_fn: {}'.format(self.path_for_doc_fn)) # _LOG.debug('>>>>>>>>>> GitActionBase.path_for_doc returning: [{}]'.format(full_path)) return full_path
410
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/git_action.py#L143-L149
[ "def", "Reset", "(", "self", ",", "Channel", ")", ":", "try", ":", "res", "=", "self", ".", "__m_dllBasic", ".", "CAN_Reset", "(", "Channel", ")", "return", "TPCANStatus", "(", "res", ")", "except", ":", "logger", ".", "error", "(", "\"Exception on PCANBasic.Reset\"", ")", "raise" ]
Return the current branch name
def current_branch ( self ) : branch_name = git ( self . gitdir , self . gitwd , "symbolic-ref" , "HEAD" ) return branch_name . replace ( 'refs/heads/' , '' ) . strip ( )
411
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/git_action.py#L169-L172
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Returns true or false depending on if a branch exists
def branch_exists ( self , branch ) : try : git ( self . gitdir , self . gitwd , "rev-parse" , branch ) except sh . ErrorReturnCode : return False return True
412
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/git_action.py#L174-L180
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
fetch from a remote
def fetch ( self , remote = 'origin' ) : git ( self . gitdir , "fetch" , remote , _env = self . env ( ) )
413
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/git_action.py#L204-L206
[ "def", "localize_sql", "(", "self", ",", "sql", ":", "str", ")", "->", "str", ":", "# pyodbc seems happy with ? now (pyodbc.paramstyle is 'qmark');", "# using ? is much simpler, because we may want to use % with LIKE", "# fields or (in my case) with date formatting strings for", "# STR_TO_DATE().", "# If you get this wrong, you may see \"not all arguments converted", "# during string formatting\";", "# http://stackoverflow.com/questions/9337134", "if", "self", ".", "db_pythonlib", "in", "[", "PYTHONLIB_PYMYSQL", ",", "PYTHONLIB_MYSQLDB", "]", ":", "# These engines use %, so we need to convert ? to %, without", "# breaking literal % values.", "sql", "=", "_PERCENT_REGEX", ".", "sub", "(", "\"%%\"", ",", "sql", ")", "# ... replace all % with %% first", "sql", "=", "_QUERY_VALUE_REGEX", ".", "sub", "(", "\"%s\"", ",", "sql", ")", "# ... replace all ? with %s in the SQL", "# Otherwise: engine uses ?, so we don't have to fiddle.", "return", "sql" ]
Return a dict representation of this file s commit history
def get_version_history_for_file ( self , filepath ) : # define the desired fields for logout output, matching the order in these lists! GIT_COMMIT_FIELDS = [ 'id' , 'author_name' , 'author_email' , 'date' , 'date_ISO_8601' , 'relative_date' , 'message_subject' , 'message_body' ] GIT_LOG_FORMAT = [ '%H' , '%an' , '%ae' , '%aD' , '%ai' , '%ar' , '%s' , '%b' ] # make the final format string, using standard ASCII field/record delimiters GIT_LOG_FORMAT = '%x1f' . join ( GIT_LOG_FORMAT ) + '%x1e' try : log = git ( self . gitdir , self . gitwd , '--no-pager' , 'log' , '--format=%s' % GIT_LOG_FORMAT , '--follow' , # Track file's history when moved/renamed... '--find-renames=100%' , # ... but only if the contents are identical! '--' , filepath ) # _LOG.debug('log said "{}"'.format(log)) log = log . strip ( '\n\x1e' ) . split ( "\x1e" ) log = [ row . strip ( ) . split ( "\x1f" ) for row in log ] log = [ dict ( zip ( GIT_COMMIT_FIELDS , row ) ) for row in log ] except : _LOG . exception ( 'git log failed' ) raise return log
414
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/git_action.py#L235-L273
[ "def", "is_expired", "(", "self", ",", "max_idle_seconds", ")", ":", "now", "=", "current_time", "(", ")", "return", "(", "self", ".", "expiration_time", "is", "not", "None", "and", "self", ".", "expiration_time", "<", "now", ")", "or", "(", "max_idle_seconds", "is", "not", "None", "and", "self", ".", "last_access_time", "+", "max_idle_seconds", "<", "now", ")" ]
Low level function used internally when you have an absolute filepath to add and commit
def _add_and_commit ( self , doc_filepath , author , commit_msg ) : try : git ( self . gitdir , self . gitwd , "add" , doc_filepath ) git ( self . gitdir , self . gitwd , "commit" , author = author , message = commit_msg ) except Exception as e : # We can ignore this if no changes are new, # otherwise raise a 400 if "nothing to commit" in e . message : # @EJM is this dangerous? _LOG . debug ( '"nothing to commit" found in error response' ) else : _LOG . exception ( '"git commit" failed' ) self . reset_hard ( ) raise
415
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/git_action.py#L275-L288
[ "def", "reserve", "(", "self", ",", "timeout", "=", "None", ")", ":", "if", "timeout", "is", "not", "None", ":", "command", "=", "'reserve-with-timeout %d\\r\\n'", "%", "timeout", "else", ":", "command", "=", "'reserve\\r\\n'", "try", ":", "return", "self", ".", "_interact_job", "(", "command", ",", "[", "'RESERVED'", "]", ",", "[", "'DEADLINE_SOON'", ",", "'TIMED_OUT'", "]", ")", "except", "CommandFailed", ":", "exc", "=", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "_", ",", "status", ",", "results", "=", "exc", ".", "args", "if", "status", "==", "'TIMED_OUT'", ":", "return", "None", "elif", "status", "==", "'DEADLINE_SOON'", ":", "raise", "DeadlineSoon", "(", "results", ")" ]
Remove a document Remove a document on the given branch and attribute the commit to author . Returns the SHA of the commit on branch .
def _remove_document ( self , gh_user , doc_id , parent_sha , author , commit_msg = None ) : # _LOG.debug("@@@@@@@@ GitActionBase._remove_document, doc_id={}".format(doc_id)) doc_filepath = self . path_for_doc ( doc_id ) # _LOG.debug("@@@@@@@@ GitActionBase._remove_document, doc_filepath={}".format(doc_filepath)) branch = self . create_or_checkout_branch ( gh_user , doc_id , parent_sha ) prev_file_sha = None if commit_msg is None : msg = "Delete document '%s' via OpenTree API" % doc_id else : msg = commit_msg if os . path . exists ( doc_filepath ) : prev_file_sha = self . get_blob_sha_for_file ( doc_filepath ) if self . doc_type == 'nexson' : # delete the parent directory entirely doc_dir = os . path . split ( doc_filepath ) [ 0 ] # _LOG.debug("@@@@@@@@ GitActionBase._remove_document, doc_dir={}".format(doc_dir)) git ( self . gitdir , self . gitwd , "rm" , "-rf" , doc_dir ) elif self . doc_type in ( 'collection' , 'favorites' , 'amendment' ) : # delete just the target file git ( self . gitdir , self . gitwd , "rm" , doc_filepath ) else : raise NotImplementedError ( "No deletion rules for doc_type '{}'" . format ( self . doc_type ) ) git ( self . gitdir , self . gitwd , "commit" , author = author , message = msg ) new_sha = git ( self . gitdir , self . gitwd , "rev-parse" , "HEAD" ) . strip ( ) return { 'commit_sha' : new_sha , 'branch' : branch , 'prev_file_sha' : prev_file_sha , }
416
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/git_action.py#L429-L465
[ "def", "_timestamp_regulator", "(", "self", ")", ":", "unified_timestamps", "=", "_PrettyDefaultDict", "(", "list", ")", "staged_files", "=", "self", ".", "_list_audio_files", "(", "sub_dir", "=", "\"staging\"", ")", "for", "timestamp_basename", "in", "self", ".", "__timestamps_unregulated", ":", "if", "len", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ">", "1", ":", "# File has been splitted", "timestamp_name", "=", "''", ".", "join", "(", "timestamp_basename", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "staged_splitted_files_of_timestamp", "=", "list", "(", "filter", "(", "lambda", "staged_file", ":", "(", "timestamp_name", "==", "staged_file", "[", ":", "-", "3", "]", "and", "all", "(", "[", "(", "x", "in", "set", "(", "map", "(", "str", ",", "range", "(", "10", ")", ")", ")", ")", "for", "x", "in", "staged_file", "[", "-", "3", ":", "]", "]", ")", ")", ",", "staged_files", ")", ")", "if", "len", "(", "staged_splitted_files_of_timestamp", ")", "==", "0", ":", "self", ".", "__errors", "[", "(", "time", "(", ")", ",", "timestamp_basename", ")", "]", "=", "{", "\"reason\"", ":", "\"Missing staged file\"", ",", "\"current_staged_files\"", ":", "staged_files", "}", "continue", "staged_splitted_files_of_timestamp", ".", "sort", "(", ")", "unified_timestamp", "=", "list", "(", ")", "for", "staging_digits", ",", "splitted_file", "in", "enumerate", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ":", "prev_splits_sec", "=", "0", "if", "int", "(", "staging_digits", ")", "!=", "0", ":", "prev_splits_sec", "=", "self", ".", "_get_audio_duration_seconds", "(", "\"{}/staging/{}{:03d}\"", ".", "format", "(", "self", ".", "src_dir", ",", "timestamp_name", ",", "staging_digits", "-", "1", ")", ")", "for", "word_block", "in", "splitted_file", ":", "unified_timestamp", ".", "append", "(", "_WordBlock", "(", "word", "=", "word_block", ".", "word", ",", "start", "=", "round", "(", "word_block", ".", "start", "+", "prev_splits_sec", ",", "2", ")", ",", "end", "=", "round", "(", "word_block", ".", "end", "+", "prev_splits_sec", ",", "2", ")", ")", ")", "unified_timestamps", "[", "str", "(", "timestamp_basename", ")", "]", "+=", "unified_timestamp", "else", ":", "unified_timestamps", "[", "timestamp_basename", "]", "+=", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", "[", "0", "]", "self", ".", "__timestamps", ".", "update", "(", "unified_timestamps", ")", "self", ".", "__timestamps_unregulated", "=", "_PrettyDefaultDict", "(", "list", ")" ]
Given a document id temporary filename of content branch and auth_info
def write_document ( self , gh_user , doc_id , file_content , branch , author , commit_msg = None ) : parent_sha = None fc = tempfile . NamedTemporaryFile ( ) # N.B. we currently assume file_content is text/JSON, or should be serialized from a dict if is_str_type ( file_content ) : fc . write ( file_content ) else : write_as_json ( file_content , fc ) fc . flush ( ) try : doc_filepath = self . path_for_doc ( doc_id ) doc_dir = os . path . split ( doc_filepath ) [ 0 ] if parent_sha is None : self . checkout_master ( ) parent_sha = self . get_master_sha ( ) branch = self . create_or_checkout_branch ( gh_user , doc_id , parent_sha , force_branch_name = True ) # create a document directory if this is a new doc EJM- what if it isn't? if not os . path . isdir ( doc_dir ) : os . makedirs ( doc_dir ) shutil . copy ( fc . name , doc_filepath ) git ( self . gitdir , self . gitwd , "add" , doc_filepath ) if commit_msg is None : commit_msg = "Update document '%s' via OpenTree API" % doc_id try : git ( self . gitdir , self . gitwd , "commit" , author = author , message = commit_msg ) except Exception as e : # We can ignore this if no changes are new, # otherwise raise a 400 if "nothing to commit" in e . message : # @EJM is this dangerous? pass else : _LOG . exception ( '"git commit" failed' ) self . reset_hard ( ) raise new_sha = git ( self . gitdir , self . gitwd , "rev-parse" , "HEAD" ) except Exception as e : _LOG . exception ( 'write_document exception' ) raise GitWorkflowError ( "Could not write to document #%s ! Details: \n%s" % ( doc_id , e . message ) ) finally : fc . close ( ) return new_sha
417
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/git_action.py#L467-L516
[ "def", "group_values", "(", "self", ",", "group_name", ")", ":", "group_index", "=", "self", ".", "groups", ".", "index", "(", "group_name", ")", "values", "=", "[", "]", "for", "key", "in", "self", ".", "data_keys", ":", "if", "key", "[", "group_index", "]", "not", "in", "values", ":", "values", ".", "append", "(", "key", "[", "group_index", "]", ")", "return", "values" ]
Given a doc_id temporary filename of content branch and auth_info
def write_doc_from_tmpfile ( self , doc_id , tmpfi , parent_sha , auth_info , commit_msg = '' , doctype_display_name = "document" ) : gh_user , author = get_user_author ( auth_info ) doc_filepath = self . path_for_doc ( doc_id ) doc_dir = os . path . split ( doc_filepath ) [ 0 ] if parent_sha is None : self . checkout_master ( ) parent_sha = self . get_master_sha ( ) branch = self . create_or_checkout_branch ( gh_user , doc_id , parent_sha ) # build complete (probably type-specific) commit message default_commit_msg = "Update %s '%s' via OpenTree API" % ( doctype_display_name , doc_id ) if commit_msg : commit_msg = "%s\n\n(%s)" % ( commit_msg , default_commit_msg ) else : commit_msg = default_commit_msg # create a doc directory if this is a new document EJM- what if it isn't? if not os . path . isdir ( doc_dir ) : os . makedirs ( doc_dir ) if os . path . exists ( doc_filepath ) : prev_file_sha = self . get_blob_sha_for_file ( doc_filepath ) else : prev_file_sha = None shutil . copy ( tmpfi . name , doc_filepath ) self . _add_and_commit ( doc_filepath , author , commit_msg ) new_sha = git ( self . gitdir , self . gitwd , "rev-parse" , "HEAD" ) _LOG . debug ( 'Committed document "{i}" to branch "{b}" commit SHA: "{s}"' . format ( i = doc_id , b = branch , s = new_sha . strip ( ) ) ) return { 'commit_sha' : new_sha . strip ( ) , 'branch' : branch , 'prev_file_sha' : prev_file_sha , }
418
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/git_storage/git_action.py#L518-L559
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
Remove an amendment Given a amendment_id branch and optionally an author remove an amendment on the given branch and attribute the commit to author . Returns the SHA of the commit on branch .
def remove_amendment ( self , first_arg , sec_arg , third_arg , fourth_arg = None , commit_msg = None ) : if fourth_arg is None : amendment_id , branch_name , author = first_arg , sec_arg , third_arg gh_user = branch_name . split ( '_amendment_' ) [ 0 ] parent_sha = self . get_master_sha ( ) else : gh_user , amendment_id , parent_sha , author = first_arg , sec_arg , third_arg , fourth_arg if commit_msg is None : commit_msg = "Delete Amendment '%s' via OpenTree API" % amendment_id return self . _remove_document ( gh_user , amendment_id , parent_sha , author , commit_msg )
419
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/amendments/git_actions.py#L92-L107
[ "def", "health", "(", "self", ")", ":", "return", "json", ".", "dumps", "(", "dict", "(", "uptime", "=", "'{:.3f}s'", ".", "format", "(", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_start_time", ")", ")", ")", ")" ]
Create a record inclusion request to a community .
def create ( cls , community , record , user = None , expires_at = None , notify = True ) : if expires_at and expires_at < datetime . utcnow ( ) : raise InclusionRequestExpiryTimeError ( community = community , record = record ) if community . has_record ( record ) : raise InclusionRequestObsoleteError ( community = community , record = record ) try : # Create inclusion request with db . session . begin_nested ( ) : obj = cls ( id_community = community . id , id_record = record . id , user = user , expires_at = expires_at ) db . session . add ( obj ) except ( IntegrityError , FlushError ) : raise InclusionRequestExistsError ( community = community , record = record ) # Send signal inclusion_request_created . send ( current_app . _get_current_object ( ) , request = obj , notify = notify ) return obj
420
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L114-L152
[ "def", "add_dataframe", "(", "df", ",", "name", ",", "pkg", "=", "None", ",", "description", "=", "''", ")", ":", "from", "warnings", "import", "warn", "from", "metapack", ".", "cli", ".", "core", "import", "alt_col_name", ",", "type_map", "import", "numpy", "as", "np", "if", "name", "is", "None", "or", "df", "is", "None", ":", "warn", "(", "\"Did not find dataframe for reference '{}' \"", ".", "format", "(", "ref", ")", ")", "return", "pkg", "=", "pkg", "or", "open_source_package", "(", ")", "resource_ref", "=", "'file:'", "+", "get_notebook_rel_path", "(", "pkg", ")", "+", "'#'", "+", "name", "t", "=", "pkg", ".", "find_first", "(", "'Root.Datafile'", ",", "value", "=", "resource_ref", ")", "col_props", "=", "{", "}", "if", "t", ":", "print", "(", "\"Datafile exists for url '{}', deleting\"", ".", "format", "(", "resource_ref", ")", ")", "if", "t", ".", "schema_term", ":", "col_props", "=", "{", "c", "[", "'name'", "]", ":", "c", "for", "c", "in", "t", ".", "columns", "(", ")", "}", "pkg", ".", "remove_term", "(", "t", ".", "schema_term", ")", "pkg", ".", "remove_term", "(", "t", ")", "t", "=", "pkg", "[", "'Resources'", "]", ".", "new_term", "(", "'Root.Datafile'", ",", "resource_ref", ",", "name", "=", "name", ",", "description", "=", "description", ")", "st", "=", "pkg", "[", "'Schema'", "]", ".", "new_term", "(", "'Table'", ",", "t", ".", "schema_name", ",", "description", "=", "description", ")", "for", "i", ",", "name", "in", "enumerate", "(", "df", ".", "columns", ")", ":", "props", "=", "col_props", ".", "get", "(", "name", ",", "{", "}", ")", "try", ":", "native_type", "=", "type", "(", "np", ".", "asscalar", "(", "df", "[", "name", "]", ".", "dtype", ".", "type", "(", "0", ")", ")", ")", ".", "__name__", "except", "ValueError", ":", "native_type", "=", "df", "[", "name", "]", ".", "dtype", ".", "name", "except", "AttributeError", ":", "native_type", "=", "type", "(", "df", "[", "name", "]", "[", "0", "]", ")", ".", "__name__", "for", "pn", "in", "'datatype name pos header'", ".", "split", "(", ")", ":", "if", "pn", "in", "props", ":", "del", "props", "[", "pn", "]", "if", "'altname'", "in", "props", ":", "altname", "=", "props", "[", "'altname'", "]", "del", "props", "[", "'altname'", "]", "else", ":", "raw_alt_name", "=", "alt_col_name", "(", "name", ",", "i", ")", "altname", "=", "raw_alt_name", "if", "raw_alt_name", "!=", "name", "else", "''", "col", "=", "df", "[", "name", "]", "if", "hasattr", "(", "col", ",", "'description'", ")", ":", "# custom property", "props", "[", "'description'", "]", "=", "col", ".", "description", "t", "=", "st", ".", "new_child", "(", "'Column'", ",", "name", ",", "datatype", "=", "type_map", ".", "get", "(", "native_type", ",", "native_type", ")", ",", "altname", "=", "altname", ",", "*", "*", "props", ")", "pkg", ".", "write_csv", "(", ")" ]
Get an inclusion request .
def get ( cls , community_id , record_uuid ) : return cls . query . filter_by ( id_record = record_uuid , id_community = community_id ) . one_or_none ( )
421
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L155-L159
[ "def", "delete_entity", "(", "self", ",", "entity_id", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "api_path", "=", "'/v1/{mount_point}/entity/id/{id}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "id", "=", "entity_id", ",", ")", "return", "self", ".", "_adapter", ".", "delete", "(", "url", "=", "api_path", ",", ")" ]
Search for communities .
def filter_communities ( cls , p , so , with_deleted = False ) : query = cls . query if with_deleted else cls . query . filter ( cls . deleted_at . is_ ( None ) ) if p : p = p . replace ( ' ' , '%' ) query = query . filter ( db . or_ ( cls . id . ilike ( '%' + p + '%' ) , cls . title . ilike ( '%' + p + '%' ) , cls . description . ilike ( '%' + p + '%' ) , ) ) if so in current_app . config [ 'COMMUNITIES_SORTING_OPTIONS' ] : order = so == 'title' and db . asc or db . desc query = query . order_by ( order ( getattr ( cls , so ) ) ) else : query = query . order_by ( db . desc ( cls . ranking ) ) return query
422
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L257-L284
[ "async", "def", "setup_streamer", "(", "self", ")", ":", "self", ".", "streamer", ".", "volume", "=", "self", ".", "volume", "/", "100", "self", ".", "streamer", ".", "start", "(", ")", "self", ".", "pause_time", "=", "None", "self", ".", "vclient_starttime", "=", "self", ".", "vclient", ".", "loop", ".", "time", "(", ")", "# Cache next song", "self", ".", "logger", ".", "debug", "(", "\"Caching next song\"", ")", "dl_thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "download_next_song_cache", ")", "dl_thread", ".", "start", "(", ")" ]
Add a record to the community .
def add_record ( self , record ) : key = current_app . config [ 'COMMUNITIES_RECORD_KEY' ] record . setdefault ( key , [ ] ) if self . has_record ( record ) : current_app . logger . warning ( 'Community addition: record {uuid} is already in community ' '"{comm}"' . format ( uuid = record . id , comm = self . id ) ) else : record [ key ] . append ( self . id ) record [ key ] = sorted ( record [ key ] ) if current_app . config [ 'COMMUNITIES_OAI_ENABLED' ] : if not self . oaiset . has_record ( record ) : self . oaiset . add_record ( record )
423
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L286-L304
[ "def", "delete_binding", "(", "self", ",", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", ")", ":", "vhost", "=", "quote", "(", "vhost", ",", "''", ")", "exchange", "=", "quote", "(", "exchange", ",", "''", ")", "queue", "=", "quote", "(", "queue", ",", "''", ")", "body", "=", "''", "path", "=", "Client", ".", "urls", "[", "'rt_bindings_between_exch_queue'", "]", "%", "(", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", ")", "return", "self", ".", "_call", "(", "path", ",", "'DELETE'", ",", "headers", "=", "Client", ".", "json_headers", ")" ]
Remove an already accepted record from the community .
def remove_record ( self , record ) : if not self . has_record ( record ) : current_app . logger . warning ( 'Community removal: record {uuid} was not in community ' '"{comm}"' . format ( uuid = record . id , comm = self . id ) ) else : key = current_app . config [ 'COMMUNITIES_RECORD_KEY' ] record [ key ] = [ c for c in record [ key ] if c != self . id ] if current_app . config [ 'COMMUNITIES_OAI_ENABLED' ] : if self . oaiset . has_record ( record ) : self . oaiset . remove_record ( record )
424
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L306-L322
[ "def", "external_metadata", "(", "self", ",", "datasource_type", "=", "None", ",", "datasource_id", "=", "None", ")", ":", "if", "datasource_type", "==", "'druid'", ":", "datasource", "=", "ConnectorRegistry", ".", "get_datasource", "(", "datasource_type", ",", "datasource_id", ",", "db", ".", "session", ")", "elif", "datasource_type", "==", "'table'", ":", "database", "=", "(", "db", ".", "session", ".", "query", "(", "Database", ")", ".", "filter_by", "(", "id", "=", "request", ".", "args", ".", "get", "(", "'db_id'", ")", ")", ".", "one", "(", ")", ")", "Table", "=", "ConnectorRegistry", ".", "sources", "[", "'table'", "]", "datasource", "=", "Table", "(", "database", "=", "database", ",", "table_name", "=", "request", ".", "args", ".", "get", "(", "'table_name'", ")", ",", "schema", "=", "request", ".", "args", ".", "get", "(", "'schema'", ")", "or", "None", ",", ")", "external_metadata", "=", "datasource", ".", "external_metadata", "(", ")", "return", "self", ".", "json_response", "(", "external_metadata", ")" ]
Accept a record for inclusion in the community .
def accept_record ( self , record ) : with db . session . begin_nested ( ) : req = InclusionRequest . get ( self . id , record . id ) if req is None : raise InclusionRequestMissingError ( community = self , record = record ) req . delete ( ) self . add_record ( record ) self . last_record_accepted = datetime . utcnow ( )
425
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L329-L341
[ "def", "delete_binding", "(", "self", ",", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", ")", ":", "vhost", "=", "quote", "(", "vhost", ",", "''", ")", "exchange", "=", "quote", "(", "exchange", ",", "''", ")", "queue", "=", "quote", "(", "queue", ",", "''", ")", "body", "=", "''", "path", "=", "Client", ".", "urls", "[", "'rt_bindings_between_exch_queue'", "]", "%", "(", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", ")", "return", "self", ".", "_call", "(", "path", ",", "'DELETE'", ",", "headers", "=", "Client", ".", "json_headers", ")" ]
Reject a record for inclusion in the community .
def reject_record ( self , record ) : with db . session . begin_nested ( ) : req = InclusionRequest . get ( self . id , record . id ) if req is None : raise InclusionRequestMissingError ( community = self , record = record ) req . delete ( )
426
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L343-L353
[ "def", "delete_binding", "(", "self", ",", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", ")", ":", "vhost", "=", "quote", "(", "vhost", ",", "''", ")", "exchange", "=", "quote", "(", "exchange", ",", "''", ")", "queue", "=", "quote", "(", "queue", ",", "''", ")", "body", "=", "''", "path", "=", "Client", ".", "urls", "[", "'rt_bindings_between_exch_queue'", "]", "%", "(", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", ")", "return", "self", ".", "_call", "(", "path", ",", "'DELETE'", ",", "headers", "=", "Client", ".", "json_headers", ")" ]
Mark the community for deletion .
def delete ( self ) : if self . deleted_at is not None : raise CommunitiesError ( community = self ) else : self . deleted_at = datetime . utcnow ( )
427
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L355-L365
[ "def", "mod_data", "(", "fsclient", ")", ":", "# TODO, change out for a fileserver backend", "sync_refs", "=", "[", "'modules'", ",", "'states'", ",", "'grains'", ",", "'renderers'", ",", "'returners'", ",", "]", "ret", "=", "{", "}", "envs", "=", "fsclient", ".", "envs", "(", ")", "ver_base", "=", "''", "for", "env", "in", "envs", ":", "files", "=", "fsclient", ".", "file_list", "(", "env", ")", "for", "ref", "in", "sync_refs", ":", "mods_data", "=", "{", "}", "pref", "=", "'_{0}'", ".", "format", "(", "ref", ")", "for", "fn_", "in", "sorted", "(", "files", ")", ":", "if", "fn_", ".", "startswith", "(", "pref", ")", ":", "if", "fn_", ".", "endswith", "(", "(", "'.py'", ",", "'.so'", ",", "'.pyx'", ")", ")", ":", "full", "=", "salt", ".", "utils", ".", "url", ".", "create", "(", "fn_", ")", "mod_path", "=", "fsclient", ".", "cache_file", "(", "full", ",", "env", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "mod_path", ")", ":", "continue", "mods_data", "[", "os", ".", "path", ".", "basename", "(", "fn_", ")", "]", "=", "mod_path", "chunk", "=", "salt", ".", "utils", ".", "hashutils", ".", "get_hash", "(", "mod_path", ")", "ver_base", "+=", "chunk", "if", "mods_data", ":", "if", "ref", "in", "ret", ":", "ret", "[", "ref", "]", ".", "update", "(", "mods_data", ")", "else", ":", "ret", "[", "ref", "]", "=", "mods_data", "if", "not", "ret", ":", "return", "{", "}", "if", "six", ".", "PY3", ":", "ver_base", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_bytes", "(", "ver_base", ")", "ver", "=", "hashlib", ".", "sha1", "(", "ver_base", ")", ".", "hexdigest", "(", ")", "ext_tar_path", "=", "os", ".", "path", ".", "join", "(", "fsclient", ".", "opts", "[", "'cachedir'", "]", ",", "'ext_mods.{0}.tgz'", ".", "format", "(", "ver", ")", ")", "mods", "=", "{", "'version'", ":", "ver", ",", "'file'", ":", "ext_tar_path", "}", "if", "os", ".", "path", ".", "isfile", "(", "ext_tar_path", ")", ":", "return", "mods", "tfp", "=", "tarfile", ".", "open", "(", "ext_tar_path", ",", "'w:gz'", ")", "verfile", "=", "os", ".", "path", ".", "join", "(", "fsclient", ".", "opts", "[", "'cachedir'", "]", ",", "'ext_mods.ver'", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "verfile", ",", "'w+'", ")", "as", "fp_", ":", "fp_", ".", "write", "(", "ver", ")", "tfp", ".", "add", "(", "verfile", ",", "'ext_version'", ")", "for", "ref", "in", "ret", ":", "for", "fn_", "in", "ret", "[", "ref", "]", ":", "tfp", ".", "add", "(", "ret", "[", "ref", "]", "[", "fn_", "]", ",", "os", ".", "path", ".", "join", "(", "ref", ",", "fn_", ")", ")", "tfp", ".", "close", "(", ")", "return", "mods" ]
Get URL to collection logo .
def logo_url ( self ) : if self . logo_ext : return '/api/files/{bucket}/{key}' . format ( bucket = current_app . config [ 'COMMUNITIES_BUCKET_UUID' ] , key = '{0}/logo.{1}' . format ( self . id , self . logo_ext ) , ) return None
428
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L380-L391
[ "def", "create_stream_subscription", "(", "self", ",", "stream", ",", "on_data", ",", "timeout", "=", "60", ")", ":", "options", "=", "rest_pb2", ".", "StreamSubscribeRequest", "(", ")", "options", ".", "stream", "=", "stream", "manager", "=", "WebSocketSubscriptionManager", "(", "self", ".", "_client", ",", "resource", "=", "'stream'", ",", "options", "=", "options", ")", "# Represent subscription as a future", "subscription", "=", "WebSocketSubscriptionFuture", "(", "manager", ")", "wrapped_callback", "=", "functools", ".", "partial", "(", "_wrap_callback_parse_stream_data", ",", "subscription", ",", "on_data", ")", "manager", ".", "open", "(", "wrapped_callback", ",", "instance", "=", "self", ".", "_instance", ")", "# Wait until a reply or exception is received", "subscription", ".", "reply", "(", "timeout", "=", "timeout", ")", "return", "subscription" ]
Return the corresponding OAISet for given community .
def oaiset ( self ) : if current_app . config [ 'COMMUNITIES_OAI_ENABLED' ] : from invenio_oaiserver . models import OAISet return OAISet . query . filter_by ( spec = self . oaiset_spec ) . one ( ) else : return None
429
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L421-L433
[ "def", "to_td", "(", "frame", ",", "name", ",", "con", ",", "if_exists", "=", "'fail'", ",", "time_col", "=", "None", ",", "time_index", "=", "None", ",", "index", "=", "True", ",", "index_label", "=", "None", ",", "chunksize", "=", "10000", ",", "date_format", "=", "None", ")", ":", "database", ",", "table", "=", "name", ".", "split", "(", "'.'", ")", "uploader", "=", "StreamingUploader", "(", "con", ".", "client", ",", "database", ",", "table", ",", "show_progress", "=", "True", ",", "clear_progress", "=", "True", ")", "uploader", ".", "message", "(", "'Streaming import into: {0}.{1}'", ".", "format", "(", "database", ",", "table", ")", ")", "# check existence", "if", "if_exists", "==", "'fail'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "RuntimeError", "(", "'table \"%s\" already exists'", "%", "name", ")", "elif", "if_exists", "==", "'replace'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "pass", "else", ":", "uploader", ".", "message", "(", "'deleting old table...'", ")", "con", ".", "client", ".", "delete_table", "(", "database", ",", "table", ")", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "elif", "if_exists", "==", "'append'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "ValueError", "(", "'invalid value for if_exists: %s'", "%", "if_exists", ")", "# \"time_index\" implies \"index=False\"", "if", "time_index", ":", "index", "=", "None", "# convert", "frame", "=", "frame", ".", "copy", "(", ")", "frame", "=", "_convert_time_column", "(", "frame", ",", "time_col", ",", "time_index", ")", "frame", "=", "_convert_index_column", "(", "frame", ",", "index", ",", "index_label", ")", "frame", "=", "_convert_date_format", "(", "frame", ",", "date_format", ")", "# upload", "uploader", ".", "upload_frame", "(", "frame", ",", "chunksize", ")", "uploader", ".", "wait_for_import", "(", "len", "(", "frame", ")", ")" ]
Return the OAISet URL for given community .
def oaiset_url ( self ) : return url_for ( 'invenio_oaiserver.response' , verb = 'ListRecords' , metadataPrefix = 'oai_dc' , set = self . oaiset_spec , _external = True )
430
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L436-L445
[ "def", "exit_resync", "(", "self", ")", ":", "print", "(", "\"********** exit & resync **********\"", ")", "try", ":", "if", "self", ".", "client_socket", ":", "self", ".", "client_socket", ".", "close", "(", ")", "self", ".", "client_socket", "=", "None", "try", ":", "self", ".", "exit", "(", ")", "except", "Exception", "as", "e", ":", "self", ".", "_log_error", "(", "e", ")", "print", "(", "\"Pause for exit(s) ...\"", ")", "time", ".", "sleep", "(", "60", ")", "except", "(", "socket", ".", "error", ",", "ConnectionError", ")", ":", "pass", "self", ".", "resync", "(", ")" ]
Return the version of the community .
def version_id ( self ) : return hashlib . sha1 ( '{0}__{1}' . format ( self . id , self . updated ) . encode ( 'utf-8' ) ) . hexdigest ( )
431
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L448-L455
[ "def", "numRegisteredForRole", "(", "self", ",", "role", ",", "includeTemporaryRegs", "=", "False", ")", ":", "count", "=", "self", ".", "eventregistration_set", ".", "filter", "(", "cancelled", "=", "False", ",", "dropIn", "=", "False", ",", "role", "=", "role", ")", ".", "count", "(", ")", "if", "includeTemporaryRegs", ":", "count", "+=", "self", ".", "temporaryeventregistration_set", ".", "filter", "(", "dropIn", "=", "False", ",", "role", "=", "role", ")", ".", "exclude", "(", "registration__expirationDate__lte", "=", "timezone", ".", "now", "(", ")", ")", ".", "count", "(", ")", "return", "count" ]
Get the latest featured community .
def get_featured_or_none ( cls , start_date = None ) : start_date = start_date or datetime . utcnow ( ) comm = cls . query . filter ( FeaturedCommunity . start_date <= start_date ) . order_by ( cls . start_date . desc ( ) ) . first ( ) return comm if comm is None else comm . community
432
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/models.py#L486-L500
[ "def", "delete_entry", "(", "request", ",", "entry_id", ")", ":", "try", ":", "entry", "=", "Entry", ".", "no_join", ".", "get", "(", "pk", "=", "entry_id", ",", "user", "=", "request", ".", "user", ")", "except", "Entry", ".", "DoesNotExist", ":", "message", "=", "'No such entry found.'", "messages", ".", "info", "(", "request", ",", "message", ")", "url", "=", "request", ".", "GET", ".", "get", "(", "'next'", ",", "reverse", "(", "'dashboard'", ")", ")", "return", "HttpResponseRedirect", "(", "url", ")", "if", "request", ".", "method", "==", "'POST'", ":", "key", "=", "request", ".", "POST", ".", "get", "(", "'key'", ",", "None", ")", "if", "key", "and", "key", "==", "entry", ".", "delete_key", ":", "entry", ".", "delete", "(", ")", "message", "=", "'Deleted {0} for {1}.'", ".", "format", "(", "entry", ".", "activity", ".", "name", ",", "entry", ".", "project", ")", "messages", ".", "info", "(", "request", ",", "message", ")", "url", "=", "request", ".", "GET", ".", "get", "(", "'next'", ",", "reverse", "(", "'dashboard'", ")", ")", "return", "HttpResponseRedirect", "(", "url", ")", "else", ":", "message", "=", "'You are not authorized to delete this entry!'", "messages", ".", "error", "(", "request", ",", "message", ")", "return", "render", "(", "request", ",", "'timepiece/entry/delete.html'", ",", "{", "'entry'", ":", "entry", ",", "}", ")" ]
GET the current Connector version .
def getConnectorVersion ( self ) : result = asyncResult ( ) data = self . _getURL ( "/" , versioned = False ) result . fill ( data ) if data . status_code == 200 : result . error = False else : result . error = response_codes ( "get_mdc_version" , data . status_code ) result . is_done = True return result
433
https://github.com/ARMmbed/mbed-connector-api-python/blob/a5024a01dc67cc192c8bf7a70b251fcf0a3f279b/mbed_connector_api/mbed_connector_api.py#L72-L87
[ "def", "record", "(", "self", ")", ":", "while", "True", ":", "frames", "=", "[", "]", "self", ".", "stream", ".", "start_stream", "(", ")", "for", "i", "in", "range", "(", "self", ".", "num_frames", ")", ":", "data", "=", "self", ".", "stream", ".", "read", "(", "self", ".", "config", ".", "FRAMES_PER_BUFFER", ")", "frames", ".", "append", "(", "data", ")", "self", ".", "output", ".", "seek", "(", "0", ")", "w", "=", "wave", ".", "open", "(", "self", ".", "output", ",", "'wb'", ")", "w", ".", "setnchannels", "(", "self", ".", "config", ".", "CHANNELS", ")", "w", ".", "setsampwidth", "(", "self", ".", "audio", ".", "get_sample_size", "(", "self", ".", "config", ".", "FORMAT", ")", ")", "w", ".", "setframerate", "(", "self", ".", "config", ".", "RATE", ")", "w", ".", "writeframes", "(", "b''", ".", "join", "(", "frames", ")", ")", "w", ".", "close", "(", ")", "yield" ]
Register a handler for a particular notification type . These are the types of notifications that are acceptable . | async - responses | registrations - expired | de - registrations | reg - updates | registrations | notifications
def setHandler ( self , handler , cbfn ) : if handler == "async-responses" : self . async_responses_callback = cbfn elif handler == "registrations-expired" : self . registrations_expired_callback = cbfn elif handler == "de-registrations" : self . de_registrations_callback = cbfn elif handler == "reg-updates" : self . reg_updates_callback = cbfn elif handler == "registrations" : self . registrations_callback = cbfn elif handler == "notifications" : self . notifications_callback = cbfn else : self . log . warn ( "'%s' is not a legitimate notification channel option. Please check your spelling." , handler )
434
https://github.com/ARMmbed/mbed-connector-api-python/blob/a5024a01dc67cc192c8bf7a70b251fcf0a3f279b/mbed_connector_api/mbed_connector_api.py#L554-L583
[ "def", "_deflate", "(", "cls", ")", ":", "data", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "vars", "(", "cls", ")", ".", "items", "(", ")", "if", "not", "k", ".", "startswith", "(", "\"_\"", ")", "}", "return", "{", "Constants", ".", "CONFIG_KEY", ":", "data", "}" ]
Return the python representation of the document
def as_python ( self , infile , include_original_shex : bool = False ) : self . _context . resolve_circular_references ( ) # add forwards for any circular entries body = '' for k in self . _context . ordered_elements ( ) : v = self . _context . grammarelts [ k ] if isinstance ( v , ( JSGLexerRuleBlock , JSGObjectExpr ) ) : body += v . as_python ( k ) if isinstance ( v , JSGObjectExpr ) and not self . _context . has_typeid : self . _context . directives . append ( f'_CONTEXT.TYPE_EXCEPTIONS.append("{k}")' ) elif isinstance ( v , JSGForwardRef ) : pass elif isinstance ( v , ( JSGValueType , JSGArrayExpr ) ) : body += f"\n\n\n{k} = {v.signature_type()}" else : raise NotImplementedError ( "Unknown grammar elt for {}" . format ( k ) ) self . _context . forward_refs . pop ( k , None ) body = '\n' + '\n' . join ( self . _context . directives ) + body return _jsg_python_template . format ( infile = infile , original_shex = '# ' + self . text if include_original_shex else "" , version = __version__ , gendate = datetime . datetime . now ( ) . strftime ( "%Y-%m-%d %H:%M" ) , body = body )
435
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/jsg_doc_parser.py#L36-L59
[ "def", "get_l2vpnfs_table", "(", "self", ")", ":", "l2vpnfs_table", "=", "self", ".", "_global_tables", ".", "get", "(", "RF_L2VPN_FLOWSPEC", ")", "# Lazy initialization of the table.", "if", "not", "l2vpnfs_table", ":", "l2vpnfs_table", "=", "L2VPNFlowSpecTable", "(", "self", ".", "_core_service", ",", "self", ".", "_signal_bus", ")", "self", ".", "_global_tables", "[", "RF_L2VPN_FLOWSPEC", "]", "=", "l2vpnfs_table", "self", ".", "_tables", "[", "(", "None", ",", "RF_L2VPN_FLOWSPEC", ")", "]", "=", "l2vpnfs_table", "return", "l2vpnfs_table" ]
Generate a dummy date list for testing without hitting the server
def __getDummyDateList ( ) : D = [ ] for y in xrange ( 2001 , 2010 ) : for d in xrange ( 1 , 365 , 1 ) : D . append ( 'A%04d%03d' % ( y , d ) ) return D
436
https://github.com/emirozer/bowshock/blob/9f5e053f1d54995b833b83616f37c67178c3e840/bowshock/modis.py#L92-L103
[ "def", "addResourceFile", "(", "self", ",", "pid", ",", "resource_file", ",", "resource_filename", "=", "None", ",", "progress_callback", "=", "None", ")", ":", "url", "=", "\"{url_base}/resource/{pid}/files/\"", ".", "format", "(", "url_base", "=", "self", ".", "url_base", ",", "pid", "=", "pid", ")", "params", "=", "{", "}", "close_fd", "=", "self", ".", "_prepareFileForUpload", "(", "params", ",", "resource_file", ",", "resource_filename", ")", "encoder", "=", "MultipartEncoder", "(", "params", ")", "if", "progress_callback", "is", "None", ":", "progress_callback", "=", "default_progress_callback", "monitor", "=", "MultipartEncoderMonitor", "(", "encoder", ",", "progress_callback", ")", "r", "=", "self", ".", "_request", "(", "'POST'", ",", "url", ",", "data", "=", "monitor", ",", "headers", "=", "{", "'Content-Type'", ":", "monitor", ".", "content_type", "}", ")", "if", "close_fd", ":", "fd", "=", "params", "[", "'file'", "]", "[", "1", "]", "fd", ".", "close", "(", ")", "if", "r", ".", "status_code", "!=", "201", ":", "if", "r", ".", "status_code", "==", "403", ":", "raise", "HydroShareNotAuthorized", "(", "(", "'POST'", ",", "url", ")", ")", "elif", "r", ".", "status_code", "==", "404", ":", "raise", "HydroShareNotFound", "(", "(", "pid", ",", ")", ")", "else", ":", "raise", "HydroShareHTTPException", "(", "(", "url", ",", "'POST'", ",", "r", ".", "status_code", ")", ")", "response", "=", "r", ".", "json", "(", ")", "# assert(response['resource_id'] == pid)", "return", "response" ]
Convert the webserver formatted dates to an integer format by stripping the leading char and casting
def mkIntDate ( s ) : n = s . __len__ ( ) d = int ( s [ - ( n - 1 ) : n ] ) return d
437
https://github.com/emirozer/bowshock/blob/9f5e053f1d54995b833b83616f37c67178c3e840/bowshock/modis.py#L118-L127
[ "def", "uniquenessRatio", "(", "self", ",", "value", ")", ":", "if", "value", ">=", "5", "and", "value", "<=", "15", ":", "self", ".", "_uniqueness", "=", "value", "else", ":", "raise", "InvalidUniquenessRatioError", "(", "\"Uniqueness ratio must be \"", "\"between 5 and 15.\"", ")", "self", ".", "_replace_bm", "(", ")" ]
Create an ID .
def create_id ( self , prefix = "guid" ) : if self . method == IDGenerator . METHOD_UUID : id_ = str ( uuid . uuid4 ( ) ) elif self . method == IDGenerator . METHOD_INT : id_ = self . next_int self . next_int += 1 else : raise InvalidMethodError ( self . method ) return "%s:%s-%s" % ( self . namespace . prefix , prefix , id_ )
438
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/idgen.py#L61-L75
[ "def", "configure_materials_manager", "(", "graph", ",", "key_provider", ")", ":", "if", "graph", ".", "config", ".", "materials_manager", ".", "enable_cache", ":", "return", "CachingCryptoMaterialsManager", "(", "cache", "=", "LocalCryptoMaterialsCache", "(", "graph", ".", "config", ".", "materials_manager", ".", "cache_capacity", ")", ",", "master_key_provider", "=", "key_provider", ",", "max_age", "=", "graph", ".", "config", ".", "materials_manager", ".", "cache_max_age", ",", "max_messages_encrypted", "=", "graph", ".", "config", ".", "materials_manager", ".", "cache_max_messages_encrypted", ",", ")", "return", "DefaultCryptoMaterialsManager", "(", "master_key_provider", "=", "key_provider", ")" ]
List of gray - scale colors in HSV space as web hex triplets .
def grayspec ( k ) : ll = .5 ul = .8 delta = ( ul - ll ) / k return [ GrayScale ( t ) for t in np . arange ( ll , ul , delta ) ]
439
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/spreadsheet.py#L477-L502
[ "def", "per_file_type_data", "(", "self", ")", ":", "ret", "=", "{", "}", "for", "cache_date", "in", "self", ".", "cache_dates", ":", "data", "=", "self", ".", "_cache_get", "(", "cache_date", ")", "if", "len", "(", "data", "[", "'by_file_type'", "]", ")", "==", "0", ":", "data", "[", "'by_file_type'", "]", "=", "{", "'other'", ":", "0", "}", "ret", "[", "cache_date", "]", "=", "data", "[", "'by_file_type'", "]", "return", "ret" ]
Append one or more records to the end of a numpy recarray or ndarray .
def addrecords ( X , new ) : if isinstance ( new , np . record ) or isinstance ( new , np . void ) or isinstance ( new , tuple ) : new = [ new ] return np . append ( X , utils . fromrecords ( new , type = np . ndarray , dtype = X . dtype ) , axis = 0 )
440
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/spreadsheet.py#L704-L737
[ "def", "lv_grid_generators_bus_bar", "(", "nd", ")", ":", "lv_stats", "=", "{", "}", "for", "la", "in", "nd", ".", "_mv_grid_districts", "[", "0", "]", ".", "lv_load_areas", "(", ")", ":", "for", "lvgd", "in", "la", ".", "lv_grid_districts", "(", ")", ":", "station_neighbors", "=", "list", "(", "lvgd", ".", "lv_grid", ".", "_graph", "[", "lvgd", ".", "lv_grid", ".", "_station", "]", ".", "keys", "(", ")", ")", "# check if nodes of a statio are members of list generators", "station_generators", "=", "[", "x", "for", "x", "in", "station_neighbors", "if", "x", "in", "lvgd", ".", "lv_grid", ".", "generators", "(", ")", "]", "lv_stats", "[", "repr", "(", "lvgd", ".", "lv_grid", ".", "_station", ")", "]", "=", "station_generators", "return", "lv_stats" ]
Add one or more columns to a numpy ndarray .
def addcols ( X , cols , names = None ) : if isinstance ( names , str ) : names = [ n . strip ( ) for n in names . split ( ',' ) ] if isinstance ( cols , list ) : if any ( [ isinstance ( x , np . ndarray ) or isinstance ( x , list ) or isinstance ( x , tuple ) for x in cols ] ) : assert all ( [ len ( x ) == len ( X ) for x in cols ] ) , 'Trying to add columns of wrong length.' assert names != None and len ( cols ) == len ( names ) , 'Number of columns to add must equal number of new names.' cols = utils . fromarrays ( cols , type = np . ndarray , names = names ) else : assert len ( cols ) == len ( X ) , 'Trying to add column of wrong length.' cols = utils . fromarrays ( [ cols ] , type = np . ndarray , names = names ) else : assert isinstance ( cols , np . ndarray ) if cols . dtype . names == None : cols = utils . fromarrays ( [ cols ] , type = np . ndarray , names = names ) Replacements = [ a for a in cols . dtype . names if a in X . dtype . names ] if len ( Replacements ) > 0 : print ( 'Replacing columns' , [ a for a in cols . dtype . names if a in X . dtype . names ] ) return utils . fromarrays ( [ X [ a ] if a not in cols . dtype . names else cols [ a ] for a in X . dtype . names ] + [ cols [ a ] for a in cols . dtype . names if a not in X . dtype . names ] , type = np . ndarray , names = list ( X . dtype . names ) + [ a for a in cols . dtype . names if a not in X . dtype . names ] )
441
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/spreadsheet.py#L740-L803
[ "def", "stop_vm", "(", "self", ")", ":", "if", "self", ".", "vagrant", "is", "not", "None", ":", "if", "self", ".", "destroy", ":", "self", ".", "vagrant", ".", "destroy", "(", ")", "shutil", ".", "rmtree", "(", "self", ".", "vagrant", ".", "root", ",", "ignore_errors", "=", "True", ")", "self", ".", "vagrant", "=", "None", "else", ":", "self", ".", "vagrant", ".", "halt", "(", ")" ]
Delete columns from a numpy ndarry or recarray .
def deletecols ( X , cols ) : if isinstance ( cols , str ) : cols = cols . split ( ',' ) retain = [ n for n in X . dtype . names if n not in cols ] if len ( retain ) > 0 : return X [ retain ] else : return None
442
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/spreadsheet.py#L806-L842
[ "def", "eval_facet_vars", "(", "data", ",", "vars", ",", "env", ")", ":", "# To allow expressions in facet formula", "def", "I", "(", "value", ")", ":", "return", "value", "env", "=", "env", ".", "with_outer_namespace", "(", "{", "'I'", ":", "I", "}", ")", "facet_vals", "=", "pd", ".", "DataFrame", "(", "index", "=", "data", ".", "index", ")", "for", "name", "in", "vars", ":", "if", "name", "in", "data", ":", "# This is a limited solution. If a keyword is", "# part of an expression it will fail in the", "# else statement below", "res", "=", "data", "[", "name", "]", "elif", "str", ".", "isidentifier", "(", "name", ")", ":", "# All other non-statements", "continue", "else", ":", "# Statements", "try", ":", "res", "=", "env", ".", "eval", "(", "name", ",", "inner_namespace", "=", "data", ")", "except", "NameError", ":", "continue", "facet_vals", "[", "name", "]", "=", "res", "return", "facet_vals" ]
Rename column of a numpy ndarray with structured dtype in - place .
def renamecol ( X , old , new ) : NewNames = tuple ( [ n if n != old else new for n in X . dtype . names ] ) X . dtype . names = NewNames
443
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/spreadsheet.py#L845-L868
[ "async", "def", "set_max_ch_setpoint", "(", "self", ",", "temperature", ",", "timeout", "=", "OTGW_DEFAULT_TIMEOUT", ")", ":", "cmd", "=", "OTGW_CMD_SET_MAX", "status", "=", "{", "}", "ret", "=", "await", "self", ".", "_wait_for_cmd", "(", "cmd", ",", "temperature", ",", "timeout", ")", "if", "ret", "is", "None", ":", "return", "ret", "=", "float", "(", "ret", ")", "status", "[", "DATA_MAX_CH_SETPOINT", "]", "=", "ret", "self", ".", "_update_status", "(", "status", ")", "return", "ret" ]
Replace value old with new everywhere it appears in - place .
def replace ( X , old , new , strict = True , cols = None , rows = None ) : if cols == None : cols = X . dtype . names elif isinstance ( cols , str ) : cols = cols . split ( ',' ) if rows == None : rows = np . ones ( ( len ( X ) , ) , bool ) if strict : new = np . array ( new ) for a in cols : if X . dtype [ a ] < new . dtype : print ( 'WARNING: dtype of column' , a , 'is inferior to dtype of ' , new , 'which may cause problems.' ) try : X [ a ] [ ( X [ a ] == old ) [ rows ] ] = new except : print ( 'Replacement not made on column' , a , '.' ) else : for a in cols : QuickRep = True try : colstr = '' . join ( X [ a ] [ rows ] ) except TypeError : print ( 'Not replacing in column' , a , 'due to type mismatch.' ) else : avoid = [ ord ( o ) for o in utils . uniqify ( old + new + colstr ) ] ok = set ( range ( 256 ) ) . difference ( avoid ) if len ( ok ) > 0 : sep = chr ( list ( ok ) [ 0 ] ) else : ok = set ( range ( 65536 ) ) . difference ( avoid ) if len ( ok ) > 0 : sep = unichr ( list ( ok ) [ 0 ] ) else : print ( 'All unicode characters represented in column' , a , ', can\t replace quickly.' ) QuickRep = False if QuickRep : newrows = np . array ( sep . join ( X [ a ] [ rows ] ) . replace ( old , new ) . split ( sep ) ) else : newrows = np . array ( [ aa . replace ( old , new ) for aa in X [ a ] [ rows ] ] ) X [ a ] [ rows ] = np . cast [ X . dtype [ a ] ] ( newrows ) if newrows . dtype > X . dtype [ a ] : print ( 'WARNING: dtype of column' , a , 'is inferior to the ' 'dtype of its replacement which may cause problems ' '(ends of strings might get chopped off).' )
444
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/spreadsheet.py#L871-L964
[ "def", "configure", "(", "self", ",", "voltage_range", "=", "RANGE_32V", ",", "gain", "=", "GAIN_AUTO", ",", "bus_adc", "=", "ADC_12BIT", ",", "shunt_adc", "=", "ADC_12BIT", ")", ":", "self", ".", "__validate_voltage_range", "(", "voltage_range", ")", "self", ".", "_voltage_range", "=", "voltage_range", "if", "self", ".", "_max_expected_amps", "is", "not", "None", ":", "if", "gain", "==", "self", ".", "GAIN_AUTO", ":", "self", ".", "_auto_gain_enabled", "=", "True", "self", ".", "_gain", "=", "self", ".", "_determine_gain", "(", "self", ".", "_max_expected_amps", ")", "else", ":", "self", ".", "_gain", "=", "gain", "else", ":", "if", "gain", "!=", "self", ".", "GAIN_AUTO", ":", "self", ".", "_gain", "=", "gain", "else", ":", "self", ".", "_auto_gain_enabled", "=", "True", "self", ".", "_gain", "=", "self", ".", "GAIN_1_40MV", "logging", ".", "info", "(", "'gain set to %.2fV'", "%", "self", ".", "__GAIN_VOLTS", "[", "self", ".", "_gain", "]", ")", "logging", ".", "debug", "(", "self", ".", "__LOG_MSG_1", "%", "(", "self", ".", "_shunt_ohms", ",", "self", ".", "__BUS_RANGE", "[", "voltage_range", "]", ",", "self", ".", "__GAIN_VOLTS", "[", "self", ".", "_gain", "]", ",", "self", ".", "__max_expected_amps_to_string", "(", "self", ".", "_max_expected_amps", ")", ",", "bus_adc", ",", "shunt_adc", ")", ")", "self", ".", "_calibrate", "(", "self", ".", "__BUS_RANGE", "[", "voltage_range", "]", ",", "self", ".", "__GAIN_VOLTS", "[", "self", ".", "_gain", "]", ",", "self", ".", "_max_expected_amps", ")", "self", ".", "_configure", "(", "voltage_range", ",", "self", ".", "_gain", ",", "bus_adc", ",", "shunt_adc", ")" ]
Vertically stack a sequence of numpy ndarrays with structured dtype
def rowstack ( seq , mode = 'nulls' , nullvals = None ) : if nullvals == None : nullvals = utils . DEFAULT_NULLVALUEFORMAT #newseq = [ss for ss in seq if len(ss) > 0] if len ( seq ) > 1 : assert mode in [ 'commons' , 'nulls' , 'abort' ] , ( '"mode" argument must either by "commons", "abort", or "nulls".' ) if mode == 'abort' : if not all ( [ set ( l . dtype . names ) == set ( seq [ 0 ] . dtype . names ) for l in seq ] ) : raise ValueError ( 'Some column names are different.' ) else : mode = 'commons' if mode == 'nulls' : names = utils . uniqify ( utils . listunion ( [ list ( s . dtype . names ) for s in seq if s . dtype . names != None ] ) ) formats = [ max ( [ s . dtype [ att ] for s in seq if s . dtype . names != None and att in s . dtype . names ] ) . str for att in names ] dtype = np . dtype ( zip ( names , formats ) ) return utils . fromarrays ( [ utils . listunion ( [ s [ att ] . tolist ( ) if ( s . dtype . names != None and att in s . dtype . names ) else [ nullvals ( format ) ] * len ( s ) for s in seq ] ) for ( att , format ) in zip ( names , formats ) ] , type = np . ndarray , dtype = dtype ) elif mode == 'commons' : names = [ x for x in seq [ 0 ] . dtype . names if all ( [ x in l . dtype . names for l in seq [ 1 : ] ] ) ] formats = [ max ( [ a . dtype [ att ] for a in seq ] ) . str for att in names ] return utils . fromrecords ( utils . listunion ( [ ar . tolist ( ) for ar in seq ] ) , type = np . ndarray , names = names , formats = formats ) else : return seq [ 0 ]
445
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/spreadsheet.py#L967-L1044
[ "def", "remove_experiment", "(", "self", ",", "id", ")", ":", "if", "id", "in", "self", ".", "experiments", ":", "self", ".", "experiments", ".", "pop", "(", "id", ")", "self", ".", "write_file", "(", ")" ]
Horizontally stack a sequence of numpy ndarrays with structured dtypes
def colstack ( seq , mode = 'abort' , returnnaming = False ) : assert mode in [ 'first' , 'drop' , 'abort' , 'rename' ] , 'mode argument must take on value "first","drop", "rename", or "abort".' AllNames = utils . uniqify ( utils . listunion ( [ list ( l . dtype . names ) for l in seq ] ) ) NameList = [ ( x , [ i for i in range ( len ( seq ) ) if x in seq [ i ] . dtype . names ] ) for x in AllNames ] Commons = [ x [ 0 ] for x in NameList if len ( x [ 1 ] ) > 1 ] if len ( Commons ) > 0 or mode == 'first' : if mode == 'abort' : raise ValueError ( 'There are common column names with differing ' + 'values in the columns' ) elif mode == 'drop' : Names = [ ( L [ 0 ] , x , x ) for ( x , L ) in NameList if x not in Commons ] elif mode == 'rename' : NameDict = dict ( NameList ) Names = utils . listunion ( [ [ ( i , n , n ) if len ( NameDict [ n ] ) == 1 else ( i , n , n + '_' + str ( i ) ) for n in s . dtype . names ] for ( i , s ) in enumerate ( seq ) ] ) else : Names = [ ( L [ 0 ] , x , x ) for ( x , L ) in NameList ] if returnnaming : return utils . fromarrays ( [ seq [ i ] [ x ] for ( i , x , y ) in Names ] , type = np . ndarray , names = zip ( * Names ) [ 2 ] ) , Names else : return utils . fromarrays ( [ seq [ i ] [ x ] for ( i , x , y ) in Names ] , type = np . ndarray , names = zip ( * Names ) [ 2 ] )
446
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/spreadsheet.py#L1047-L1123
[ "def", "expire", "(", "self", ",", "name", ",", "time", ")", ":", "with", "self", ".", "pipe", "as", "pipe", ":", "return", "pipe", ".", "expire", "(", "self", ".", "redis_key", "(", "name", ")", ",", "time", ")" ]
Renames overlapping column names of numpy ndarrays with structured dtypes
def DEFAULT_RENAMER ( L , Names = None ) : if isinstance ( L , dict ) : Names = L . keys ( ) LL = L . values ( ) else : if Names == None : Names = range ( len ( L ) ) else : assert len ( Names ) == len ( L ) LL = L commons = Commons ( [ l . dtype . names for l in LL ] ) D = { } for ( i , l ) in zip ( Names , LL ) : d = { } for c in commons : if c in l . dtype . names : d [ c ] = c + '_' + str ( i ) if d : D [ i ] = d return D
447
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/spreadsheet.py#L1520-L1571
[ "def", "remove", "(", "self", ",", "id_option_pool", ")", ":", "if", "not", "is_valid_int_param", "(", "id_option_pool", ")", ":", "raise", "InvalidParameterError", "(", "u'The identifier of Option Pool is invalid or was not informed.'", ")", "url", "=", "'api/pools/options/'", "+", "str", "(", "id_option_pool", ")", "+", "'/'", "return", "self", ".", "delete", "(", "url", ")" ]
Helioviewer . org and JHelioviewer operate off of JPEG2000 formatted image data generated from science - quality FITS files . Use the APIs below to interact directly with these intermediary JPEG2000 files . Download a JP2 image for the specified datasource that is the closest match in time to the date requested .
def getjp2image ( date , sourceId = None , observatory = None , instrument = None , detector = None , measurement = None ) : base_url = 'http://helioviewer.org/api/v1/getJP2Image/?' req_url = '' try : validate_iso8601 ( date ) if not date [ - 1 : ] == 'Z' : date += 'Z' base_url += 'date=' + date except : raise ValueError ( "Your date input is not in iso8601 format. ex: 2014-01-01T23:59:59" ) if sourceId : if not isinstance ( sourceId , int ) : logger . error ( "The sourceId argument should be an int, ignoring it" ) else : base_url += "sourceId=" + str ( sourceId ) + "&" if observatory : if not isinstance ( observatory , str ) : logger . error ( "The observatory argument should be a str, ignoring it" ) else : base_url += "observatory=" + observatory + "&" if instrument : if not isinstance ( instrument , str ) : logger . error ( "The instrument argument should be a str, ignoring it" ) else : base_url += "instrument=" + instrument + "&" if detector : if not isinstance ( detector , str ) : logger . error ( "The detector argument should be a str, ignoring it" ) else : base_url += "detector=" + detector + "&" if measurement : if not isinstance ( measurement , str ) : logger . error ( "The measurement argument should be a str, ignoring it" ) else : base_url += "measurement=" + detector + "&" req_url += base_url + "jpip=true" return dispatch_http_get ( req_url )
448
https://github.com/emirozer/bowshock/blob/9f5e053f1d54995b833b83616f37c67178c3e840/bowshock/helioviewer.py#L10-L85
[ "def", "clear_weights", "(", "self", ")", ":", "self", ".", "weighted", "=", "False", "for", "layer", "in", "self", ".", "layer_list", ":", "layer", ".", "weights", "=", "None" ]
json loader objecthook
def loads_loader ( load_module : types . ModuleType , pairs : Dict [ str , str ] ) -> Optional [ JSGValidateable ] : cntxt = load_module . _CONTEXT # If the type element is a member of the JSON, load it possible_type = pairs [ cntxt . TYPE ] if cntxt . TYPE in pairs else None target_class = getattr ( load_module , possible_type , None ) if isinstance ( possible_type , str ) else None if target_class : return target_class ( * * pairs ) # See whether there are any exception types that are valid for the incoming data for type_exception in cntxt . TYPE_EXCEPTIONS : if not hasattr ( load_module , type_exception ) : raise ValueError ( UNKNOWN_TYPE_EXCEPTION . format ( type_exception ) ) target_class = getattr ( load_module , type_exception ) target_strict = target_class . _strict target_class . _strict = False try : rval = target_class ( * * pairs ) finally : target_class . _strict = target_strict if is_valid ( rval ) : return rval # If there is not a type variable and nothing fits, just load up the first (and perhaps only) exception # It will later fail any is_valid tests if not cntxt . TYPE and cntxt . TYPE_EXCEPTIONS : return getattr ( load_module , cntxt . TYPE_EXCEPTIONS [ 0 ] ) ( * * pairs ) if cntxt . TYPE in pairs : raise ValueError ( f'Unknown reference type: "{cntxt.TYPE}": "{pairs[cntxt.TYPE]}"' ) else : raise ValueError ( f'Missing "{cntxt.TYPE}" element' )
449
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/loader.py#L17-L54
[ "def", "get_station_by_name", "(", "self", ",", "station_name", ",", "num_minutes", "=", "None", ",", "direction", "=", "None", ",", "destination", "=", "None", ",", "stops_at", "=", "None", ")", ":", "url", "=", "self", ".", "api_base_url", "+", "'getStationDataByNameXML'", "params", "=", "{", "'StationDesc'", ":", "station_name", "}", "if", "num_minutes", ":", "url", "=", "url", "+", "'_withNumMins'", "params", "[", "'NumMins'", "]", "=", "num_minutes", "response", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "params", ",", "timeout", "=", "10", ")", "if", "response", ".", "status_code", "!=", "200", ":", "return", "[", "]", "trains", "=", "self", ".", "_parse_station_data", "(", "response", ".", "content", ")", "if", "direction", "is", "not", "None", "or", "destination", "is", "not", "None", ":", "return", "self", ".", "_prune_trains", "(", "trains", ",", "direction", "=", "direction", ",", "destination", "=", "destination", ",", "stops_at", "=", "stops_at", ")", "return", "trains" ]
Convert a JSON string into a JSGObject
def loads ( s : str , load_module : types . ModuleType , * * kwargs ) : return json . loads ( s , object_hook = lambda pairs : loads_loader ( load_module , pairs ) , * * kwargs )
450
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/loader.py#L57-L65
[ "def", "kill", "(", "self", ")", ":", "BaseShellOperator", ".", "_close_process_input_stdin", "(", "self", ".", "_batcmd", ".", "batch_to_file_s", ")", "BaseShellOperator", ".", "_wait_process", "(", "self", ".", "_process", ",", "self", ".", "_batcmd", ".", "sh_cmd", ",", "self", ".", "_success_exitcodes", ")", "BaseShellOperator", ".", "_rm_process_input_tmpfiles", "(", "self", ".", "_batcmd", ".", "batch_to_file_s", ")", "self", ".", "_process", "=", "None" ]
Convert a file name or file - like object containing stringified JSON into a JSGObject
def load ( fp : Union [ TextIO , str ] , load_module : types . ModuleType , * * kwargs ) : if isinstance ( fp , str ) : with open ( fp ) as f : return loads ( f . read ( ) , load_module , * * kwargs ) else : return loads ( fp . read ( ) , load_module , * * kwargs )
451
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/loader.py#L68-L80
[ "def", "_send_and_wait", "(", "self", ",", "*", "*", "kwargs", ")", ":", "frame_id", "=", "self", ".", "next_frame_id", "kwargs", ".", "update", "(", "dict", "(", "frame_id", "=", "frame_id", ")", ")", "self", ".", "_send", "(", "*", "*", "kwargs", ")", "timeout", "=", "datetime", ".", "now", "(", ")", "+", "const", ".", "RX_TIMEOUT", "while", "datetime", ".", "now", "(", ")", "<", "timeout", ":", "try", ":", "frame", "=", "self", ".", "_rx_frames", ".", "pop", "(", "frame_id", ")", "raise_if_error", "(", "frame", ")", "return", "frame", "except", "KeyError", ":", "sleep", "(", "0.1", ")", "continue", "_LOGGER", ".", "exception", "(", "\"Did not receive response within configured timeout period.\"", ")", "raise", "exceptions", ".", "ZigBeeResponseTimeout", "(", ")" ]
native isinstance_ with the test for typing . Union overridden
def isinstance_ ( x , A_tuple ) : if is_union ( A_tuple ) : return any ( isinstance_ ( x , t ) for t in A_tuple . __args__ ) elif getattr ( A_tuple , '__origin__' , None ) is not None : return isinstance ( x , A_tuple . __origin__ ) else : return isinstance ( x , A_tuple )
452
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/loader.py#L83-L90
[ "def", "download_next_song", "(", "self", ",", "song", ")", ":", "dl_ydl_opts", "=", "dict", "(", "ydl_opts", ")", "dl_ydl_opts", "[", "\"progress_hooks\"", "]", "=", "[", "self", ".", "ytdl_progress_hook", "]", "dl_ydl_opts", "[", "\"outtmpl\"", "]", "=", "self", ".", "output_format", "# Move the songs from the next cache to the current cache", "self", ".", "move_next_cache", "(", ")", "self", ".", "state", "=", "'ready'", "self", ".", "play_empty", "(", ")", "# Download the file and create the stream", "with", "youtube_dl", ".", "YoutubeDL", "(", "dl_ydl_opts", ")", "as", "ydl", ":", "try", ":", "ydl", ".", "download", "(", "[", "song", "]", ")", "except", "DownloadStreamException", ":", "# This is a livestream, use the appropriate player", "future", "=", "asyncio", ".", "run_coroutine_threadsafe", "(", "self", ".", "create_stream_player", "(", "song", ",", "dl_ydl_opts", ")", ",", "client", ".", "loop", ")", "try", ":", "future", ".", "result", "(", ")", "except", "Exception", "as", "e", ":", "logger", ".", "exception", "(", "e", ")", "self", ".", "vafter_ts", "(", ")", "return", "except", "PermissionError", ":", "# File is still in use, it'll get cleared next time", "pass", "except", "youtube_dl", ".", "utils", ".", "DownloadError", "as", "e", ":", "self", ".", "logger", ".", "exception", "(", "e", ")", "self", ".", "statuslog", ".", "error", "(", "e", ")", "self", ".", "vafter_ts", "(", ")", "return", "except", "Exception", "as", "e", ":", "self", ".", "logger", ".", "exception", "(", "e", ")", "self", ".", "vafter_ts", "(", ")", "return" ]
Determine whether obj is valid
def is_valid ( obj : JSGValidateable , log : Optional [ Union [ TextIO , Logger ] ] = None ) -> bool : return obj . _is_valid ( log )
453
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/loader.py#L93-L99
[ "def", "_restart_session", "(", "self", ",", "session", ")", ":", "# remove old session key, if socket is None, that means the", "# session was closed by user and there is no need to restart.", "if", "session", ".", "socket", "is", "not", "None", ":", "self", ".", "log", ".", "info", "(", "\"Attempting restart session for Monitor Id %s.\"", "%", "session", ".", "monitor_id", ")", "del", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "session", ".", "stop", "(", ")", "session", ".", "start", "(", ")", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "=", "session" ]
Given a set of argument tuples set their value in a data dictionary if not blank
def arg_tup_to_dict ( argument_tuples ) : data = dict ( ) for arg_name , arg_val in argument_tuples : if arg_val is not None : if arg_val is True : arg_val = 'true' elif arg_val is False : arg_val = 'false' data [ arg_name ] = arg_val return data
454
https://github.com/cydrobolt/pifx/blob/c9de9c2695c3e6e72de4aa0de47b78fc13c457c3/pifx/util.py#L32-L43
[ "def", "patch_lustre_path", "(", "f_path", ")", ":", "if", "CHECK_LUSTRE_PATH_LEN", "and", "len", "(", "f_path", ")", "==", "60", ":", "if", "os", ".", "path", ".", "isabs", "(", "f_path", ")", ":", "f_path", "=", "'/.'", "+", "f_path", "else", ":", "f_path", "=", "'./'", "+", "f_path", "return", "f_path" ]
Raise appropriate exceptions if necessary .
def handle_error ( response ) : status_code = response . status_code if status_code not in A_OK_HTTP_CODES : error_explanation = A_ERROR_HTTP_CODES . get ( status_code ) raise_error = "{}: {}" . format ( status_code , error_explanation ) raise Exception ( raise_error ) else : return True
455
https://github.com/cydrobolt/pifx/blob/c9de9c2695c3e6e72de4aa0de47b78fc13c457c3/pifx/util.py#L54-L63
[ "def", "marker", "(", "self", ",", "marker_name", "=", "None", ",", "label", "=", "None", ",", "color", "=", "None", ",", "retina", "=", "False", ")", ":", "# Check for marker_name.", "if", "marker_name", "is", "None", ":", "raise", "ValidationError", "(", "\"marker_name is a required argument\"", ")", "# Validate marker_name and retina.", "marker_name", "=", "self", ".", "_validate_marker_name", "(", "marker_name", ")", "retina", "=", "self", ".", "_validate_retina", "(", "retina", ")", "# Create dict and start building URI resource path.", "path_values", "=", "dict", "(", "marker_name", "=", "marker_name", ")", "path_part", "=", "\"/marker/{marker_name}\"", "# Validate label, update dict,", "# and continue building URI resource path.", "if", "label", "is", "not", "None", ":", "label", "=", "self", ".", "_validate_label", "(", "label", ")", "path_values", "[", "\"label\"", "]", "=", "label", "path_part", "+=", "\"-{label}\"", "# Validate color, update dict,", "# and continue building URI resource path.", "if", "color", "is", "not", "None", ":", "color", "=", "self", ".", "_validate_color", "(", "color", ")", "path_values", "[", "\"color\"", "]", "=", "color", "path_part", "+=", "\"+{color}\"", "uri", "=", "URITemplate", "(", "self", ".", "base_uri", "+", "path_part", ")", ".", "expand", "(", "*", "*", "path_values", ")", "# Finish building URI resource path.", "path_part", "=", "\"{}.png\"", ".", "format", "(", "retina", ")", "uri", "+=", "path_part", "# Send HTTP GET request.", "response", "=", "self", ".", "session", ".", "get", "(", "uri", ")", "self", ".", "handle_http_error", "(", "response", ")", "return", "response" ]
Context manager entry ; open wallet . For use when keeping agent open across multiple calls .
async def open ( self ) -> '_BaseAgent' : LOGGER . debug ( '_BaseAgent.open >>>' ) # Do not open pool independently: let relying party decide when to go on-line and off-line await self . wallet . open ( ) LOGGER . debug ( '_BaseAgent.open <<<' ) return self
456
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/agent/base.py#L129-L143
[ "def", "get_column_metadata", "(", "conn", ",", "table", ":", "str", ",", "schema", "=", "'public'", ")", ":", "query", "=", "\"\"\"\\\nSELECT\n attname as name,\n format_type(atttypid, atttypmod) AS data_type,\n NOT attnotnull AS nullable\nFROM pg_catalog.pg_attribute\nWHERE attrelid=%s::regclass\n AND attnum > 0 AND NOT attisdropped\nORDER BY attnum;\"\"\"", "qualified_name", "=", "compile_qualified_name", "(", "table", ",", "schema", "=", "schema", ")", "for", "record", "in", "select_dict", "(", "conn", ",", "query", ",", "params", "=", "(", "qualified_name", ",", ")", ")", ":", "yield", "record" ]
Get revocation registry definition from ledger by its identifier . Raise AbsentRevReg for no such revocation registry logging any error condition and raising BadLedgerTxn on bad request .
async def _get_rev_reg_def ( self , rr_id : str ) -> str : LOGGER . debug ( '_BaseAgent._get_rev_reg_def >>> rr_id: %s' , rr_id ) rv_json = json . dumps ( { } ) with REVO_CACHE . lock : revo_cache_entry = REVO_CACHE . get ( rr_id , None ) rr_def = revo_cache_entry . rev_reg_def if revo_cache_entry else None if rr_def : LOGGER . info ( '_BaseAgent._get_rev_reg_def: rev reg def for %s from cache' , rr_id ) rv_json = json . dumps ( rr_def ) else : get_rrd_req_json = await ledger . build_get_revoc_reg_def_request ( self . did , rr_id ) resp_json = await self . _submit ( get_rrd_req_json ) try : ( _ , rv_json ) = await ledger . parse_get_revoc_reg_def_response ( resp_json ) rr_def = json . loads ( rv_json ) except IndyError : # ledger replied, but there is no such rev reg LOGGER . debug ( '_BaseAgent._get_rev_reg_def: <!< no rev reg exists on %s' , rr_id ) raise AbsentRevReg ( 'No rev reg exists on {}' . format ( rr_id ) ) if revo_cache_entry is None : REVO_CACHE [ rr_id ] = RevoCacheEntry ( rr_def , None ) else : REVO_CACHE [ rr_id ] . rev_reg_def = rr_def LOGGER . debug ( '_BaseAgent._get_rev_reg_def <<< %s' , rv_json ) return rv_json
457
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/agent/base.py#L289-L329
[ "def", "blend", "(", "self", ",", "other", ",", "ratio", "=", "0.5", ")", ":", "keep", "=", "1.0", "-", "ratio", "if", "not", "self", ".", "space", "==", "other", ".", "space", ":", "raise", "Exception", "(", "\"Colors must belong to the same color space.\"", ")", "values", "=", "tuple", "(", "(", "(", "u", "*", "keep", ")", "+", "(", "v", "*", "ratio", ")", "for", "u", ",", "v", "in", "zip", "(", "self", ".", "values", ",", "other", ".", "values", ")", ")", ")", "return", "self", ".", "__class__", "(", "self", ".", "space", ",", "*", "values", ")" ]
Get credential definition from ledger by its identifier .
async def get_cred_def ( self , cd_id : str ) -> str : LOGGER . debug ( '_BaseAgent.get_cred_def >>> cd_id: %s' , cd_id ) rv_json = json . dumps ( { } ) with CRED_DEF_CACHE . lock : if cd_id in CRED_DEF_CACHE : LOGGER . info ( '_BaseAgent.get_cred_def: got cred def for %s from cache' , cd_id ) rv_json = json . dumps ( CRED_DEF_CACHE [ cd_id ] ) LOGGER . debug ( '_BaseAgent.get_cred_def <<< %s' , rv_json ) return rv_json req_json = await ledger . build_get_cred_def_request ( self . did , cd_id ) resp_json = await self . _submit ( req_json ) resp = json . loads ( resp_json ) if not ( 'result' in resp and resp [ 'result' ] . get ( 'data' , None ) ) : LOGGER . debug ( '_BaseAgent.get_cred_def: <!< no cred def exists on %s' , cd_id ) raise AbsentCredDef ( 'No cred def exists on {}' . format ( cd_id ) ) try : ( _ , rv_json ) = await ledger . parse_get_cred_def_response ( resp_json ) except IndyError : # ledger replied, but there is no such cred def LOGGER . debug ( '_BaseAgent.get_cred_def: <!< no cred def exists on %s' , cd_id ) raise AbsentCredDef ( 'No cred def exists on {}' . format ( cd_id ) ) CRED_DEF_CACHE [ cd_id ] = json . loads ( rv_json ) LOGGER . info ( '_BaseAgent.get_cred_def: got cred def %s from ledger' , cd_id ) LOGGER . debug ( '_BaseAgent.get_cred_def <<< %s' , rv_json ) return rv_json
458
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/agent/base.py#L331-L371
[ "def", "_zero_pad_gaps", "(", "tr", ",", "gaps", ",", "fill_gaps", "=", "True", ")", ":", "start_in", ",", "end_in", "=", "(", "tr", ".", "stats", ".", "starttime", ",", "tr", ".", "stats", ".", "endtime", ")", "for", "gap", "in", "gaps", ":", "stream", "=", "Stream", "(", ")", "if", "gap", "[", "'starttime'", "]", ">", "tr", ".", "stats", ".", "starttime", ":", "stream", "+=", "tr", ".", "slice", "(", "tr", ".", "stats", ".", "starttime", ",", "gap", "[", "'starttime'", "]", ")", ".", "copy", "(", ")", "if", "gap", "[", "'endtime'", "]", "<", "tr", ".", "stats", ".", "endtime", ":", "# Note this can happen when gaps are calculated for a trace that", "# is longer than `length`, e.g. gaps are calculated pre-trim.", "stream", "+=", "tr", ".", "slice", "(", "gap", "[", "'endtime'", "]", ",", "tr", ".", "stats", ".", "endtime", ")", ".", "copy", "(", ")", "tr", "=", "stream", ".", "merge", "(", ")", "[", "0", "]", "if", "fill_gaps", ":", "tr", "=", "tr", ".", "split", "(", ")", "tr", "=", "tr", ".", "detrend", "(", ")", "tr", "=", "tr", ".", "merge", "(", "fill_value", "=", "0", ")", "[", "0", "]", "# Need to check length - if a gap happened overlapping the end or start", "# of the trace this will be lost.", "if", "tr", ".", "stats", ".", "starttime", "!=", "start_in", ":", "# pad with zeros", "tr", ".", "data", "=", "np", ".", "concatenate", "(", "[", "np", ".", "zeros", "(", "int", "(", "tr", ".", "stats", ".", "starttime", "-", "start_in", ")", ")", ",", "tr", ".", "data", "]", ")", "tr", ".", "stats", ".", "starttime", "=", "start_in", "if", "tr", ".", "stats", ".", "endtime", "!=", "end_in", ":", "tr", ".", "data", "=", "np", ".", "concatenate", "(", "[", "tr", ".", "data", ",", "np", ".", "zeros", "(", "int", "(", "end_in", "-", "tr", ".", "stats", ".", "endtime", ")", ")", "]", ")", "return", "tr" ]
Determine whether etype is a Union
def is_union ( etype ) -> bool : return getattr ( etype , '__origin__' , None ) is not None and getattr ( etype . __origin__ , '_name' , None ) and etype . __origin__ . _name == 'Union'
459
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/jsglib/typing_patch_37.py#L13-L17
[ "def", "WSGIHandler", "(", "self", ")", ":", "sdm", "=", "werkzeug_wsgi", ".", "SharedDataMiddleware", "(", "self", ",", "{", "\"/\"", ":", "config", ".", "CONFIG", "[", "\"AdminUI.document_root\"", "]", ",", "}", ")", "# Use DispatcherMiddleware to make sure that SharedDataMiddleware is not", "# used at all if the URL path doesn't start with \"/static\". This is a", "# workaround for cases when unicode URLs are used on systems with", "# non-unicode filesystems (as detected by Werkzeug). In this case", "# SharedDataMiddleware may fail early while trying to convert the", "# URL into the file path and not dispatch the call further to our own", "# WSGI handler.", "return", "werkzeug_wsgi", ".", "DispatcherMiddleware", "(", "self", ",", "{", "\"/static\"", ":", "sdm", ",", "}", ")" ]
Unset the TypedFields on the input entity .
def unset ( entity , * types ) : if not types : types = ( TypedField , ) fields = list ( entity . _fields . keys ( ) ) remove = ( x for x in fields if isinstance ( x , types ) ) for field in remove : del entity . _fields [ field ]
460
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/fields.py#L17-L32
[ "def", "gc_velocity_update", "(", "particle", ",", "social", ",", "state", ")", ":", "gbest", "=", "state", ".", "swarm", "[", "gbest_idx", "(", "state", ".", "swarm", ")", "]", ".", "position", "if", "not", "np", ".", "array_equal", "(", "gbest", ",", "particle", ".", "position", ")", ":", "return", "std_velocity", "(", "particle", ",", "social", ",", "state", ")", "rho", "=", "state", ".", "params", "[", "'rho'", "]", "inertia", "=", "state", ".", "params", "[", "'inertia'", "]", "v_max", "=", "state", ".", "params", "[", "'v_max'", "]", "size", "=", "particle", ".", "position", ".", "size", "r2", "=", "state", ".", "rng", ".", "uniform", "(", "0.0", ",", "1.0", ",", "size", ")", "velocity", "=", "__gc_velocity_equation__", "(", "inertia", ",", "rho", ",", "r2", ",", "particle", ",", "gbest", ")", "return", "__clamp__", "(", "velocity", ",", "v_max", ")" ]
Return True if the input TypedField field contains instance attributes that match the input parameters .
def _matches ( field , params ) : fieldattrs = six . iteritems ( params ) return all ( getattr ( field , attr ) == val for attr , val in fieldattrs )
461
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/fields.py#L35-L47
[ "def", "removeApplicationManifest", "(", "self", ",", "pchApplicationManifestFullPath", ")", ":", "fn", "=", "self", ".", "function_table", ".", "removeApplicationManifest", "result", "=", "fn", "(", "pchApplicationManifestFullPath", ")", "return", "result" ]
Iterate over the input class members and yield its TypedFields .
def iterfields ( klass ) : is_field = lambda x : isinstance ( x , TypedField ) for name , field in inspect . getmembers ( klass , predicate = is_field ) : yield name , field
462
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/fields.py#L50-L62
[ "def", "upload_prev", "(", "ver", ",", "doc_root", "=", "'./'", ")", ":", "local_dir", "=", "doc_root", "+", "'build/html'", "remote_dir", "=", "'/usr/share/nginx/pandas/pandas-docs/version/%s/'", "%", "ver", "cmd", "=", "'cd %s; rsync -avz . pandas@pandas.pydata.org:%s -essh'", "cmd", "=", "cmd", "%", "(", "local_dir", ",", "remote_dir", ")", "print", "cmd", "if", "os", ".", "system", "(", "cmd", ")", ":", "raise", "SystemExit", "(", "'Upload to %s from %s failed'", "%", "(", "remote_dir", ",", "local_dir", ")", ")", "local_dir", "=", "doc_root", "+", "'build/latex'", "pdf_cmd", "=", "'cd %s; scp pandas.pdf pandas@pandas.pydata.org:%s'", "pdf_cmd", "=", "pdf_cmd", "%", "(", "local_dir", ",", "remote_dir", ")", "if", "os", ".", "system", "(", "pdf_cmd", ")", ":", "raise", "SystemExit", "(", "'Upload PDF to %s from %s failed'", "%", "(", "ver", ",", "doc_root", ")", ")" ]
Validate and clean a candidate value for this field .
def _clean ( self , value ) : if value is None : return None elif self . type_ is None : return value elif self . check_type ( value ) : return value elif self . is_type_castable : # noqa return self . type_ ( value ) error_fmt = "%s must be a %s, not a %s" error = error_fmt % ( self . name , self . type_ , type ( value ) ) raise TypeError ( error )
463
https://github.com/CybOXProject/mixbox/blob/9097dae7a433f5b98c18171c4a5598f69a7d30af/mixbox/fields.py#L177-L190
[ "def", "writearff", "(", "data", ",", "filename", ",", "relation_name", "=", "None", ",", "index", "=", "True", ")", ":", "if", "isinstance", "(", "filename", ",", "str", ")", ":", "fp", "=", "open", "(", "filename", ",", "'w'", ")", "if", "relation_name", "is", "None", ":", "relation_name", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "else", ":", "fp", "=", "filename", "if", "relation_name", "is", "None", ":", "relation_name", "=", "\"pandas\"", "try", ":", "data", "=", "_write_header", "(", "data", ",", "fp", ",", "relation_name", ",", "index", ")", "fp", ".", "write", "(", "\"\\n\"", ")", "_write_data", "(", "data", ",", "fp", ")", "finally", ":", "fp", ".", "close", "(", ")" ]
Remove a collection Given a collection_id branch and optionally an author remove a collection on the given branch and attribute the commit to author . Returns the SHA of the commit on branch .
def remove_collection ( self , first_arg , sec_arg , third_arg , fourth_arg = None , commit_msg = None ) : if fourth_arg is None : collection_id , branch_name , author = first_arg , sec_arg , third_arg gh_user = branch_name . split ( '_collection_' ) [ 0 ] parent_sha = self . get_master_sha ( ) else : gh_user , collection_id , parent_sha , author = first_arg , sec_arg , third_arg , fourth_arg if commit_msg is None : commit_msg = "Delete Collection '%s' via OpenTree API" % collection_id return self . _remove_document ( gh_user , collection_id , parent_sha , author , commit_msg )
464
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/collections_store/git_actions.py#L92-L107
[ "def", "health", "(", "self", ")", ":", "return", "json", ".", "dumps", "(", "dict", "(", "uptime", "=", "'{:.3f}s'", ".", "format", "(", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_start_time", ")", ")", ")", ")" ]
Load caches and archive enough to go offline and be able to verify proof on content marked of interest in configuration .
async def load_cache ( self , archive : bool = False ) -> int : LOGGER . debug ( 'Verifier.load_cache >>> archive: %s' , archive ) rv = int ( time ( ) ) for s_id in self . cfg . get ( 'archive-on-close' , { } ) . get ( 'schema_id' , { } ) : with SCHEMA_CACHE . lock : await self . get_schema ( s_id ) for cd_id in self . cfg . get ( 'archive-on-close' , { } ) . get ( 'cred_def_id' , { } ) : with CRED_DEF_CACHE . lock : await self . get_cred_def ( cd_id ) for rr_id in self . cfg . get ( 'archive-on-close' , { } ) . get ( 'rev_reg_id' , { } ) : await self . _get_rev_reg_def ( rr_id ) with REVO_CACHE . lock : revo_cache_entry = REVO_CACHE . get ( rr_id , None ) if revo_cache_entry : try : await revo_cache_entry . get_state_json ( self . _build_rr_state_json , rv , rv ) except ClosedPool : LOGGER . warning ( 'Verifier %s is offline from pool %s, cannot update revo cache reg state for %s to %s' , self . wallet . name , self . pool . name , rr_id , rv ) if archive : Caches . archive ( self . dir_cache ) LOGGER . debug ( 'Verifier.load_cache <<< %s' , rv ) return rv
465
https://github.com/PSPC-SPAC-buyandsell/von_agent/blob/0b1c17cca3bd178b6e6974af84dbac1dfce5cf45/von_agent/agent/verifier.py#L165-L204
[ "def", "console_get_default_background", "(", "con", ":", "tcod", ".", "console", ".", "Console", ")", "->", "Color", ":", "return", "Color", ".", "_new_from_cdata", "(", "lib", ".", "TCOD_console_get_default_background", "(", "_console", "(", "con", ")", ")", ")" ]
Grant permission if owner or admin .
def can ( self ) : return str ( current_user . get_id ( ) ) == str ( self . community . id_user ) or DynamicPermission ( ActionNeed ( 'admin-access' ) ) . can ( )
466
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/permissions.py#L46-L49
[ "def", "restart", "(", "self", ",", "container", ",", "timeout", "=", "10", ")", ":", "params", "=", "{", "'t'", ":", "timeout", "}", "url", "=", "self", ".", "_url", "(", "\"/containers/{0}/restart\"", ",", "container", ")", "conn_timeout", "=", "self", ".", "timeout", "if", "conn_timeout", "is", "not", "None", ":", "conn_timeout", "+=", "timeout", "res", "=", "self", ".", "_post", "(", "url", ",", "params", "=", "params", ",", "timeout", "=", "conn_timeout", ")", "self", ".", "_raise_for_status", "(", "res", ")" ]
Take the union of a list of lists .
def listunion ( ListOfLists ) : u = [ ] for s in ListOfLists : if s != None : u . extend ( s ) return u
467
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/utils.py#L49-L83
[ "def", "led", "(", "host", ",", "seq", ",", "anim", ",", "f", ",", "d", ")", ":", "at", "(", "host", ",", "'LED'", ",", "seq", ",", "[", "anim", ",", "float", "(", "f", ")", ",", "d", "]", ")" ]
Returns a null value for each of various kinds of test values .
def DEFAULT_NULLVALUE ( test ) : return False if isinstance ( test , bool ) else 0 if isinstance ( test , int ) else 0.0 if isinstance ( test , float ) else ''
468
https://github.com/yamins81/tabular/blob/1caf091c8c395960a9ad7078f95158b533cc52dd/tabular/utils.py#L369-L394
[ "def", "load", "(", "self", ",", "filename", ",", "offset", ")", ":", "try", ":", "self", ".", "offset", "=", "offset", "# self.fd = open(filename, 'rb')", "# self.fd.close()", "except", "IOError", "as", "e", ":", "print", "(", "e", ")" ]
Return the python representation of the class represented by this object
def as_python ( self , name : str ) -> str : if self . _map_valuetype : return self . map_as_python ( name ) else : return self . obj_as_python ( name )
469
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/jsg_objectexpr_parser.py#L87-L92
[ "def", "read_stats", "(", "self", ",", "*", "stats", ")", ":", "self", ".", "statistics", "=", "OrderedDict", "(", ")", "for", "port", "in", "self", ".", "ports", ":", "port_stats", "=", "IxeStatTotal", "(", "port", ")", ".", "get_attributes", "(", "FLAG_RDONLY", ",", "*", "stats", ")", "port_stats", ".", "update", "(", "{", "c", "+", "'_rate'", ":", "v", "for", "c", ",", "v", "in", "IxeStatRate", "(", "port", ")", ".", "get_attributes", "(", "FLAG_RDONLY", ",", "*", "stats", ")", ".", "items", "(", ")", "}", ")", "self", ".", "statistics", "[", "str", "(", "port", ")", "]", "=", "port_stats", "return", "self", ".", "statistics" ]
Return an ordered list of elements for the _members section
def members_entries ( self , all_are_optional : bool = False ) -> List [ Tuple [ str , str ] ] : rval = [ ] if self . _members : for member in self . _members : rval += member . members_entries ( all_are_optional ) elif self . _choices : for choice in self . _choices : rval += self . _context . reference ( choice ) . members_entries ( True ) else : return [ ] return rval
470
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/jsg_objectexpr_parser.py#L196-L211
[ "def", "start", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "is_running", "(", ")", ":", "self", ".", "websock_url", "=", "self", ".", "chrome", ".", "start", "(", "*", "*", "kwargs", ")", "self", ".", "websock", "=", "websocket", ".", "WebSocketApp", "(", "self", ".", "websock_url", ")", "self", ".", "websock_thread", "=", "WebsockReceiverThread", "(", "self", ".", "websock", ",", "name", "=", "'WebsockThread:%s'", "%", "self", ".", "chrome", ".", "port", ")", "self", ".", "websock_thread", ".", "start", "(", ")", "self", ".", "_wait_for", "(", "lambda", ":", "self", ".", "websock_thread", ".", "is_open", ",", "timeout", "=", "30", ")", "# tell browser to send us messages we're interested in", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Page.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Console.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Runtime.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.setForceUpdateOnPageLoad'", ")", "# disable google analytics", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.setBlockedURLs'", ",", "params", "=", "{", "'urls'", ":", "[", "'*google-analytics.com/analytics.js'", ",", "'*google-analytics.com/ga.js'", "]", "}", ")" ]
Optionally filters out aliases from standard doc - id list
def _get_filtered_study_ids ( shard , include_aliases = False ) : from peyotl . phylesystem . helper import DIGIT_PATTERN k = shard . get_doc_ids ( ) if shard . has_aliases and ( not include_aliases ) : x = [ ] for i in k : if DIGIT_PATTERN . match ( i ) or ( ( len ( i ) > 1 ) and ( i [ - 2 ] == '_' ) ) : pass else : x . append ( i ) return x
471
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/phylesystem/phylesystem_shard.py#L18-L29
[ "def", "set_table", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "if", "self", ".", "has_table", "(", "str", "(", "schema", ")", ",", "*", "*", "kwargs", ")", ":", "return", "True", "try", ":", "with", "self", ".", "transaction", "(", "*", "*", "kwargs", ")", ":", "self", ".", "_set_table", "(", "schema", ",", "*", "*", "kwargs", ")", "for", "index_name", ",", "index", "in", "schema", ".", "indexes", ".", "items", "(", ")", ":", "self", ".", "set_index", "(", "schema", ",", "name", "=", "index", ".", "name", ",", "fields", "=", "index", ".", "fields", ",", "connection", "=", "connection", ",", "*", "*", "index", ".", "options", ")", "except", "InterfaceError", ":", "# check to see if this table now exists, it might have been created", "# in another thread", "if", "not", "self", ".", "has_table", "(", "schema", ",", "*", "*", "kwargs", ")", ":", "raise" ]
Return the numeric part of the newest study_id
def _determine_next_study_id ( self ) : if self . _doc_counter_lock is None : self . _doc_counter_lock = Lock ( ) prefix = self . _new_study_prefix lp = len ( prefix ) n = 0 # this function holds the lock for quite awhile, # but it only called on the first instance of # of creating a new study with self . _doc_counter_lock : with self . _index_lock : for k in self . study_index . keys ( ) : if k . startswith ( prefix ) : try : pn = int ( k [ lp : ] ) if pn > n : n = pn except : pass nsi_contents = self . _read_master_branch_resource ( self . _id_minting_file , is_json = True ) if nsi_contents : self . _next_study_id = nsi_contents [ 'next_study_id' ] if self . _next_study_id <= n : m = 'next_study_id in {} is set lower than the ID of an existing study!' m = m . format ( self . _id_minting_file ) raise RuntimeError ( m ) else : # legacy support for repo with no next_study_id.json file self . _next_study_id = n self . _advance_new_study_id ( )
472
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/phylesystem/phylesystem_shard.py#L246-L279
[ "def", "stream", "(", "self", ",", "muted", "=", "values", ".", "unset", ",", "hold", "=", "values", ".", "unset", ",", "coaching", "=", "values", ".", "unset", ",", "limit", "=", "None", ",", "page_size", "=", "None", ")", ":", "limits", "=", "self", ".", "_version", ".", "read_limits", "(", "limit", ",", "page_size", ")", "page", "=", "self", ".", "page", "(", "muted", "=", "muted", ",", "hold", "=", "hold", ",", "coaching", "=", "coaching", ",", "page_size", "=", "limits", "[", "'page_size'", "]", ",", ")", "return", "self", ".", "_version", ".", "stream", "(", "page", ",", "limits", "[", "'limit'", "]", ",", "limits", "[", "'page_limit'", "]", ")" ]
ASSUMES the caller holds the _doc_counter_lock ! Returns the current numeric part of the next study ID advances the counter to the next value and stores that value in the file in case the server is restarted .
def _advance_new_study_id ( self ) : c = self . _next_study_id self . _next_study_id = 1 + c content = u'{"next_study_id": %d}\n' % self . _next_study_id # The content is JSON, but we hand-rolled the string above # so that we can use it as a commit_msg self . _write_master_branch_resource ( content , self . _id_minting_file , commit_msg = content , is_json = False ) return c
473
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/phylesystem/phylesystem_shard.py#L281-L296
[ "def", "planetType", "(", "temperature", ",", "mass", ",", "radius", ")", ":", "if", "mass", "is", "not", "np", ".", "nan", ":", "sizeType", "=", "planetMassType", "(", "mass", ")", "elif", "radius", "is", "not", "np", ".", "nan", ":", "sizeType", "=", "planetRadiusType", "(", "radius", ")", "else", ":", "return", "None", "return", "'{0} {1}'", ".", "format", "(", "planetTempType", "(", "temperature", ")", ",", "sizeType", ")" ]
Return a list of all non - list items in l
def flatten ( l : Iterable ) -> List : rval = [ ] for e in l : if not isinstance ( e , str ) and isinstance ( e , Iterable ) : if len ( list ( e ) ) : rval += flatten ( e ) else : rval . append ( e ) return rval
474
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/parser_utils.py#L14-L27
[ "def", "dump", "(", "self", ")", ":", "for", "i", "in", "range", "(", "0", ",", "len", "(", "self", ".", "TA", ")", ")", ":", "if", "self", ".", "TA", "[", "i", "]", "is", "not", "None", ":", "print", "(", "\"TA%d: %x\"", "%", "(", "i", "+", "1", ",", "self", ".", "TA", "[", "i", "]", ")", ")", "if", "self", ".", "TB", "[", "i", "]", "is", "not", "None", ":", "print", "(", "\"TB%d: %x\"", "%", "(", "i", "+", "1", ",", "self", ".", "TB", "[", "i", "]", ")", ")", "if", "self", ".", "TC", "[", "i", "]", "is", "not", "None", ":", "print", "(", "\"TC%d: %x\"", "%", "(", "i", "+", "1", ",", "self", ".", "TC", "[", "i", "]", ")", ")", "if", "self", ".", "TD", "[", "i", "]", "is", "not", "None", ":", "print", "(", "\"TD%d: %x\"", "%", "(", "i", "+", "1", ",", "self", ".", "TD", "[", "i", "]", ")", ")", "print", "(", "'supported protocols '", "+", "','", ".", "join", "(", "self", ".", "getSupportedProtocols", "(", ")", ")", ")", "print", "(", "'T=0 supported: '", "+", "str", "(", "self", ".", "isT0Supported", "(", ")", ")", ")", "print", "(", "'T=1 supported: '", "+", "str", "(", "self", ".", "isT1Supported", "(", ")", ")", ")", "if", "self", ".", "getChecksum", "(", ")", ":", "print", "(", "'checksum: %d'", "%", "self", ".", "getChecksum", "(", ")", ")", "print", "(", "'\\tclock rate conversion factor: '", "+", "str", "(", "self", ".", "getClockRateConversion", "(", ")", ")", ")", "print", "(", "'\\tbit rate adjustment factor: '", "+", "str", "(", "self", ".", "getBitRateFactor", "(", ")", ")", ")", "print", "(", "'\\tmaximum programming current: '", "+", "str", "(", "self", ".", "getProgrammingCurrent", "(", ")", ")", ")", "print", "(", "'\\tprogramming voltage: '", "+", "str", "(", "self", ".", "getProgrammingVoltage", "(", ")", ")", ")", "print", "(", "'\\tguard time: '", "+", "str", "(", "self", ".", "getGuardTime", "(", ")", ")", ")", "print", "(", "'nb of interface bytes: %d'", "%", "self", ".", "getInterfaceBytesCount", "(", ")", ")", "print", "(", "'nb of historical bytes: %d'", "%", "self", ".", "getHistoricalBytesCount", "(", ")", ")" ]
Return a list of UNIQUE non - list items in l
def flatten_unique ( l : Iterable ) -> List : rval = OrderedDict ( ) for e in l : if not isinstance ( e , str ) and isinstance ( e , Iterable ) : for ev in flatten_unique ( e ) : rval [ ev ] = None else : rval [ e ] = None return list ( rval . keys ( ) )
475
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/parser_utils.py#L30-L39
[ "def", "_fetch_dataframe", "(", "self", ")", ":", "def", "reshape", "(", "training_summary", ")", ":", "# Helper method to reshape a single training job summary into a dataframe record", "out", "=", "{", "}", "for", "k", ",", "v", "in", "training_summary", "[", "'TunedHyperParameters'", "]", ".", "items", "(", ")", ":", "# Something (bokeh?) gets confused with ints so convert to float", "try", ":", "v", "=", "float", "(", "v", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "pass", "out", "[", "k", "]", "=", "v", "out", "[", "'TrainingJobName'", "]", "=", "training_summary", "[", "'TrainingJobName'", "]", "out", "[", "'TrainingJobStatus'", "]", "=", "training_summary", "[", "'TrainingJobStatus'", "]", "out", "[", "'FinalObjectiveValue'", "]", "=", "training_summary", ".", "get", "(", "'FinalHyperParameterTuningJobObjectiveMetric'", ",", "{", "}", ")", ".", "get", "(", "'Value'", ")", "start_time", "=", "training_summary", ".", "get", "(", "'TrainingStartTime'", ",", "None", ")", "end_time", "=", "training_summary", ".", "get", "(", "'TrainingEndTime'", ",", "None", ")", "out", "[", "'TrainingStartTime'", "]", "=", "start_time", "out", "[", "'TrainingEndTime'", "]", "=", "end_time", "if", "start_time", "and", "end_time", ":", "out", "[", "'TrainingElapsedTimeSeconds'", "]", "=", "(", "end_time", "-", "start_time", ")", ".", "total_seconds", "(", ")", "return", "out", "# Run that helper over all the summaries.", "df", "=", "pd", ".", "DataFrame", "(", "[", "reshape", "(", "tjs", ")", "for", "tjs", "in", "self", ".", "training_job_summaries", "(", ")", "]", ")", "return", "df" ]
Return a stringified list of identifiers in ctx
def as_tokens ( ctx : List [ ParserRuleContext ] ) -> List [ str ] : return [ as_token ( e ) for e in ctx ]
476
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/parser_utils.py#L85-L91
[ "def", "check_checksum_mismatch", "(", "self", ")", ":", "if", "self", ".", "pefile_handle", ".", "OPTIONAL_HEADER", ":", "if", "self", ".", "pefile_handle", ".", "OPTIONAL_HEADER", ".", "CheckSum", "!=", "self", ".", "pefile_handle", ".", "generate_checksum", "(", ")", ":", "return", "{", "'description'", ":", "'Reported Checksum does not match actual checksum'", ",", "'severity'", ":", "2", ",", "'category'", ":", "'MALFORMED'", "}", "return", "None" ]
Determine whether tkn is a valid python identifier
def is_valid_python ( tkn : str ) -> bool : try : root = ast . parse ( tkn ) except SyntaxError : return False return len ( root . body ) == 1 and isinstance ( root . body [ 0 ] , ast . Expr ) and isinstance ( root . body [ 0 ] . value , ast . Name )
477
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/parser_utils.py#L94-L104
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Remove a study Given a study_id branch and optionally an author remove a study on the given branch and attribute the commit to author . Returns the SHA of the commit on branch .
def remove_study ( self , first_arg , sec_arg , third_arg , fourth_arg = None , commit_msg = None ) : if fourth_arg is None : study_id , branch_name , author = first_arg , sec_arg , third_arg gh_user = branch_name . split ( '_study_' ) [ 0 ] parent_sha = self . get_master_sha ( ) else : gh_user , study_id , parent_sha , author = first_arg , sec_arg , third_arg , fourth_arg if commit_msg is None : commit_msg = "Delete Study #%s via OpenTree API" % study_id return self . _remove_document ( gh_user , study_id , parent_sha , author , commit_msg )
478
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/phylesystem/git_actions.py#L108-L123
[ "def", "write_file", "(", "self", ")", ":", "name", "=", "\"kernel-{pid}.json\"", ".", "format", "(", "pid", "=", "os", ".", "getpid", "(", ")", ")", "path", "=", "os", ".", "path", ".", "join", "(", "jupyter_runtime_dir", "(", ")", ",", "name", ")", "# indentation, because why not.", "connection_json", "=", "json", ".", "dumps", "(", "self", ".", "connection_props", ",", "indent", "=", "2", ")", "with", "open", "(", "path", ",", "\"w\"", ")", "as", "connection_file", ":", "connection_file", ".", "write", "(", "connection_json", ")", "return", "path" ]
Initialize the communities file storage .
def init ( ) : try : initialize_communities_bucket ( ) click . secho ( 'Community init successful.' , fg = 'green' ) except FilesException as e : click . secho ( e . message , fg = 'red' )
479
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/cli.py#L50-L56
[ "def", "_add_timeout", "(", "deferred", ",", "timeout", ")", ":", "try", ":", "deferred", ".", "addTimeout", "(", "timeout", ",", "reactor", ")", "except", "AttributeError", ":", "# Twisted 12.2 (in EL7) does not have the addTimeout API, so make do with", "# the slightly more annoying approach of scheduling a call to cancel which", "# is then canceled if the deferred succeeds before the timeout is up.", "delayed_cancel", "=", "reactor", ".", "callLater", "(", "timeout", ",", "deferred", ".", "cancel", ")", "def", "cancel_cancel_call", "(", "result", ")", ":", "\"\"\"Halt the delayed call to cancel if the deferred fires before the timeout.\"\"\"", "if", "not", "delayed_cancel", ".", "called", ":", "delayed_cancel", ".", "cancel", "(", ")", "return", "result", "deferred", ".", "addBoth", "(", "cancel_cancel_call", ")" ]
Add logo to the community .
def addlogo ( community_id , logo ) : # Create the bucket c = Community . get ( community_id ) if not c : click . secho ( 'Community {0} does not exist.' . format ( community_id ) , fg = 'red' ) return ext = save_and_validate_logo ( logo , logo . name , c . id ) c . logo_ext = ext db . session . commit ( )
480
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/cli.py#L63-L73
[ "def", "make_random_models_table", "(", "n_sources", ",", "param_ranges", ",", "random_state", "=", "None", ")", ":", "prng", "=", "check_random_state", "(", "random_state", ")", "sources", "=", "Table", "(", ")", "for", "param_name", ",", "(", "lower", ",", "upper", ")", "in", "param_ranges", ".", "items", "(", ")", ":", "# Generate a column for every item in param_ranges, even if it", "# is not in the model (e.g. flux). However, such columns will", "# be ignored when rendering the image.", "sources", "[", "param_name", "]", "=", "prng", ".", "uniform", "(", "lower", ",", "upper", ",", "n_sources", ")", "return", "sources" ]
Request a record acceptance to a community .
def request ( community_id , record_id , accept ) : c = Community . get ( community_id ) assert c is not None record = Record . get_record ( record_id ) if accept : c . add_record ( record ) record . commit ( ) else : InclusionRequest . create ( community = c , record = record , notify = False ) db . session . commit ( ) RecordIndexer ( ) . index_by_id ( record . id )
481
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/cli.py#L81-L93
[ "def", "get_thumbnails", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# First, delete any related thumbnails.", "source_cache", "=", "self", ".", "get_source_cache", "(", ")", "if", "source_cache", ":", "thumbnail_storage_hash", "=", "utils", ".", "get_storage_hash", "(", "self", ".", "thumbnail_storage", ")", "for", "thumbnail_cache", "in", "source_cache", ".", "thumbnails", ".", "all", "(", ")", ":", "# Only iterate files which are stored using the current", "# thumbnail storage.", "if", "thumbnail_cache", ".", "storage_hash", "==", "thumbnail_storage_hash", ":", "yield", "ThumbnailFile", "(", "name", "=", "thumbnail_cache", ".", "name", ",", "storage", "=", "self", ".", "thumbnail_storage", ")" ]
Remove a record from community .
def remove ( community_id , record_id ) : c = Community . get ( community_id ) assert c is not None c . remove_record ( record_id ) db . session . commit ( ) RecordIndexer ( ) . index_by_id ( record_id )
482
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/cli.py#L100-L106
[ "def", "assignParameters", "(", "self", ",", "solution_next", ",", "IncomeDstn", ",", "LivPrb", ",", "DiscFac", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ",", "BoroCnstArt", ",", "aXtraGrid", ",", "vFuncBool", ",", "CubicBool", ")", ":", "ConsPerfForesightSolver", ".", "assignParameters", "(", "self", ",", "solution_next", ",", "DiscFac", ",", "LivPrb", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ")", "self", ".", "BoroCnstArt", "=", "BoroCnstArt", "self", ".", "IncomeDstn", "=", "IncomeDstn", "self", ".", "aXtraGrid", "=", "aXtraGrid", "self", ".", "vFuncBool", "=", "vFuncBool", "self", ".", "CubicBool", "=", "CubicBool" ]
Takes a NexSON object and returns a dict of otu_id - > otu_obj
def gen_otu_dict ( nex_obj , nexson_version = None ) : if nexson_version is None : nexson_version = detect_nexson_version ( nex_obj ) if _is_by_id_hbf ( nexson_version ) : otus = nex_obj [ 'nexml' ] [ 'otusById' ] if len ( otus ) > 1 : d = { } for v in otus . values ( ) : d . update ( v [ 'otuById' ] ) return d else : return otus . values ( ) [ 0 ] [ 'otuById' ] o_dict = { } for ob in nex_obj . get ( 'otus' , [ ] ) : for o in ob . get ( 'otu' , [ ] ) : oid = o [ '@id' ] o_dict [ oid ] = o return o_dict
483
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/__init__.py#L33-L53
[ "def", "delete_blob", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "# pylint: disable=unused-argument", "if", "kwargs", "is", "None", ":", "kwargs", "=", "{", "}", "if", "'container'", "not", "in", "kwargs", ":", "raise", "SaltCloudSystemExit", "(", "'A container must be specified'", ")", "if", "'blob'", "not", "in", "kwargs", ":", "raise", "SaltCloudSystemExit", "(", "'A blob must be specified'", ")", "storageservice", "=", "_get_block_blob_service", "(", "kwargs", ")", "storageservice", ".", "delete_blob", "(", "kwargs", "[", "'container'", "]", ",", "kwargs", "[", "'blob'", "]", ")", "return", "True" ]
Sets the chosen country in the session or cookie .
def set_country ( request ) : if request . method == 'POST' : next = request . POST . get ( 'next' , request . GET . get ( 'next' ) ) if is_safe_url ( url = next , host = request . get_host ( ) ) : response = http . HttpResponseRedirect ( next ) else : response = http . HttpResponse ( ) country_code = request . POST . get ( 'country' , '' ) . upper ( ) if country_code != geo . get_supported_country ( country_code ) : return http . HttpResponseBadRequest ( ) if hasattr ( request , 'session' ) : request . session [ geo . COUNTRY_SESSION_KEY ] = country_code else : response . set_cookie ( geo . COUNTRY_COOKIE_NAME , country_code , max_age = geo . COUNTRY_COOKIE_AGE , path = geo . COUNTRY_COOKIE_PATH ) return response else : return http . HttpResponseNotAllowed ( [ 'POST' ] )
484
https://github.com/color/django-country/blob/1d272a196d998e21bb8d407e2657b88211f35232/django_country/views.py#L8-L34
[ "def", "_build_index", "(", "maf_strm", ",", "ref_spec", ")", ":", "idx_strm", "=", "StringIO", ".", "StringIO", "(", ")", "bound_iter", "=", "functools", ".", "partial", "(", "genome_alignment_iterator", ",", "reference_species", "=", "ref_spec", ")", "hash_func", "=", "JustInTimeGenomeAlignmentBlock", ".", "build_hash", "idx", "=", "IndexedFile", "(", "maf_strm", ",", "bound_iter", ",", "hash_func", ")", "idx", ".", "write_index", "(", "idx_strm", ")", "idx_strm", ".", "seek", "(", "0", ")", "# seek to the start", "return", "idx_strm" ]
Return the element that tkn represents
def reference ( self , tkn : str ) : return self . grammarelts [ tkn ] if tkn in self . grammarelts else UndefinedElement ( tkn )
485
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/jsg_doc_context.py#L111-L113
[ "def", "get_product_info", "(", "self", ",", "apps", "=", "[", "]", ",", "packages", "=", "[", "]", ",", "timeout", "=", "15", ")", ":", "if", "not", "apps", "and", "not", "packages", ":", "return", "message", "=", "MsgProto", "(", "EMsg", ".", "ClientPICSProductInfoRequest", ")", "for", "app", "in", "apps", ":", "app_info", "=", "message", ".", "body", ".", "apps", ".", "add", "(", ")", "app_info", ".", "only_public", "=", "False", "if", "isinstance", "(", "app", ",", "tuple", ")", ":", "app_info", ".", "appid", ",", "app_info", ".", "access_token", "=", "app", "else", ":", "app_info", ".", "appid", "=", "app", "for", "package", "in", "packages", ":", "package_info", "=", "message", ".", "body", ".", "packages", ".", "add", "(", ")", "if", "isinstance", "(", "package", ",", "tuple", ")", ":", "package_info", ".", "appid", ",", "package_info", ".", "access_token", "=", "package", "else", ":", "package_info", ".", "packageid", "=", "package", "message", ".", "body", ".", "meta_data_only", "=", "False", "job_id", "=", "self", ".", "send_job", "(", "message", ")", "data", "=", "dict", "(", "apps", "=", "{", "}", ",", "packages", "=", "{", "}", ")", "while", "True", ":", "chunk", "=", "self", ".", "wait_event", "(", "job_id", ",", "timeout", "=", "timeout", ")", "if", "chunk", "is", "None", ":", "return", "chunk", "=", "chunk", "[", "0", "]", ".", "body", "for", "app", "in", "chunk", ".", "apps", ":", "data", "[", "'apps'", "]", "[", "app", ".", "appid", "]", "=", "vdf", ".", "loads", "(", "app", ".", "buffer", "[", ":", "-", "1", "]", ".", "decode", "(", "'utf-8'", ",", "'replace'", ")", ")", "[", "'appinfo'", "]", "for", "pkg", "in", "chunk", ".", "packages", ":", "data", "[", "'packages'", "]", "[", "pkg", ".", "packageid", "]", "=", "vdf", ".", "binary_loads", "(", "pkg", ".", "buffer", "[", "4", ":", "]", ")", "[", "str", "(", "pkg", ".", "packageid", ")", "]", "if", "not", "chunk", ".", "response_pending", ":", "break", "return", "data" ]
Return a list all of the grammarelts that depend on tkn
def dependency_list ( self , tkn : str ) -> List [ str ] : if tkn not in self . dependency_map : self . dependency_map [ tkn ] = [ tkn ] # Force a circular reference self . dependency_map [ tkn ] = self . reference ( tkn ) . dependency_list ( ) return self . dependency_map [ tkn ]
486
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/jsg_doc_context.py#L139-L148
[ "def", "send_http_error", "(", "self", ",", "http_code", ",", "cim_error", "=", "None", ",", "cim_error_details", "=", "None", ",", "headers", "=", "None", ")", ":", "self", ".", "send_response", "(", "http_code", ",", "http_client", ".", "responses", ".", "get", "(", "http_code", ",", "''", ")", ")", "self", ".", "send_header", "(", "\"CIMExport\"", ",", "\"MethodResponse\"", ")", "if", "cim_error", "is", "not", "None", ":", "self", ".", "send_header", "(", "\"CIMError\"", ",", "cim_error", ")", "if", "cim_error_details", "is", "not", "None", ":", "self", ".", "send_header", "(", "\"CIMErrorDetails\"", ",", "cim_error_details", ")", "if", "headers", "is", "not", "None", ":", "for", "header", ",", "value", "in", "headers", ":", "self", ".", "send_header", "(", "header", ",", "value", ")", "self", ".", "end_headers", "(", ")", "self", ".", "log", "(", "'%s: HTTP status %s; CIMError: %s, CIMErrorDetails: %s'", ",", "(", "self", ".", "_get_log_prefix", "(", ")", ",", "http_code", ",", "cim_error", ",", "cim_error_details", ")", ",", "logging", ".", "WARNING", ")" ]
Return all the items that tkn depends on as a set
def dependencies ( self , tkn : str ) -> Set [ str ] : return set ( self . dependency_list ( tkn ) )
487
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/jsg_doc_context.py#L150-L156
[ "def", "_is_expired_token_response", "(", "cls", ",", "response", ")", ":", "EXPIRED_MESSAGE", "=", "\"Expired oauth2 access token\"", "INVALID_MESSAGE", "=", "\"Invalid oauth2 access token\"", "if", "response", ".", "status_code", "==", "400", ":", "try", ":", "body", "=", "response", ".", "json", "(", ")", "if", "str", "(", "body", ".", "get", "(", "'error_description'", ")", ")", "in", "[", "EXPIRED_MESSAGE", ",", "INVALID_MESSAGE", "]", ":", "return", "True", "except", ":", "pass", "return", "False" ]
Return the set of tokens that are referenced but not defined .
def undefined_entries ( self ) -> Set [ str ] : return as_set ( [ [ d for d in self . dependencies ( k ) if d not in self . grammarelts ] for k in self . grammarelts . keys ( ) ] )
488
https://github.com/hsolbrig/pyjsg/blob/9b2b8fa8e3b8448abe70b09f804a79f0f31b32b7/pyjsg/parser_impl/jsg_doc_context.py#L158-L161
[ "def", "recompress_archive", "(", "archive", ",", "verbosity", "=", "0", ",", "interactive", "=", "True", ")", ":", "util", ".", "check_existing_filename", "(", "archive", ")", "util", ".", "check_writable_filename", "(", "archive", ")", "if", "verbosity", ">=", "0", ":", "util", ".", "log_info", "(", "\"Recompressing %s ...\"", "%", "(", "archive", ",", ")", ")", "res", "=", "_recompress_archive", "(", "archive", ",", "verbosity", "=", "verbosity", ",", "interactive", "=", "interactive", ")", "if", "res", "and", "verbosity", ">=", "0", ":", "util", ".", "log_info", "(", "res", ")", "return", "0" ]
New request for inclusion .
def new_request ( sender , request = None , notify = True , * * kwargs ) : if current_app . config [ 'COMMUNITIES_MAIL_ENABLED' ] and notify : send_community_request_email ( request )
489
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/receivers.py#L36-L39
[ "def", "update_consumer_offsets", "(", "self", ",", "partition_offsets", ")", ":", "self", ".", "logger", ".", "debug", "(", "\"Updating consumer offsets to: %s\"", ",", "partition_offsets", ")", "for", "partition", ",", "offset", "in", "partition_offsets", ".", "items", "(", ")", ":", "self", ".", "consumer", ".", "offsets", "[", "partition", "]", "=", "offset", "# consumer keeps other offset states beyond its `offsets` dictionary,", "# a relative seek with zero delta forces the consumer to reset to the", "# current value of the `offsets` dictionary", "self", ".", "consumer", ".", "seek", "(", "0", ",", "1", ")" ]
Inject provisional_communities key to ES index .
def inject_provisional_community ( sender , json = None , record = None , index = None , * * kwargs ) : if index and not index . startswith ( current_app . config [ 'COMMUNITIES_INDEX_PREFIX' ] ) : return json [ 'provisional_communities' ] = list ( sorted ( [ r . id_community for r in InclusionRequest . get_by_record ( record . id ) ] ) )
490
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/receivers.py#L42-L51
[ "def", "signals_blocker", "(", "instance", ",", "attribute", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "value", "=", "None", "try", ":", "hasattr", "(", "instance", ",", "\"blockSignals\"", ")", "and", "instance", ".", "blockSignals", "(", "True", ")", "value", "=", "attribute", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "hasattr", "(", "instance", ",", "\"blockSignals\"", ")", "and", "instance", ".", "blockSignals", "(", "False", ")", "return", "value" ]
Query on node properties . See documentation for _OTIWrapper class .
def find_nodes ( self , query_dict = None , exact = False , verbose = False , * * kwargs ) : assert self . use_v1 return self . _do_query ( '{p}/singlePropertySearchForTreeNodes' . format ( p = self . query_prefix ) , query_dict = query_dict , exact = exact , verbose = verbose , valid_keys = self . node_search_term_set , kwargs = kwargs )
491
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/api/oti.py#L119-L127
[ "def", "process_exception", "(", "self", ",", "request", ",", "exception", ")", ":", "gc", "=", "GithubCredentials", "(", "user", "=", "settings", ".", "EXREPORTER_GITHUB_USER", ",", "repo", "=", "settings", ".", "EXREPORTER_GITHUB_REPO", ",", "auth_token", "=", "settings", ".", "EXREPORTER_GITHUB_AUTH_TOKEN", ")", "gs", "=", "GithubStore", "(", "credentials", "=", "gc", ")", "reporter", "=", "ExReporter", "(", "store", "=", "gs", ",", "labels", "=", "settings", ".", "EXREPORTER_GITHUB_LABELS", ")", "reporter", ".", "report", "(", ")" ]
Query on tree properties . See documentation for _OTIWrapper class .
def find_trees ( self , query_dict = None , exact = False , verbose = False , wrap_response = False , * * kwargs ) : if self . use_v1 : uri = '{p}/singlePropertySearchForTrees' . format ( p = self . query_prefix ) else : uri = '{p}/find_trees' . format ( p = self . query_prefix ) resp = self . _do_query ( uri , query_dict = query_dict , exact = exact , verbose = verbose , valid_keys = self . tree_search_term_set , kwargs = kwargs ) if wrap_response : return TreeRefList ( resp ) return resp
492
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/api/oti.py#L129-L143
[ "def", "process_exception", "(", "self", ",", "request", ",", "exception", ")", ":", "gc", "=", "GithubCredentials", "(", "user", "=", "settings", ".", "EXREPORTER_GITHUB_USER", ",", "repo", "=", "settings", ".", "EXREPORTER_GITHUB_REPO", ",", "auth_token", "=", "settings", ".", "EXREPORTER_GITHUB_AUTH_TOKEN", ")", "gs", "=", "GithubStore", "(", "credentials", "=", "gc", ")", "reporter", "=", "ExReporter", "(", "store", "=", "gs", ",", "labels", "=", "settings", ".", "EXREPORTER_GITHUB_LABELS", ")", "reporter", ".", "report", "(", ")" ]
Query on study properties . See documentation for _OTIWrapper class .
def find_studies ( self , query_dict = None , exact = False , verbose = False , * * kwargs ) : if self . use_v1 : uri = '{p}/singlePropertySearchForStudies' . format ( p = self . query_prefix ) else : uri = '{p}/find_studies' . format ( p = self . query_prefix ) return self . _do_query ( uri , query_dict = query_dict , exact = exact , verbose = verbose , valid_keys = self . study_search_term_set , kwargs = kwargs )
493
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/api/oti.py#L145-L156
[ "def", "expire_leaderboard_for", "(", "self", ",", "leaderboard_name", ",", "seconds", ")", ":", "pipeline", "=", "self", ".", "redis_connection", ".", "pipeline", "(", ")", "pipeline", ".", "expire", "(", "leaderboard_name", ",", "seconds", ")", "pipeline", ".", "expire", "(", "self", ".", "_member_data_key", "(", "leaderboard_name", ")", ",", "seconds", ")", "pipeline", ".", "execute", "(", ")" ]
returns requirements array for package
def get_requirements ( ) : packages = [ ] with open ( "requirements.txt" , "r" ) as req_doc : for package in req_doc : packages . append ( package . replace ( "\n" , "" ) ) return packages
494
https://github.com/SpotlightData/preprocessing/blob/180c6472bc2642afbd7a1ece08d0b0d14968a708/setup.py#L5-L11
[ "def", "absent", "(", "name", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", "}", "# Comment and change messages", "comment_bridge_deleted", "=", "'Bridge {0} deleted.'", ".", "format", "(", "name", ")", "comment_bridge_notdeleted", "=", "'Unable to delete bridge: {0}.'", ".", "format", "(", "name", ")", "comment_bridge_notexists", "=", "'Bridge {0} does not exist.'", ".", "format", "(", "name", ")", "changes_bridge_deleted", "=", "{", "name", ":", "{", "'old'", ":", "'Bridge {0} exists.'", ".", "format", "(", "name", ")", ",", "'new'", ":", "'Bridge {0} deleted.'", ".", "format", "(", "name", ")", ",", "}", "}", "bridge_exists", "=", "__salt__", "[", "'openvswitch.bridge_exists'", "]", "(", "name", ")", "# Dry run, test=true mode", "if", "__opts__", "[", "'test'", "]", ":", "if", "not", "bridge_exists", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "comment_bridge_notexists", "else", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "comment_bridge_deleted", "return", "ret", "if", "not", "bridge_exists", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "comment_bridge_notexists", "else", ":", "bridge_delete", "=", "__salt__", "[", "'openvswitch.bridge_delete'", "]", "(", "name", ")", "if", "bridge_delete", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "comment_bridge_deleted", "ret", "[", "'changes'", "]", "=", "changes_bridge_deleted", "else", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "=", "comment_bridge_notdeleted", "return", "ret" ]
Factory function for a _TaxonomicAmendmentStore object .
def TaxonomicAmendmentStore ( repos_dict = None , repos_par = None , with_caching = True , assumed_doc_version = None , git_ssh = None , pkey = None , git_action_class = TaxonomicAmendmentsGitAction , mirror_info = None , infrastructure_commit_author = 'OpenTree API <api@opentreeoflife.org>' ) : global _THE_TAXONOMIC_AMENDMENT_STORE if _THE_TAXONOMIC_AMENDMENT_STORE is None : _THE_TAXONOMIC_AMENDMENT_STORE = _TaxonomicAmendmentStore ( repos_dict = repos_dict , repos_par = repos_par , with_caching = with_caching , assumed_doc_version = assumed_doc_version , git_ssh = git_ssh , pkey = pkey , git_action_class = git_action_class , mirror_info = mirror_info , infrastructure_commit_author = infrastructure_commit_author ) return _THE_TAXONOMIC_AMENDMENT_STORE
495
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/amendments/amendments_umbrella.py#L352-L379
[ "def", "expose_event", "(", "self", ",", "widget", ",", "event", ")", ":", "x", ",", "y", ",", "width", ",", "height", "=", "event", ".", "area", "self", ".", "logger", ".", "debug", "(", "\"surface is %s\"", "%", "self", ".", "surface", ")", "if", "self", ".", "surface", "is", "not", "None", ":", "win", "=", "widget", ".", "get_window", "(", ")", "cr", "=", "win", ".", "cairo_create", "(", ")", "# set clip area for exposed region", "cr", ".", "rectangle", "(", "x", ",", "y", ",", "width", ",", "height", ")", "cr", ".", "clip", "(", ")", "# Paint from off-screen surface", "cr", ".", "set_source_surface", "(", "self", ".", "surface", ",", "0", ",", "0", ")", "cr", ".", "set_operator", "(", "cairo", ".", "OPERATOR_SOURCE", ")", "cr", ".", "paint", "(", ")", "return", "False" ]
Delete communities after holdout time .
def delete_marked_communities ( ) : # TODO: Delete the community ID from all records metadata first raise NotImplementedError ( ) Community . query . filter_by ( Community . delete_time > datetime . utcnow ( ) ) . delete ( ) db . session . commit ( )
496
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/tasks.py#L38-L44
[ "def", "create_configmap", "(", "name", ",", "namespace", ",", "data", ",", "source", "=", "None", ",", "template", "=", "None", ",", "saltenv", "=", "'base'", ",", "*", "*", "kwargs", ")", ":", "if", "source", ":", "data", "=", "__read_and_render_yaml_file", "(", "source", ",", "template", ",", "saltenv", ")", "elif", "data", "is", "None", ":", "data", "=", "{", "}", "data", "=", "__enforce_only_strings_dict", "(", "data", ")", "body", "=", "kubernetes", ".", "client", ".", "V1ConfigMap", "(", "metadata", "=", "__dict_to_object_meta", "(", "name", ",", "namespace", ",", "{", "}", ")", ",", "data", "=", "data", ")", "cfg", "=", "_setup_conn", "(", "*", "*", "kwargs", ")", "try", ":", "api_instance", "=", "kubernetes", ".", "client", ".", "CoreV1Api", "(", ")", "api_response", "=", "api_instance", ".", "create_namespaced_config_map", "(", "namespace", ",", "body", ")", "return", "api_response", ".", "to_dict", "(", ")", "except", "(", "ApiException", ",", "HTTPError", ")", "as", "exc", ":", "if", "isinstance", "(", "exc", ",", "ApiException", ")", "and", "exc", ".", "status", "==", "404", ":", "return", "None", "else", ":", "log", ".", "exception", "(", "'Exception when calling '", "'CoreV1Api->create_namespaced_config_map'", ")", "raise", "CommandExecutionError", "(", "exc", ")", "finally", ":", "_cleanup", "(", "*", "*", "cfg", ")" ]
Delete expired inclusion requests .
def delete_expired_requests ( ) : InclusionRequest . query . filter_by ( InclusionRequest . expiry_date > datetime . utcnow ( ) ) . delete ( ) db . session . commit ( )
497
https://github.com/inveniosoftware/invenio-communities/blob/5c4de6783724d276ae1b6dd13a399a9e22fadc7a/invenio_communities/tasks.py#L48-L52
[ "def", "set_temperature", "(", "self", ",", "zone", ",", "temperature", ",", "until", "=", "None", ")", ":", "if", "until", "is", "None", ":", "data", "=", "{", "\"Value\"", ":", "temperature", ",", "\"Status\"", ":", "\"Hold\"", ",", "\"NextTime\"", ":", "None", "}", "else", ":", "data", "=", "{", "\"Value\"", ":", "temperature", ",", "\"Status\"", ":", "\"Temporary\"", ",", "\"NextTime\"", ":", "until", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%SZ'", ")", "}", "self", ".", "_set_heat_setpoint", "(", "zone", ",", "data", ")" ]
Sugar . factory for a PhyloSchema object .
def create_content_spec ( * * kwargs ) : format_str = kwargs . get ( 'format' , 'nexson' ) nexson_version = kwargs . get ( 'nexson_version' , 'native' ) otu_label = kwargs . get ( 'otu_label' ) if otu_label is None : otu_label = kwargs . get ( 'tip_label' ) content = kwargs . get ( 'content' ) if content is not None : content_id = kwargs . get ( 'content_id' ) if content_id is None : content_id = _get_content_id_from ( * * kwargs ) else : content , content_id = _sniff_content_from_kwargs ( * * kwargs ) if content is None : content = 'study' return PhyloSchema ( content = content , content_id = content_id , format_str = format_str , version = nexson_version , otu_label = otu_label , repo_nexml2json = kwargs . get ( 'repo_nexml2json' ) , bracket_ingroup = bool ( kwargs . get ( 'bracket_ingroup' , False ) ) , cull_nonmatching = kwargs . get ( 'cull_nonmatching' ) )
498
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/__init__.py#L178-L205
[ "def", "list_slack", "(", ")", ":", "try", ":", "token", "=", "os", ".", "environ", "[", "'SLACK_TOKEN'", "]", "slack", "=", "Slacker", "(", "token", ")", "# Get channel list", "response", "=", "slack", ".", "channels", ".", "list", "(", ")", "channels", "=", "response", ".", "body", "[", "'channels'", "]", "for", "channel", "in", "channels", ":", "print", "(", "channel", "[", "'id'", "]", ",", "channel", "[", "'name'", "]", ")", "# if not channel['is_archived']:", "# slack.channels.join(channel['name'])", "print", "(", ")", "# Get users list", "response", "=", "slack", ".", "users", ".", "list", "(", ")", "users", "=", "response", ".", "body", "[", "'members'", "]", "for", "user", "in", "users", ":", "if", "not", "user", "[", "'deleted'", "]", ":", "print", "(", "user", "[", "'id'", "]", ",", "user", "[", "'name'", "]", ",", "user", "[", "'is_admin'", "]", ",", "user", "[", "'is_owner'", "]", ")", "print", "(", ")", "except", "KeyError", "as", "ex", ":", "print", "(", "'Environment variable %s not set.'", "%", "str", "(", "ex", ")", ")" ]
Take a dict form of NexSON and converts its datastructures to those needed to serialize as out_nexson_format . If current_format is not specified it will be inferred . If remove_old_structs is False and different honeybadgerfish varieties are selected the blob will be fat containing both types of lookup structures . If pristine_if_invalid is False then the object may be corrupted if it is an invalid nexson struct . Setting this to False can result in faster translation but if an exception is raised the object may be polluted with partially constructed fields for the out_nexson_format .
def convert_nexson_format ( blob , out_nexson_format , current_format = None , remove_old_structs = True , pristine_if_invalid = False , sort_arbitrary = False ) : if not current_format : current_format = detect_nexson_version ( blob ) out_nexson_format = resolve_nexson_format ( out_nexson_format ) if current_format == out_nexson_format : if sort_arbitrary : sort_arbitrarily_ordered_nexson ( blob ) return blob two2zero = _is_by_id_hbf ( out_nexson_format ) and _is_badgerfish_version ( current_format ) zero2two = _is_by_id_hbf ( current_format ) and _is_badgerfish_version ( out_nexson_format ) if two2zero or zero2two : # go from 0.0 -> 1.0 then the 1.0->1.2 should succeed without nexml... blob = convert_nexson_format ( blob , DIRECT_HONEY_BADGERFISH , current_format = current_format , remove_old_structs = remove_old_structs , pristine_if_invalid = pristine_if_invalid ) current_format = DIRECT_HONEY_BADGERFISH ccdict = { 'output_format' : out_nexson_format , 'input_format' : current_format , 'remove_old_structs' : remove_old_structs , 'pristine_if_invalid' : pristine_if_invalid } ccfg = ConversionConfig ( ccdict ) if _is_badgerfish_version ( current_format ) : converter = Badgerfish2DirectNexson ( ccfg ) elif _is_badgerfish_version ( out_nexson_format ) : assert _is_direct_hbf ( current_format ) converter = Direct2BadgerfishNexson ( ccfg ) elif _is_direct_hbf ( current_format ) and ( out_nexson_format == BY_ID_HONEY_BADGERFISH ) : converter = Direct2OptimalNexson ( ccfg ) elif _is_direct_hbf ( out_nexson_format ) and ( current_format == BY_ID_HONEY_BADGERFISH ) : converter = Optimal2DirectNexson ( ccfg ) else : raise NotImplementedError ( 'Conversion from {i} to {o}' . format ( i = current_format , o = out_nexson_format ) ) blob = converter . convert ( blob ) if sort_arbitrary : sort_arbitrarily_ordered_nexson ( blob ) return blob
499
https://github.com/OpenTreeOfLife/peyotl/blob/5e4e52a0fdbd17f490aa644ad79fda6ea2eda7c0/peyotl/nexson_syntax/__init__.py#L646-L699
[ "def", "lock", "(", "self", ",", "key", ",", "timeout", "=", "0", ",", "sleep", "=", "0", ")", ":", "return", "MockRedisLock", "(", "self", ",", "key", ",", "timeout", ",", "sleep", ")" ]