idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
300
def __dfs ( self , v , index , layers ) : if index == 0 : path = [ v ] while self . _dfs_parent [ v ] != v : path . append ( self . _dfs_parent [ v ] ) v = self . _dfs_parent [ v ] self . _dfs_paths . append ( path ) return True for neighbour in self . _graph [ v ] : if neighbour in layers [ index - 1 ] : if neighbour in self . _dfs_parent : continue if ( neighbour in self . _left and ( v not in self . _matching or neighbour != self . _matching [ v ] ) ) or ( neighbour in self . _right and ( v in self . _matching and neighbour == self . _matching [ v ] ) ) : self . _dfs_parent [ neighbour ] = v if self . __dfs ( neighbour , index - 1 , layers ) : return True return False
we recursively run dfs on each vertices in free_vertex
301
def method ( self , symbol ) : assert issubclass ( symbol , SymbolBase ) def wrapped ( fn ) : setattr ( symbol , fn . __name__ , fn ) return wrapped
Symbol decorator .
302
def _simpleparsefun ( date ) : if hasattr ( date , 'year' ) : return date try : date = datetime . datetime . strptime ( date , '%Y-%m-%d' ) except ValueError : date = datetime . datetime . strptime ( date , '%Y-%m-%d %H:%M:%S' ) return date
Simple date parsing function
303
def _connect ( cls ) : post_save . connect ( notify_items , sender = cls , dispatch_uid = 'knocker_{0}' . format ( cls . __name__ ) )
Connect signal to current model
304
def _disconnect ( cls ) : post_save . disconnect ( notify_items , sender = cls , dispatch_uid = 'knocker_{0}' . format ( cls . __name__ ) )
Disconnect signal from current model
305
def as_knock ( self , created = False ) : knock = { } if self . should_knock ( created ) : for field , data in self . _retrieve_data ( None , self . _knocker_data ) : knock [ field ] = data return knock
Returns a dictionary with the knock data built from _knocker_data
306
def send_knock ( self , created = False ) : knock = self . as_knock ( created ) if knock : gr = Group ( 'knocker-{0}' . format ( knock [ 'language' ] ) ) gr . send ( { 'text' : json . dumps ( knock ) } )
Send the knock in the associated channels Group
307
def colorize ( printable , color , style = 'normal' , autoreset = True ) : if not COLORED : return printable if color not in COLOR_MAP : raise RuntimeError ( 'invalid color set, no {}' . format ( color ) ) return '{color}{printable}{reset}' . format ( printable = printable , color = COLOR_MAP [ color ] . format ( style = STYLE_MAP [ style ] ) , reset = COLOR_MAP [ 'reset' ] if autoreset else '' )
Colorize some message with ANSI colors specification
308
def color ( string , status = True , warning = False , bold = True ) : attr = [ ] if status : attr . append ( '32' ) if warning : attr . append ( '31' ) if bold : attr . append ( '1' ) return '\x1b[%sm%s\x1b[0m' % ( ';' . join ( attr ) , string )
Change text color for the linux terminal defaults to green . Set warning = True for red .
309
def _patch ( ) : if not __debug__ : import warnings warnings . warn ( "A catgirl has died." , ImportWarning ) from pymongo . collection import Collection Collection . tail = tail
Patch pymongo s Collection object to add a tail method . While not nessicarily recommended you can use this to inject tail as a method into Collection making it generally accessible .
310
def _prepare_find ( cls , * args , ** kw ) : cls , collection , query , options = cls . _prepare_query ( cls . FIND_MAPPING , cls . FIND_OPTIONS , * args , ** kw ) if 'await' in options : raise TypeError ( "Await is hard-deprecated as reserved keyword in Python 3.7, use wait instead." ) if 'cursor_type' in options and { 'tail' , 'wait' } & set ( options ) : raise TypeError ( "Can not combine cursor_type and tail/wait arguments." ) elif options . pop ( 'tail' , False ) : options [ 'cursor_type' ] = CursorType . TAILABLE_AWAIT if options . pop ( 'wait' , True ) else CursorType . TAILABLE elif 'wait' in options : raise TypeError ( "Wait option only applies to tailing cursors." ) modifiers = options . get ( 'modifiers' , dict ( ) ) if 'max_time_ms' in options : modifiers [ '$maxTimeMS' ] = options . pop ( 'max_time_ms' ) if modifiers : options [ 'modifiers' ] = modifiers return cls , collection , query , options
Execute a find and return the resulting queryset using combined plain and parametric query generation . Additionally performs argument case normalization refer to the _prepare_query method s docstring .
311
def reload ( self , * fields , ** kw ) : Doc , collection , query , options = self . _prepare_find ( id = self . id , projection = fields , ** kw ) result = collection . find_one ( query , ** options ) if fields : for k in result : if k == ~ Doc . id : continue self . __data__ [ k ] = result [ k ] else : self . __data__ = result return self
Reload the entire document from the database or refresh specific named top - level fields .
312
def get ( cls ) : results = { } hierarchy = cls . __hierarchy hierarchy . reverse ( ) for storeMethod in hierarchy : cls . merger . merge ( results , storeMethod . get ( ) ) return results
Get values gathered from the previously set hierarchy .
313
def argv ( cls , name , short_name = None , type = None , help = None ) : cls . __hierarchy . append ( argv . Argv ( name , short_name , type , help ) )
Set command line arguments as a source
314
def env ( cls , separator = None , match = None , whitelist = None , parse_values = None , to_lower = None , convert_underscores = None ) : cls . __hierarchy . append ( env . Env ( separator , match , whitelist , parse_values , to_lower , convert_underscores ) )
Set environment variables as a source .
315
def file ( cls , path , encoding = None , parser = None ) : cls . __hierarchy . append ( file . File ( path , encoding , parser ) )
Set a file as a source .
316
def P ( Document , * fields , ** kw ) : __always__ = kw . pop ( '__always__' , set ( ) ) projected = set ( ) omitted = set ( ) for field in fields : if field [ 0 ] in ( '-' , '!' ) : omitted . add ( field [ 1 : ] ) elif field [ 0 ] == '+' : projected . add ( field [ 1 : ] ) else : projected . add ( field ) if not projected : names = set ( getattr ( Document , '__projection__' , Document . __fields__ ) or Document . __fields__ ) projected = { name for name in ( names - omitted ) } projected |= __always__ if not projected : projected = { '_id' } return { unicode ( traverse ( Document , name , name ) ) : True for name in projected }
Generate a MongoDB projection dictionary using the Django ORM style .
317
def is_valid ( self , context , sid ) : record = self . _Document . find_one ( sid , project = ( 'expires' , ) ) if not record : return return not record . _expired
Identify if the given session ID is currently valid . Return True if valid False if explicitly invalid None if unknown .
318
def invalidate ( self , context , sid ) : result = self . _Document . get_collection ( ) . delete_one ( { '_id' : sid } ) return result . deleted_count == 1
Immediately expire a session from the backing store .
319
def persist ( self , context ) : D = self . _Document document = context . session [ self . name ] D . get_collection ( ) . replace_one ( D . id == document . id , document , True )
Update or insert the session document into the configured collection
320
def ws_connect ( message ) : prefix , language = message [ 'path' ] . strip ( '/' ) . split ( '/' ) gr = Group ( 'knocker-{0}' . format ( language ) ) gr . add ( message . reply_channel ) message . channel_session [ 'knocker' ] = language message . reply_channel . send ( { "accept" : True } )
Channels connection setup . Register the current client on the related Group according to the language
321
def ws_disconnect ( message ) : language = message . channel_session [ 'knocker' ] gr = Group ( 'knocker-{0}' . format ( language ) ) gr . discard ( message . reply_channel )
Channels connection close . Deregister the client
322
def start ( self , autopush = True ) : if self . enabled : if autopush : self . push_message ( self . message ) self . spinner . message = ' - ' . join ( self . animation . messages ) if not self . spinner . running : self . animation . thread = threading . Thread ( target = _spinner , args = ( self . spinner , ) ) self . spinner . running = True self . animation . thread . start ( ) sys . stdout = stream . Clean ( sys . stdout , self . spinner . stream )
Start a new animation instance
323
def stop ( cls ) : if AnimatedDecorator . _enabled : if cls . spinner . running : cls . spinner . running = False cls . animation . thread . join ( ) if any ( cls . animation . messages ) : cls . pop_message ( ) sys . stdout = sys . __stdout__
Stop the thread animation gracefully and reset_message
324
def auto_message ( self , args ) : if any ( args ) and callable ( args [ 0 ] ) and not self . message : return args [ 0 ] . __name__ elif not self . message : return self . default_message else : return self . message
Try guess the message by the args passed
325
def start ( self ) : self . streams . append ( sys . stdout ) sys . stdout = self . stream
Activate the TypingStream on stdout
326
def stop ( cls ) : if any ( cls . streams ) : sys . stdout = cls . streams . pop ( - 1 ) else : sys . stdout = sys . __stdout__
Change back the normal stdout after the end
327
def prolong ( self ) : D = self . __class__ collection = self . get_collection ( ) identity = self . Lock ( ) query = D . id == self query &= D . lock . instance == identity . instance query &= D . lock . time >= ( identity . time - identity . __period__ ) previous = collection . find_one_and_update ( query , { '$set' : { ~ D . lock . time : identity . time } } , { ~ D . lock : True } ) if previous is None : lock = getattr ( self . find_one ( self , projection = { ~ D . lock : True } ) , 'lock' , None ) if lock and lock . expires <= identity . time : lock . expired ( self ) raise self . Locked ( "Unable to prolong lock." , lock ) identity . prolonged ( self ) return identity
Prolong the working duration of an already held lock . Attempting to prolong a lock not already owned will result in a Locked exception .
328
def release ( self , force = False ) : D = self . __class__ collection = self . get_collection ( ) identity = self . Lock ( ) query = D . id == self if not force : query &= D . lock . instance == identity . instance previous = collection . find_one_and_update ( query , { '$unset' : { ~ D . lock : True } } , { ~ D . lock : True } ) if previous is None : lock = getattr ( self . find_one ( self , projection = { ~ D . lock : True } ) , 'lock' , None ) raise self . Locked ( "Unable to release lock." , lock ) lock = self . Lock . from_mongo ( previous [ ~ D . lock ] ) if lock and lock . expires <= identity . time : lock . expired ( self ) identity . released ( self , force )
Release an exclusive lock on this integration task . Unless forcing if we are not the current owners of the lock a Locked exception will be raised .
329
def write ( self , message , autoerase = True ) : super ( Animation , self ) . write ( message ) self . last_message = message if autoerase : time . sleep ( self . interval ) self . erase ( message )
Send something for stdout and erased after delay
330
def write ( self , message , flush = False ) : with self . lock : self . paralell_stream . erase ( ) super ( Clean , self ) . write ( message , flush )
Write something on the default stream with a prefixed message
331
def write ( self , message , flush = True ) : if isinstance ( message , bytes ) : message = message . decode ( 'utf-8' ) for char in message : time . sleep ( self . delay * ( 4 if char == '\n' else 1 ) ) super ( Writting , self ) . write ( char , flush )
A Writting like write method delayed at each char
332
def _get_default_projection ( cls ) : projected = [ ] neutral = [ ] omitted = False for name , field in cls . __fields__ . items ( ) : if field . project is None : neutral . append ( name ) elif field . project : projected . append ( name ) else : omitted = True if not projected and not omitted : return None elif not projected and omitted : projected = neutral return { field : True for field in projected }
Construct the default projection document .
333
def adjust_attribute_sequence ( * fields ) : amount = None if fields and isinstance ( fields [ 0 ] , int ) : amount , fields = fields [ 0 ] , fields [ 1 : ] def adjust_inner ( cls ) : for field in fields : if field not in cls . __dict__ : raise TypeError ( "Can only override sequence on non-inherited attributes." ) if amount is None : cls . __dict__ [ field ] . __sequence__ = ElementMeta . sequence else : cls . __dict__ [ field ] . __sequence__ += amount cls . __attributes__ = OrderedDict ( ( k , v ) for k , v in sorted ( cls . __attributes__ . items ( ) , key = lambda i : i [ 1 ] . __sequence__ ) ) return cls return adjust_inner
Move marrow . schema fields around to control positional instantiation order .
334
def get_hashes ( path , exclude = None ) : out = { } for f in Path ( path ) . rglob ( '*' ) : if f . is_dir ( ) : continue if exclude and re . match ( exclude , f . as_posix ( ) ) : retox_log . debug ( "excluding '{}'" . format ( f . as_posix ( ) ) ) continue pytime = f . stat ( ) . st_mtime out [ f . as_posix ( ) ] = pytime return out
Get a dictionary of file paths and timestamps .
335
def request ( self , method , params = None , query_continue = None , files = None , auth = None , continuation = False ) : normal_params = _normalize_params ( params , query_continue ) if continuation : return self . _continuation ( method , params = normal_params , auth = auth , files = files ) else : return self . _request ( method , params = normal_params , auth = auth , files = files )
Sends an HTTP request to the API .
336
def login ( self , username , password , login_token = None ) : if login_token is None : token_doc = self . post ( action = 'query' , meta = 'tokens' , type = 'login' ) login_token = token_doc [ 'query' ] [ 'tokens' ] [ 'logintoken' ] login_doc = self . post ( action = "clientlogin" , username = username , password = password , logintoken = login_token , loginreturnurl = "http://example.org/" ) if login_doc [ 'clientlogin' ] [ 'status' ] == "UI" : raise ClientInteractionRequest . from_doc ( login_token , login_doc [ 'clientlogin' ] ) elif login_doc [ 'clientlogin' ] [ 'status' ] != 'PASS' : raise LoginError . from_doc ( login_doc [ 'clientlogin' ] ) return login_doc [ 'clientlogin' ]
Authenticate with the given credentials . If authentication is successful all further requests sent will be signed the authenticated user .
337
def continue_login ( self , login_token , ** params ) : login_params = { 'action' : "clientlogin" , 'logintoken' : login_token , 'logincontinue' : 1 } login_params . update ( params ) login_doc = self . post ( ** login_params ) if login_doc [ 'clientlogin' ] [ 'status' ] != 'PASS' : raise LoginError . from_doc ( login_doc [ 'clientlogin' ] ) return login_doc [ 'clientlogin' ]
Continues a login that requires an additional step . This is common for when login requires completing a captcha or supplying a two - factor authentication token .
338
def get ( self , query_continue = None , auth = None , continuation = False , ** params ) : return self . request ( 'GET' , params = params , auth = auth , query_continue = query_continue , continuation = continuation )
Makes an API request with the GET method
339
def post ( self , query_continue = None , upload_file = None , auth = None , continuation = False , ** params ) : if upload_file is not None : files = { 'file' : upload_file } else : files = None return self . request ( 'POST' , params = params , auth = auth , query_continue = query_continue , files = files , continuation = continuation )
Makes an API request with the POST method
340
def promote ( self , cls , update = False , preserve = True ) : if not issubclass ( cls , self . __class__ ) : raise TypeError ( "Must promote to a subclass of " + self . __class__ . __name__ ) return self . _as ( cls , update , preserve )
Transform this record into an instance of a more specialized subclass .
341
def cut_levels ( nodes , start_level ) : final = [ ] removed = [ ] for node in nodes : if not hasattr ( node , 'level' ) : remove ( node , removed ) continue if node . attr . get ( 'soft_root' , False ) : remove ( node , removed ) continue if node . level == start_level : final . append ( node ) node . parent = None if not node . visible and not node . children : remove ( node , removed ) elif node . level == start_level + 1 : node . children = [ ] else : remove ( node , removed ) if not node . visible : keep_node = False for child in node . children : keep_node = keep_node or child . visible if not keep_node : remove ( node , removed ) for node in removed : if node in final : final . remove ( node ) return final
cutting nodes away from menus
342
def from_mongo ( cls , data , expired = False , ** kw ) : value = super ( Expires , cls ) . from_mongo ( data , ** kw ) if not expired and value . is_expired : return None return value
In the event a value that has technically already expired is loaded swap it for None .
343
def S ( Document , * fields ) : result = [ ] for field in fields : if isinstance ( field , tuple ) : field , direction = field result . append ( ( field , direction ) ) continue direction = ASCENDING if not field . startswith ( '__' ) : field = field . replace ( '__' , '.' ) if field [ 0 ] == '-' : direction = DESCENDING if field [ 0 ] in ( '+' , '-' ) : field = field [ 1 : ] _field = traverse ( Document , field , default = None ) result . append ( ( ( ~ _field ) if _field else field , direction ) ) return result
Generate a MongoDB sort order list using the Django ORM style .
344
def run ( self ) : self . _assure_output_dir ( self . output ) companies = self . read ( ) print '%s CNPJs found' % len ( companies ) pbar = ProgressBar ( widgets = [ Counter ( ) , ' ' , Percentage ( ) , ' ' , Bar ( ) , ' ' , Timer ( ) ] , maxval = len ( companies ) ) . start ( ) resolved = 0 runner = Runner ( companies , self . days , self . token ) try : for data in runner : self . write ( data ) resolved = resolved + 1 pbar . update ( resolved ) except KeyboardInterrupt : print '\naborted: waiting current requests to finish.' runner . stop ( ) return pbar . finish ( )
Reads data from CNPJ list and write results to output directory .
345
def read ( self ) : companies = [ ] with open ( self . file ) as f : reader = unicodecsv . reader ( f ) for line in reader : if len ( line ) >= 1 : cnpj = self . format ( line [ 0 ] ) if self . valid ( cnpj ) : companies . append ( cnpj ) return companies
Reads data from the CSV file .
346
def write ( self , data ) : cnpj , data = data path = os . path . join ( self . output , '%s.json' % cnpj ) with open ( path , 'w' ) as f : json . dump ( data , f , encoding = 'utf-8' )
Writes json data to the output directory .
347
def valid ( self , cnpj ) : if len ( cnpj ) != 14 : return False tam = 12 nums = cnpj [ : tam ] digs = cnpj [ tam : ] tot = 0 pos = tam - 7 for i in range ( tam , 0 , - 1 ) : tot = tot + int ( nums [ tam - i ] ) * pos pos = pos - 1 if pos < 2 : pos = 9 res = 0 if tot % 11 < 2 else 11 - ( tot % 11 ) if res != int ( digs [ 0 ] ) : return False tam = tam + 1 nums = cnpj [ : tam ] tot = 0 pos = tam - 7 for i in range ( tam , 0 , - 1 ) : tot = tot + int ( nums [ tam - i ] ) * pos pos = pos - 1 if pos < 2 : pos = 9 res = 0 if tot % 11 < 2 else 11 - ( tot % 11 ) if res != int ( digs [ 1 ] ) : return False return True
Check if a CNPJ is valid .
348
def get_default_config_filename ( ) : global _CONFIG_FN if _CONFIG_FN is not None : return _CONFIG_FN with _CONFIG_FN_LOCK : if _CONFIG_FN is not None : return _CONFIG_FN if 'PEYOTL_CONFIG_FILE' in os . environ : cfn = os . path . abspath ( os . environ [ 'PEYOTL_CONFIG_FILE' ] ) else : cfn = os . path . expanduser ( "~/.peyotl/config" ) if not os . path . isfile ( cfn ) : if 'PEYOTL_CONFIG_FILE' in os . environ : from peyotl . utility . get_logger import warn_from_util_logger msg = 'Filepath "{}" specified via PEYOTL_CONFIG_FILE={} was not found' . format ( cfn , os . environ [ 'PEYOTL_CONFIG_FILE' ] ) warn_from_util_logger ( msg ) from pkg_resources import Requirement , resource_filename pr = Requirement . parse ( 'peyotl' ) cfn = resource_filename ( pr , 'peyotl/default.conf' ) if not os . path . isfile ( cfn ) : raise RuntimeError ( 'The peyotl configuration file cascade failed looking for "{}"' . format ( cfn ) ) _CONFIG_FN = os . path . abspath ( cfn ) return _CONFIG_FN
Returns the configuration filepath .
349
def get_raw_default_config_and_read_file_list ( ) : global _CONFIG , _READ_DEFAULT_FILES if _CONFIG is not None : return _CONFIG , _READ_DEFAULT_FILES with _CONFIG_LOCK : if _CONFIG is not None : return _CONFIG , _READ_DEFAULT_FILES try : from ConfigParser import SafeConfigParser except ImportError : from configparser import ConfigParser as SafeConfigParser cfg = SafeConfigParser ( ) read_files = cfg . read ( get_default_config_filename ( ) ) _CONFIG , _READ_DEFAULT_FILES = cfg , read_files return _CONFIG , _READ_DEFAULT_FILES
Returns a ConfigParser object and a list of filenames that were parsed to initialize it
350
def get_config_object ( ) : global _DEFAULT_CONFIG_WRAPPER if _DEFAULT_CONFIG_WRAPPER is not None : return _DEFAULT_CONFIG_WRAPPER with _DEFAULT_CONFIG_WRAPPER_LOCK : if _DEFAULT_CONFIG_WRAPPER is not None : return _DEFAULT_CONFIG_WRAPPER _DEFAULT_CONFIG_WRAPPER = ConfigWrapper ( ) return _DEFAULT_CONFIG_WRAPPER
Thread - safe accessor for the immutable default ConfigWrapper object
351
def get_from_config_setting_cascade ( self , sec_param_list , default = None , warn_on_none_level = logging . WARN ) : for section , param in sec_param_list : r = self . get_config_setting ( section , param , default = None , warn_on_none_level = None ) if r is not None : return r section , param = sec_param_list [ - 1 ] if default is None : _warn_missing_setting ( section , param , self . _config_filename , warn_on_none_level ) return default
return the first non - None setting from a series where each element in sec_param_list is a section param pair suitable for a get_config_setting call .
352
def parse ( input_ : Union [ str , FileStream ] , source : str ) -> Optional [ str ] : error_listener = ParseErrorListener ( ) if not isinstance ( input_ , FileStream ) : input_ = InputStream ( input_ ) lexer = jsgLexer ( input_ ) lexer . addErrorListener ( error_listener ) tokens = CommonTokenStream ( lexer ) tokens . fill ( ) if error_listener . n_errors : return None parser = jsgParser ( tokens ) parser . addErrorListener ( error_listener ) parse_tree = parser . doc ( ) if error_listener . n_errors : return None parser = JSGDocParser ( ) parser . visit ( parse_tree ) if parser . undefined_tokens ( ) : for tkn in parser . undefined_tokens ( ) : print ( "Undefined token: " + tkn ) return None return parser . as_python ( source )
Parse the text in infile and save the results in outfile
353
def fetch ( self , request , callback = None , raise_error = True , ** kwargs ) : if isinstance ( request , str ) : request = HTTPRequest ( request , ** kwargs ) try : response = yield self . _authorized_fetch ( request , callback , raise_error = False , ** kwargs ) if response . code == BAD_TOKEN : yield self . _token_manager . reset_token ( ) elif response . error and raise_error : raise response . error else : raise gen . Return ( response ) response = yield self . _authorized_fetch ( request , callback , raise_error = raise_error , ** kwargs ) raise gen . Return ( response ) except TokenError as err : yield self . _token_manager . reset_token ( ) raise err
Executes a request by AsyncHTTPClient asynchronously returning an tornado . HTTPResponse .
354
def validate_config ( key : str , config : dict ) -> None : try : jsonschema . validate ( config , CONFIG_JSON_SCHEMA [ key ] ) except jsonschema . ValidationError as x_validation : raise JSONValidation ( 'JSON validation error on {} configuration: {}' . format ( key , x_validation . message ) ) except jsonschema . SchemaError as x_schema : raise JSONValidation ( 'JSON schema error on {} specification: {}' . format ( key , x_schema . message ) )
Call jsonschema validation to raise JSONValidation on non - compliance or silently pass .
355
def __make_id ( receiver ) : if __is_bound_method ( receiver ) : return ( id ( receiver . __func__ ) , id ( receiver . __self__ ) ) return id ( receiver )
Generate an identifier for a callable signal receiver .
356
def __purge ( ) : global __receivers newreceivers = collections . defaultdict ( list ) for signal , receivers in six . iteritems ( __receivers ) : alive = [ x for x in receivers if not __is_dead ( x ) ] newreceivers [ signal ] = alive __receivers = newreceivers
Remove all dead signal receivers from the global receivers collection .
357
def __live_receivers ( signal ) : with __lock : __purge ( ) receivers = [ funcref ( ) for funcref in __receivers [ signal ] ] return receivers
Return all signal handlers that are currently still alive for the input signal .
358
def __is_bound_method ( method ) : if not ( hasattr ( method , "__func__" ) and hasattr ( method , "__self__" ) ) : return False return six . get_method_self ( method ) is not None
Return True if the method is a bound method ( attached to an class instance .
359
def disconnect ( signal , receiver ) : inputkey = __make_id ( receiver ) with __lock : __purge ( ) receivers = __receivers . get ( signal ) for idx in six . moves . range ( len ( receivers ) ) : connected = receivers [ idx ] ( ) if inputkey != __make_id ( connected ) : continue del receivers [ idx ] return True return False
Disconnect the receiver func from the signal identified by signal_id .
360
def emit ( signal , * args , ** kwargs ) : if signal not in __receivers : return receivers = __live_receivers ( signal ) for func in receivers : func ( * args , ** kwargs )
Emit a signal by serially calling each registered signal receiver for the signal .
361
def arrayuniqify ( X , retainorder = False ) : s = X . argsort ( ) X = X [ s ] D = np . append ( [ True ] , X [ 1 : ] != X [ : - 1 ] ) if retainorder : DD = np . append ( D . nonzero ( ) [ 0 ] , len ( X ) ) ind = [ min ( s [ x : DD [ i + 1 ] ] ) for ( i , x ) in enumerate ( DD [ : - 1 ] ) ] ind . sort ( ) return ind else : return [ D , s ]
Very fast uniqify routine for numpy arrays .
362
def equalspairs ( X , Y ) : T = Y . copy ( ) R = ( T [ 1 : ] != T [ : - 1 ] ) . nonzero ( ) [ 0 ] R = np . append ( R , np . array ( [ len ( T ) - 1 ] ) ) M = R [ R . searchsorted ( range ( len ( T ) ) ) ] D = T . searchsorted ( X ) T = np . append ( T , np . array ( [ 0 ] ) ) M = np . append ( M , np . array ( [ 0 ] ) ) A = ( T [ D ] == X ) * D B = ( T [ D ] == X ) * ( M [ D ] + 1 ) return [ A , B ]
Indices of elements in a sorted numpy array equal to those in another .
363
def isin ( X , Y ) : if len ( Y ) > 0 : T = Y . copy ( ) T . sort ( ) D = T . searchsorted ( X ) T = np . append ( T , np . array ( [ 0 ] ) ) W = ( T [ D ] == X ) if isinstance ( W , bool ) : return np . zeros ( ( len ( X ) , ) , bool ) else : return ( T [ D ] == X ) else : return np . zeros ( ( len ( X ) , ) , bool )
Indices of elements in a numpy array that appear in another .
364
def arraydifference ( X , Y ) : if len ( Y ) > 0 : Z = isin ( X , Y ) return X [ np . invert ( Z ) ] else : return X
Elements of a numpy array that do not appear in another .
365
def arraymax ( X , Y ) : Z = np . zeros ( ( len ( X ) , ) , int ) A = X <= Y B = Y < X Z [ A ] = Y [ A ] Z [ B ] = X [ B ] return Z
Fast vectorized max function for element - wise comparison of two numpy arrays .
366
async def _seed2did ( self ) -> str : rv = None dids_with_meta = json . loads ( await did . list_my_dids_with_meta ( self . handle ) ) if dids_with_meta : for did_with_meta in dids_with_meta : if 'metadata' in did_with_meta : try : meta = json . loads ( did_with_meta [ 'metadata' ] ) if isinstance ( meta , dict ) and meta . get ( 'seed' , None ) == self . _seed : rv = did_with_meta . get ( 'did' ) except json . decoder . JSONDecodeError : continue if not rv : temp_wallet = await Wallet ( self . _seed , '{}.seed2did' . format ( self . name ) , None , { 'auto-remove' : True } ) . create ( ) rv = temp_wallet . did await temp_wallet . remove ( ) return rv
Derive DID as per indy - sdk from seed .
367
async def remove ( self ) -> None : LOGGER . debug ( 'Wallet.remove >>>' ) try : LOGGER . info ( 'Removing wallet: %s' , self . name ) await wallet . delete_wallet ( json . dumps ( self . cfg ) , json . dumps ( self . access_creds ) ) except IndyError as x_indy : LOGGER . info ( 'Abstaining from wallet removal; indy-sdk error code %s' , x_indy . error_code ) LOGGER . debug ( 'Wallet.remove <<<' )
Remove serialized wallet if it exists .
368
def loadSV ( fname , shape = None , titles = None , aligned = False , byteorder = None , renamer = None , ** kwargs ) : [ columns , metadata ] = loadSVcols ( fname , ** kwargs ) if 'names' in metadata . keys ( ) : names = metadata [ 'names' ] else : names = None if 'formats' in metadata . keys ( ) : formats = metadata [ 'formats' ] else : formats = None if 'dtype' in metadata . keys ( ) : dtype = metadata [ 'dtype' ] else : dtype = None if renamer is not None : print 'Trying user-given renamer ...' renamed = renamer ( names ) if len ( renamed ) == len ( uniqify ( renamed ) ) : names = renamed print else : print '... renamer failed to produce unique names, not using.' if names and len ( names ) != len ( uniqify ( names ) ) : print 'Names are not unique, reverting to default naming scheme.' names = None return [ utils . fromarrays ( columns , type = np . ndarray , dtype = dtype , shape = shape , formats = formats , names = names , titles = titles , aligned = aligned , byteorder = byteorder ) , metadata ]
Load a delimited text file to a numpy record array .
369
def loadSVrecs ( fname , uselines = None , skiprows = 0 , linefixer = None , delimiter_regex = None , verbosity = DEFAULT_VERBOSITY , ** metadata ) : if delimiter_regex and isinstance ( delimiter_regex , types . StringType ) : import re delimiter_regex = re . compile ( delimiter_regex ) [ metadata , inferedlines , WHOLETHING ] = getmetadata ( fname , skiprows = skiprows , linefixer = linefixer , delimiter_regex = delimiter_regex , verbosity = verbosity , ** metadata ) if uselines is None : uselines = ( 0 , False ) if is_string_like ( fname ) : fh = file ( fname , 'rU' ) elif hasattr ( fname , 'readline' ) : fh = fname else : raise ValueError ( 'fname must be a string or file handle' ) for _ind in range ( skiprows + uselines [ 0 ] + metadata [ 'headerlines' ] ) : fh . readline ( ) if linefixer or delimiter_regex : fh2 = tempfile . TemporaryFile ( 'w+b' ) F = fh . read ( ) . strip ( '\n' ) . split ( '\n' ) if linefixer : F = map ( linefixer , F ) if delimiter_regex : F = map ( lambda line : delimiter_regex . sub ( metadata [ 'dialect' ] . delimiter , line ) , F ) fh2 . write ( '\n' . join ( F ) ) fh2 . seek ( 0 ) fh = fh2 reader = csv . reader ( fh , dialect = metadata [ 'dialect' ] ) if uselines [ 1 ] : linelist = [ ] for ln in reader : if reader . line_num <= uselines [ 1 ] - uselines [ 0 ] : linelist . append ( ln ) else : break else : linelist = list ( reader ) fh . close ( ) if linelist [ - 1 ] == [ ] : linelist . pop ( - 1 ) return [ linelist , metadata ]
Load a separated value text file to a list of lists of strings of records .
370
def parsetypes ( dtype ) : return [ dtype [ i ] . name . strip ( '1234567890' ) . rstrip ( 'ing' ) for i in range ( len ( dtype ) ) ]
Parse the types from a structured numpy dtype object .
371
def thresholdcoloring ( coloring , names ) : for key in coloring . keys ( ) : if len ( [ k for k in coloring [ key ] if k in names ] ) == 0 : coloring . pop ( key ) else : coloring [ key ] = utils . uniqify ( [ k for k in coloring [ key ] if k in names ] ) return coloring
Threshold a coloring dictionary for a given list of column names .
372
def makedir ( dir_name ) : if os . path . exists ( dir_name ) : delete ( dir_name ) os . mkdir ( dir_name )
Strong directory maker .
373
def pass_community ( f ) : @ wraps ( f ) def inner ( community_id , * args , ** kwargs ) : c = Community . get ( community_id ) if c is None : abort ( 404 ) return f ( c , * args , ** kwargs ) return inner
Decorator to pass community .
374
def permission_required ( action ) : def decorator ( f ) : @ wraps ( f ) def inner ( community , * args , ** kwargs ) : permission = current_permission_factory ( community , action = action ) if not permission . can ( ) : abort ( 403 ) return f ( community , * args , ** kwargs ) return inner return decorator
Decorator to require permission .
375
def format_item ( item , template , name = 'item' ) : ctx = { name : item } return render_template_to_string ( template , ** ctx )
Render a template to a string with the provided item in context .
376
def new ( ) : form = CommunityForm ( formdata = request . values ) ctx = mycommunities_ctx ( ) ctx . update ( { 'form' : form , 'is_new' : True , 'community' : None , } ) if form . validate_on_submit ( ) : data = copy . deepcopy ( form . data ) community_id = data . pop ( 'identifier' ) del data [ 'logo' ] community = Community . create ( community_id , current_user . get_id ( ) , ** data ) file = request . files . get ( 'logo' , None ) if file : if not community . save_logo ( file . stream , file . filename ) : form . logo . errors . append ( _ ( 'Cannot add this file as a logo. Supported formats: ' 'PNG, JPG and SVG. Max file size: 1.5 MB.' ) ) db . session . rollback ( ) community = None if community : db . session . commit ( ) flash ( "Community was successfully created." , category = 'success' ) return redirect ( url_for ( '.edit' , community_id = community . id ) ) return render_template ( current_app . config [ 'COMMUNITIES_NEW_TEMPLATE' ] , community_form = form , ** ctx )
Create a new community .
377
def edit ( community ) : form = EditCommunityForm ( formdata = request . values , obj = community ) deleteform = DeleteCommunityForm ( ) ctx = mycommunities_ctx ( ) ctx . update ( { 'form' : form , 'is_new' : False , 'community' : community , 'deleteform' : deleteform , } ) if form . validate_on_submit ( ) : for field , val in form . data . items ( ) : setattr ( community , field , val ) file = request . files . get ( 'logo' , None ) if file : if not community . save_logo ( file . stream , file . filename ) : form . logo . errors . append ( _ ( 'Cannot add this file as a logo. Supported formats: ' 'PNG, JPG and SVG. Max file size: 1.5 MB.' ) ) if not form . logo . errors : db . session . commit ( ) flash ( "Community successfully edited." , category = 'success' ) return redirect ( url_for ( '.edit' , community_id = community . id ) ) return render_template ( current_app . config [ 'COMMUNITIES_EDIT_TEMPLATE' ] , ** ctx )
Create or edit a community .
378
def delete ( community ) : deleteform = DeleteCommunityForm ( formdata = request . values ) ctx = mycommunities_ctx ( ) ctx . update ( { 'deleteform' : deleteform , 'is_new' : False , 'community' : community , } ) if deleteform . validate_on_submit ( ) : community . delete ( ) db . session . commit ( ) flash ( "Community was deleted." , category = 'success' ) return redirect ( url_for ( '.index' ) ) else : flash ( "Community could not be deleted." , category = 'warning' ) return redirect ( url_for ( '.edit' , community_id = community . id ) )
Delete a community .
379
def ot_find_tree ( arg_dict , exact = True , verbose = False , oti_wrapper = None ) : if oti_wrapper is None : from peyotl . sugar import oti oti_wrapper = oti return oti_wrapper . find_trees ( arg_dict , exact = exact , verbose = verbose , wrap_response = True )
Uses a peyotl wrapper around an Open Tree web service to get a list of trees including values value for a given property to be searched on porperty .
380
def is_iterable ( etype ) -> bool : return type ( etype ) is GenericMeta and issubclass ( etype . __extra__ , Iterable )
Determine whether etype is a List or other iterable
381
def main ( argv ) : import argparse description = 'Uses Open Tree of Life web services to the MRCA for a set of OTT IDs.' parser = argparse . ArgumentParser ( prog = 'ot-tree-of-life-mrca' , description = description ) parser . add_argument ( 'ottid' , nargs = '*' , type = int , help = 'OTT IDs' ) parser . add_argument ( '--subtree' , action = 'store_true' , default = False , required = False , help = 'write a newick representation of the subtree rooted at this mrca' ) parser . add_argument ( '--induced-subtree' , action = 'store_true' , default = False , required = False , help = 'write a newick representation of the topology of the requested taxa in the synthetic tree (the subtree pruned to just the queried taxa)' ) parser . add_argument ( '--details' , action = 'store_true' , default = False , required = False , help = 'report more details about the mrca node' ) args = parser . parse_args ( argv ) id_list = args . ottid if not id_list : sys . stderr . write ( 'No OTT IDs provided. Running a dummy query with 770302 770315\n' ) id_list = [ 770302 , 770315 ] fetch_and_write_mrca ( id_list , args . details , args . subtree , args . induced_subtree , sys . stdout , sys . stderr )
This function sets up a command - line option parser and then calls fetch_and_write_mrca to do all of the real work .
382
async def send_schema ( self , schema_data_json : str ) -> str : LOGGER . debug ( 'Origin.send_schema >>> schema_data_json: %s' , schema_data_json ) schema_data = json . loads ( schema_data_json ) s_key = schema_key ( schema_id ( self . did , schema_data [ 'name' ] , schema_data [ 'version' ] ) ) with SCHEMA_CACHE . lock : try : rv_json = await self . get_schema ( s_key ) LOGGER . error ( 'Schema %s version %s already exists on ledger for origin-did %s: not sending' , schema_data [ 'name' ] , schema_data [ 'version' ] , self . did ) except AbsentSchema : ( _ , schema_json ) = await anoncreds . issuer_create_schema ( self . did , schema_data [ 'name' ] , schema_data [ 'version' ] , json . dumps ( schema_data [ 'attr_names' ] ) ) req_json = await ledger . build_schema_request ( self . did , schema_json ) resp_json = await self . _sign_submit ( req_json ) resp = json . loads ( resp_json ) resp_result_txn = resp [ 'result' ] [ 'txn' ] rv_json = await self . get_schema ( schema_key ( schema_id ( resp_result_txn [ 'metadata' ] [ 'from' ] , resp_result_txn [ 'data' ] [ 'data' ] [ 'name' ] , resp_result_txn [ 'data' ] [ 'data' ] [ 'version' ] ) ) ) LOGGER . debug ( 'Origin.send_schema <<< %s' , rv_json ) return rv_json
Send schema to ledger then retrieve it as written to the ledger and return it . If schema already exists on ledger log error and return schema .
383
def _locked_refresh_doc_ids ( self ) : d = { } for s in self . _shards : for k in s . doc_index . keys ( ) : if k in d : raise KeyError ( 'doc "{i}" found in multiple repos' . format ( i = k ) ) d [ k ] = s self . _doc2shard_map = d
Assumes that the caller has the _index_lock !
384
def push_doc_to_remote ( self , remote_name , doc_id = None ) : if doc_id is None : ret = True for shard in self . _shards : if not shard . push_to_remote ( remote_name ) : ret = False return ret shard = self . get_shard ( doc_id ) return shard . push_to_remote ( remote_name )
This will push the master branch to the remote named remote_name using the mirroring strategy to cut down on locking of the working repo .
385
def iter_doc_filepaths ( self , ** kwargs ) : for shard in self . _shards : for doc_id , blob in shard . iter_doc_filepaths ( ** kwargs ) : yield doc_id , blob
Generator that iterates over all detected documents . and returns the filesystem path to each doc . Order is by shard but arbitrary within shards .
386
def data ( self ) : d = super ( CommunityForm , self ) . data d . pop ( 'csrf_token' , None ) return d
Form data .
387
def validate_identifier ( self , field ) : if field . data : field . data = field . data . lower ( ) if Community . get ( field . data , with_deleted = True ) : raise validators . ValidationError ( _ ( 'The identifier already exists. ' 'Please choose a different one.' ) )
Validate field identifier .
388
def read_filepath ( filepath , encoding = 'utf-8' ) : with codecs . open ( filepath , 'r' , encoding = encoding ) as fo : return fo . read ( )
Returns the text content of filepath
389
def download ( url , encoding = 'utf-8' ) : import requests response = requests . get ( url ) response . encoding = encoding return response . text
Returns the text fetched via http GET from URL read as encoding
390
def pretty_dict_str ( d , indent = 2 ) : b = StringIO ( ) write_pretty_dict_str ( b , d , indent = indent ) return b . getvalue ( )
shows JSON indented representation of d
391
def write_pretty_dict_str ( out , obj , indent = 2 ) : json . dump ( obj , out , indent = indent , sort_keys = True , separators = ( ',' , ': ' ) , ensure_ascii = False , encoding = "utf-8" )
writes JSON indented representation of obj to out
392
def community_responsify ( schema_class , mimetype ) : def view ( data , code = 200 , headers = None , links_item_factory = None , page = None , urlkwargs = None , links_pagination_factory = None ) : if isinstance ( data , Community ) : last_modified = data . updated response_data = schema_class ( context = dict ( item_links_factory = links_item_factory ) ) . dump ( data ) . data else : last_modified = None response_data = schema_class ( context = dict ( total = data . query . count ( ) , item_links_factory = links_item_factory , page = page , urlkwargs = urlkwargs , pagination_links_factory = links_pagination_factory ) ) . dump ( data . items , many = True ) . data response = current_app . response_class ( json . dumps ( response_data , ** _format_args ( ) ) , mimetype = mimetype ) response . status_code = code if last_modified : response . last_modified = last_modified if headers is not None : response . headers . extend ( headers ) return response return view
Create a community response serializer .
393
def from_error ( exc_info , json_encoder , debug_url = None ) : exc = exc_info [ 1 ] data = exc . __dict__ . copy ( ) for key , value in data . items ( ) : try : json_encoder . encode ( value ) except TypeError : data [ key ] = repr ( value ) data [ "traceback" ] = "" . join ( traceback . format_exception ( * exc_info ) ) if debug_url is not None : data [ "debug_url" ] = debug_url return InternalError ( data )
Wraps another Exception in an InternalError .
394
def contains ( self , index : Union [ SchemaKey , int , str ] ) -> bool : LOGGER . debug ( 'SchemaCache.contains >>> index: %s' , index ) rv = None if isinstance ( index , SchemaKey ) : rv = ( index in self . _schema_key2schema ) elif isinstance ( index , int ) or ( isinstance ( index , str ) and ':2:' not in index ) : rv = ( int ( index ) in self . _seq_no2schema_key ) elif isinstance ( index , str ) : rv = ( schema_key ( index ) in self . _schema_key2schema ) else : rv = False LOGGER . debug ( 'SchemaCache.contains <<< %s' , rv ) return rv
Return whether the cache contains a schema for the input key sequence number or schema identifier .
395
def cull ( self , delta : bool ) -> None : LOGGER . debug ( 'RevoCacheEntry.cull >>> delta: %s' , delta ) rr_frames = self . rr_delta_frames if delta else self . rr_state_frames mark = 4096 ** 0.5 if len ( rr_frames ) > int ( mark * 1.25 ) : rr_frames . sort ( key = lambda x : - x . qtime ) del rr_frames [ int ( mark * 0.75 ) : ] LOGGER . info ( 'Pruned revocation cache entry %s to %s %s frames' , self . rev_reg_def [ 'id' ] , len ( rr_frames ) , 'delta' if delta else 'state' ) LOGGER . debug ( 'RevoCacheEntry.cull <<<' )
Cull cache entry frame list to size favouring most recent query time .
396
def dflt_interval ( self , cd_id : str ) -> ( int , int ) : LOGGER . debug ( 'RevocationCache.dflt_interval >>>' ) fro = None to = None for rr_id in self : if cd_id != rev_reg_id2cred_def_id ( rr_id ) : continue entry = self [ rr_id ] if entry . rr_delta_frames : to = max ( entry . rr_delta_frames , key = lambda f : f . to ) . to fro = min ( fro or to , to ) if not ( fro and to ) : LOGGER . debug ( 'RevocationCache.dflt_interval <!< No data for default non-revoc interval on cred def id %s' , cd_id ) raise CacheIndex ( 'No data for default non-revoc interval on cred def id {}' . format ( cd_id ) ) rv = ( fro , to ) LOGGER . debug ( 'RevocationCache.dflt_interval <<< %s' , rv ) return rv
Return default non - revocation interval from latest to times on delta frames of revocation cache entries on indices stemming from input cred def id .
397
def parse ( base_dir : str , timestamp : int = None ) -> int : LOGGER . debug ( 'parse >>> base_dir: %s, timestamp: %s' , base_dir , timestamp ) if not isdir ( base_dir ) : LOGGER . info ( 'No cache archives available: not feeding cache' ) LOGGER . debug ( 'parse <<< None' ) return None if not timestamp : timestamps = [ int ( t ) for t in listdir ( base_dir ) if t . isdigit ( ) ] if timestamps : timestamp = max ( timestamps ) else : LOGGER . info ( 'No cache archives available: not feeding cache' ) LOGGER . debug ( 'parse <<< None' ) return None timestamp_dir = join ( base_dir , str ( timestamp ) ) if not isdir ( timestamp_dir ) : LOGGER . error ( 'No such archived cache directory: %s' , timestamp_dir ) LOGGER . debug ( 'parse <<< None' ) return None with SCHEMA_CACHE . lock : with open ( join ( timestamp_dir , 'schema' ) , 'r' ) as archive : schemata = json . loads ( archive . read ( ) ) SCHEMA_CACHE . feed ( schemata ) with CRED_DEF_CACHE . lock : with open ( join ( timestamp_dir , 'cred_def' ) , 'r' ) as archive : cred_defs = json . loads ( archive . read ( ) ) for cd_id in cred_defs : if cd_id in CRED_DEF_CACHE : LOGGER . warning ( 'Cred def cache already has cred def on %s: skipping' , cd_id ) else : CRED_DEF_CACHE [ cd_id ] = cred_defs [ cd_id ] LOGGER . info ( 'Cred def cache imported cred def for cred def id %s' , cd_id ) with REVO_CACHE . lock : with open ( join ( timestamp_dir , 'revocation' ) , 'r' ) as archive : rr_cache_entries = json . loads ( archive . read ( ) ) for ( rr_id , entry ) in rr_cache_entries . items ( ) : if rr_id in REVO_CACHE : LOGGER . warning ( 'Revocation cache already has entry on %s: skipping' , rr_id ) else : rr_cache_entry = RevoCacheEntry ( entry [ 'rev_reg_def' ] ) rr_cache_entry . rr_delta_frames = [ RevRegUpdateFrame ( f [ '_to' ] , f [ '_timestamp' ] , f [ '_rr_update' ] ) for f in entry [ 'rr_delta_frames' ] ] rr_cache_entry . cull ( True ) rr_cache_entry . rr_state_frames = [ RevRegUpdateFrame ( f [ '_to' ] , f [ '_timestamp' ] , f [ '_rr_update' ] ) for f in entry [ 'rr_state_frames' ] ] rr_cache_entry . cull ( False ) REVO_CACHE [ rr_id ] = rr_cache_entry LOGGER . info ( 'Revocation cache imported entry for rev reg id %s' , rr_id ) LOGGER . debug ( 'parse <<< %s' , timestamp ) return timestamp
Parse and update from archived cache files . Only accept new content ; do not overwrite any existing cache content .
398
def detect_nexson_version ( blob ) : n = get_nexml_el ( blob ) assert isinstance ( n , dict ) return n . get ( '@nexml2json' , BADGER_FISH_NEXSON_VERSION )
Returns the nexml2json attribute or the default code for badgerfish
399
def _add_value_to_dict_bf ( d , k , v ) : prev = d . get ( k ) if prev is None : d [ k ] = v elif isinstance ( prev , list ) : if isinstance ( v , list ) : prev . extend ( v ) else : prev . append ( v ) else : if isinstance ( v , list ) : x = [ prev ] x . extend ( v ) d [ k ] = x else : d [ k ] = [ prev , v ]
Adds the k - > v mapping to d but if a previous element exists it changes the value of for the key to list .