idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
1,800
def by_readings ( self , role_names = [ '' , 'Author' ] ) : if not spectator_apps . is_enabled ( 'reading' ) : raise ImproperlyConfigured ( "To use the CreatorManager.by_readings() method, 'spectator.reading' must by in INSTALLED_APPS." ) qs = self . get_queryset ( ) qs = qs . filter ( publication_roles__role_name__in ...
The Creators who have been most - read ordered by number of readings .
1,801
def by_events ( self , kind = None ) : if not spectator_apps . is_enabled ( 'events' ) : raise ImproperlyConfigured ( "To use the CreatorManager.by_events() method, 'spectator.events' must by in INSTALLED_APPS." ) qs = self . get_queryset ( ) if kind is not None : qs = qs . filter ( events__kind = kind ) qs = qs . anno...
Get the Creators involved in the most Events .
1,802
def by_works ( self , kind = None , role_name = None ) : if not spectator_apps . is_enabled ( 'events' ) : raise ImproperlyConfigured ( "To use the CreatorManager.by_works() method, 'spectator.events' must by in INSTALLED_APPS." ) qs = self . get_queryset ( ) filter_kwargs = { } if kind is not None : filter_kwargs [ 'w...
Get the Creators involved in the most Works .
1,803
def index ( ) : page = request . values . get ( 'page' , 1 , type = int ) size = request . values . get ( 'size' , 2 , type = int ) search = ExampleSearch ( ) [ ( page - 1 ) * size : page * size ] if 'q' in request . values : search = search . query ( QueryString ( query = request . values . get ( 'q' ) ) ) search = se...
Query Elasticsearch using Invenio query syntax .
1,804
def clean_options ( self , using_keytab = False , principal = None , keytab_file = None , ccache_file = None , password = None ) : cleaned = { } if using_keytab : if principal is None : raise ValueError ( 'Principal is required when using key table.' ) princ_name = gssapi . names . Name ( principal , gssapi . names . N...
Clean argument to related object
1,805
def init_with_keytab ( self ) : creds_opts = { 'usage' : 'initiate' , 'name' : self . _cleaned_options [ 'principal' ] , } store = { } if self . _cleaned_options [ 'keytab' ] != DEFAULT_KEYTAB : store [ 'client_keytab' ] = self . _cleaned_options [ 'keytab' ] if self . _cleaned_options [ 'ccache' ] != DEFAULT_CCACHE : ...
Initialize credential cache with keytab
1,806
def init_with_password ( self ) : creds_opts = { 'usage' : 'initiate' , 'name' : self . _cleaned_options [ 'principal' ] , } if self . _cleaned_options [ 'ccache' ] != DEFAULT_CCACHE : creds_opts [ 'store' ] = { 'ccache' : self . _cleaned_options [ 'ccache' ] } cred = gssapi . creds . Credentials ( ** creds_opts ) try ...
Initialize credential cache with password
1,807
def templates ( self ) : templates = { } result = [ ] if self . entry_point_group_templates : result = self . load_entry_point_group_templates ( self . entry_point_group_templates ) or [ ] for template in result : for name , path in template . items ( ) : templates [ name ] = path return templates
Generate a dictionary with template names and file paths .
1,808
def register_mappings ( self , alias , package_name ) : if ES_VERSION [ 0 ] == 2 : try : resource_listdir ( package_name , 'v2' ) package_name += '.v2' except ( OSError , IOError ) as ex : if getattr ( ex , 'errno' , 0 ) != errno . ENOENT : raise warnings . warn ( "Having mappings in a path which doesn't specify the " ...
Register mappings from a package under given alias .
1,809
def register_templates ( self , directory ) : try : resource_listdir ( directory , 'v{}' . format ( ES_VERSION [ 0 ] ) ) directory = '{}/v{}' . format ( directory , ES_VERSION [ 0 ] ) except ( OSError , IOError ) as ex : if getattr ( ex , 'errno' , 0 ) == errno . ENOENT : raise OSError ( "Please move your templates to ...
Register templates from the provided directory .
1,810
def _client_builder ( self ) : client_config = self . app . config . get ( 'SEARCH_CLIENT_CONFIG' ) or { } client_config . setdefault ( 'hosts' , self . app . config . get ( 'SEARCH_ELASTIC_HOSTS' ) ) client_config . setdefault ( 'connection_class' , RequestsHttpConnection ) return Elasticsearch ( ** client_config )
Build Elasticsearch client .
1,811
def client ( self ) : if self . _client is None : self . _client = self . _client_builder ( ) return self . _client
Return client for current application .
1,812
def flush_and_refresh ( self , index ) : self . client . indices . flush ( wait_if_ongoing = True , index = index ) self . client . indices . refresh ( index = index ) self . client . cluster . health ( wait_for_status = 'yellow' , request_timeout = 30 ) return True
Flush and refresh one or more indices .
1,813
def cluster_version ( self ) : versionstr = self . client . info ( ) [ 'version' ] [ 'number' ] return [ int ( x ) for x in versionstr . split ( '.' ) ]
Get version of Elasticsearch running on the cluster .
1,814
def active_aliases ( self ) : whitelisted_aliases = self . app . config . get ( 'SEARCH_MAPPINGS' ) if whitelisted_aliases is None : return self . aliases else : return { k : v for k , v in self . aliases . items ( ) if k in whitelisted_aliases }
Get a filtered list of aliases based on configuration .
1,815
def create ( self , ignore = None ) : ignore = ignore or [ ] def _create ( tree_or_filename , alias = None ) : for name , value in tree_or_filename . items ( ) : if isinstance ( value , dict ) : for result in _create ( value , alias = name ) : yield result else : with open ( value , 'r' ) as body : yield name , self . ...
Yield tuple with created index name and responses from a client .
1,816
def put_templates ( self , ignore = None ) : ignore = ignore or [ ] def _replace_prefix ( template_path , body ) : pattern = '__SEARCH_INDEX_PREFIX__' prefix = self . app . config [ 'SEARCH_INDEX_PREFIX' ] or '' if prefix : assert pattern in body , "You are using the prefix `{0}`, " "but the template `{1}` does not con...
Yield tuple with registered template and response from client .
1,817
def delete ( self , ignore = None ) : ignore = ignore or [ ] def _delete ( tree_or_filename , alias = None ) : if alias : yield alias , self . client . indices . delete_alias ( index = list ( _get_indices ( tree_or_filename ) ) , name = alias , ignore = ignore , ) for name , value in tree_or_filename . items ( ) : if i...
Yield tuple with deleted index name and responses from a client .
1,818
def main ( ) : try : opts , args = getopt . getopt ( sys . argv [ 1 : ] , "h:v" , [ "help" , "nack=" , "servers=" , "queues=" ] ) except getopt . GetoptError as err : print str ( err ) usage ( ) sys . exit ( ) nack = 0.0 verbose = False servers = "localhost:7712,localhost:7711" queues = "test" for o , a in opts : if o ...
Start the poor_consumer .
1,819
def connect ( self ) : self . connected_node = None for i , node in self . nodes . items ( ) : host , port = i . split ( ':' ) port = int ( port ) redis_client = redis . Redis ( host , port , ** self . client_kw_args ) try : ret = redis_client . execute_command ( 'HELLO' ) format_version , node_id = ret [ 0 ] , ret [ 1...
Connect to one of the Disque nodes .
1,820
def execute_command ( self , * args , ** kwargs ) : try : return self . get_connection ( ) . execute_command ( * args , ** kwargs ) except ConnectionError as e : logger . warn ( 'trying to reconnect' ) self . connect ( ) logger . warn ( 'connected' ) raise
Execute a command on the connected server .
1,821
def add_job ( self , queue_name , job , timeout = 200 , replicate = None , delay = None , retry = None , ttl = None , maxlen = None , asynchronous = None ) : command = [ 'ADDJOB' , queue_name , job , timeout ] if replicate : command += [ 'REPLICATE' , replicate ] if delay : command += [ 'DELAY' , delay ] if retry is no...
Add a job to a queue .
1,822
def get_job ( self , queues , timeout = None , count = None , nohang = False , withcounters = False ) : assert queues command = [ 'GETJOB' ] if nohang : command += [ 'NOHANG' ] if timeout : command += [ 'TIMEOUT' , timeout ] if count : command += [ 'COUNT' , count ] if withcounters : command += [ 'WITHCOUNTERS' ] comma...
Return some number of jobs from specified queues .
1,823
def show ( self , job_id , return_dict = False ) : rtn = self . execute_command ( 'SHOW' , job_id ) if return_dict : grouped = self . _grouper ( rtn , 2 ) rtn = dict ( ( a , b ) for a , b in grouped ) return rtn
Describe the job .
1,824
def pause ( self , queue_name , kw_in = None , kw_out = None , kw_all = None , kw_none = None , kw_state = None , kw_bcast = None ) : command = [ "PAUSE" , queue_name ] if kw_in : command += [ "in" ] if kw_out : command += [ "out" ] if kw_all : command += [ "all" ] if kw_none : command += [ "none" ] if kw_state : comma...
Pause a queue .
1,825
def qscan ( self , cursor = 0 , count = None , busyloop = None , minlen = None , maxlen = None , importrate = None ) : command = [ "QSCAN" , cursor ] if count : command += [ "COUNT" , count ] if busyloop : command += [ "BUSYLOOP" ] if minlen : command += [ "MINLEN" , minlen ] if maxlen : command += [ "MAXLEN" , maxlen ...
Iterate all the existing queues in the local node .
1,826
def jscan ( self , cursor = 0 , count = None , busyloop = None , queue = None , state = None , reply = None ) : command = [ "JSCAN" , cursor ] if count : command += [ "COUNT" , count ] if busyloop : command += [ "BUSYLOOP" ] if queue : command += [ "QUEUE" , queue ] if type ( state ) is list : for s in state : command ...
Iterate all the existing jobs in the local node .
1,827
def build_index_name ( app , * parts ) : base_index = os . path . splitext ( '-' . join ( [ part for part in parts if part ] ) ) [ 0 ] return prefix_index ( app = app , index = base_index )
Build an index name from parts .
1,828
def es_version_check ( f ) : @ wraps ( f ) def inner ( * args , ** kwargs ) : cluster_ver = current_search . cluster_version [ 0 ] client_ver = ES_VERSION [ 0 ] if cluster_ver != client_ver : raise click . ClickException ( 'Elasticsearch version mismatch. Invenio was installed with ' 'Elasticsearch v{client_ver}.x supp...
Decorator to check Elasticsearch version .
1,829
def init ( force ) : click . secho ( 'Creating indexes...' , fg = 'green' , bold = True , file = sys . stderr ) with click . progressbar ( current_search . create ( ignore = [ 400 ] if force else None ) , length = current_search . number_of_indexes ) as bar : for name , response in bar : bar . label = name click . sech...
Initialize registered aliases and mappings .
1,830
def destroy ( force ) : click . secho ( 'Destroying indexes...' , fg = 'red' , bold = True , file = sys . stderr ) with click . progressbar ( current_search . delete ( ignore = [ 400 , 404 ] if force else None ) , length = current_search . number_of_indexes ) as bar : for name , response in bar : bar . label = name
Destroy all indexes .
1,831
def delete ( index_name , force , verbose ) : result = current_search_client . indices . delete ( index = index_name , ignore = [ 400 , 404 ] if force else None , ) if verbose : click . echo ( json . dumps ( result ) )
Delete index by its name .
1,832
def put ( index_name , doc_type , identifier , body , force , verbose ) : result = current_search_client . index ( index = index_name , doc_type = doc_type or index_name , id = identifier , body = json . load ( body ) , op_type = 'index' if force or identifier is None else 'create' , ) if verbose : click . echo ( json ...
Index input data .
1,833
def get_records ( self , ids ) : return self . query ( Ids ( values = [ str ( id_ ) for id_ in ids ] ) )
Return records by their identifiers .
1,834
def faceted_search ( cls , query = None , filters = None , search = None ) : search_ = search or cls ( ) class RecordsFacetedSearch ( FacetedSearch ) : index = prefix_index ( app = current_app , index = search_ . _index [ 0 ] ) doc_types = getattr ( search_ . Meta , 'doc_types' , [ '_all' ] ) fields = getattr ( search_...
Return faceted search instance with defaults set .
1,835
def with_preference_param ( self ) : user_hash = self . _get_user_hash ( ) if user_hash : return self . params ( preference = user_hash ) return self
Add the preference param to the ES request and return a new Search .
1,836
def _get_user_agent ( self ) : user_agent = request . headers . get ( 'User-Agent' ) if user_agent : user_agent = user_agent . encode ( 'utf-8' ) return user_agent or ''
Retrieve the request s User - Agent if available .
1,837
def _get_user_hash ( self ) : if request : user_hash = '{ip}-{ua}' . format ( ip = request . remote_addr , ua = self . _get_user_agent ( ) ) alg = hashlib . md5 ( ) alg . update ( user_hash . encode ( 'utf8' ) ) return alg . hexdigest ( ) return None
Calculate a digest based on request s User - Agent and IP address .
1,838
def beautify ( filename = None , json_str = None ) : if filename is not None : with open ( filename ) as json_file : json_str = json . load ( json_file ) return json . dumps ( json_str , indent = 4 , sort_keys = True )
Beautify JSON string or file .
1,839
def replace ( pretty , old_str , new_str ) : out_str = '' line_number = 1 changes = 0 for line in pretty . splitlines ( keepends = True ) : new_line = line . replace ( old_str , new_str ) if line . find ( old_str ) != - 1 : logging . debug ( '%s' , line_number ) logging . debug ( '< %s' , line ) logging . debug ( '> %s...
Replace strings giving some info on where the replacement was done
1,840
def receive_connection ( ) : server = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) server . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , 1 ) server . bind ( ( "localhost" , 8080 ) ) server . listen ( 1 ) client = server . accept ( ) [ 0 ] server . close ( ) return client
Wait for and then return a connected socket ..
1,841
def send_message ( client , message ) : print ( message ) client . send ( "HTTP/1.1 200 OK\r\n\r\n{}" . format ( message ) . encode ( "utf-8" ) ) client . close ( )
Send message to client and close the connection .
1,842
def watch ( logger_name , level = DEBUG , out = stdout ) : watcher = Watcher ( logger_name ) watcher . watch ( level , out ) return watcher
Quick wrapper for using the Watcher .
1,843
def get_user_agent ( ) : from sys import platform , version_info template = "neobolt/{} Python/{}.{}.{}-{}-{} ({})" fields = ( version , ) + tuple ( version_info ) + ( platform , ) return template . format ( * fields )
Obtain the default user agent string sent to the server after a successful handshake .
1,844
def import_best ( c_module , py_module ) : from importlib import import_module from os import getenv pure_python = getenv ( "PURE_PYTHON" , "" ) if pure_python : return import_module ( py_module ) else : try : return import_module ( c_module ) except ImportError : return import_module ( py_module )
Import the best available module with C preferred to pure Python .
1,845
def hydrate ( self , values ) : def hydrate_ ( obj ) : if isinstance ( obj , Structure ) : try : f = self . hydration_functions [ obj . tag ] except KeyError : return obj else : return f ( * map ( hydrate_ , obj . fields ) ) elif isinstance ( obj , list ) : return list ( map ( hydrate_ , obj ) ) elif isinstance ( obj ,...
Convert PackStream values into native values .
1,846
def authorize_url ( self , duration , scopes , state , implicit = False ) : if self . redirect_uri is None : raise InvalidInvocation ( "redirect URI not provided" ) if implicit and not isinstance ( self , UntrustedAuthenticator ) : raise InvalidInvocation ( "Only UntrustedAuthentictor instances can " "use the implicit ...
Return the URL used out - of - band to grant access to your application .
1,847
def revoke_token ( self , token , token_type = None ) : data = { "token" : token } if token_type is not None : data [ "token_type_hint" ] = token_type url = self . _requestor . reddit_url + const . REVOKE_TOKEN_PATH self . _post ( url , success_status = codes [ "no_content" ] , ** data )
Ask Reddit to revoke the provided token .
1,848
def authorize ( self , code ) : if self . _authenticator . redirect_uri is None : raise InvalidInvocation ( "redirect URI not provided" ) self . _request_token ( code = code , grant_type = "authorization_code" , redirect_uri = self . _authenticator . redirect_uri , )
Obtain and set authorization tokens based on code .
1,849
def refresh ( self ) : if self . refresh_token is None : raise InvalidInvocation ( "refresh token not provided" ) self . _request_token ( grant_type = "refresh_token" , refresh_token = self . refresh_token )
Obtain a new access token from the refresh_token .
1,850
def refresh ( self ) : grant_type = "https://oauth.reddit.com/grants/installed_client" self . _request_token ( grant_type = grant_type , device_id = self . _device_id )
Obtain a new access token .
1,851
def refresh ( self ) : self . _request_token ( grant_type = "password" , username = self . _username , password = self . _password , )
Obtain a new personal - use script type access token .
1,852
def request ( self , * args , ** kwargs ) : try : return self . _http . request ( * args , timeout = TIMEOUT , ** kwargs ) except Exception as exc : raise RequestException ( exc , args , kwargs )
Issue the HTTP request capturing any errors that may occur .
1,853
def _jamo_to_hangul_char ( lead , vowel , tail = 0 ) : lead = ord ( lead ) - _JAMO_LEAD_OFFSET vowel = ord ( vowel ) - _JAMO_VOWEL_OFFSET tail = ord ( tail ) - _JAMO_TAIL_OFFSET if tail else 0 return chr ( tail + ( vowel - 1 ) * 28 + ( lead - 1 ) * 588 + _JAMO_OFFSET )
Return the Hangul character for the given jamo characters .
1,854
def _get_unicode_name ( char ) : if char not in _JAMO_TO_NAME . keys ( ) and char not in _HCJ_TO_NAME . keys ( ) : raise InvalidJamoError ( "Not jamo or nameless jamo character" , char ) else : if is_hcj ( char ) : return _HCJ_TO_NAME [ char ] return _JAMO_TO_NAME [ char ]
Fetch the unicode name for jamo characters .
1,855
def is_jamo ( character ) : code = ord ( character ) return 0x1100 <= code <= 0x11FF or 0xA960 <= code <= 0xA97C or 0xD7B0 <= code <= 0xD7C6 or 0xD7CB <= code <= 0xD7FB or is_hcj ( character )
Test if a single character is a jamo character . Valid jamo includes all modern and archaic jamo as well as all HCJ . Non - assigned code points are invalid .
1,856
def is_jamo_compound ( character ) : if len ( character ) != 1 : return False if is_jamo ( character ) : return character in JAMO_COMPOUNDS return False
Test if a single character is a compound i . e . a consonant cluster double consonant or dipthong .
1,857
def get_jamo_class ( jamo ) : if jamo in JAMO_LEADS or jamo == chr ( 0x115F ) : return "lead" if jamo in JAMO_VOWELS or jamo == chr ( 0x1160 ) or 0x314F <= ord ( jamo ) <= 0x3163 : return "vowel" if jamo in JAMO_TAILS : return "tail" else : raise InvalidJamoError ( "Invalid or classless jamo argument." , jamo )
Determine if a jamo character is a lead vowel or tail . Integers and U + 11xx characters are valid arguments . HCJ consonants are not valid here .
1,858
def hangul_to_jamo ( hangul_string ) : return ( _ for _ in chain . from_iterable ( _hangul_char_to_jamo ( _ ) for _ in hangul_string ) )
Convert a string of Hangul to jamo . Arguments may be iterables of characters .
1,859
def jamo_to_hangul ( lead , vowel , tail = '' ) : lead = hcj_to_jamo ( lead , "lead" ) vowel = hcj_to_jamo ( vowel , "vowel" ) if not tail or ord ( tail ) == 0 : tail = None elif is_hcj ( tail ) : tail = hcj_to_jamo ( tail , "tail" ) if ( is_jamo ( lead ) and get_jamo_class ( lead ) == "lead" ) and ( is_jamo ( vowel ) ...
Return the Hangul character for the given jamo input . Integers corresponding to U + 11xx jamo codepoints U + 11xx jamo characters or HCJ are valid inputs .
1,860
def compose_jamo ( * parts ) : for p in parts : if not ( type ( p ) == str and len ( p ) == 1 and 2 <= len ( parts ) <= 3 ) : raise TypeError ( "compose_jamo() expected 2-3 single characters " + "but received " + str ( parts ) , '\x00' ) hcparts = [ j2hcj ( _ ) for _ in parts ] hcparts = tuple ( hcparts ) if hcparts in...
Return the compound jamo for the given jamo input . Integers corresponding to U + 11xx jamo codepoints U + 11xx jamo characters or HCJ are valid inputs .
1,861
def synth_hangul ( string ) : raise NotImplementedError return '' . join ( [ '' . join ( '' . join ( jamo_to_hcj ( _ ) ) for _ in string ) ] )
Convert jamo characters in a string into hcj as much as possible .
1,862
def authorization_error_class ( response ) : message = response . headers . get ( "www-authenticate" ) if message : error = message . replace ( '"' , "" ) . rsplit ( "=" , 1 ) [ 1 ] else : error = response . status_code return _auth_error_mapping [ error ] ( response )
Return an exception instance that maps to the OAuth Error .
1,863
def _last_bookmark ( b0 , b1 ) : n = [ None , None ] _ , _ , n [ 0 ] = b0 . rpartition ( ":" ) _ , _ , n [ 1 ] = b1 . rpartition ( ":" ) for i in range ( 2 ) : try : n [ i ] = int ( n [ i ] ) except ValueError : raise ValueError ( "Invalid bookmark: {}" . format ( b0 ) ) return b0 if n [ 0 ] > n [ 1 ] else b1
Return the latest of two bookmarks by looking for the maximum integer value following the last colon in the bookmark string .
1,864
def connect ( address , ** config ) : ssl_context = make_ssl_context ( ** config ) last_error = None log_debug ( "[#0000] C: <RESOLVE> %s" , address ) resolver = Resolver ( custom_resolver = config . get ( "resolver" ) ) resolver . addresses . append ( address ) resolver . custom_resolve ( ) resolver . dns_resolve ( )...
Connect and perform a handshake and return a valid Connection object assuming a protocol version can be agreed .
1,865
def _append ( self , signature , fields = ( ) , response = None ) : self . packer . pack_struct ( signature , fields ) self . output_buffer . chunk ( ) self . output_buffer . chunk ( ) self . responses . append ( response )
Add a message to the outgoing queue .
1,866
def reset ( self ) : def fail ( metadata ) : raise ProtocolError ( "RESET failed %r" % metadata ) log_debug ( "[#%04X] C: RESET" , self . local_port ) self . _append ( b"\x0F" , response = Response ( self , on_failure = fail ) ) self . sync ( )
Add a RESET message to the outgoing queue send it and consume all remaining messages .
1,867
def _send ( self ) : data = self . output_buffer . view ( ) if not data : return if self . closed ( ) : raise self . Error ( "Failed to write to closed connection {!r}" . format ( self . server . address ) ) if self . defunct ( ) : raise self . Error ( "Failed to write to defunct connection {!r}" . format ( self . serv...
Send all queued messages to the server .
1,868
def _fetch ( self ) : if self . closed ( ) : raise self . Error ( "Failed to read from closed connection {!r}" . format ( self . server . address ) ) if self . defunct ( ) : raise self . Error ( "Failed to read from defunct connection {!r}" . format ( self . server . address ) ) if not self . responses : return 0 , 0 s...
Receive at least one message from the server if available .
1,869
def sync ( self ) : self . send ( ) detail_count = summary_count = 0 while self . responses : response = self . responses [ 0 ] while not response . complete : detail_delta , summary_delta = self . fetch ( ) detail_count += detail_delta summary_count += summary_delta return detail_count , summary_count
Send and fetch all outstanding messages .
1,870
def acquire_direct ( self , address ) : if self . closed ( ) : raise ServiceUnavailable ( "Connection pool closed" ) with self . lock : try : connections = self . connections [ address ] except KeyError : connections = self . connections [ address ] = deque ( ) connection_acquisition_start_timestamp = perf_counter ( ) ...
Acquire a connection to a given address from the pool . The address supplied should always be an IP address not a host name .
1,871
def release ( self , connection ) : with self . lock : connection . in_use = False self . cond . notify_all ( )
Release a connection back into the pool . This method is thread safe .
1,872
def in_use_connection_count ( self , address ) : try : connections = self . connections [ address ] except KeyError : return 0 else : return sum ( 1 if connection . in_use else 0 for connection in connections )
Count the number of connections currently in use to a given address .
1,873
def deactivate ( self , address ) : with self . lock : try : connections = self . connections [ address ] except KeyError : return for conn in list ( connections ) : if not conn . in_use : connections . remove ( conn ) try : conn . close ( ) except IOError : pass if not connections : self . remove ( address )
Deactivate an address from the connection pool if present closing all idle connection to that address
1,874
def remove ( self , address ) : with self . lock : for connection in self . connections . pop ( address , ( ) ) : try : connection . close ( ) except IOError : pass
Remove an address from the connection pool if present closing all connections to that address .
1,875
def close ( self ) : if self . _closed : return try : with self . lock : if not self . _closed : self . _closed = True for address in list ( self . connections ) : self . remove ( address ) except TypeError as e : pass
Close all connections and empty the pool . This method is thread safe .
1,876
def on_records ( self , records ) : handler = self . handlers . get ( "on_records" ) if callable ( handler ) : handler ( records )
Called when one or more RECORD messages have been received .
1,877
def on_success ( self , metadata ) : handler = self . handlers . get ( "on_success" ) if callable ( handler ) : handler ( metadata ) handler = self . handlers . get ( "on_summary" ) if callable ( handler ) : handler ( )
Called when a SUCCESS message has been received .
1,878
def on_failure ( self , metadata ) : self . connection . reset ( ) handler = self . handlers . get ( "on_failure" ) if callable ( handler ) : handler ( metadata ) handler = self . handlers . get ( "on_summary" ) if callable ( handler ) : handler ( ) raise CypherError . hydrate ( ** metadata )
Called when a FAILURE message has been received .
1,879
def on_ignored ( self , metadata = None ) : handler = self . handlers . get ( "on_ignored" ) if callable ( handler ) : handler ( metadata ) handler = self . handlers . get ( "on_summary" ) if callable ( handler ) : handler ( )
Called when an IGNORED message has been received .
1,880
def cached_property ( prop ) : def cache_wrapper ( self ) : if not hasattr ( self , "_cache" ) : self . _cache = { } if prop . __name__ not in self . _cache : return_value = prop ( self ) if isgenerator ( return_value ) : return_value = tuple ( return_value ) self . _cache [ prop . __name__ ] = return_value return self...
A replacement for the property decorator that will only compute the attribute s value on the first call and serve a cached copy from then on .
1,881
def _convert_value_to_native ( value ) : if isinstance ( value , Counter32 ) : return int ( value . prettyPrint ( ) ) if isinstance ( value , Counter64 ) : return int ( value . prettyPrint ( ) ) if isinstance ( value , Gauge32 ) : return int ( value . prettyPrint ( ) ) if isinstance ( value , Integer ) : return int ( v...
Converts pysnmp objects into native Python objects .
1,882
def get ( self , oid ) : snmpsecurity = self . _get_snmp_security ( ) try : engine_error , pdu_error , pdu_error_index , objects = self . _cmdgen . getCmd ( snmpsecurity , cmdgen . UdpTransportTarget ( ( self . host , self . port ) , timeout = self . timeout , retries = self . retries ) , oid , ) except Exception as e ...
Get a single OID value .
1,883
def table ( self , oid , columns = None , column_value_mapping = None , non_repeaters = 0 , max_repetitions = 20 , fetch_all_columns = True ) : snmpsecurity = self . _get_snmp_security ( ) base_oid = oid . strip ( "." ) if not fetch_all_columns and not columns : raise ValueError ( "please use the columns argument to " ...
Get a table of values with the given OID prefix .
1,884
def get_parser ( ) : desc = Colors . LIGHTBLUE + textwrap . dedent ( ) + Colors . ENDC usage_info = Colors . LGREEN + textwrap . dedent ( ) + Colors . ENDC epi = Colors . LIGHTPURPLE + textwrap . dedent ( ) + Colors . ENDC parser = argparse . ArgumentParser ( add_help = True , formatter_class = argparse . RawTextHelpFo...
Load parser for command line arguments .
1,885
def insert ( args ) : string_search = args . str_search mode_search = MODES [ args . mode ] page = list ( TORRENTS [ args . torr_page ] . keys ( ) ) [ 0 ] key_search = TORRENTS [ args . torr_page ] [ page ] [ 'key_search' ] torrent_page = TORRENTS [ args . torr_page ] [ page ] [ 'page' ] domain = TORRENTS [ args . torr...
Insert args values into instance variables .
1,886
def run_it ( ) : initialize ( ) parser = get_parser ( ) args = None first_parse = True while ( True ) : if first_parse is True : first_parse = False args = parser . parse_args ( ) else : print ( textwrap . dedent ( ) ) print ( 'Or.. if you want to exit just write "' + Colors . LRED + 'Q' + Colors . ENDC + '" or "' + Co...
Search and download torrents until the user says it so .
1,887
def open_magnet ( self ) : if sys . platform . startswith ( 'linux' ) : subprocess . Popen ( [ 'xdg-open' , self . magnet ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) elif sys . platform . startswith ( 'win32' ) : os . startfile ( self . magnet ) elif sys . platform . startswith ( 'cygwin' ) : os . sta...
Open magnet according to os .
1,888
def get_magnet ( self , url ) : content_most_rated = requests . get ( url ) rated_soup = BeautifulSoup ( content_most_rated . content , 'lxml' ) if self . page == 'torrent_project' : self . magnet = rated_soup . find ( 'a' , href = True , text = re . compile ( 'Download' ) ) [ 'href' ] elif self . page == 'the_pirate_b...
Get magnet from torrent page . Url already got domain .
1,889
def download_torrent ( self ) : try : if self . back_to_menu is True : return if self . found_torrents is False : print ( 'Nothing found.' ) return if self . mode_search == 'best_rated' : print ( 'Downloading..' ) self . open_magnet ( ) elif self . mode_search == 'list' : if self . selected is not None : if self . page...
Download torrent .
1,890
def handle_select ( self ) : self . selected = input ( '>> ' ) if self . selected in [ 'Q' , 'q' ] : sys . exit ( 1 ) elif self . selected in [ 'B' , 'b' ] : self . back_to_menu = True return True elif is_num ( self . selected ) : if 0 <= int ( self . selected ) <= len ( self . hrefs ) - 1 : self . back_to_menu = False...
Handle user s input in list mode .
1,891
def select_torrent ( self ) : try : self . found_torrents = not bool ( self . key_search in self . content_page . text ) if not self . found_torrents : print ( 'No torrents found.' ) sys . exit ( 1 ) self . soupify ( ) if self . mode_search == 'list' : self . build_table ( ) if len ( self . hrefs ) == 1 : print ( 'Pres...
Select torrent .
1,892
def build_url ( self ) : url = requests . utils . requote_uri ( self . torrent_page + self . string_search ) if self . page == '1337x' : return ( url + '/1/' ) elif self . page == 'limetorrents' : return ( url + '/' ) else : return ( url )
Build appropiate encoded URL .
1,893
def get_content ( self ) : url = self . build_url ( ) try : self . content_page = requests . get ( url ) if not ( self . content_page . status_code == requests . codes . ok ) : self . content_page . raise_for_status ( ) except requests . exceptions . RequestException as ex : logging . info ( 'A requests exception has o...
Get content of the page through url .
1,894
def _recycle ( self ) : origin = self . _origin if origin == 0 : return False available = self . _extent - origin self . _data [ : available ] = self . _data [ origin : self . _extent ] self . _extent = available self . _origin = 0 return True
Reclaim buffer space before the origin .
1,895
def frame_message ( self ) : if self . _frame is not None : self . discard_message ( ) panes = [ ] p = origin = self . _origin extent = self . _extent while p < extent : available = extent - p if available < 2 : break chunk_size , = struct_unpack ( ">H" , self . _view [ p : ( p + 2 ) ] ) p += 2 if chunk_size == 0 : sel...
Construct a frame around the first complete message in the buffer .
1,896
def call ( self , request_function , set_header_callback , * args , ** kwargs ) : self . delay ( ) kwargs [ "headers" ] = set_header_callback ( ) response = request_function ( * args , ** kwargs ) self . update ( response . headers ) return response
Rate limit the call to request_function .
1,897
def delay ( self ) : if self . next_request_timestamp is None : return sleep_seconds = self . next_request_timestamp - time . time ( ) if sleep_seconds <= 0 : return message = "Sleeping: {:0.2f} seconds prior to" " call" . format ( sleep_seconds ) log . debug ( message ) time . sleep ( sleep_seconds )
Sleep for an amount of time to remain under the rate limit .
1,898
def update ( self , response_headers ) : if "x-ratelimit-remaining" not in response_headers : if self . remaining is not None : self . remaining -= 1 self . used += 1 return now = time . time ( ) prev_remaining = self . remaining seconds_to_reset = int ( response_headers [ "x-ratelimit-reset" ] ) self . remaining = flo...
Update the state of the rate limiter based on the response headers .
1,899
def custom_resolve ( self ) : if not callable ( self . custom_resolver ) : return new_addresses = [ ] for address in self . addresses : for new_address in self . custom_resolver ( address ) : new_addresses . append ( new_address ) self . addresses = new_addresses
If a custom resolver is defined perform custom resolution on the contained addresses .