idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
1,900
def dns_resolve ( self ) : new_addresses = [ ] for address in self . addresses : try : info = getaddrinfo ( address [ 0 ] , address [ 1 ] , 0 , SOCK_STREAM , IPPROTO_TCP ) except gaierror : raise AddressError ( "Cannot resolve address {!r}" . format ( address ) ) else : for _ , _ , _ , _ , address in info : if len ( ad...
Perform DNS resolution on the contained addresses .
1,901
def matching_line ( lines , keyword ) : for line in lines : matching = match ( line , keyword ) if matching != None : return matching return None
Returns the first matching line in a list of lines .
1,902
def request ( self , method , path , data = None , files = None , json = None , params = None ) : params = deepcopy ( params ) or { } params [ "raw_json" ] = 1 if isinstance ( data , dict ) : data = deepcopy ( data ) data [ "api_type" ] = "json" data = sorted ( data . items ( ) ) url = urljoin ( self . _requestor . oau...
Return the json content from the resource at path .
1,903
def request ( self , method , url , params = None , ** kwargs ) : params_key = tuple ( params . items ( ) ) if params else ( ) if method . upper ( ) == "GET" : if ( url , params_key ) in self . get_cache : print ( "Returning cached response for:" , method , url , params ) return self . get_cache [ ( url , params_key ) ...
Perform a request or return a cached response if available .
1,904
def parse_routing_info ( cls , records ) : if len ( records ) != 1 : raise RoutingProtocolError ( "Expected exactly one record" ) record = records [ 0 ] routers = [ ] readers = [ ] writers = [ ] try : servers = record [ "servers" ] for server in servers : role = server [ "role" ] addresses = [ ] for address in server [...
Parse the records returned from a getServers call and return a new RoutingTable instance .
1,905
def is_fresh ( self , access_mode ) : log_debug ( "[#0000] C: <ROUTING> Checking table freshness for %r" , access_mode ) expired = self . last_updated_time + self . ttl <= self . timer ( ) has_server_for_mode = bool ( access_mode == READ_ACCESS and self . readers ) or bool ( access_mode == WRITE_ACCESS and self . writ...
Indicator for whether routing information is still usable .
1,906
def update ( self , new_routing_table ) : self . routers . replace ( new_routing_table . routers ) self . readers . replace ( new_routing_table . readers ) self . writers . replace ( new_routing_table . writers ) self . last_updated_time = self . timer ( ) self . ttl = new_routing_table . ttl log_debug ( "[#0000] S: <...
Update the current routing table with new routing information from a replacement table .
1,907
def fetch_routing_info ( self , address ) : metadata = { } records = [ ] def fail ( md ) : if md . get ( "code" ) == "Neo.ClientError.Procedure.ProcedureNotFound" : raise RoutingProtocolError ( "Server {!r} does not support routing" . format ( address ) ) else : raise RoutingProtocolError ( "Routing support broken on s...
Fetch raw routing info from a given router address .
1,908
def fetch_routing_table ( self , address ) : new_routing_info = self . fetch_routing_info ( address ) if new_routing_info is None : return None new_routing_table = RoutingTable . parse_routing_info ( new_routing_info ) num_routers = len ( new_routing_table . routers ) num_readers = len ( new_routing_table . readers ) n...
Fetch a routing table from a given router address .
1,909
def update_routing_table_from ( self , * routers ) : for router in routers : new_routing_table = self . fetch_routing_table ( router ) if new_routing_table is not None : self . routing_table . update ( new_routing_table ) return True return False
Try to update routing tables with the given routers .
1,910
def update_routing_table ( self ) : existing_routers = list ( self . routing_table . routers ) has_tried_initial_routers = False if self . missing_writer : has_tried_initial_routers = True if self . update_routing_table_from ( self . initial_address ) : return if self . update_routing_table_from ( * existing_routers ) ...
Update the routing table from the first router able to provide valid routing information .
1,911
def ensure_routing_table_is_fresh ( self , access_mode ) : if self . routing_table . is_fresh ( access_mode ) : return False with self . refresh_lock : if self . routing_table . is_fresh ( access_mode ) : if access_mode == READ_ACCESS : self . missing_writer = not self . routing_table . is_fresh ( WRITE_ACCESS ) return...
Update the routing table if stale .
1,912
def deactivate ( self , address ) : log_debug ( "[#0000] C: <ROUTING> Deactivating address %r" , address ) self . routing_table . routers . discard ( address ) self . routing_table . readers . discard ( address ) self . routing_table . writers . discard ( address ) log_debug ( "[#0000] C: <ROUTING> table=%r" , self ....
Deactivate an address from the connection pool if present remove from the routing table and also closing all idle connections to that address .
1,913
def remove_writer ( self , address ) : log_debug ( "[#0000] C: <ROUTING> Removing writer %r" , address ) self . routing_table . writers . discard ( address ) log_debug ( "[#0000] C: <ROUTING> table=%r" , self . routing_table )
Remove a writer address from the routing table if present .
1,914
def handle ( self , error , connection ) : error_class = error . __class__ if error_class in ( ConnectionExpired , ServiceUnavailable , DatabaseUnavailableError ) : self . deactivate ( connection . address ) elif error_class in ( NotALeaderError , ForbiddenOnReadOnlyDatabaseError ) : self . remove_writer ( connection ....
Handle any cleanup or similar activity related to an error occurring on a pooled connection .
1,915
def point_type ( name , fields , srid_map ) : def srid ( self ) : try : return srid_map [ len ( self ) ] except KeyError : return None attributes = { "srid" : property ( srid ) } for index , subclass_field in enumerate ( fields ) : def accessor ( self , i = index , f = subclass_field ) : try : return self [ i ] except ...
Dynamically create a Point subclass .
1,916
def main ( ) : args = parse_args ( ) configure_logging ( args . debug ) src_path = args . src_path dest_path = args . dest_path old_str1 = '\\"size\\":' + args . old_size old_str2 = '\\"size\\": ' + args . old_size new_str = '\\"size\\":' + args . new_size logging . info ( 'Input path: %s' , src_path ) logging . info (...
Read a directory containing json files for Kibana panels beautify them and replace size value in aggregations as specified through corresponding params params .
1,917
def signal_handler ( signal_name , frame ) : sys . stdout . flush ( ) print ( "\nSIGINT in frame signal received. Quitting..." ) sys . stdout . flush ( ) sys . exit ( 0 )
Quit signal handler .
1,918
def graph_format ( new_mem , old_mem , is_firstiteration = True ) : if is_firstiteration : output = " n/a " elif new_mem - old_mem > 50000000 : output = " +++++" elif new_mem - old_mem > 20000000 : output = " ++++ " elif new_mem - old_mem > 5000000 : output = " +++ " elif new_mem - old_mem > 1000000 : output ...
Show changes graphically in memory consumption
1,919
def get_cur_mem_use ( ) : lines = open ( "/proc/meminfo" , 'r' ) . readlines ( ) emptySpace = re . compile ( '[ ]+' ) for line in lines : if "MemTotal" in line : memtotal = float ( emptySpace . split ( line ) [ 1 ] ) if "SwapFree" in line : swapfree = float ( emptySpace . split ( line ) [ 1 ] ) if "SwapTotal" in line :...
return utilization of memory
1,920
def check_py_version ( ) : try : if sys . version_info >= ( 2 , 7 ) : return except : pass print ( " " ) print ( " ERROR - memtop needs python version at least 2.7" ) print ( ( "Chances are that you can install newer version from your " "repositories, or even that you have some newer version " "installed yet." ) ) prin...
Check if a propper Python version is used .
1,921
def character ( prompt = None , empty = False ) : s = _prompt_input ( prompt ) if empty and not s : return None elif len ( s ) == 1 : return s else : return character ( prompt = prompt , empty = empty )
Prompt a single character .
1,922
def email ( prompt = None , empty = False , mode = "simple" ) : if mode == "simple" : s = _prompt_input ( prompt ) if empty and not s : return None else : if RE_EMAIL_SIMPLE . match ( s ) : return s else : return email ( prompt = prompt , empty = empty , mode = mode ) else : raise ValueError
Prompt an email address .
1,923
def integer ( prompt = None , empty = False ) : s = _prompt_input ( prompt ) if empty and not s : return None else : try : return int ( s ) except ValueError : return integer ( prompt = prompt , empty = empty )
Prompt an integer .
1,924
def real ( prompt = None , empty = False ) : s = _prompt_input ( prompt ) if empty and not s : return None else : try : return float ( s ) except ValueError : return real ( prompt = prompt , empty = empty )
Prompt a real number .
1,925
def regex ( pattern , prompt = None , empty = False , flags = 0 ) : s = _prompt_input ( prompt ) if empty and not s : return None else : m = re . match ( pattern , s , flags = flags ) if m : return m else : return regex ( pattern , prompt = prompt , empty = empty , flags = flags )
Prompt a string that matches a regular expression .
1,926
def secret ( prompt = None , empty = False ) : if prompt is None : prompt = PROMPT s = getpass . getpass ( prompt = prompt ) if empty and not s : return None else : if s : return s else : return secret ( prompt = prompt , empty = empty )
Prompt a string without echoing .
1,927
def string ( prompt = None , empty = False ) : s = _prompt_input ( prompt ) if empty and not s : return None else : if s : return s else : return string ( prompt = prompt , empty = empty )
Prompt a string .
1,928
def _get_cache_plus_key ( self ) : key = getattr ( self , '_cache_key' , self . key_from_query ( ) ) return self . _cache . cache , key
Return a cache region plus key .
1,929
def get_value ( self , merge = True , createfunc = None , expiration_time = None , ignore_expiration = False ) : cache , cache_key = self . _get_cache_plus_key ( ) assert not ignore_expiration or not createfunc , "Can't ignore expiration and also provide createfunc" if ignore_expiration or not createfunc : cached_value...
Return the value from the cache for this query .
1,930
def set_value ( self , value ) : cache , cache_key = self . _get_cache_plus_key ( ) cache . set ( cache_key , value )
Set the value in the cache for this query .
1,931
def key_from_query ( self , qualifier = None ) : stmt = self . with_labels ( ) . statement compiled = stmt . compile ( ) params = compiled . params values = [ str ( compiled ) ] for k in sorted ( params ) : values . append ( repr ( params [ k ] ) ) key = u" " . join ( values ) return md5 ( key . encode ( 'utf8' ) ) . h...
Given a Query create a cache key .
1,932
def process_query_conditionally ( self , query ) : if query . _current_path : mapper , prop = query . _current_path [ - 2 : ] for cls in mapper . class_ . __mro__ : k = ( cls , prop . key ) relationship_option = self . _relationship_options . get ( k ) if relationship_option : query . _cache = relationship_option break
Process a Query that is used within a lazy loader .
1,933
def fit ( self , t , y , dy = 1 , presorted = False ) : self . t , self . y , self . dy = self . _validate_inputs ( t , y , dy , presorted ) self . _fit ( self . t , self . y , self . dy ) return self
Fit the smoother
1,934
def predict ( self , t ) : t = np . asarray ( t ) return self . _predict ( np . ravel ( t ) ) . reshape ( t . shape )
Predict the smoothed function value at time t
1,935
def cv_residuals ( self , cv = True ) : vals = self . cv_values ( cv ) return ( self . y - vals ) / self . dy
Return the residuals of the cross - validation for the fit data
1,936
def cv_error ( self , cv = True , skip_endpoints = True ) : resids = self . cv_residuals ( cv ) if skip_endpoints : resids = resids [ 1 : - 1 ] return np . mean ( abs ( resids ) )
Return the sum of cross - validation residuals for the input data
1,937
def arcfour_drop ( key , n = 3072 ) : af = arcfour ( key ) [ af . next ( ) for c in range ( n ) ] return af
Return a generator for the RC4 - drop pseudorandom keystream given by the key and number of bytes to drop passed as arguments . Dropped bytes default to the more conservative 3072 NOT the SCAN default of 768 .
1,938
def reconnect ( self ) : self . lock . acquire ( ) if self . use_ssl : self . client = http . client . HTTPSConnection ( self . host , self . port , context = self . ssl_context ) else : self . client = http . client . HTTPConnection ( self . host , self . port ) self . lock . release ( )
Reconnect to the remote server .
1,939
def call ( self , method , * args , ** kwargs ) : if kwargs : options = self . encode ( dict ( args = args , kwargs = kwargs ) ) else : options = self . encode ( args ) headers = { } if self . headers : headers . update ( self . headers ) headers [ 'Content-Type' ] = self . serializer . content_type headers [ 'Content-...
Issue a call to the remote end point to execute the specified procedure .
1,940
def cache_call_refresh ( self , method , * options ) : options_hash = self . encode ( options ) if len ( options_hash ) > 20 : options_hash = hashlib . new ( 'sha1' , options ) . digest ( ) options_hash = sqlite3 . Binary ( options_hash ) with self . cache_lock : cursor = self . cache_db . cursor ( ) cursor . execute (...
Call a remote method and update the local cache with the result if it already existed .
1,941
def cache_clear ( self ) : with self . cache_lock : cursor = self . cache_db . cursor ( ) cursor . execute ( 'DELETE FROM cache' ) self . cache_db . commit ( ) self . logger . info ( 'the RPC cache has been purged' ) return
Purge the local store of all cached function information .
1,942
def respond_file ( self , file_path , attachment = False , query = None ) : del query file_path = os . path . abspath ( file_path ) try : file_obj = open ( file_path , 'rb' ) except IOError : self . respond_not_found ( ) return self . send_response ( 200 ) self . send_header ( 'Content-Type' , self . guess_mime_type ( ...
Respond to the client by serving a file either directly or as an attachment .
1,943
def respond_list_directory ( self , dir_path , query = None ) : del query try : dir_contents = os . listdir ( dir_path ) except os . error : self . respond_not_found ( ) return if os . path . normpath ( dir_path ) != self . __config [ 'serve_files_root' ] : dir_contents . append ( '..' ) dir_contents . sort ( key = lam...
Respond to the client with an HTML page listing the contents of the specified directory .
1,944
def respond_redirect ( self , location = '/' ) : self . send_response ( 301 ) self . send_header ( 'Content-Length' , 0 ) self . send_header ( 'Location' , location ) self . end_headers ( ) return
Respond to the client with a 301 message and redirect them with a Location header .
1,945
def respond_server_error ( self , status = None , status_line = None , message = None ) : ( ex_type , ex_value , ex_traceback ) = sys . exc_info ( ) if ex_type : ( ex_file_name , ex_line , _ , _ ) = traceback . extract_tb ( ex_traceback ) [ - 1 ] line_info = "{0}:{1}" . format ( ex_file_name , ex_line ) log_msg = "enco...
Handle an internal server error logging a traceback if executed within an exception handler .
1,946
def respond_unauthorized ( self , request_authentication = False ) : headers = { } if request_authentication : headers [ 'WWW-Authenticate' ] = 'Basic realm="' + self . __config [ 'server_version' ] + '"' self . send_response_full ( b'Unauthorized' , status = 401 , headers = headers ) return
Respond to the client that the request is unauthorized .
1,947
def dispatch_handler ( self , query = None ) : query = ( query or { } ) self . path = self . path . split ( '?' , 1 ) [ 0 ] self . path = self . path . split ( '#' , 1 ) [ 0 ] original_path = urllib . parse . unquote ( self . path ) self . path = posixpath . normpath ( original_path ) words = self . path . split ( '/' ...
Dispatch functions based on the established handler_map . It is generally not necessary to override this function and doing so will prevent any handlers from being executed . This function is executed automatically when requests of either GET HEAD or POST are received .
1,948
def guess_mime_type ( self , path ) : _ , ext = posixpath . splitext ( path ) if ext in self . extensions_map : return self . extensions_map [ ext ] ext = ext . lower ( ) return self . extensions_map [ ext if ext in self . extensions_map else '' ]
Guess an appropriate MIME type based on the extension of the provided path .
1,949
def check_authorization ( self ) : try : store = self . __config . get ( 'basic_auth' ) if store is None : return True auth_info = self . headers . get ( 'Authorization' ) if not auth_info : return False auth_info = auth_info . split ( ) if len ( auth_info ) != 2 or auth_info [ 0 ] != 'Basic' : return False auth_info =...
Check for the presence of a basic auth Authorization header and if the credentials contained within in are valid .
1,950
def cookie_get ( self , name ) : if not hasattr ( self , 'cookies' ) : return None if self . cookies . get ( name ) : return self . cookies . get ( name ) . value return None
Check for a cookie value by name .
1,951
def cookie_set ( self , name , value ) : if not self . headers_active : raise RuntimeError ( 'headers have already been ended' ) cookie = "{0}={1}; Path=/; HttpOnly" . format ( name , value ) self . send_header ( 'Set-Cookie' , cookie )
Set the value of a client cookie . This can only be called while headers can be sent .
1,952
def get_content_type_charset ( self , default = 'UTF-8' ) : encoding = default header = self . headers . get ( 'Content-Type' , '' ) idx = header . find ( 'charset=' ) if idx > 0 : encoding = ( header [ idx + 8 : ] . split ( ' ' , 1 ) [ 0 ] or encoding ) return encoding
Inspect the Content - Type header to retrieve the charset that the client has specified .
1,953
def close ( self ) : if not self . connected : return self . connected = False if self . handler . wfile . closed : return if select . select ( [ ] , [ self . handler . wfile ] , [ ] , 0 ) [ 1 ] : with self . lock : self . handler . wfile . write ( b'\x88\x00' ) self . handler . wfile . flush ( ) self . on_closed ( )
Close the web socket connection and stop processing results . If the connection is still open a WebSocket close message will be sent to the peer .
1,954
def send_message ( self , opcode , message ) : if not isinstance ( message , bytes ) : message = message . encode ( 'utf-8' ) length = len ( message ) if not select . select ( [ ] , [ self . handler . wfile ] , [ ] , 0 ) [ 1 ] : self . logger . error ( 'the socket is not ready for writing' ) self . close ( ) return buf...
Send a message to the peer over the socket .
1,955
def on_message ( self , opcode , message ) : self . logger . debug ( "processing {0} (opcode: 0x{1:02x}) message" . format ( self . _opcode_names . get ( opcode , 'UNKNOWN' ) , opcode ) ) if opcode == self . _opcode_close : self . close ( ) elif opcode == self . _opcode_ping : if len ( message ) > 125 : self . close ( ...
The primary dispatch function to handle incoming WebSocket messages .
1,956
def from_content_type ( cls , content_type ) : name = content_type options = { } if ';' in content_type : name , options_str = content_type . split ( ';' , 1 ) for part in options_str . split ( ';' ) : part = part . strip ( ) if '=' in part : key , value = part . split ( '=' ) else : key , value = ( part , None ) optio...
Build a serializer object from a MIME Content - Type string .
1,957
def dumps ( self , data ) : data = g_serializer_drivers [ self . name ] [ 'dumps' ] ( data ) if sys . version_info [ 0 ] == 3 and isinstance ( data , str ) : data = data . encode ( self . _charset ) if self . _compression == 'zlib' : data = zlib . compress ( data ) assert isinstance ( data , bytes ) return data
Serialize a python data type for transmission or storage .
1,958
def loads ( self , data ) : if not isinstance ( data , bytes ) : raise TypeError ( "loads() argument 1 must be bytes, not {0}" . format ( type ( data ) . __name__ ) ) if self . _compression == 'zlib' : data = zlib . decompress ( data ) if sys . version_info [ 0 ] == 3 and self . name . startswith ( 'application/' ) : d...
Deserialize the data into it s original python object .
1,959
def shutdown ( self ) : self . __should_stop . set ( ) if self . __server_thread == threading . current_thread ( ) : self . __is_shutdown . set ( ) self . __is_running . clear ( ) else : if self . __wakeup_fd is not None : os . write ( self . __wakeup_fd . write_fd , b'\x00' ) self . __is_shutdown . wait ( ) if self . ...
Shutdown the server and stop responding to requests .
1,960
def auth_set ( self , status ) : if not bool ( status ) : self . __config [ 'basic_auth' ] = None self . logger . info ( 'basic authentication has been disabled' ) else : self . __config [ 'basic_auth' ] = { } self . logger . info ( 'basic authentication has been enabled' )
Enable or disable requiring authentication on all incoming requests .
1,961
def auth_delete_creds ( self , username = None ) : if not username : self . __config [ 'basic_auth' ] = { } self . logger . info ( 'basic authentication database has been cleared of all entries' ) return del self . __config [ 'basic_auth' ] [ username ]
Delete the credentials for a specific username if specified or all stored credentials .
1,962
def setattr_context ( obj , ** kwargs ) : old_kwargs = dict ( [ ( key , getattr ( obj , key ) ) for key in kwargs ] ) [ setattr ( obj , key , val ) for key , val in kwargs . items ( ) ] try : yield finally : [ setattr ( obj , key , val ) for key , val in old_kwargs . items ( ) ]
Context manager to temporarily change the values of object attributes while executing a function .
1,963
def validate_inputs ( * arrays , ** kwargs ) : arrays = np . broadcast_arrays ( * arrays ) sort_by = kwargs . pop ( 'sort_by' , None ) if kwargs : raise ValueError ( "unrecognized arguments: {0}" . format ( kwargs . keys ( ) ) ) if arrays [ 0 ] . ndim != 1 : raise ValueError ( "Input arrays should be one-dimensional." ...
Validate input arrays
1,964
def _prep_smooth ( t , y , dy , span , t_out , span_out , period ) : if period : t = t % period if t_out is not None : t_out = t_out % period t , y , dy = validate_inputs ( t , y , dy , sort_by = t ) if span_out is not None : if t_out is None : raise ValueError ( "Must specify t_out when span_out is given" ) if span is...
Private function to prepare & check variables for smooth utilities
1,965
def moving_average_smooth ( t , y , dy , span = None , cv = True , t_out = None , span_out = None , period = None ) : prep = _prep_smooth ( t , y , dy , span , t_out , span_out , period ) t , y , dy , span , t_out , span_out , indices = prep w = 1. / ( dy ** 2 ) w , yw = windowed_sum ( [ w , y * w ] , t = t , span = sp...
Perform a moving - average smooth of the data
1,966
def linear_smooth ( t , y , dy , span = None , cv = True , t_out = None , span_out = None , period = None ) : t_input = t prep = _prep_smooth ( t , y , dy , span , t_out , span_out , period ) t , y , dy , span , t_out , span_out , indices = prep if period : t_input = np . asarray ( t_input ) % period w = 1. / ( dy ** 2...
Perform a linear smooth of the data
1,967
def multinterp ( x , y , xquery , slow = False ) : x , y , xquery = map ( np . asarray , ( x , y , xquery ) ) assert x . ndim == 1 assert xquery . ndim == 1 assert y . shape == x . shape + xquery . shape xquery = np . clip ( xquery , x . min ( ) , x . max ( ) ) if slow : from scipy . interpolate import interp1d return ...
Multiple linear interpolations
1,968
def _create_session ( self , test_connection = False ) : session = consulate . Session ( host = self . host , port = self . port ) if test_connection : session . status . leader ( ) return session
Create a consulate . session object and query for its leader to ensure that the connection is made .
1,969
def apply_remote_config ( self , namespace = None ) : if namespace is None : namespace = "config/{service}/{environment}/" . format ( service = os . environ . get ( 'SERVICE' , 'generic_service' ) , environment = os . environ . get ( 'ENVIRONMENT' , 'generic_environment' ) ) for k , v in iteritems ( self . session . kv...
Applies all config values defined in consul s kv store to self . app .
1,970
def register_service ( self , ** kwargs ) : kwargs . setdefault ( 'name' , self . app . name ) self . session . agent . service . register ( ** kwargs )
register this service with consul kwargs passed to Consul . agent . service . register
1,971
def _resolve ( self ) : endpoints = { } r = self . resolver . query ( self . service , 'SRV' ) for rec in r . response . additional : name = rec . name . to_text ( ) addr = rec . items [ 0 ] . address endpoints [ name ] = { 'addr' : addr } for rec in r . response . answer [ 0 ] . items : name = '.' . join ( rec . targe...
Query the consul DNS server for the service IP and port
1,972
def crop ( gens , seconds = 5 , cropper = None ) : if hasattr ( gens , "next" ) : gens = ( gens , ) if cropper == None : cropper = lambda gen : itertools . islice ( gen , 0 , seconds * sampler . FRAME_RATE ) cropped = [ cropper ( gen ) for gen in gens ] return cropped [ 0 ] if len ( cropped ) == 1 else cropped
Crop the generator to a finite number of frames
1,973
def crop_at_zero_crossing ( gen , seconds = 5 , error = 0.1 ) : source = iter ( gen ) buffer_length = int ( 2 * error * sampler . FRAME_RATE ) start = itertools . islice ( source , 0 , int ( ( seconds - error ) * sampler . FRAME_RATE ) ) end = itertools . islice ( source , 0 , buffer_length ) for sample in start : yiel...
Crop the generator ending at a zero - crossing
1,974
def volume ( gen , dB = 0 ) : if not hasattr ( dB , 'next' ) : scale = 10 ** ( dB / 20. ) else : def scale_gen ( ) : while True : yield 10 ** ( next ( dB ) / 20. ) scale = scale_gen ( ) return envelope ( gen , scale )
Change the volume of gen by dB decibles
1,975
def mixer ( inputs , mix = None ) : if mix == None : mix = ( [ constant ( 1.0 / len ( inputs ) ) ] * len ( inputs ) , ) duped_inputs = zip ( * [ itertools . tee ( i , len ( mix ) ) for i in inputs ] ) return [ sum ( * [ multiply ( m , i ) for m , i in zip ( channel_mix , channel_inputs ) ] ) for channel_mix , channel_i...
Mix inputs together based on mix tuple
1,976
def channelize ( gen , channels ) : def pick ( g , channel ) : for samples in g : yield samples [ channel ] return [ pick ( gen_copy , channel ) for channel , gen_copy in enumerate ( itertools . tee ( gen , channels ) ) ]
Break multi - channel generator into one sub - generator per channel
1,977
def file_is_seekable ( f ) : try : f . tell ( ) logger . info ( "File is seekable!" ) except IOError , e : if e . errno == errno . ESPIPE : return False else : raise return True
Returns True if file f is seekable and False if not Useful to determine for example if f is STDOUT to a pipe .
1,978
def sample ( generator , min = - 1 , max = 1 , width = SAMPLE_WIDTH ) : fmt = { 1 : '<B' , 2 : '<h' , 4 : '<i' } [ width ] return ( struct . pack ( fmt , int ( sample ) ) for sample in normalize ( hard_clip ( generator , min , max ) , min , max , - 2 ** ( width * 8 - 1 ) , 2 ** ( width * 8 - 1 ) - 1 ) )
Convert audio waveform generator into packed sample generator .
1,979
def sample_all ( generators , * args , ** kwargs ) : return [ sample ( gen , * args , ** kwargs ) for gen in generators ]
Convert list of audio waveform generators into list of packed sample generators .
1,980
def buffer ( stream , buffer_size = BUFFER_SIZE ) : i = iter ( stream ) return iter ( lambda : "" . join ( itertools . islice ( i , buffer_size ) ) , "" )
Buffer the generator into byte strings of buffer_size samples
1,981
def wave_module_patched ( ) : f = StringIO ( ) w = wave . open ( f , "wb" ) w . setparams ( ( 1 , 2 , 44100 , 0 , "NONE" , "no compression" ) ) patched = True try : w . setnframes ( ( 0xFFFFFFFF - 36 ) / w . getnchannels ( ) / w . getsampwidth ( ) ) w . _ensure_header_written ( 0 ) except struct . error : patched = Fal...
True if wave module can write data size of 0xFFFFFFFF False otherwise .
1,982
def cache_finite_samples ( f ) : cache = { } def wrap ( * args ) : key = FRAME_RATE , args if key not in cache : cache [ key ] = [ sample for sample in f ( * args ) ] return ( sample for sample in cache [ key ] ) return wrap
Decorator to cache audio samples produced by the wrapped generator .
1,983
def play ( channels , blocking = True , raw_samples = False ) : if not pyaudio_loaded : raise Exception ( "Soundcard playback requires PyAudio. Install with `pip install pyaudio`." ) channel_count = 1 if hasattr ( channels , "next" ) else len ( channels ) wavgen = wav_samples ( channels , raw_samples = raw_samples ) p ...
Play the contents of the generator using PyAudio
1,984
def _pad_arrays ( t , arrays , indices , span , period ) : N = len ( t ) if indices is None : indices = np . arange ( N ) pad_left = max ( 0 , 0 - np . min ( indices - span // 2 ) ) pad_right = max ( 0 , np . max ( indices + span - span // 2 ) - ( N - 1 ) ) if pad_left + pad_right > 0 : Nright , pad_right = divmod ( pa...
Internal routine to pad arrays for periodic models .
1,985
def get_i2c_bus_numbers ( glober = glob . glob ) : res = [ ] for device in glober ( "/dev/i2c-*" ) : r = re . match ( "/dev/i2c-([\d]){1,2}" , device ) res . append ( int ( r . group ( 1 ) ) ) return res
Search all the available I2C devices in the system
1,986
def get_led_register_from_name ( self , name ) : res = re . match ( '^led_([0-9]{1,2})$' , name ) if res is None : raise AttributeError ( "Unknown attribute: '%s'" % name ) led_num = int ( res . group ( 1 ) ) if led_num < 0 or led_num > 15 : raise AttributeError ( "Unknown attribute: '%s'" % name ) return self . calc_l...
Parse the name for led number
1,987
def set_pwm ( self , led_num , value ) : self . __check_range ( 'led_number' , led_num ) self . __check_range ( 'led_value' , value ) register_low = self . calc_led_register ( led_num ) self . write ( register_low , value_low ( value ) ) self . write ( register_low + 1 , value_high ( value ) )
Set PWM value for the specified LED
1,988
def get_pwm ( self , led_num ) : self . __check_range ( 'led_number' , led_num ) register_low = self . calc_led_register ( led_num ) return self . __get_led_value ( register_low )
Generic getter for all LED PWM value
1,989
def sleep ( self ) : logger . debug ( "Sleep the controller" ) self . write ( Registers . MODE_1 , self . mode_1 | ( 1 << Mode1 . SLEEP ) )
Send the controller to sleep
1,990
def write ( self , reg , value ) : self . __check_range ( 'register_value' , value ) logger . debug ( "Write '%s' to register '%s'" % ( value , reg ) ) self . __bus . write_byte_data ( self . __address , reg , value )
Write raw byte value to the specified register
1,991
def set_pwm_frequency ( self , value ) : self . __check_range ( 'pwm_frequency' , value ) reg_val = self . calc_pre_scale ( value ) logger . debug ( "Calculated prescale value is %s" % reg_val ) self . sleep ( ) self . write ( Registers . PRE_SCALE , reg_val ) self . wake ( )
Set the frequency for all PWM output
1,992
def check_valid_color ( color ) : if color in list ( mcolors . CSS4_COLORS . keys ( ) ) + [ "#4CB391" ] : logging . info ( "Nanoplotter: Valid color {}." . format ( color ) ) return color else : logging . info ( "Nanoplotter: Invalid color {}, using default." . format ( color ) ) sys . stderr . write ( "Invalid color {...
Check if the color provided by the user is valid .
1,993
def check_valid_format ( figformat ) : fig = plt . figure ( ) if figformat in list ( fig . canvas . get_supported_filetypes ( ) . keys ( ) ) : logging . info ( "Nanoplotter: valid output format {}" . format ( figformat ) ) return figformat else : logging . info ( "Nanoplotter: invalid output format {}" . format ( figfo...
Check if the specified figure format is valid .
1,994
def spatial_heatmap ( array , path , title = None , color = "Greens" , figformat = "png" ) : logging . info ( "Nanoplotter: Creating heatmap of reads per channel using {} reads." . format ( array . size ) ) activity_map = Plot ( path = path + "." + figformat , title = "Number of reads generated per channel" ) layout = ...
Taking channel information and creating post run channel activity plots .
1,995
def check_valid_time_and_sort ( df , timescol , days = 5 , warning = True ) : timediff = ( df [ timescol ] . max ( ) - df [ timescol ] . min ( ) ) . days if timediff < days : return df . sort_values ( timescol ) . reset_index ( drop = True ) . reset_index ( ) else : if warning : sys . stderr . write ( "\nWarning: data ...
Check if the data contains reads created within the same days timeframe .
1,996
def time_plots ( df , path , title = None , color = "#4CB391" , figformat = "png" , log_length = False , plot_settings = None ) : dfs = check_valid_time_and_sort ( df , "start_time" ) logging . info ( "Nanoplotter: Creating timeplots using {} reads." . format ( len ( dfs ) ) ) cumyields = cumulative_yield ( dfs = dfs ....
Making plots of time vs read length time vs quality and cumulative yield .
1,997
def violin_or_box_plot ( df , y , figformat , path , y_name , title = None , plot = "violin" , log = False , palette = None ) : comp = Plot ( path = path + "NanoComp_" + y . replace ( ' ' , '_' ) + '.' + figformat , title = "Comparing {}" . format ( y ) ) if y == "quals" : comp . title = "Comparing base call quality sc...
Create a violin or boxplot from the received DataFrame .
1,998
def output_barplot ( df , figformat , path , title = None , palette = None ) : logging . info ( "Nanoplotter: Creating barplots for number of reads and total throughput." ) read_count = Plot ( path = path + "NanoComp_number_of_reads." + figformat , title = "Comparing number of reads" ) ax = sns . countplot ( x = "datas...
Create barplots based on number of reads and total sum of nucleotides sequenced .
1,999
def overlay_histogram ( df , path , palette = None ) : if palette is None : palette = plotly . colors . DEFAULT_PLOTLY_COLORS * 5 hist = Plot ( path = path + "NanoComp_OverlayHistogram.html" , title = "Histogram of read lengths" ) hist . html , hist . fig = plot_overlay_histogram ( df , palette , title = hist . title )...
Use plotly to create an overlay of length histograms Return html code but also save as png