signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def request ( self , url : str , method : str , raise_for_status : bool = True , path_to_errors : tuple = None , * args , ** kwargs ) -> tuple : """A wrapper method for : meth : ` ~ requests . Session . request ` ` , which adds some defaults and logging : param url : The URL to send the reply to : param method : The method to use : param raise _ for _ status : Should an exception be raised for a failed response . Default is * * True * * : param args : Additional args to be sent to the request : param kwargs : Additional args to be sent to the request : return : Dict of response body or original : class : ` requests . Response `"""
session = kwargs . get ( "session" , requests . Session ( ) ) log . debug ( "sending a %s request to %s with args: %s kwargs: %s" , method . upper ( ) , url , args , kwargs , ) rsp = session . request ( method , url , * args , ** kwargs ) log . debug ( "response: %s" , rsp . text ) errors = None if raise_for_status : try : rsp . raise_for_status ( ) except requests . RequestException as e : if e . response is not None : rsp = e . response if path_to_errors : try : errors = rsp . json ( ) for arg in path_to_errors : if errors . get ( arg ) : errors = errors [ arg ] except json . decoder . JSONDecodeError : errors = [ rsp . text ] else : errors = [ rsp . text ] if not isinstance ( errors , list ) : errors = [ errors ] else : rsp = None errors = [ str ( e ) ] log . debug ( "errors when trying to access %s: %s" , url , errors ) log . debug ( "returning response %s, errors %s" , rsp , errors ) return rsp , errors
def get_logs ( self , container_id ) : """Return the full stdout / stderr of a container"""
stdout = self . _docker . containers . get ( container_id ) . logs ( stdout = True , stderr = False ) . decode ( 'utf8' ) stderr = self . _docker . containers . get ( container_id ) . logs ( stdout = False , stderr = True ) . decode ( 'utf8' ) return stdout , stderr
def stop ( pid ) : """Shut down a specific process . Args : pid : the pid of the process to shutdown ."""
if psutil . pid_exists ( pid ) : try : p = psutil . Process ( pid ) p . kill ( ) except Exception : pass
def mdgel_metadata ( self ) : """Return consolidated metadata from MD GEL tags as dict ."""
for page in self . pages [ : 2 ] : if 'MDFileTag' in page . tags : tags = page . tags break else : return None result = { } for code in range ( 33445 , 33453 ) : name = TIFF . TAGS [ code ] if name not in tags : continue result [ name [ 2 : ] ] = tags [ name ] . value return result
async def getChatMember ( self , chat_id , user_id ) : """See : https : / / core . telegram . org / bots / api # getchatmember"""
p = _strip ( locals ( ) ) return await self . _api_request ( 'getChatMember' , _rectify ( p ) )
def output_prefixdata_code ( prefixdata , outfilename , module_prefix , varprefix , per_locale , chunks ) : """Output the per - prefix data in Python form to the given file"""
sorted_keys = sorted ( prefixdata . keys ( ) ) total_keys = len ( sorted_keys ) if chunks == - 1 : chunk_size = PREFIXDATA_CHUNK_SIZE total_chunks = int ( math . ceil ( total_keys / float ( chunk_size ) ) ) else : chunk_size = int ( math . ceil ( total_keys / float ( chunks ) ) ) total_chunks = chunks outdirname = os . path . dirname ( outfilename ) longest_prefix = 0 for chunk_num in range ( total_chunks ) : chunk_index = chunk_size * chunk_num chunk_keys = sorted_keys [ chunk_index : chunk_index + chunk_size ] chunk_data = { } for key in chunk_keys : chunk_data [ key ] = prefixdata [ key ] chunk_file = os . path . join ( outdirname , 'data%d.py' % chunk_num ) chunk_longest = output_prefixdata_chunk ( chunk_data , chunk_file , module_prefix , per_locale ) if chunk_longest > longest_prefix : longest_prefix = chunk_longest with open ( outfilename , "w" ) as outfile : if per_locale : prnt ( PREFIXDATA_LOCALE_FILE_PROLOG % { 'module' : module_prefix } , file = outfile ) else : prnt ( PREFIXDATA_FILE_PROLOG % { 'module' : module_prefix } , file = outfile ) prnt ( COPYRIGHT_NOTICE , file = outfile ) prnt ( "%s_DATA = {}" % varprefix , file = outfile ) for chunk_num in range ( total_chunks ) : prnt ( "from .data%d import data" % chunk_num , file = outfile ) prnt ( "%s_DATA.update(data)" % varprefix , file = outfile ) prnt ( "del data" , file = outfile ) prnt ( "%s_LONGEST_PREFIX = %d" % ( varprefix , longest_prefix ) , file = outfile )
def dw ( self ) : r"""The weighted degree of vertices . For undirected graphs , the weighted degree of the vertex : math : ` v _ i ` is defined as . . math : : d [ i ] = \ sum _ j W [ j , i ] = \ sum _ j W [ i , j ] , where : math : ` W ` is the weighted adjacency matrix : attr : ` W ` . For directed graphs , the weighted degree of the vertex : math : ` v _ i ` is defined as . . math : : d [ i ] = \ frac12 ( d ^ \ text { in } [ i ] + d ^ \ text { out } [ i ] ) = \ frac12 ( \ sum _ j W [ j , i ] + \ sum _ j W [ i , j ] ) , i . e . , as the average of the in and out degrees . Examples Undirected graph : > > > graph = graphs . Graph ( [ . . . [ 0 , 1 , 0 ] , . . . [ 1 , 0 , 2 ] , . . . [ 0 , 2 , 0 ] , > > > print ( graph . d ) # Number of neighbors . [1 2 1] > > > print ( graph . dw ) # Weighted degree . [1 3 2] Directed graph : > > > graph = graphs . Graph ( [ . . . [ 0 , 1 , 0 ] , . . . [ 0 , 0 , 2 ] , . . . [ 0 , 2 , 0 ] , > > > print ( graph . d ) # Number of neighbors . [0.5 1.5 1 . ] > > > print ( graph . dw ) # Weighted degree . [0.5 2.5 2 . ]"""
if self . _dw is None : if not self . is_directed ( ) : # Shortcut for undirected graphs . self . _dw = np . ravel ( self . W . sum ( axis = 0 ) ) else : degree_in = np . ravel ( self . W . sum ( axis = 0 ) ) degree_out = np . ravel ( self . W . sum ( axis = 1 ) ) self . _dw = ( degree_in + degree_out ) / 2 return self . _dw
def compose ( self , * args ) : """Returns a function that is the composition of a list of functions , each consuming the return value of the function that follows ."""
args = list ( args ) def composed ( * ar , ** kwargs ) : lastRet = self . obj ( * ar , ** kwargs ) for i in args : lastRet = i ( lastRet ) return lastRet return self . _wrap ( composed )
def load_yaml ( yaml_file : str ) -> Any : """Load YAML from file . : param yaml _ file : path to YAML file : return : content of the YAML as dict / list"""
with open ( yaml_file , 'r' ) as file : return ruamel . yaml . load ( file , ruamel . yaml . RoundTripLoader )
def persons_significant_control ( self , num , statements = False , ** kwargs ) : """Search for a list of persons with significant control . Searches for persons of significant control based on company number for a specified company . Specify statements = True to only search for officers with statements . Args : num ( str , int ) : Company number to search on . statements ( Optional [ bool ] ) : Search only for persons with statements . Default is False . kwargs ( dict ) : additional keywords passed into requests . session . get * params * keyword ."""
baseuri = ( self . _BASE_URI + 'company/{}/persons-with-significant-control' . format ( num ) ) # Only append statements to the URL if statements is True if statements is True : baseuri += '-statements' res = self . session . get ( baseuri , params = kwargs ) self . handle_http_error ( res ) return res
def get_function_for_aws_event ( self , record ) : """Get the associated function to execute for a triggered AWS event Support S3 , SNS , DynamoDB , kinesis and SQS events"""
if 's3' in record : if ':' in record [ 's3' ] [ 'configurationId' ] : return record [ 's3' ] [ 'configurationId' ] . split ( ':' ) [ - 1 ] arn = None if 'Sns' in record : try : message = json . loads ( record [ 'Sns' ] [ 'Message' ] ) if message . get ( 'command' ) : return message [ 'command' ] except ValueError : pass arn = record [ 'Sns' ] . get ( 'TopicArn' ) elif 'dynamodb' in record or 'kinesis' in record : arn = record . get ( 'eventSourceARN' ) elif 'eventSource' in record and record . get ( 'eventSource' ) == 'aws:sqs' : arn = record . get ( 'eventSourceARN' ) elif 's3' in record : arn = record [ 's3' ] [ 'bucket' ] [ 'arn' ] if arn : return self . settings . AWS_EVENT_MAPPING . get ( arn ) return None
def ekssum ( handle , segno ) : """Return summary information for a specified segment in a specified EK . http : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / ekssum _ c . html : param handle : Handle of EK . : type handle : int : param segno : Number of segment to be summarized . : type segno : int : return : EK segment summary . : rtype : spicepy . utils . support _ types . SpiceEKSegSum"""
handle = ctypes . c_int ( handle ) segno = ctypes . c_int ( segno ) segsum = stypes . SpiceEKSegSum ( ) libspice . ekssum_c ( handle , segno , ctypes . byref ( segsum ) ) return segsum
def _collate ( * iterables , key = lambda a : a , reverse = False ) : """Helper for ` ` collate ( ) ` ` , called when the user is using the ` ` reverse ` ` or ` ` key ` ` keyword arguments on Python versions below 3.5."""
min_or_max = partial ( max if reverse else min , key = itemgetter ( 0 ) ) peekables = [ peekable ( it ) for it in iterables ] peekables = [ p for p in peekables if p ] # Kill empties . while peekables : _ , p = min_or_max ( ( key ( p . peek ( ) ) , p ) for p in peekables ) yield next ( p ) peekables = [ x for x in peekables if x ]
def account_setup ( remote , token = None , response = None , account_setup = None ) : """Setup user account ."""
gh = GitHubAPI ( user_id = token . remote_account . user_id ) with db . session . begin_nested ( ) : gh . init_account ( ) # Create user < - > external id link . oauth_link_external_id ( token . remote_account . user , dict ( id = str ( gh . account . extra_data [ 'id' ] ) , method = "github" ) )
def hash_and_serialize_chunk ( chunk_pii_data , # type : Sequence [ Sequence [ str ] ] keys , # type : Sequence [ Sequence [ bytes ] ] schema # type : Schema ) : # type : ( . . . ) - > Tuple [ List [ str ] , Sequence [ int ] ] """Generate Bloom filters ( ie hash ) from chunks of PII then serialize the generated Bloom filters . It also computes and outputs the Hamming weight ( or popcount ) - - the number of bits set to one - - of the generated Bloom filters . : param chunk _ pii _ data : An iterable of indexable records . : param keys : A tuple of two lists of secret keys used in the HMAC . : param Schema schema : Schema specifying the entry formats and hashing settings . : return : A list of serialized Bloom filters and a list of corresponding popcounts"""
clk_data = [ ] clk_popcounts = [ ] for clk in stream_bloom_filters ( chunk_pii_data , keys , schema ) : clk_data . append ( serialize_bitarray ( clk [ 0 ] ) . strip ( ) ) clk_popcounts . append ( clk [ 2 ] ) return clk_data , clk_popcounts
def set_data ( self , data ) : """Sets the content data . arg : data ( osid . transport . DataInputStream ) : the content data raise : InvalidArgument - ` ` data ` ` is invalid raise : NoAccess - ` ` Metadata . isReadOnly ( ) ` ` is ` ` true ` ` raise : NullArgument - ` ` data ` ` is ` ` null ` ` * compliance : mandatory - - This method must be implemented . *"""
if data is None : raise errors . NullArgument ( 'data cannot be None' ) if not isinstance ( data , DataInputStream ) : raise errors . InvalidArgument ( 'data must be instance of DataInputStream' ) dbase = JSONClientValidated ( 'repository' , runtime = self . _runtime ) . raw ( ) filesys = gridfs . GridFS ( dbase ) self . _my_map [ 'data' ] = filesys . put ( data . _my_data ) data . _my_data . seek ( 0 ) self . _my_map [ 'base64' ] = base64 . b64encode ( data . _my_data . read ( ) )
def is_intercepted ( target ) : """True iif input target is intercepted . : param target : target to check such as an intercepted target . : return : True iif input target is intercepted . : rtype : bool"""
result = False # get interception function from input target function = _get_function ( target ) result = hasattr ( function , _INTERCEPTED ) return result
def kill_speech_dispatcher ( self ) : '''kill speech dispatcher processs'''
if not 'HOME' in os . environ : return pidpath = os . path . join ( os . environ [ 'HOME' ] , '.speech-dispatcher' , 'pid' , 'speech-dispatcher.pid' ) if os . path . exists ( pidpath ) : try : import signal pid = int ( open ( pidpath ) . read ( ) ) if pid > 1 and os . kill ( pid , 0 ) is None : print ( "Killing speech dispatcher pid %u" % pid ) os . kill ( pid , signal . SIGINT ) time . sleep ( 1 ) except Exception as e : pass
def list_objects ( self , prefix = None , delimiter = None ) : """List the objects for this bucket . : param str prefix : If specified , only objects that start with this prefix are listed . : param str delimiter : If specified , return only objects whose name do not contain the delimiter after the prefix . For the other objects , the response contains ( in the prefix response parameter ) the name truncated after the delimiter . Duplicates are omitted ."""
return self . _client . list_objects ( instance = self . _instance , bucket_name = self . name , prefix = prefix , delimiter = delimiter )
def Normalize ( self , period , start_time , stop_time , mode = NORMALIZE_MODE_GAUGE ) : """Normalize the series to have a fixed period over a fixed time range . Supports two modes , depending on the type of data : NORMALIZE _ MODE _ GAUGE : support gauge values . If multiple original data points lie within an output interval , the output value is an average of the original data point . if no original data points lie within an output interval , the output value is None . NORMALIZE _ MODE _ COUNTER : supports counter values . Assumes that the sequence is already increasing ( typically , MakeIncreasing will have been called ) . Each output value is the largest value seen during or before the corresponding output interval . Args : period : The desired time between points . Should be an rdfvalue . Duration or a count of microseconds . start _ time : The first timestamp will be at start _ time . Should be an rdfvalue . RDFDatetime or a count of microseconds since epoch . stop _ time : The last timestamp will be at stop _ time - period . Should be an rdfvalue . RDFDatetime or a count of microseconds since epoch . mode : The type of normalization to perform . May be NORMALIZE _ MODE _ GAUGE or NORMALIZE _ MODE _ COUNTER . Raises : RuntimeError : In case the sequence timestamps are misordered ."""
period = self . _NormalizeTime ( period ) start_time = self . _NormalizeTime ( start_time ) stop_time = self . _NormalizeTime ( stop_time ) if not self . data : return self . FilterRange ( start_time , stop_time ) grouped = { } for value , timestamp in self . data : offset = timestamp - start_time shifted_offset = offset - ( offset % period ) grouped . setdefault ( shifted_offset , [ ] ) . append ( value ) self . data = [ ] last_value = None for offset in range ( 0 , stop_time - start_time , period ) : g = grouped . get ( offset ) if mode == NORMALIZE_MODE_GAUGE : v = None if g : v = sum ( g ) / len ( g ) self . data . append ( [ v , offset + start_time ] ) else : if g : for v in g : if v < last_value : raise RuntimeError ( "Next value must not be smaller." ) last_value = v self . data . append ( [ last_value , offset + start_time ] )
def fetch_liked_projects ( self , ** kwargs ) : """List liked projects Fetch projects that the currently authenticated user likes . This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please define a ` callback ` function to be invoked when receiving the response . > > > def callback _ function ( response ) : > > > pprint ( response ) > > > thread = api . fetch _ liked _ projects ( callback = callback _ function ) : param callback function : The callback function for asynchronous request . ( optional ) : return : PaginatedProjectResults If the method is called asynchronously , returns the request thread ."""
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . fetch_liked_projects_with_http_info ( ** kwargs ) else : ( data ) = self . fetch_liked_projects_with_http_info ( ** kwargs ) return data
def registration_settings ( request ) : '''Expose selected settings to templates'''
context = { } for setting in ( 'WAFER_SSO' , 'WAFER_HIDE_LOGIN' , 'WAFER_REGISTRATION_OPEN' , 'WAFER_REGISTRATION_MODE' , 'WAFER_TALKS_OPEN' , 'WAFER_VIDEO_LICENSE' , ) : context [ setting ] = getattr ( settings , setting , None ) return context
def decode ( self , text ) : """Decode a Lua string to an dictionary : type text : str : rtype : dict : param text : string to decode : return : dictionary"""
LOGGER . debug ( 'decoding text to dictionary' ) if not text or type ( text ) is not str : raise SLTPParsingError ( ERRORS [ 'unexp_type_str' ] ) LOGGER . debug ( 'extracting qualifier' ) qual = re . compile ( r'^(?P<value>(dictionary|mission|mapResource|warehouses) = ?)\n' ) match = qual . match ( text ) if match is None : raise ValueError ( 'qualifier not found; first line: {}' . format ( text . split ( '\n' ) [ 0 ] ) ) self . qual = match . group ( 'value' ) text = qual . sub ( '' , text ) reg = re . compile ( r' -- .*[^(\\|",)]$' , re . M ) text = reg . sub ( '' , text ) self . text = text self . at , self . ch , self . depth = 0 , '' , 0 self . len = len ( text ) self . next_chr ( ) result = self . value ( ) return result , self . qual
def _readBatchOutputForFile ( self , directory , fileIO , filename , session , spatial , spatialReferenceID , replaceParamFile = None , maskMap = None ) : """When batch mode is run in GSSHA , the files of the same type are prepended with an integer to avoid filename conflicts . This will attempt to read files in this format and throw warnings if the files aren ' t found ."""
# Get contents of directory directoryList = os . listdir ( directory ) # Compile a list of files with that include the filename in them batchFiles = [ ] for thing in directoryList : if filename in thing : batchFiles . append ( thing ) numFilesRead = 0 for batchFile in batchFiles : instance = fileIO ( ) instance . projectFile = self if isinstance ( instance , WMSDatasetFile ) : instance . read ( directory = directory , filename = batchFile , session = session , maskMap = maskMap , spatial = spatial , spatialReferenceID = spatialReferenceID ) else : instance . read ( directory , batchFile , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Increment runCounter for next file numFilesRead += 1 # Issue warnings if '[' in filename or ']' in filename : log . info ( 'A file cannot be read, because the path to the ' 'file in the project file has been replaced with ' 'replacement variable {0}.' . format ( filename ) ) elif numFilesRead == 0 : log . warning ( '{0} listed in project file, but no such ' 'file exists.' . format ( filename ) ) else : log . info ( 'Batch mode output detected. {0} files read ' 'for file {1}' . format ( numFilesRead , filename ) )
async def listener ( self ) : """Listener task for receiving ops from Lavalink ."""
while self . _ws . open and self . _is_shutdown is False : try : data = json . loads ( await self . _ws . recv ( ) ) except websockets . ConnectionClosed : break raw_op = data . get ( "op" ) try : op = LavalinkIncomingOp ( raw_op ) except ValueError : socket_log . debug ( "Received unknown op: %s" , data ) else : socket_log . debug ( "Received known op: %s" , data ) self . loop . create_task ( self . _handle_op ( op , data ) ) self . ready . clear ( ) log . debug ( "Listener exited: ws %s SHUTDOWN %s." , self . _ws . open , self . _is_shutdown ) self . loop . create_task ( self . _reconnect ( ) )
def bifurcate_base ( cls , newick ) : """Rewrites a newick string so that the base is a bifurcation ( rooted tree )"""
t = cls ( newick ) t . _tree . resolve_polytomies ( ) return t . newick
def release ( self , connection ) : """Releases the connection back to the pool"""
self . _checkpid ( ) if connection . pid == self . pid : idx = connection . _pattern_idx self . _in_use_connections [ idx ] . remove ( connection ) self . _available_connections [ idx ] . append ( connection )
def tcp_traceflow ( packet , timestamp , * , data_link , count = NotImplemented ) : """Trace packet flow for TCP ."""
if getattr ( packet , 'ip' , None ) : ip = packet [ 'ip' ] elif getattr ( packet , 'ip6' , None ) : ip = packet [ 'ip6' ] else : return False , None tcp = getattr ( ip , 'tcp' , None ) if tcp is not None : flags = bin ( tcp . flags ) [ 2 : ] . zfill ( 8 ) data = dict ( protocol = data_link , # data link type from global header index = count , # frame number frame = packet2dict ( packet , timestamp , data_link = data_link ) , # extracted packet syn = bool ( int ( flags [ 6 ] ) ) , # TCP synchronise ( SYN ) flag fin = bool ( int ( flags [ 7 ] ) ) , # TCP finish ( FIN ) flag src = ipaddress . ip_address ( ip . src ) , # source IP dst = ipaddress . ip_address ( ip . dst ) , # destination IP srcport = tcp . sport , # TCP source port dstport = tcp . dport , # TCP destination port timestamp = timestamp , # timestamp ) return True , data return False , None
def start_background_task ( self , target , * args , ** kwargs ) : """Start a background task using the appropriate async model . This is a utility function that applications can use to start a background task using the method that is compatible with the selected async mode . : param target : the target function to execute . : param args : arguments to pass to the function . : param kwargs : keyword arguments to pass to the function . This function returns an object compatible with the ` Thread ` class in the Python standard library . The ` start ( ) ` method on this object is already called by this function ."""
return self . server . start_background_task ( target , * args , ** kwargs )
def _get_edge_sign ( im , edge ) : """Get the polarity of the influence by examining the edge sign ."""
edge_data = im [ edge [ 0 ] ] [ edge [ 1 ] ] # Handle possible multiple edges between nodes signs = list ( set ( [ v [ 'sign' ] for v in edge_data . values ( ) if v . get ( 'sign' ) ] ) ) if len ( signs ) > 1 : logger . warning ( "Edge %s has conflicting polarities; choosing " "positive polarity by default" % str ( edge ) ) sign = 1 else : sign = signs [ 0 ] if sign is None : raise Exception ( 'No sign attribute for edge.' ) elif abs ( sign ) == 1 : return sign else : raise Exception ( 'Unexpected edge sign: %s' % edge . attr [ 'sign' ] )
def should_sample ( self , trace_id ) : """Make the sampling decision based on the lower 8 bytes of the trace ID . If the value is less than the bound , return True , else False . : type trace _ id : str : param trace _ id : Trace ID of the current trace . : rtype : bool : returns : The sampling decision ."""
lower_long = get_lower_long_from_trace_id ( trace_id ) bound = self . rate * MAX_VALUE if lower_long <= bound : return True else : return False
def bottomAt ( self , offset = 0 ) : """Returns point in the center of the region ' s bottom side ( offset to the bottom by ` ` offset ` ` )"""
return Location ( self . getX ( ) + ( self . getW ( ) / 2 ) , self . getY ( ) + self . getH ( ) + offset )
def value ( self ) : """Read the value from BACnet network"""
try : res = self . properties . device . properties . network . read ( "{} {} {} presentValue" . format ( self . properties . device . properties . address , self . properties . type , str ( self . properties . address ) , ) ) self . _trend ( res ) except Exception : raise Exception ( "Problem reading : {}" . format ( self . properties . name ) ) if res == "inactive" : self . _key = 0 self . _boolKey = False else : self . _key = 1 self . _boolKey = True return res
def ssh_interface ( vm_ ) : '''Return the ssh _ interface type to connect to . Either ' public _ ips ' ( default ) or ' private _ ips ' .'''
ret = config . get_cloud_config_value ( 'ssh_interface' , vm_ , __opts__ , default = 'public_ips' , search_global = False ) if ret not in ( 'public_ips' , 'private_ips' ) : log . warning ( 'Invalid ssh_interface: %s. ' 'Allowed options are ("public_ips", "private_ips"). ' 'Defaulting to "public_ips".' , ret ) ret = 'public_ips' return ret
def update_saved_search ( self , id , ** kwargs ) : # noqa : E501 """Update a specific saved search # noqa : E501 # noqa : E501 This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please pass async _ req = True > > > thread = api . update _ saved _ search ( id , async _ req = True ) > > > result = thread . get ( ) : param async _ req bool : param str id : ( required ) : param SavedSearch body : Example Body : < pre > { \" query \" : { \" foo \" : \" { \\ \" searchTerms \\ \" : [ { \\ \" type \\ \" : \\ \" freetext \\ \" , \\ \" value \\ \" : \\ \" foo \\ \" } ] } \" } , \" entityType \" : \" DASHBOARD \" } < / pre > : return : ResponseContainerSavedSearch If the method is called asynchronously , returns the request thread ."""
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async_req' ) : return self . update_saved_search_with_http_info ( id , ** kwargs ) # noqa : E501 else : ( data ) = self . update_saved_search_with_http_info ( id , ** kwargs ) # noqa : E501 return data
def compress ( func ) : """Compress route return data with gzip compression"""
@ wraps ( func ) def wrapper ( * args , ** kwargs ) : result = func ( * args , ** kwargs ) if ( 'gzip' in bottle . request . headers . get ( 'Accept-Encoding' , '' ) and isinstance ( result , string_type ) and len ( result ) > 1024 ) : if isinstance ( result , unicode ) : result = result . encode ( 'utf-8' ) tmp_fo = BytesIO ( ) with gzip . GzipFile ( mode = 'wb' , fileobj = tmp_fo ) as gzip_fo : gzip_fo . write ( result ) result = tmp_fo . getvalue ( ) bottle . response . add_header ( 'Content-Encoding' , 'gzip' ) return result return wrapper
def load_template ( self , template : str ) -> Template : """Load a Jinja2 template from the source directory ."""
env = dict ( trim_blocks = True , lstrip_blocks = True , keep_trailing_newline = False ) jinja2_ext = ".jinja2" if not template . endswith ( jinja2_ext ) : self . _log . error ( "Template file name must end with %s" % jinja2_ext ) raise ValueError if not template [ : - len ( jinja2_ext ) ] . endswith ( ".md" ) : self . _log . error ( "Template file should be a Markdown file." ) raise ValueError if not os . path . isabs ( template ) : template = os . path . join ( os . path . dirname ( __file__ ) , template ) with open ( template , encoding = "utf-8" ) as fin : template_obj = Template ( fin . read ( ) , ** env ) template_obj . filename = template self . _log . info ( "Loaded %s" , template ) return template_obj
def GetAttachmentIdFromMediaId ( media_id ) : """Gets attachment id from media id . : param str media _ id : : return : The attachment id from the media id . : rtype : str"""
altchars = '+-' if not six . PY2 : altchars = altchars . encode ( 'utf-8' ) # altchars for ' + ' and ' / ' . We keep ' + ' but replace ' / ' with ' - ' buffer = base64 . b64decode ( str ( media_id ) , altchars ) resoure_id_length = 20 attachment_id = '' if len ( buffer ) > resoure_id_length : # We are cutting off the storage index . attachment_id = base64 . b64encode ( buffer [ 0 : resoure_id_length ] , altchars ) if not six . PY2 : attachment_id = attachment_id . decode ( 'utf-8' ) else : attachment_id = media_id return attachment_id
def delete_row ( self , index ) : """" Deletes the row from the worksheet at the specified index . : param index : Index of a row for deletion . : type index : int"""
body = { "requests" : [ { "deleteDimension" : { "range" : { "sheetId" : self . id , "dimension" : "ROWS" , "startIndex" : index - 1 , "endIndex" : index } } } ] } return self . spreadsheet . batch_update ( body )
def _get_datatable_options ( self ) : """Helps to keep the promise that we only run ` ` get _ datatable _ options ( ) ` ` once ."""
if not hasattr ( self , '_datatable_options' ) : self . _datatable_options = self . get_datatable_options ( ) # Convert sources from list to tuple , so that modern Column tracking dicts can hold the # field definitions as keys . columns = self . _datatable_options . get ( 'columns' , [ ] ) for i , column in enumerate ( columns ) : if len ( column ) >= 2 and isinstance ( column [ 1 ] , list ) : column = list ( column ) column [ 1 ] = tuple ( column [ 1 ] ) columns [ i ] = tuple ( column ) return self . _datatable_options
def set_broad_fig_style ( self ) : '''4 times width , 1.5 times height'''
plt . rcParams . update ( { 'figure.figsize' : [ self . frontierswidth / self . inchpercm * 4 , self . frontierswidth / self . inchpercm * 1.5 ] , } )
def create_title_node ( field_name , field , field_id , state , lineno ) : """Create docutils nodes for the configuration field ' s title and reference target node . Parameters field : ` ` lsst . pex . config . Field ` ` A configuration field . state : ` ` docutils . statemachine . State ` ` Usually the directive ' s ` ` state ` ` attribute . Returns ` ` docutils . nodes . title ` ` Title containing nodes for the title of the ` ` field ` ` and reference target ."""
# Reference target env = state . document . settings . env ref_target = create_configfield_ref_target_node ( field_id , env , lineno ) # Title is the field ' s attribute name title = nodes . title ( text = field_name ) title += ref_target return title
def _initializer_wrapper ( actual_initializer , * rest ) : """We ignore SIGINT . It ' s up to our parent to kill us in the typical condition of this arising from ` ` ^ C ` ` on a terminal . If someone is manually killing us with that signal , well . . . nothing will happen ."""
signal . signal ( signal . SIGINT , signal . SIG_IGN ) if actual_initializer is not None : actual_initializer ( * rest )
def copy ( self ) : """Copies the BeliefState by recursively deep - copying all of its parts . Domains are not copied , as they do not change during the interpretation or generation ."""
copied = BeliefState ( self . __dict__ [ 'referential_domain' ] ) for key in [ 'environment_variables' , 'deferred_effects' , 'pos' , 'p' ] : copied . __dict__ [ key ] = copy . deepcopy ( self . __dict__ [ key ] ) return copied
def translate ( line , ascent , offs = 0 ) : """offs - > shifts parallel to line ascent - > rotate line"""
# TODO : why do I have thuis factor here ? ascent *= - 2 offs *= - 2 l0 = length ( line ) # change relative to line : t0 = offs # - h + offs t1 = l0 * ascent + offs return translate2P ( line , t0 , t1 )
def is_instance_running ( self , instance_id ) : """Checks if the instance is up and running . : param str instance _ id : instance identifier : return : bool - True if running , False otherwise"""
instance = self . _load_instance ( instance_id ) if instance . update ( ) == "running" : # If the instance is up & running , ensure it has an IP # address . if not instance . ip_address and self . request_floating_ip : log . debug ( "Public ip address has to be assigned through " "elasticluster." ) self . _allocate_address ( instance ) instance . update ( ) return True else : return False
def compile_theme ( theme_id = None ) : """Compiles a theme ."""
from engineer . processors import convert_less from engineer . themes import ThemeManager if theme_id is None : themes = ThemeManager . themes ( ) . values ( ) else : themes = [ ThemeManager . theme ( theme_id ) ] with ( indent ( 2 ) ) : puts ( colored . yellow ( "Compiling %s themes." % len ( themes ) ) ) for theme in themes : theme_output_path = ( theme . static_root / ( 'stylesheets/%s_precompiled.css' % theme . id ) ) . normpath ( ) puts ( colored . cyan ( "Compiling theme %s to %s" % ( theme . id , theme_output_path ) ) ) with indent ( 4 ) : puts ( "Compiling..." ) convert_less ( theme . static_root / ( 'stylesheets/%s.less' % theme . id ) , theme_output_path , minify = True ) puts ( colored . green ( "Done." , bold = True ) )
def from_dict ( cls , d ) : """Returns a SlabEntry by reading in an dictionary"""
structure = SlabEntry . from_dict ( d [ "structure" ] ) energy = SlabEntry . from_dict ( d [ "energy" ] ) miller_index = d [ "miller_index" ] label = d [ "label" ] coverage = d [ "coverage" ] adsorbates = d [ "adsorbates" ] clean_entry = d [ "clean_entry" ] = self . clean_entry return SlabEntry ( structure , energy , miller_index , label = label , coverage = coverage , adsorbates = adsorbates , clean_entry = clean_entry )
def obfn_g1var ( self ) : r"""Variable to be evaluated in computing the : math : ` \ ell _ 1 ` regularisation term , depending on the ` ` gEvalY ` ` option value ."""
# Use of self . block _ sep1 ( self . AXnr ) instead of self . cnst _ A1 ( self . X ) # reduces number of calls to self . cnst _ A0 return self . var_y1 ( ) if self . opt [ 'gEvalY' ] else self . block_sep1 ( self . AXnr )
def with_output ( verbosity = 1 ) : """Decorator that configures output verbosity ."""
def make_wrapper ( func ) : @ wraps ( func ) def wrapper ( * args , ** kwargs ) : configure_output ( verbosity = verbosity ) return func ( * args , ** kwargs ) return wrapper return make_wrapper
def determine_drift ( self ) : """Determine the drift of the stack . Args : None Returns : Good or Bad ; True or False"""
try : response = self . _cloud_formation . detect_stack_drift ( StackName = self . _stack_name ) drift_request_id = response . get ( 'StackDriftDetectionId' , None ) if drift_request_id : logging . info ( 'drift_request_id: %s - polling' , drift_request_id ) drift_calc_done = False while not drift_calc_done : time . sleep ( self . nap_time ) response = self . _cloud_formation . describe_stack_drift_detection_status ( StackDriftDetectionId = drift_request_id ) current_state = response . get ( 'DetectionStatus' , None ) logging . info ( 'describe_stack_drift_detection_status(): {}' . format ( current_state ) ) drift_calc_done = current_state in CALC_DONE_STATES drift_answer = response . get ( 'StackDriftStatus' , 'UNKNOWN' ) logging . info ( 'drift of {}: {}' . format ( self . _stack_name , drift_answer ) ) if drift_answer == 'DRIFTED' : if self . _verbose : self . _print_drift_report ( ) return False else : return True else : logging . warning ( 'drift_request_id is None' ) return False except Exception as wtf : logging . error ( wtf , exc_info = True ) return False
def pause ( self , container_id = None , sudo = None ) : '''pause a running OciImage container , if it exists Equivalent command line example : singularity oci pause < container _ ID > Parameters container _ id : the id to stop . sudo : Add sudo to the command . If the container was created by root , you need sudo to interact and get its state . Returns return _ code : the return code to indicate if the container was paused .'''
return self . _state_command ( container_id , command = 'pause' , sudo = sudo )
def encryption_key ( self , alg , ** kwargs ) : """Return an encryption key as per http : / / openid . net / specs / openid - connect - core - 1_0 . html # Encryption : param alg : encryption algorithm : param kwargs : : return : encryption key as byte string"""
if not self . key : self . deserialize ( ) try : tsize = ALG2KEYLEN [ alg ] except KeyError : raise UnsupportedAlgorithm ( alg ) if tsize <= 32 : # SHA256 _enc_key = sha256_digest ( self . key ) [ : tsize ] elif tsize <= 48 : # SHA384 _enc_key = sha384_digest ( self . key ) [ : tsize ] elif tsize <= 64 : # SHA512 _enc_key = sha512_digest ( self . key ) [ : tsize ] else : raise JWKException ( "No support for symmetric keys > 512 bits" ) logger . debug ( 'Symmetric encryption key: {}' . format ( as_unicode ( b64e ( _enc_key ) ) ) ) return _enc_key
def to_phalf_from_pfull ( arr , val_toa = 0 , val_sfc = 0 ) : """Compute data at half pressure levels from values at full levels . Could be the pressure array itself , but it could also be any other data defined at pressure levels . Requires specification of values at surface and top of atmosphere ."""
phalf = np . zeros ( ( arr . shape [ 0 ] + 1 , arr . shape [ 1 ] , arr . shape [ 2 ] ) ) phalf [ 0 ] = val_toa phalf [ - 1 ] = val_sfc phalf [ 1 : - 1 ] = 0.5 * ( arr [ : - 1 ] + arr [ 1 : ] ) return phalf
def new ( self ) : # type : ( ) - > None '''A method to create a new UDF Partition Volume Descriptor . Parameters : None . Returns : Nothing .'''
if self . _initialized : raise pycdlibexception . PyCdlibInternalError ( 'UDF Partition Volume Descriptor already initialized' ) self . desc_tag = UDFTag ( ) self . desc_tag . new ( 5 ) # FIXME : we should let the user set serial _ number self . vol_desc_seqnum = 2 self . part_flags = 1 # FIXME : how should we set this ? self . part_num = 0 # FIXME : how should we set this ? self . part_contents = UDFEntityID ( ) self . part_contents . new ( 2 , b'+NSR02' ) self . part_contents_use = UDFPartitionHeaderDescriptor ( ) self . part_contents_use . new ( ) self . access_type = 1 self . part_start_location = 0 # This will get set later self . part_length = 3 # This will get set later self . impl_ident = UDFEntityID ( ) self . impl_ident . new ( 0 , b'*pycdlib' ) self . implementation_use = b'\x00' * 128 # FIXME : we should let the user set this self . _initialized = True
def iter_predict_proba ( self , X , include_init = False ) : """Returns the predicted probabilities for ` ` X ` ` at every stage of the boosting procedure . Arguments : X ( array - like or sparse matrix of shape ( n _ samples , n _ features ) ) : The input samples . Sparse matrices are accepted only if they are supported by the weak model . include _ init ( bool , default = False ) : If ` ` True ` ` then the prediction from ` ` init _ estimator ` ` will also be returned . Returns : iterator of arrays of shape ( n _ samples , n _ classes ) containing the predicted probabilities at each stage"""
utils . validation . check_is_fitted ( self , 'init_estimator_' ) X = utils . check_array ( X , accept_sparse = [ 'csr' , 'csc' ] , dtype = None , force_all_finite = False ) probas = np . empty ( shape = ( len ( X ) , len ( self . classes_ ) ) , dtype = np . float64 ) for y_pred in super ( ) . iter_predict ( X , include_init = include_init ) : if len ( self . classes_ ) == 2 : probas [ : , 1 ] = sigmoid ( y_pred [ : , 0 ] ) probas [ : , 0 ] = 1. - probas [ : , 1 ] else : probas [ : ] = softmax ( y_pred ) yield probas
def blue ( self , memo = None ) : """Constructs a BlueDispatcher out of the current object . : param memo : A dictionary to cache Blueprints . : type memo : dict [ T , schedula . utils . blue . Blueprint ] : return : A BlueDispatcher of the current object . : rtype : schedula . utils . blue . BlueDispatcher"""
memo = { } if memo is None else memo if self in memo : return memo [ self ] from . utils . dsp import map_list from . utils . blue import BlueDispatcher , _parent_blue memo [ self ] = blue = BlueDispatcher ( executor = self . executor , name = self . name , raises = self . raises , description = self . __doc__ ) dfl = self . default_values key_map_data = [ 'data_id' , { 'value' : 'default_value' } ] pred , succ = self . dmap . pred , self . dmap . succ def _set_weight ( n , r , d ) : d = { i : j [ 'weight' ] for i , j in d . items ( ) if 'weight' in j } if d : r [ n ] = d for k , v in sorted ( self . nodes . items ( ) , key = lambda x : x [ 1 ] [ 'index' ] ) : v = v . copy ( ) t = v . pop ( 'type' ) del v [ 'index' ] if t == 'data' : method = 'add_data' combine_dicts ( map_list ( key_map_data , k , dfl . get ( k , { } ) ) , base = v ) elif t in ( 'function' , 'dispatcher' ) : method = 'add_%s' % t if t == 'dispatcher' : t = 'dsp' v [ '%s_id' % t ] = k del v [ 'wait_inputs' ] _set_weight ( 'inp_weight' , v , pred [ k ] ) _set_weight ( 'out_weight' , v , succ [ k ] ) if 'function' in v : v [ t ] = _parent_blue ( v . pop ( 'function' ) , memo ) blue . deferred . append ( ( method , v ) ) return blue
def read_moc_fits ( moc , filename , include_meta = False , ** kwargs ) : """Read data from a FITS file into a MOC . Any additional keyword arguments are passed to the astropy . io . fits . open method ."""
hl = fits . open ( filename , mode = 'readonly' , ** kwargs ) read_moc_fits_hdu ( moc , hl [ 1 ] , include_meta )
def __wait_and_restart ( ) : """Delay and then execute the restart . Do not call directly . Instead , call ` do _ restart ( ) ` ."""
log . info ( 'Restarting server' ) sleep ( 1 ) # We can use the default event loop here because this # is actually running in a thread . We use aiohttp here because urllib is # painful and we don ’ t have ` requests ` . loop = asyncio . new_event_loop ( ) loop . run_until_complete ( _resin_supervisor_restart ( ) )
def AddTrip ( self , schedule = None , headsign = None , service_period = None , trip_id = None ) : """Add a trip to this route . Args : schedule : a Schedule object which will hold the new trip or None to use the schedule of this route . headsign : headsign of the trip as a string service _ period : a ServicePeriod object or None to use schedule . GetDefaultServicePeriod ( ) trip _ id : optional trip _ id for the new trip Returns : a new Trip object"""
if schedule is None : assert self . _schedule is not None schedule = self . _schedule if trip_id is None : trip_id = util . FindUniqueId ( schedule . trips ) if service_period is None : service_period = schedule . GetDefaultServicePeriod ( ) trip_class = self . GetGtfsFactory ( ) . Trip trip_obj = trip_class ( route = self , headsign = headsign , service_period = service_period , trip_id = trip_id ) schedule . AddTripObject ( trip_obj ) return trip_obj
def from_http ( cls , raw_body : MutableMapping , verification_token : Optional [ str ] = None , team_id : Optional [ str ] = None , ) -> "Event" : """Create an event with data coming from the HTTP Event API . If the event type is a message a : class : ` slack . events . Message ` is returned . Args : raw _ body : Decoded body of the Event API request verification _ token : Slack verification token used to verify the request came from slack team _ id : Verify the event is for the correct team Returns : : class : ` slack . events . Event ` or : class : ` slack . events . Message ` Raises : : class : ` slack . exceptions . FailedVerification ` : when ` verification _ token ` or ` team _ id ` does not match the incoming event ' s ."""
if verification_token and raw_body [ "token" ] != verification_token : raise exceptions . FailedVerification ( raw_body [ "token" ] , raw_body [ "team_id" ] ) if team_id and raw_body [ "team_id" ] != team_id : raise exceptions . FailedVerification ( raw_body [ "token" ] , raw_body [ "team_id" ] ) if raw_body [ "event" ] [ "type" ] . startswith ( "message" ) : return Message ( raw_body [ "event" ] , metadata = raw_body ) else : return Event ( raw_body [ "event" ] , metadata = raw_body )
def export_c ( self , target = C_TARGET , c_indent = C_INDENTATION , headerf = None ) : '''Export the grammar to a c ( source and header ) file which can be used with the libcleri module .'''
language = [ ] indent = 0 enums = set ( ) for name in self . _order : elem = getattr ( self , name , None ) if not isinstance ( elem , Element ) : continue if not hasattr ( elem , '_export_c' ) : continue language . append ( '{indent}cleri_t * {name} = {value};' . format ( indent = c_indent , name = name , value = elem . _export_c ( c_indent , indent , enums ) ) ) for name , ref in self . _refs . items ( ) : language . append ( '{indent}cleri_ref_set({name}, {value});' . format ( indent = c_indent , name = name , value = ref . _element . _export_c ( c_indent , indent , enums , ref ) ) ) pattern = self . RE_KEYWORDS . pattern . replace ( '\\' , '\\\\' ) if not pattern . startswith ( '^' ) : pattern = '^' + pattern enums = ',\n' . join ( [ '{}{}' . format ( c_indent , gid ) for gid in sorted ( enums ) ] ) + ',' header_file = '"{}.h"' . format ( target ) if headerf is None else headerf fun = target . strip ( '/' ) . replace ( '/' , '_' ) return ( self . __class__ . C_TEMPLATE_C . format ( name = self . __class__ . __name__ , target = target , header_file = header_file , fun = fun , indent = c_indent , datetime = time . strftime ( '%Y-%m-%d %H:%M:%S' , time . localtime ( ) ) , language = '\n' . join ( language ) , re_keywords = pattern ) , self . __class__ . C_TEMPLATE_H . format ( name = self . __class__ . __name__ , target = target , fun = fun , guard = target . upper ( ) . replace ( '/' , '_' ) . replace ( '\\' , '_' ) , datetime = time . strftime ( '%Y-%m-%d %H:%M:%S' , time . localtime ( ) ) , language = '\n' . join ( language ) , enums = enums ) )
def merge_configs ( a , b , copy_trees = False ) : """Merge config b into a : param a : target config : type a : ConfigTree : param b : source config : type b : ConfigTree : return : merged config a"""
for key , value in b . items ( ) : # if key is in both a and b and both values are dictionary then merge it otherwise override it if key in a and isinstance ( a [ key ] , ConfigTree ) and isinstance ( b [ key ] , ConfigTree ) : if copy_trees : a [ key ] = a [ key ] . copy ( ) ConfigTree . merge_configs ( a [ key ] , b [ key ] , copy_trees = copy_trees ) else : if isinstance ( value , ConfigValues ) : value . parent = a value . key = key if key in a : value . overriden_value = a [ key ] a [ key ] = value if a . root : if b . root : a . history [ key ] = a . history . get ( key , [ ] ) + b . history . get ( key , [ value ] ) else : a . history [ key ] = a . history . get ( key , [ ] ) + [ value ] return a
def filter ( self , info , releases ) : """Remove all release versions that match any of the specificed patterns ."""
for version in list ( releases . keys ( ) ) : if any ( pattern . match ( version ) for pattern in self . patterns ) : del releases [ version ]
def _get_conda_version ( stdout , stderr ) : """Callback for get _ conda _ version ."""
# argparse outputs version to stderr in Python < 3.4. # http : / / bugs . python . org / issue18920 pat = re . compile ( r'conda:?\s+(\d+\.\d\S+|unknown)' ) m = pat . match ( stderr . decode ( ) . strip ( ) ) if m is None : m = pat . match ( stdout . decode ( ) . strip ( ) ) if m is None : raise Exception ( 'output did not match: {0}' . format ( stderr ) ) return m . group ( 1 )
def transfer ( self , receiver_address , amount , sender_account ) : """Transfer a number of tokens from ` sender _ account ` to ` receiver _ address ` : param receiver _ address : hex str ethereum address to receive this transfer of tokens : param amount : int number of tokens to transfer : param sender _ account : Account instance to take the tokens from : return : bool"""
self . _keeper . token . token_approve ( receiver_address , amount , sender_account ) self . _keeper . token . transfer ( receiver_address , amount , sender_account )
def _add ( self , codeobj ) : """Add a child ( namespace , function , variable , class ) to this object ."""
assert isinstance ( codeobj , ( CodeNamespace , CodeClass , CodeFunction , CodeVariable ) ) self . children . append ( codeobj )
def iterallitems ( self , key = _absent ) : '''Example : omd = omdict ( [ ( 1,1 ) , ( 1,11 ) , ( 1,111 ) , ( 2,2 ) , ( 3,3 ) ] ) omd . iterallitems ( ) = = ( 1,1 ) - > ( 1,11 ) - > ( 1,111 ) - > ( 2,2 ) - > ( 3,3) omd . iterallitems ( 1 ) = = ( 1,1 ) - > ( 1,11 ) - > ( 1,111) Raises : KeyError if < key > is provided and not in the dictionary . Returns : An iterator over every item in the diciontary . If < key > is provided , only items with the key < key > are iterated over .'''
if key is not _absent : # Raises KeyError if < key > is not in self . _ map . return self . iteritems ( key ) return self . _items . iteritems ( )
def _init_metadata ( self ) : """stub"""
super ( edXNumericResponseQuestionFormRecord , self ) . _init_metadata ( ) QuestionTextFormRecord . _init_metadata ( self ) QuestionFilesFormRecord . _init_metadata ( self )
def blockstart_tolineno ( self ) : """The line on which the beginning of this block ends . : type : int"""
if self . name : return self . name . tolineno if self . type : return self . type . tolineno return self . lineno
def connectionLost ( self , reason ) : """Called when the response body has been completely delivered . @ param reason : Either a twisted . web . client . ResponseDone exception or a twisted . web . http . PotentialDataLoss exception ."""
self . remaining . reset ( ) try : result = json . load ( self . remaining ) except Exception , e : self . finished . errback ( e ) return returnValue = result if self . heartbeater : self . heartbeater . nextToken = result [ 'token' ] returnValue = ( result , self . heartbeater ) self . finished . callback ( returnValue )
def is_running ( self ) : """Return true if the node is running"""
self . __update_status ( ) return self . status == Status . UP or self . status == Status . DECOMMISSIONED
def get_pinned_version ( ireq ) : """Get the pinned version of an InstallRequirement . An InstallRequirement is considered pinned if : - Is not editable - It has exactly one specifier - That specifier is " = = " - The version does not contain a wildcard Examples : django = = 1.8 # pinned django > 1.8 # NOT pinned django ~ = 1.8 # NOT pinned django = = 1 . * # NOT pinned Raises ` TypeError ` if the input is not a valid InstallRequirement , or ` ValueError ` if the InstallRequirement is not pinned ."""
try : specifier = ireq . specifier except AttributeError : raise TypeError ( "Expected InstallRequirement, not {}" . format ( type ( ireq ) . __name__ , ) ) if ireq . editable : raise ValueError ( "InstallRequirement is editable" ) if not specifier : raise ValueError ( "InstallRequirement has no version specification" ) if len ( specifier . _specs ) != 1 : raise ValueError ( "InstallRequirement has multiple specifications" ) op , version = next ( iter ( specifier . _specs ) ) . _spec if op not in ( '==' , '===' ) or version . endswith ( '.*' ) : raise ValueError ( "InstallRequirement not pinned (is {0!r})" . format ( op + version , ) ) return version
def terminate ( self , devices ) : """Terminate one or more running or stopped instances ."""
for device in devices : self . logger . info ( 'Terminating: %s' , device . id ) try : device . delete ( ) except packet . baseapi . Error : raise PacketManagerException ( 'Unable to terminate instance "{}"' . format ( device . id ) )
def wait ( self , task_id ) : """Blocking method which wait end of task . It ' s prefered to use : class : ` carotte . Task ` object directly : param string task _ id : Task ID : returns : Task dict : rtype : dict"""
data = { 'action' : 'wait' , 'id' : task_id } self . __send_pyobj ( data ) task = self . __recv_pyobj ( notimeout = True ) return task
def Connect ( self , Skype ) : """Connects this call channel manager instance to Skype . This is the first thing you should do after creating this object . : Parameters : Skype : ` Skype ` The Skype object . : see : ` Disconnect `"""
self . _Skype = Skype self . _Skype . RegisterEventHandler ( 'CallStatus' , self . _CallStatus ) del self . _Channels [ : ]
def get_jobs_from_argument ( self , raw_job_string ) : """return a list of jobs corresponding to the raw _ job _ string"""
jobs = [ ] if raw_job_string . startswith ( ":" ) : job_keys = raw_job_string . strip ( " :" ) jobs . extend ( [ job for job in self . jobs ( job_keys ) ] ) # we assume a job code else : assert "/" in raw_job_string , "Job Code {0} is improperly formatted!" . format ( raw_job_string ) host , job_name = raw_job_string . rsplit ( "/" , 1 ) host_url = self . _config_dict . get ( host , { } ) . get ( 'url' , host ) host = self . get_host ( host_url ) if host . has_job ( job_name ) : jobs . append ( JenksJob ( None , host , job_name , lambda : self . _get_job_api_instance ( host_url , job_name ) ) ) else : raise JenksDataException ( "Could not find Job {0}/{1}!" . format ( host , job_name ) ) return jobs
def _to_reader_home ( self ) : """Navigate to the Cloud Reader library page . Raises : BrowserError : If the KCR homepage could not be loaded . ConnectionError : If there was a connection error ."""
# NOTE : Prevents QueryInterface error caused by getting a URL # while switched to an iframe self . switch_to_default_content ( ) self . get ( _KindleCloudReaderBrowser . _CLOUD_READER_URL ) if self . title == u'Problem loading page' : raise ConnectionError # Wait for either the login page or the reader to load login_or_reader_loaded = lambda br : ( br . find_elements_by_id ( 'amzn_kcr' ) or br . find_elements_by_id ( 'KindleLibraryIFrame' ) ) self . _wait ( 5 ) . until ( login_or_reader_loaded ) try : self . _wait ( 5 ) . until ( lambda br : br . title == u'Amazon.com Sign In' ) except TimeoutException : raise BrowserError ( 'Failed to load Kindle Cloud Reader.' ) else : self . _login ( )
def create_migration ( self , app , fixture_path ) : """Create a data migration for app that uses fixture _ path ."""
self . monkey_patch_migration_template ( app , fixture_path ) out = StringIO ( ) management . call_command ( 'makemigrations' , app . label , empty = True , stdout = out ) self . restore_migration_template ( ) self . stdout . write ( out . getvalue ( ) )
def clear_alert_destination ( self , destination = 0 , channel = None ) : """Clear an alert destination Remove the specified alert destination configuration . : param destination : The destination to clear ( defaults to 0)"""
if channel is None : channel = self . get_network_channel ( ) self . set_alert_destination ( '0.0.0.0' , False , 0 , 0 , destination , channel )
def validate_config_has_one_of ( config , one_of_keys ) : """Validate a config dictionary to make sure it has one and only one key in one _ of _ keys . Args : config : the config to validate . one _ of _ keys : the list of possible keys that config can have one and only one . Raises : Exception if the config does not have any of them , or multiple of them ."""
intersection = set ( config ) . intersection ( one_of_keys ) if len ( intersection ) > 1 : raise Exception ( 'Only one of the values in "%s" is needed' % ', ' . join ( intersection ) ) if len ( intersection ) == 0 : raise Exception ( 'One of the values in "%s" is needed' % ', ' . join ( one_of_keys ) )
def cli_main ( pid , include_greenlet , debugger , verbose ) : '''Print stack of python process . $ pystack < pid >'''
try : print_stack ( pid , include_greenlet , debugger , verbose ) except DebuggerNotFound as e : click . echo ( 'DebuggerNotFound: %s' % e . args [ 0 ] , err = True ) click . get_current_context ( ) . exit ( 1 )
def iter_processed_text ( self , file , encoding = None , base_url = None ) : '''Return the file text and processed absolute links . Args : file : A file object containing the document . encoding ( str ) : The encoding of the document . base _ url ( str ) : The URL at which the document is located . Returns : iterator : Each item is a tuple : 1 . str : The text 2 . bool : Whether the text a link'''
for text , is_link in self . iter_text ( file , encoding ) : if is_link and base_url : new_link = urljoin_safe ( base_url , text , allow_fragments = False ) if new_link : yield ( new_link , is_link ) else : yield ( new_link , False ) else : yield ( text , is_link )
def is_false ( self ) : """Ensures : attr : ` subject ` is ` ` False ` ` ."""
self . _run ( unittest_case . assertFalse , ( self . _subject , ) ) return ChainInspector ( self . _subject )
def split_key ( d , key , new_keys , before = True , list_of_dicts = False , deepcopy = True ) : """split an existing key ( s ) into multiple levels Parameters d : dict or dict like key : str existing key value new _ keys : list [ str ] new levels to add before : bool add level before existing key ( else after ) list _ of _ dicts : bool treat list of dicts as additional branches Examples > > > from pprint import pprint > > > d = { ' a ' : 1 , ' b ' : 2} > > > pprint ( split _ key ( d , ' a ' , [ ' c ' , ' d ' ] ) ) { ' b ' : 2 , ' c ' : { ' d ' : { ' a ' : 1 } } } > > > pprint ( split _ key ( d , ' a ' , [ ' c ' , ' d ' ] , before = False ) ) { ' a ' : { ' c ' : { ' d ' : 1 } } , ' b ' : 2} > > > d2 = [ { ' a ' : 1 } , { ' a ' : 2 } , { ' a ' : 3 } ] > > > pprint ( split _ key ( d2 , ' a ' , [ ' b ' ] , list _ of _ dicts = True ) ) [ { ' b ' : { ' a ' : 1 } } , { ' b ' : { ' a ' : 2 } } , { ' b ' : { ' a ' : 3 } } ]"""
list_of_dicts = '__list__' if list_of_dicts else None flatd = flatten ( d , list_of_dicts = list_of_dicts ) newd = { } for path , v in flatd . items ( ) : if key in path : newk = [ ] for k in path : if k == key : if before : newk = newk + new_keys + [ k ] else : newk = newk + [ k ] + new_keys else : newk . append ( k ) newd [ tuple ( newk ) ] = v else : newd [ path ] = v return unflatten ( newd , list_of_dicts = list_of_dicts , deepcopy = deepcopy )
def members ( self , ref , cuts = None , order = None , page = None , page_size = None ) : """List all the distinct members of the given reference , filtered and paginated . If the reference describes a dimension , all attributes are returned ."""
def prep ( cuts , ref , order , columns = None ) : q = select ( columns = columns ) bindings = [ ] cuts , q , bindings = Cuts ( self ) . apply ( q , bindings , cuts ) fields , q , bindings = Fields ( self ) . apply ( q , bindings , ref , distinct = True ) ordering , q , bindings = Ordering ( self ) . apply ( q , bindings , order , distinct = fields [ 0 ] ) q = self . restrict_joins ( q , bindings ) return q , bindings , cuts , fields , ordering # Count count = count_results ( self , prep ( cuts , ref , order , [ 1 ] ) [ 0 ] ) # Member list q , bindings , cuts , fields , ordering = prep ( cuts , ref , order ) page , q = Pagination ( self ) . apply ( q , page , page_size ) q = self . restrict_joins ( q , bindings ) return { 'total_member_count' : count , 'data' : list ( generate_results ( self , q ) ) , 'cell' : cuts , 'fields' : fields , 'order' : ordering , 'page' : page [ 'page' ] , 'page_size' : page [ 'page_size' ] }
def sessions_scope ( local_session , commit = False ) : """Provide a transactional scope around a series of operations ."""
try : yield local_session if commit : local_session . commit ( ) logger . debug ( "DB session auto-committed as requested" ) except Exception as e : # We log the exception before re - raising it , in case the rollback also # fails logger . exception ( "Exception during scoped worker transaction, " "rolling back." ) # This rollback is potentially redundant with the remove call below , # depending on how the scoped session is configured , but we ' ll be # explicit here . local_session . rollback ( ) raise e finally : local_session . remove ( ) logger . debug ( "Session complete, db session closed" )
def great_circle_distance ( pt1 , pt2 ) : """Return the great - circle distance in kilometers between two points , defined by a tuple ( lat , lon ) . Examples > > > brussels = ( 50.8503 , 4.3517) > > > paris = ( 48.8566 , 2.3522) > > > great _ circle _ distance ( brussels , paris ) 263.9754164080347"""
r = 6371. delta_latitude = math . radians ( pt1 [ 0 ] - pt2 [ 0 ] ) delta_longitude = math . radians ( pt1 [ 1 ] - pt2 [ 1 ] ) latitude1 = math . radians ( pt1 [ 0 ] ) latitude2 = math . radians ( pt2 [ 0 ] ) a = math . sin ( delta_latitude / 2 ) ** 2 + math . cos ( latitude1 ) * math . cos ( latitude2 ) * math . sin ( delta_longitude / 2 ) ** 2 return r * 2. * math . asin ( math . sqrt ( a ) )
def callback_result ( self ) : """Block the main thead until future finish , return the future . callback _ result ."""
if self . _state in [ PENDING , RUNNING ] : self . x if self . _user_callbacks : return self . _callback_result else : return self . x
def read_authentication_config ( self ) : """Read configuration options in section " authentication " ."""
section = "authentication" password_fields = [ ] if self . has_option ( section , "entry" ) : for val in read_multiline ( self . get ( section , "entry" ) ) : auth = val . split ( ) if len ( auth ) == 3 : self . config . add_auth ( pattern = auth [ 0 ] , user = auth [ 1 ] , password = auth [ 2 ] ) password_fields . append ( "entry/%s/%s" % ( auth [ 0 ] , auth [ 1 ] ) ) elif len ( auth ) == 2 : self . config . add_auth ( pattern = auth [ 0 ] , user = auth [ 1 ] ) else : raise LinkCheckerError ( _ ( "missing auth part in entry %(val)r" ) % { "val" : val } ) # read login URL and field names if self . has_option ( section , "loginurl" ) : val = self . get ( section , "loginurl" ) . strip ( ) if not ( val . lower ( ) . startswith ( "http:" ) or val . lower ( ) . startswith ( "https:" ) ) : raise LinkCheckerError ( _ ( "invalid login URL `%s'. Only " "HTTP and HTTPS URLs are supported." ) % val ) self . config [ "loginurl" ] = val self . read_string_option ( section , "loginuserfield" ) self . read_string_option ( section , "loginpasswordfield" ) # read login extra fields if self . has_option ( section , "loginextrafields" ) : for val in read_multiline ( self . get ( section , "loginextrafields" ) ) : name , value = val . split ( ":" , 1 ) self . config [ "loginextrafields" ] [ name ] = value self . check_password_readable ( section , password_fields )
def add_unique_template_variables ( self , options ) : """Update map template variables specific to heatmap visual"""
# set line stroke dash interval based on line _ stroke property if self . line_stroke in [ "dashed" , "--" ] : self . line_dash_array = [ 6 , 4 ] elif self . line_stroke in [ "dotted" , ":" ] : self . line_dash_array = [ 0.5 , 4 ] elif self . line_stroke in [ "dash dot" , "-." ] : self . line_dash_array = [ 6 , 4 , 0.5 , 4 ] elif self . line_stroke in [ "solid" , "-" ] : self . line_dash_array = [ 1 , 0 ] else : # default to solid line self . line_dash_array = [ 1 , 0 ] # check if choropleth map should include 3 - D extrusion self . extrude = all ( [ bool ( self . height_property ) , bool ( self . height_stops ) ] ) # common variables for vector and geojson - based choropleths options . update ( dict ( colorStops = self . color_stops , colorProperty = self . color_property , colorType = self . color_function_type , defaultColor = self . color_default , lineColor = self . line_color , lineDashArray = self . line_dash_array , lineStroke = self . line_stroke , lineWidth = self . line_width , extrudeChoropleth = self . extrude , highlightColor = self . highlight_color ) ) if self . extrude : options . update ( dict ( heightType = self . height_function_type , heightProperty = self . height_property , heightStops = self . height_stops , defaultHeight = self . height_default , ) ) # vector - based choropleth map variables if self . vector_source : options . update ( vectorColorStops = self . generate_vector_color_map ( ) ) if self . extrude : options . update ( vectorHeightStops = self . generate_vector_numeric_map ( 'height' ) ) # geojson - based choropleth map variables else : options . update ( geojson_data = json . dumps ( self . data , ensure_ascii = False ) )
def handle_basic_executor_options ( options , parser ) : """Handle the options specified by add _ basic _ executor _ options ( ) ."""
# setup logging logLevel = logging . INFO if options . debug : logLevel = logging . DEBUG elif options . quiet : logLevel = logging . WARNING util . setup_logging ( level = logLevel )
def getOrDefault ( self , param ) : """Gets the value of a param in the user - supplied param map or its default value . Raises an error if neither is set ."""
param = self . _resolveParam ( param ) if param in self . _paramMap : return self . _paramMap [ param ] else : return self . _defaultParamMap [ param ]
def listar_por_grupo_equipamento ( self , id_grupo_equipamento ) : """Lista todos os direitos de grupos de usuário em um grupo de equipamento . : param id _ grupo _ equipamento : Identificador do grupo de equipamento para filtrar a pesquisa . : return : Dicionário com a seguinte estrutura : { ' direito _ grupo _ equipamento ' : [ { ' id _ grupo _ equipamento ' : < id _ grupo _ equipamento > , ' exclusao ' : < exclusao > , ' alterar _ config ' : < alterar _ config > , ' nome _ grupo _ equipamento ' : < nome _ grupo _ equipamento > , ' id _ grupo _ usuario ' : < id _ grupo _ usuario > , ' escrita ' : < escrita > , ' nome _ grupo _ usuario ' : < nome _ grupo _ usuario > , ' id ' : < id > , ' leitura ' : < leitura > } , . . . demais direitos . . . ] } : raise InvalidParameterError : O identificador do grupo de equipamento é nulo ou inválido . : raise DataBaseError : Falha na networkapi ao acessar o banco de dados . : raise XMLError : Falha na networkapi ao gerar o XML de resposta ."""
if not is_valid_int_param ( id_grupo_equipamento ) : raise InvalidParameterError ( u'O identificador do grupo de equipamento é inválido ou não foi informado.' ) url = 'direitosgrupoequipamento/egrupo/' + str ( id_grupo_equipamento ) + '/' code , map = self . submit ( None , 'GET' , url ) key = 'direito_grupo_equipamento' return get_list_map ( self . response ( code , map , [ key ] ) , key )
def escape_url ( url , lowercase_urlencoding = False ) : """escape the non - safe symbols in url The encoding used by ADFS 3.0 is not compatible with python ' s quote _ plus ( ADFS produces lower case hex numbers and quote _ plus produces upper case hex numbers ) : param url : the url to escape : type url : str : param lowercase _ urlencoding : lowercase or no : type lowercase _ urlencoding : boolean : return : the escaped url : rtype str"""
encoded = quote_plus ( url ) return re . sub ( r"%[A-F0-9]{2}" , lambda m : m . group ( 0 ) . lower ( ) , encoded ) if lowercase_urlencoding else encoded
def get_domain_resolver ( self , domain_name , cur = None ) : """Get the last - knwon resolver entry for a domain name Returns None if not found ."""
get_cmd = "SELECT resolver FROM {} WHERE domain=? AND resolver != '' AND accepted=1 ORDER BY sequence DESC, parent_zonefile_index DESC LIMIT 1;" . format ( self . subdomain_table ) cursor = None if cur is None : cursor = self . conn . cursor ( ) else : cursor = cur db_query_execute ( cursor , get_cmd , ( domain_name , ) ) rowdata = cursor . fetchone ( ) if not rowdata : return None return rowdata [ 'resolver' ]
def _set_meter_id ( self , v , load = False ) : """Setter method for meter _ id , mapped from YANG variable / openflow _ state / meter _ id ( container ) If this variable is read - only ( config : false ) in the source YANG file , then _ set _ meter _ id is considered as a private method . Backends looking to populate this variable should do so via calling thisObj . _ set _ meter _ id ( ) directly ."""
if hasattr ( v , "_utype" ) : v = v . _utype ( v ) try : t = YANGDynClass ( v , base = meter_id . meter_id , is_container = 'container' , presence = False , yang_name = "meter-id" , rest_name = "meter-id" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extensions = { u'tailf-common' : { u'callpoint' : u'openflow-meter-info-meter-id-1' } } , namespace = 'urn:brocade.com:mgmt:brocade-openflow-operational' , defining_module = 'brocade-openflow-operational' , yang_type = 'container' , is_config = False ) except ( TypeError , ValueError ) : raise ValueError ( { 'error-string' : """meter_id must be of a type compatible with container""" , 'defined-type' : "container" , 'generated-type' : """YANGDynClass(base=meter_id.meter_id, is_container='container', presence=False, yang_name="meter-id", rest_name="meter-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'openflow-meter-info-meter-id-1'}}, namespace='urn:brocade.com:mgmt:brocade-openflow-operational', defining_module='brocade-openflow-operational', yang_type='container', is_config=False)""" , } ) self . __meter_id = t if hasattr ( self , '_set' ) : self . _set ( )
def query ( self , coords , order = 1 ) : """Returns the P & G ( 2010 ) correction to the SFD ' 98 E ( B - V ) at the specified location ( s ) on the sky . If component is ' err ' , then return the uncertainty in the correction . Args : coords ( : obj : ` astropy . coordinates . SkyCoord ` ) : The coordinates to query . order ( Optional [ : obj : ` int ` ] ) : Interpolation order to use . Defaults to ` ` 1 ` ` , for linear interpolation . Returns : A float array containing the P & G ( 2010 ) correction ( or its uncertainty ) to SFD ' 98 at every input coordinate . The shape of the output will be the same as the shape of the coordinates stored by : obj : ` coords ` ."""
return super ( PG2010Query , self ) . query ( coords , order = order )
def get_outside_collaborators ( self , filter_ = github . GithubObject . NotSet ) : """: calls : ` GET / orgs / : org / outside _ collaborators < http : / / developer . github . com / v3 / orgs / outside _ collaborators > ` _ : param filter _ : string : rtype : : class : ` github . PaginatedList . PaginatedList ` of : class : ` github . NamedUser . NamedUser `"""
assert ( filter_ is github . GithubObject . NotSet or isinstance ( filter_ , ( str , unicode ) ) ) , filter_ url_parameters = { } if filter_ is not github . GithubObject . NotSet : url_parameters [ "filter" ] = filter_ return github . PaginatedList . PaginatedList ( github . NamedUser . NamedUser , self . _requester , self . url + "/outside_collaborators" , url_parameters )
def parse_raw ( s , lineno = 0 ) : """Parse a date from a raw string . The format must be exactly " seconds - since - epoch offset - utc " . See the spec for details ."""
timestamp_str , timezone_str = s . split ( b' ' , 1 ) timestamp = float ( timestamp_str ) try : timezone = parse_tz ( timezone_str ) except ValueError : raise errors . InvalidTimezone ( lineno , timezone_str ) return timestamp , timezone