signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def create_return_line_item ( cls , return_line_item , ** kwargs ) : """Create ReturnLineItem Create a new ReturnLineItem This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please pass async = True > > > thread = api . create _ return _ line _ item ( return _ li...
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return cls . _create_return_line_item_with_http_info ( return_line_item , ** kwargs ) else : ( data ) = cls . _create_return_line_item_with_http_info ( return_line_item , ** kwargs ) return data
async def xgroup_set_id ( self , name : str , group : str , stream_id : str ) -> bool : """[ NOTICE ] Not officially released yet : param name : name of the stream : param group : name of the consumer group : param stream _ id : If we provide $ as we did , then only new messages arriving in the stream fro...
return await self . execute_command ( 'XGROUP SETID' , name , group , stream_id )
def get ( self , queue_name , task_id ) : """Pops a specific task off the queue by identifier . : param queue _ name : The name of the queue . Usually handled by the ` ` Gator ` ` instance . : type queue _ name : string : param task _ id : The identifier of the task . : type task _ id : string : returns...
self . _only_watch_from ( queue_name ) job = self . conn . peek ( task_id ) if not job : return job . delete ( ) return job . body
def session ( request ) : """Get the information about the current session or modify the current session . GET parameters : html turn on the HTML version of the API POST parameters : locale : client ' s locale time _ zone : client ' s time zone display _ width : width of the client ' s display ...
if request . user . id is None : # Google Bot return render_json ( request , { 'error' : _ ( 'There is no user available to create a session.' ) , 'error_type' : 'user_undefined' } , status = 400 , template = 'user_json.html' ) if request . method == 'GET' : return render_json ( request , Session . objects . ge...
def backward_delete_char ( self , e ) : # ( Rubout ) u"""Delete the character behind the cursor . A numeric argument means to kill the characters instead of deleting them ."""
self . l_buffer . backward_delete_char ( self . argument_reset ) self . finalize ( )
def clear_if_finalized ( iteration : TransitionResult , channelidentifiers_to_channels : ChannelMap , ) -> TransitionResult [ MediatorTransferState ] : """Clear the mediator task if all the locks have been finalized . A lock is considered finalized if it has been removed from the merkle tree offchain , either b...
state = cast ( MediatorTransferState , iteration . new_state ) if state is None : return iteration # Only clear the task if all channels have the lock cleared . secrethash = state . secrethash for pair in state . transfers_pair : payer_channel = get_payer_channel ( channelidentifiers_to_channels , pair ) if...
def put_conf ( self , configuration , test = False ) : """Send the configuration to the satellite HTTP request to the satellite ( POST / push _ configuration ) If test is True , store the configuration internally : param configuration : The conf to send ( data depend on the satellite ) : type configuration ...
logger . debug ( "Sending configuration to %s, %s %s" , self . name , self . alive , self . reachable ) if test : setattr ( self , 'unit_test_pushed_configuration' , configuration ) # print ( " * * * unit tests - sent configuration % s : % s " % ( self . name , configuration ) ) return True return self . co...
def has_export_permission ( self , request ) : """Returns whether a request has export permission ."""
EXPORT_PERMISSION_CODE = getattr ( settings , 'IMPORT_EXPORT_EXPORT_PERMISSION_CODE' , None ) if EXPORT_PERMISSION_CODE is None : return True opts = self . opts codename = get_permission_codename ( EXPORT_PERMISSION_CODE , opts ) return request . user . has_perm ( "%s.%s" % ( opts . app_label , codename ) )
def customize ( func ) : """Decorator to set plotting context and axes style during function call ."""
@ wraps ( func ) def call_w_context ( * args , ** kwargs ) : set_context = kwargs . pop ( 'set_context' , True ) if set_context : color_palette = sns . color_palette ( 'colorblind' ) with plotting_context ( ) , axes_style ( ) , color_palette : sns . despine ( left = True ) ...
def _map_arguments ( self , args ) : """Map from the top - level arguments to the arguments provided to the indiviudal links"""
comp_file = args . get ( 'comp' , None ) datafile = args . get ( 'data' , None ) do_ltsum = args . get ( 'do_ltsum' , False ) NAME_FACTORY . update_base_dict ( datafile ) outdir_base = os . path . join ( NAME_FACTORY . base_dict [ 'basedir' ] , 'counts_cubes' ) num_files = args . get ( 'nfiles' , 96 ) self . comp_dict ...
def quantile_gaussianize ( x ) : """Normalize a sequence of values via rank and Normal c . d . f . Args : x ( array _ like ) : sequence of values . Returns : Gaussian - normalized values . Example : . . doctest : : > > > from scipy _ sugar . stats import quantile _ gaussianize > > > print ( quantile...
from scipy . stats import norm , rankdata x = asarray ( x , float ) . copy ( ) ok = isfinite ( x ) x [ ok ] *= - 1 y = empty_like ( x ) y [ ok ] = rankdata ( x [ ok ] ) y [ ok ] = norm . isf ( y [ ok ] / ( sum ( ok ) + 1 ) ) y [ ~ ok ] = x [ ~ ok ] return y
def fit ( self , y , exogenous = None , ** fit_args ) : """Fit the auto - arima estimator Fit an AutoARIMA to a vector , ` ` y ` ` , of observations with an optional matrix of ` ` exogenous ` ` variables . Parameters y : array - like or iterable , shape = ( n _ samples , ) The time - series to which to fi...
self . model_ = auto_arima ( y , exogenous = exogenous , start_p = self . start_p , d = self . d , start_q = self . start_q , max_p = self . max_p , max_d = self . max_d , max_q = self . max_q , start_P = self . start_P , D = self . D , start_Q = self . start_Q , max_P = self . max_P , max_D = self . max_D , max_Q = se...
def train ( self , ftrain ) : '''Trains the polynomial expansion . : param numpy . ndarray / function ftrain : output values corresponding to the quadrature points given by the getQuadraturePoints method to which the expansion should be trained . Or a function that should be evaluated at the quadrature poin...
self . coeffs = 0 * self . coeffs upoints , wpoints = self . getQuadraturePointsAndWeights ( ) try : fpoints = [ ftrain ( u ) for u in upoints ] except TypeError : fpoints = ftrain for ipoly in np . arange ( self . N_poly ) : inds = tuple ( self . index_polys [ ipoly ] ) coeff = 0.0 for ( u , q , w ...
def log ( args , number = None , oneline = False , quiet = False ) : """Run a " git log . . . " command , and return stdout args is anything which can be added after a normal " git log . . . " it can be blank number , if true - ish , will be added as a " - n " option oneline , if true - ish , will add the "...
options = ' ' . join ( [ number and str ( '-n %s' % number ) or '' , oneline and '--oneline' or '' ] ) try : return run ( 'log %s %s' % ( options , args ) , quiet = quiet ) except UnknownRevision : return ''
def sample ( self ) : """This is the core sampling method . Samples a state from a demonstration , in accordance with the configuration ."""
# chooses a sampling scheme randomly based on the mixing ratios seed = random . uniform ( 0 , 1 ) ratio = np . cumsum ( self . scheme_ratios ) ratio = ratio > seed for i , v in enumerate ( ratio ) : if v : break sample_method = getattr ( self , self . sample_method_dict [ self . sampling_schemes [ i ] ] ) r...
def run_dssp ( pdb , path = True , outfile = None ) : """Uses DSSP to find helices and extracts helices from a pdb file or string . Parameters pdb : str Path to pdb file or string . path : bool , optional Indicates if pdb is a path or a string . outfile : str , optional Filepath for storing the dssp o...
if not path : if type ( pdb ) == str : pdb = pdb . encode ( ) try : temp_pdb = tempfile . NamedTemporaryFile ( delete = False ) temp_pdb . write ( pdb ) temp_pdb . seek ( 0 ) dssp_out = subprocess . check_output ( [ global_settings [ 'dssp' ] [ 'path' ] , temp_pdb . name ...
def find_handlers ( event_name , registry = HANDLER_REGISTRY ) : """Small helper to find all handlers associated to a given event If the event can ' t be found , an empty list will be returned , since this is an internal function and all validation against the event name and its existence was already performe...
handlers = [ ] # event _ name can be a BaseEvent or the string representation if isinstance ( event_name , basestring ) : matched_events = [ event for event in registry . keys ( ) if fnmatch . fnmatchcase ( event_name , event ) ] for matched_event in matched_events : handlers . extend ( registry . get (...
def get_edu_text ( text_subtree ) : """return the text of the given EDU subtree"""
assert text_subtree . label ( ) == SubtreeType . text return u' ' . join ( word . decode ( 'utf-8' ) for word in text_subtree . leaves ( ) )
def _to_unit_base ( self , base_unit , values , unit , from_unit ) : """Return values in a given unit given the input from _ unit ."""
self . _is_numeric ( values ) namespace = { 'self' : self , 'values' : values } if not from_unit == base_unit : self . is_unit_acceptable ( from_unit , True ) statement = '[self._{}_to_{}(val) for val in values]' . format ( self . _clean ( from_unit ) , self . _clean ( base_unit ) ) values = eval ( statemen...
def parse_eggs_list ( path ) : """Parse eggs list from the script at the given path"""
with open ( path , 'r' ) as script : data = script . readlines ( ) start = 0 end = 0 for counter , line in enumerate ( data ) : if not start : if 'sys.path[0:0]' in line : start = counter + 1 if counter >= start and not end : if ']' in line : ...
def select_cell ( self , row , col , add_to_selected = False ) : """Selects a single cell"""
self . grid . SelectBlock ( row , col , row , col , addToSelected = add_to_selected )
def checkmagic ( self ) : """Overridable . Check to see if the file object self . lib actually has a file we understand ."""
self . lib . seek ( self . start ) # default - magic is at start of file if self . lib . read ( len ( self . MAGIC ) ) != self . MAGIC : raise ArchiveReadError ( "%s is not a valid %s archive file" % ( self . path , self . __class__ . __name__ ) ) if self . lib . read ( len ( self . pymagic ) ) != self . pymagic : ...
def get_list ( self , question , splitter = "," , at_least = 0 , at_most = float ( "inf" ) ) : """Parses answer and gets list : param question : Question : to ask user : param splitter : Split list elements with this char : param at _ least : List must have at least this amount of elements : param at _ most...
try : user_answer = self . get_answer ( question ) # ask question user_answer = user_answer . split ( splitter ) # split items user_answer = [ str ( item ) . strip ( ) for item in user_answer ] # strip if at_least < len ( user_answer ) < at_most : return user_answer exc = "List i...
def split_matrix ( M , contigs ) : """Split multiple chromosome matrix Split a labeled matrix with multiple chromosomes into unlabeled single - chromosome matrices . Inter chromosomal contacts are discarded . Parameters M : array _ like The multiple chromosome matrix to be split contigs : list or arra...
index = 0 for _ , chunk in itertools . groubpy ( contigs ) : l = len ( chunk ) yield M [ index : index + l , index : index + l ] index += l
def save_webdriver_logs_by_type ( self , log_type , test_name ) : """Get webdriver logs of the specified type and write them to a log file : param log _ type : browser , client , driver , performance , server , syslog , crashlog or logcat : param test _ name : test that has generated these logs"""
try : logs = self . driver_wrapper . driver . get_log ( log_type ) except Exception : return if len ( logs ) > 0 : log_file_name = '{}_{}.txt' . format ( get_valid_filename ( test_name ) , log_type ) log_file_name = os . path . join ( DriverWrappersPool . logs_directory , log_file_name ) with open (...
def peek ( self , size ) : """Nondestructively retrieves a given number of characters . The next : meth : ` read ` operation behaves as though this method was never called . : param size : The number of characters to retrieve . : type size : ` ` integer ` `"""
c = self . read ( size ) self . _buffer = self . _buffer + c return c
def unpack_to_nibbles ( bindata ) : """unpack packed binary data to nibbles : param bindata : binary packed from nibbles : return : nibbles sequence , may have a terminator"""
o = bin_to_nibbles ( bindata ) flags = o [ 0 ] if flags & 2 : o . append ( NIBBLE_TERMINATOR ) if flags & 1 == 1 : o = o [ 1 : ] else : o = o [ 2 : ] return o
def sort ( self , * , key : Optional [ Callable [ [ Any ] , Any ] ] = None , reverse : bool = False ) -> None : """Sort _ WeakList . : param key : Key by which to sort , default None . : param reverse : True if return reversed WeakList , false by default ."""
return list . sort ( self , key = self . _sort_key ( key ) , reverse = reverse )
def remove ( path : str , max_retries : int = 3 ) -> bool : """Removes the specified path from the local filesystem if it exists . Directories will be removed along with all files and folders within them as well as files . : param path : The location of the file or folder to remove . : param max _ retries...
if not path : return False if not os . path . exists ( path ) : return True remover = os . remove if os . path . isfile ( path ) else shutil . rmtree for attempt in range ( max_retries ) : try : remover ( path ) return True except Exception : # Pause briefly in case there ' s a race cond...
def register_intent_parser ( self , intent_parser ) : """" Enforce " the intent parser interface at registration time . Args : intent _ parser ( intent ) : Intent to be registered . Raises : ValueError : on invalid intent"""
if hasattr ( intent_parser , 'validate' ) and callable ( intent_parser . validate ) : self . intent_parsers . append ( intent_parser ) else : raise ValueError ( "%s is not an intent parser" % str ( intent_parser ) )
def add_request_handler_chain ( self , request_handler_chain ) : # type : ( GenericRequestHandlerChain ) - > None """Checks the type before adding it to the request _ handler _ chains instance variable . : param request _ handler _ chain : Request Handler Chain instance . : type request _ handler _ chain : Re...
if request_handler_chain is None or not isinstance ( request_handler_chain , GenericRequestHandlerChain ) : raise DispatchException ( "Request Handler Chain is not a GenericRequestHandlerChain " "instance" ) self . _request_handler_chains . append ( request_handler_chain )
def open_python ( self , message , namespace ) : """Open interactive python console"""
# Importing readline will in some cases print weird escape # characters to stdout . To avoid this we only import readline # and related packages at this point when we are certain # they are needed . from code import InteractiveConsole import readline import rlcompleter readline . set_completer ( rlcompleter . Completer...
def get_column ( self , column_name ) : """Returns a column as a Series . Parameters column _ name : str Returns column : pandas . Series"""
with log_start_finish ( 'getting single column {!r} from table {!r}' . format ( column_name , self . name ) , logger ) : extra_cols = _columns_for_table ( self . name ) if column_name in extra_cols : with log_start_finish ( 'computing column {!r} for table {!r}' . format ( column_name , self . name ) , ...
def tostr ( s , encoding = 'ascii' ) : """Convert string - like - thing s to the ' str ' type , in all Pythons , even back before Python 2.6 . What ' str ' means varies by PY3K or not . In Pythons before 3.0 , str and bytes are the same type . In Python 3 + , this may require a decoding step ."""
if PY3K : if isinstance ( s , str ) : # str = = unicode in PY3K return s else : # s is type bytes return s . decode ( encoding ) else : # for py2.6 on ( before 3.0 ) , bytes is same as str ; 2.5 has no bytes # but handle if unicode is passed if isinstance ( s , unicode ) : return s ....
def getAllNodeUids ( self ) : '''getAllNodeUids - Gets all the internal uids of all nodes , their children , and all their children so on . . @ return set < uuid . UUID >'''
ret = set ( ) for child in self : ret . update ( child . getAllNodeUids ( ) ) return ret
def _addToBuffers ( self , logname , data ) : """Add data to the buffer for logname Start a timer to send the buffers if BUFFER _ TIMEOUT elapses . If adding data causes the buffer size to grow beyond BUFFER _ SIZE , then the buffers will be sent ."""
n = len ( data ) self . buflen += n self . buffered . append ( ( logname , data ) ) if self . buflen > self . BUFFER_SIZE : self . _sendBuffers ( ) elif not self . sendBuffersTimer : self . sendBuffersTimer = self . _reactor . callLater ( self . BUFFER_TIMEOUT , self . _bufferTimeout )
def aliased_as ( self , name ) : """Create an alias of this stream . Returns an alias of this stream with name ` name ` . When invocation of an SPL operator requires an : py : class : ` ~ streamsx . spl . op . Expression ` against an input port this can be used to ensure expression matches the input port ...
stream = copy . copy ( self ) stream . _alias = name return stream
def add_mesh ( self , mesh , color = None , style = None , scalars = None , rng = None , stitle = None , show_edges = None , point_size = 5.0 , opacity = 1.0 , line_width = None , flip_scalars = False , lighting = None , n_colors = 256 , interpolate_before_map = False , cmap = None , label = None , reset_camera = None ...
# fixes lighting issue when using precalculated normals if isinstance ( mesh , vtk . vtkPolyData ) : if mesh . GetPointData ( ) . HasArray ( 'Normals' ) : mesh . point_arrays [ 'Normals' ] = mesh . point_arrays . pop ( 'Normals' ) if scalar_bar_args is None : scalar_bar_args = { } if isinstance ( mesh ,...
def convert_dict ( obj , ids , parent , attr_type , item_func , cdata ) : """Converts a dict into an XML string ."""
LOG . info ( 'Inside convert_dict(): obj type is: "%s", obj="%s"' % ( type ( obj ) . __name__ , unicode_me ( obj ) ) ) output = [ ] addline = output . append item_name = item_func ( parent ) for key , val in obj . items ( ) : LOG . info ( 'Looping inside convert_dict(): key="%s", val="%s", type(val)="%s"' % ( unico...
def load_entry_point_group ( self , entry_point_group ) : """Load actions from an entry point group . : param entry _ point _ group : The entrypoint group name to load plugins ."""
for ep in pkg_resources . iter_entry_points ( group = entry_point_group ) : self . register_scope ( ep . load ( ) )
def sqlmany ( self , stringname , * args ) : """Wrapper for executing many SQL calls on my connection . First arg is the name of a query , either a key in the precompiled JSON or a method name in ` ` allegedb . alchemy . Alchemist ` ` . Remaining arguments should be tuples of argument sequences to be passed...
if hasattr ( self , 'alchemist' ) : return getattr ( self . alchemist . many , stringname ) ( * args ) s = self . strings [ stringname ] return self . connection . cursor ( ) . executemany ( s , args )
def get_compression_filter ( byte_counts ) : """Determine whether or not to use a compression on the array stored in a hierarchical data format , and which compression library to use to that purpose . Compression reduces the HDF5 file size and also helps improving I / O efficiency for large datasets . Param...
assert isinstance ( byte_counts , numbers . Integral ) and byte_counts > 0 if 2 * byte_counts > 1000 * memory ( ) [ 'free' ] : try : FILTERS = tables . filters ( complevel = 5 , complib = 'blosc' , shuffle = True , least_significant_digit = 6 ) except tables . FiltersWarning : FILTERS = tables ....
def detect_unused_return_values ( self , f ) : """Return the nodes where the return value of a call is unused Args : f ( Function ) Returns : list ( Node )"""
values_returned = [ ] nodes_origin = { } for n in f . nodes : for ir in n . irs : if isinstance ( ir , HighLevelCall ) : # if a return value is stored in a state variable , it ' s ok if ir . lvalue and not isinstance ( ir . lvalue , StateVariable ) : values_returned . append ( ir...
def _tokenize ( self , text ) : """Tokenizes a piece of text ."""
text = self . _clean_text ( text ) # This was added on November 1st , 2018 for the multilingual and Chinese # models . This is also applied to the English models now , but it doesn ' t # matter since the English models were not trained on any Chinese data # and generally don ' t have any Chinese data in them ( there ar...
def load ( self , dtype_out_time , dtype_out_vert = False , region = False , plot_units = False , mask_unphysical = False ) : """Load the data from the object if possible or from disk ."""
msg = ( "Loading data from disk for object={0}, dtype_out_time={1}, " "dtype_out_vert={2}, and region=" "{3}" . format ( self , dtype_out_time , dtype_out_vert , region ) ) logging . info ( msg + ' ({})' . format ( ctime ( ) ) ) # Grab from the object if its there . try : data = self . data_out [ dtype_out_time ] e...
def parametrized_function ( decorator ) : '''Decorator used to create decorators with arguments . Should be used with function returning another function that will be called with the original function has the first parameter . No difference are made between method and function , so the wrapper function wi...
def meta_decorator ( * args , ** kwargs ) : return _NormalMetaDecorator ( decorator , args , kwargs ) return meta_decorator
def is_github_ip ( ip_str ) : """Verify that an IP address is owned by GitHub ."""
if isinstance ( ip_str , bytes ) : ip_str = ip_str . decode ( ) ip = ipaddress . ip_address ( ip_str ) if ip . version == 6 and ip . ipv4_mapped : ip = ip . ipv4_mapped for block in load_github_hooks ( ) : if ip in ipaddress . ip_network ( block ) : return True return False
def to_dict ( self ) : """Convert self to a dict object for serialization ."""
return { 'level' : self . level , 'id' : self . id , 'text' : self . text , 'inner_html' : self . inner_html , 'children' : [ child . to_dict ( ) for child in self . children ] }
def refresh_state_in_ec ( self , ec_index ) : '''Get the up - to - date state of the component in an execution context . This function will update the state , rather than using the cached value . This may take time , if the component is executing on a remote node . @ param ec _ index The index of the execut...
with self . _mutex : if ec_index >= len ( self . owned_ecs ) : ec_index -= len ( self . owned_ecs ) if ec_index >= len ( self . participating_ecs ) : raise exceptions . BadECIndexError ( ec_index ) state = self . _get_ec_state ( self . participating_ecs [ ec_index ] ) sel...
def timeit_grid ( stmt_list , setup = '' , iterations = 10000 , input_sizes = None , verbose = True , show = False ) : """Timeit : : import utool as ut setup = ut . codeblock ( import utool as ut from six . moves import range , zip import time def time _ append ( size ) : start _ time = time . time ( ...
import timeit # iterations = timeit . default _ number if input_sizes is None : input_sizes = [ 2 ** count for count in range ( 7 , 14 ) ] time_grid = [ ] for size in input_sizes : time_list = [ ] for stmt in stmt_list : stmt_ = stmt + '(' + str ( size ) + ')' if verbose : print ...
def get_design ( self , design_name ) : """Returns dict representation of the design document with the matching name design _ name < str > name of the design"""
try : r = requests . request ( "GET" , "%s/%s/_design/%s" % ( self . host , self . database_name , design_name ) , auth = self . auth ) return self . result ( r . text ) except : raise
def adjust_all_to_360 ( dictionary ) : """Take a dictionary and check each key / value pair . If this key is of type : declination / longitude / azimuth / direction , adjust it to be within 0-360 as required by the MagIC data model"""
for key in dictionary : dictionary [ key ] = adjust_to_360 ( dictionary [ key ] , key ) return dictionary
def p_array ( self , t ) : """expression : ' { ' commalist ' } ' | kw _ array ' [ ' commalist ' ] '"""
if len ( t ) == 4 : t [ 0 ] = ArrayLit ( t [ 2 ] . children ) elif len ( t ) == 5 : t [ 0 ] = ArrayLit ( t [ 3 ] . children ) else : raise NotImplementedError ( 'unk_len' , len ( t ) ) # pragma : no cover
def text_to_speech ( self , text , file , voice_name = None , language = None ) : """Saves given text synthesized audio file , via ' CreateSpeech ' endpoint Docs : http : / / developer . ivona . com / en / speechcloud / actions . html # CreateSpeech : param text : text to synthesize : type text : str : pa...
endpoint = 'CreateSpeech' data = { 'Input' : { 'Data' : text , } , 'OutputFormat' : { 'Codec' : self . codec . upper ( ) , } , 'Parameters' : { 'Rate' : self . rate , 'Volume' : self . volume , 'SentenceBreak' : self . sentence_break , 'ParagraphBreak' : self . paragraph_break , } , 'Voice' : { 'Name' : voice_name or s...
def _PreParse ( self , key , value ) : """Executed against each field of each row read from index table ."""
if key == "Command" : return re . sub ( r"(\[\[.+?\]\])" , self . _Completion , value ) else : return value
def c2r ( self ) : """Get real matrix from complex one suitable for solving complex linear system with real solver . For matrix : math : ` M ( i _ 1 , j _ 1, \\ ldots , i _ d , j _ d ) = \\ Re M + i \\ Im M ` returns ( d + 1 ) - dimensional matrix : math : ` \\ tilde { M } ( i _ 1 , j _ 1, \\ ldots , i _ d , j ...
return matrix ( a = self . tt . __complex_op ( 'M' ) , n = _np . concatenate ( ( self . n , [ 2 ] ) ) , m = _np . concatenate ( ( self . m , [ 2 ] ) ) )
def concat_expr ( operator , conditions ) : """Concatenate ` conditions ` with ` operator ` and wrap it by ( ) . It returns a string in a list or empty list , if ` conditions ` is empty ."""
expr = " {0} " . format ( operator ) . join ( conditions ) return [ "({0})" . format ( expr ) ] if expr else [ ]
def remove_edge_fun ( graph ) : """Returns a function that removes an edge from the ` graph ` . . . note : : The out node is removed if this is isolate . : param graph : A directed graph . : type graph : networkx . classes . digraph . DiGraph : return : A function that remove an edge from the ` graph ` ...
# Namespace shortcut for speed . rm_edge , rm_node = graph . remove_edge , graph . remove_node from networkx import is_isolate def remove_edge ( u , v ) : rm_edge ( u , v ) # Remove the edge . if is_isolate ( graph , v ) : # Check if v is isolate . rm_node ( v ) # Remove the isolate out node . retur...
def get1Dcut ( cam : List [ Cam ] , odir : Path = None , verbose : bool = False ) -> List [ Cam ] : """i . get az / el of each pixel ( rotated / transposed as appropriate ) ii . get cartesian ECEF of each pixel end , a point outside the grid ( to create rays to check intersections with grid ) iii . put cameras ...
# % % determine slant range between other camera and magnetic zenith to evaluate at # 4.5 had zero discards for hst0 # 6.8 didn ' t quite get to zenith srpts = logspace ( 4.3 , 6.9 , 25 ) # % % ( i ) load az / el data from Astrometry . net for C in cam : if C . usecam : C . doorient ( ) C . toecef (...
def get_field_mappings ( self , field ) : """Converts ES field mappings to . kibana field mappings"""
retdict = { } retdict [ 'indexed' ] = False retdict [ 'analyzed' ] = False for ( key , val ) in iteritems ( field ) : if key in self . mappings : if ( key == 'type' and ( val == "long" or val == "integer" or val == "double" or val == "float" ) ) : val = "number" # self . pr _ dbg ( " \ t...
def click_and_hold ( self , on_element = None ) : """Holds down the left mouse button on an element . : Args : - on _ element : The element to mouse down . If None , clicks on current mouse position ."""
if on_element : self . move_to_element ( on_element ) if self . _driver . w3c : self . w3c_actions . pointer_action . click_and_hold ( ) self . w3c_actions . key_action . pause ( ) else : self . _actions . append ( lambda : self . _driver . execute ( Command . MOUSE_DOWN , { } ) ) return self
def holidays ( self , start = None , end = None , return_name = False ) : """Returns a curve with holidays between start _ date and end _ date Parameters start : starting date , datetime - like , optional end : ending date , datetime - like , optional return _ name : bool , optional If True , return a ser...
if self . rules is None : raise Exception ( 'Holiday Calendar {name} does not have any ' 'rules specified' . format ( name = self . name ) ) if start is None : start = AbstractHolidayCalendar . start_date if end is None : end = AbstractHolidayCalendar . end_date start = Timestamp ( start ) end = Timestamp (...
def _python3_record_factory ( * args , ** kwargs ) : """Python 3 approach to custom logging , using ` logging . getLogRecord ( . . . ) ` Inspireb by : https : / / docs . python . org / 3 / howto / logging - cookbook . html # customizing - logrecord : return : A log record augmented with the values required by L...
record = _python_record_factory ( * args , ** kwargs ) _update_record ( record ) return record
def coerce ( cls , key , value ) : """Convert plain dictionary to MutationDict"""
self = MutationDict ( ( k , MutationObj . coerce ( key , v ) ) for ( k , v ) in value . items ( ) ) self . _key = key return self
def extract_nodes ( soup , nodename , attr = None , value = None ) : """Returns a list of tags ( nodes ) from the given soup matching the given nodename . If an optional attribute and value are given , these are used to filter the results further ."""
tags = soup . find_all ( nodename ) if attr != None and value != None : return list ( filter ( lambda tag : tag . get ( attr ) == value , tags ) ) return list ( tags )
def model ( self , data , ind_col = None , dep_col = None , project_ind_col = None , baseline_period = [ None , None ] , projection_period = None , exclude_time_period = None , alphas = np . logspace ( - 4 , 1 , 30 ) , cv = 3 , plot = True , figsize = None , custom_model_func = None ) : """Split data into baseline ...
# Check to ensure data is a pandas dataframe if not isinstance ( data , pd . DataFrame ) : raise SystemError ( 'data has to be a pandas dataframe.' ) # Create instance model_data_obj = Model_Data ( data , ind_col , dep_col , alphas , cv , exclude_time_period , baseline_period , projection_period ) # Split data into...
def _validate_parameters ( self ) : """Validate Connection Parameters . : return :"""
if not compatibility . is_string ( self . parameters [ 'hostname' ] ) : raise AMQPInvalidArgument ( 'hostname should be a string' ) elif not compatibility . is_integer ( self . parameters [ 'port' ] ) : raise AMQPInvalidArgument ( 'port should be an integer' ) elif not compatibility . is_string ( self . paramet...
def middleMouseClickEvent ( argosPgPlotItem , axisNumber , mouseClickEvent ) : """Emits sigAxisReset when the middle mouse button is clicked on an axis of the the plot item ."""
if mouseClickEvent . button ( ) == QtCore . Qt . MiddleButton : mouseClickEvent . accept ( ) argosPgPlotItem . emitResetAxisSignal ( axisNumber )
def feed ( self , url_template , keyword , offset , max_num , page_step ) : """Feed urls once Args : url _ template : A string with parameters replaced with " { } " . keyword : A string indicating the searching keyword . offset : An integer indicating the starting index . max _ num : An integer indicating...
for i in range ( offset , offset + max_num , page_step ) : url = url_template . format ( keyword , i ) self . out_queue . put ( url ) self . logger . debug ( 'put url to url_queue: {}' . format ( url ) )
def fmt_tag ( cur_namespace , tag , val ) : """Processes a documentation reference ."""
if tag == 'type' : fq_val = val if '.' not in val and cur_namespace is not None : fq_val = cur_namespace . name + '.' + fq_val return fq_val elif tag == 'route' : if ':' in val : val , version = val . split ( ':' , 1 ) version = int ( version ) else : version = 1 ...
def updateProgress ( self , time , state = 'stopped' ) : """Set the watched progress for this video . Note that setting the time to 0 will not work . Use ` markWatched ` or ` markUnwatched ` to achieve that goal . Parameters : time ( int ) : milliseconds watched state ( string ) : state of the video , d...
key = '/:/progress?key=%s&identifier=com.plexapp.plugins.library&time=%d&state=%s' % ( self . ratingKey , time , state ) self . _server . query ( key ) self . reload ( )
def setup_owner ( self , name , new_owner = default , transact = { } ) : """Set the owner of the supplied name to ` new _ owner ` . For typical scenarios , you ' ll never need to call this method directly , simply call : meth : ` setup _ name ` or : meth : ` setup _ address ` . This method does * not * set up...
( super_owner , unowned , owned ) = self . _first_owner ( name ) if new_owner is default : new_owner = super_owner elif not new_owner : new_owner = EMPTY_ADDR_HEX else : new_owner = to_checksum_address ( new_owner ) current_owner = self . owner ( name ) if new_owner == EMPTY_ADDR_HEX and not current_owner :...
def create_items ( portal_type = None , uid = None , endpoint = None , ** kw ) : """create items 1 . If the uid is given , get the object and create the content in there ( assumed that it is folderish ) 2 . If the uid is 0 , the target folder is assumed the portal . 3 . If there is no uid given , the payloa...
# disable CSRF req . disable_csrf_protection ( ) # destination where to create the content container = uid and get_object_by_uid ( uid ) or None # extract the data from the request records = req . get_request_data ( ) results = [ ] for record in records : # get the portal _ type if portal_type is None : # try to fe...
def add_primary_key ( conn , schema , table , pk_col ) : r"""Adds primary key to database table Parameters conn : sqlalchemy connection object A valid connection to a database schema : str The database schema table : str The database table pk _ col : str Column that primary key is applied to"""
sql_str = """alter table {schema}.{table} add primary key ({col})""" . format ( schema = schema , table = table , col = pk_col ) conn . execute ( sql_str )
def stream_data ( self , host = HOST , port = GPSD_PORT , enable = True , gpsd_protocol = PROTOCOL , devicepath = None ) : """Connect and command , point and shoot , flail and bail"""
self . socket . connect ( host , port ) self . socket . watch ( enable , gpsd_protocol , devicepath )
def broadcast_dimension_size ( variables : List [ Variable ] , ) -> 'OrderedDict[Any, int]' : """Extract dimension sizes from a dictionary of variables . Raises ValueError if any dimensions have different sizes ."""
dims = OrderedDict ( ) # type : OrderedDict [ Any , int ] for var in variables : for dim , size in zip ( var . dims , var . shape ) : if dim in dims and size != dims [ dim ] : raise ValueError ( 'index %r not aligned' % dim ) dims [ dim ] = size return dims
def md5 ( self ) : """MD5 of scene which will change when meshes or transforms are changed Returns hashed : str , MD5 hash of scene"""
# start with transforms hash hashes = [ self . graph . md5 ( ) ] for g in self . geometry . values ( ) : if hasattr ( g , 'md5' ) : hashes . append ( g . md5 ( ) ) elif hasattr ( g , 'tostring' ) : hashes . append ( str ( hash ( g . tostring ( ) ) ) ) else : # try to just straight up hash ...
def trigger ( queue , user = None , group = None , mode = None , trigger = _c . FSQ_TRIGGER ) : '''Installs a trigger for the specified queue .'''
# default our owners and mode user , group , mode = _dflts ( user , group , mode ) trigger_path = fsq_path . trigger ( queue , trigger = trigger ) created = False try : # mkfifo is incapable of taking unicode , coerce back to str try : os . mkfifo ( trigger_path . encode ( _c . FSQ_CHARSET ) , mode ) ...
def generate_data ( method , args ) : """Assign arguments to body or URL of an HTTP request . Parameters method ( str ) HTTP Method . ( e . g . ' POST ' ) args ( dict ) Dictionary of data to attach to each Request . e . g . { ' latitude ' : 37.561 , ' longitude ' : - 122.742} Returns ( str or dict )...
data = { } params = { } if method in http . BODY_METHODS : data = dumps ( args ) else : params = args return data , params
def start_of_day ( dtime_at = None ) : """Returns the local ( user timezone ) start of day , that ' s , time 00:00:00 for a given datetime"""
dtime_at = datetime_or_now ( dtime_at ) return datetime . datetime ( dtime_at . year , dtime_at . month , dtime_at . day , tzinfo = tzlocal ( ) )
def get_process_work_item_type ( self , process_id , wit_ref_name , expand = None ) : """GetProcessWorkItemType . [ Preview API ] Returns a single work item type in a process . : param str process _ id : The ID of the process : param str wit _ ref _ name : The reference name of the work item type : param st...
route_values = { } if process_id is not None : route_values [ 'processId' ] = self . _serialize . url ( 'process_id' , process_id , 'str' ) if wit_ref_name is not None : route_values [ 'witRefName' ] = self . _serialize . url ( 'wit_ref_name' , wit_ref_name , 'str' ) query_parameters = { } if expand is not None...
def get_remote_mgmt_addr ( self , tlv_data ) : """Returns Remote Mgmt Addr from the TLV ."""
ret , parsed_val = self . _check_common_tlv_format ( tlv_data , "IPv4:" , "Management Address TLV" ) if not ret : return None addr_fam = 'IPv4:' addr = parsed_val [ 1 ] . split ( '\n' ) [ 0 ] . strip ( ) return addr_fam + addr
def InitLocCheck ( self ) : """make an interactive grid in which users can edit specimen names as well as which sample a specimen belongs to"""
self . panel = wx . Panel ( self , style = wx . SIMPLE_BORDER ) text = """Step 5: Check that locations are correctly named. Fill in any blank cells using controlled vocabularies. (See Help button for details) ** Denotes controlled vocabulary""" label = wx . StaticText ( self . panel , label = text ) # self . Data _ hi...
def substitute_url_with_ref ( self , txt ) : """In the string ` txt ` , replace links to online docs with corresponding sphinx cross - references ."""
# Find links mi = re . finditer ( r'\[([^\]]+|\[[^\]]+\])\]\(([^\)]+)\)' , txt ) if mi : # Iterate over match objects in iterator returned by # re . finditer for mo in mi : # Get components of current match : full matching text , # the link label , and the postfix to the base url in the # link url m...
def fetch ( self , sql , * args , ** kwargs ) : """Executes an SQL SELECT query and returns the first row or ` None ` . : param sql : statement to execute : param args : parameters iterable : param kwargs : parameters iterable : return : the first row or ` None `"""
with self . locked ( ) as conn : return conn . query ( sql , * args , ** kwargs ) . fetch ( )
def get_dev_asset_details ( ipaddress , auth , url ) : """Takes in ipaddress as input to fetch device assett details from HP IMC RESTFUL API : param ipaddress : IP address of the device you wish to gather the asset details : param auth : requests auth object # usually auth . creds from auth pyhpeimc . auth . cl...
get_dev_asset_url = "/imcrs/netasset/asset?assetDevice.ip=" + str ( ipaddress ) f_url = url + get_dev_asset_url # creates the URL using the payload variable as the contents r = requests . get ( f_url , auth = auth , headers = HEADERS ) # r . status _ code try : if r . status_code == 200 : dev_asset_info = (...
def on_mismatch ( self , pair ) : """Called for pairs that don ' t match ` match ` and ` exclude ` filters . If - - delete - unmatched is on , remove the remote resource ."""
remote_entry = pair . remote if self . options . get ( "delete_unmatched" ) and remote_entry : self . _log_action ( "delete" , "unmatched" , ">" , remote_entry ) if remote_entry . is_dir ( ) : self . _remove_dir ( remote_entry ) else : self . _remove_file ( remote_entry ) else : self . _...
def resolution_profile ( self , graph , partition_type , resolution_range , weights = None , bisect_func = lambda p : p . bisect_value ( ) , min_diff_bisect_value = 1 , min_diff_resolution = 1e-3 , linear_bisection = False , number_iterations = 1 , ** kwargs ) : """Use bisectioning on the resolution parameter in or...
# Helper function for cleaning values to be a stepwise function def clean_stepwise ( bisect_values ) : # Check best partition for each resolution parameter for res , bisect in bisect_values . iteritems ( ) : best_bisect = bisect best_quality = bisect . partition . quality ( res ) for res2 , ...
def get_recipes_in_node ( node ) : """Gets the name of all recipes present in the run _ list of a node"""
recipes = [ ] for elem in node . get ( 'run_list' , [ ] ) : if elem . startswith ( "recipe" ) : recipe = elem . split ( '[' ) [ 1 ] . split ( ']' ) [ 0 ] recipes . append ( recipe ) return recipes
def new ( cls , num = None , * args , ** kwargs ) : """Create a new main project Parameters num : int The number of the project % ( Project . parameters . no _ num ) s Returns Project The with the given ` num ` ( if it does not already exist , it is created ) See Also scp : Sets the current proj...
project = cls ( * args , num = num , ** kwargs ) scp ( project ) return project
def _parse_perfdata ( self , s ) : """Parse performance data from a perfdata string"""
metrics = [ ] counters = re . findall ( self . TOKENIZER_RE , s ) if counters is None : self . log . warning ( "Failed to parse performance data: {s}" . format ( s = s ) ) return metrics for ( key , value , uom , warn , crit , min , max ) in counters : try : norm_value = self . _normalize_to_unit ( ...
def update ( self , alert_condition_id , policy_id , type = None , condition_scope = None , name = None , entities = None , metric = None , runbook_url = None , terms = None , user_defined = None , enabled = None ) : """Updates any of the optional parameters of the alert condition : type alert _ condition _ id : ...
conditions_dict = self . list ( policy_id ) target_condition = None for condition in conditions_dict [ 'conditions' ] : if int ( condition [ 'id' ] ) == alert_condition_id : target_condition = condition break if target_condition is None : raise NoEntityException ( 'Target alert condition is not ...
def removePhenotypeAssociationSet ( self , phenotypeAssociationSet ) : """Remove a phenotype association set from the repo"""
q = models . Phenotypeassociationset . delete ( ) . where ( models . Phenotypeassociationset . id == phenotypeAssociationSet . getId ( ) ) q . execute ( )
def view_vector ( self , vector , viewup = None ) : """Point the camera in the direction of the given vector"""
focal_pt = self . center if viewup is None : viewup = rcParams [ 'camera' ] [ 'viewup' ] cpos = [ vector + np . array ( focal_pt ) , focal_pt , viewup ] self . camera_position = cpos return self . reset_camera ( )
def coinc ( self , s0 , s1 , slide , step ) : # pylint : disable = unused - argument """Calculate the final coinc ranking statistic"""
# Approximate log likelihood ratio by summing single - ifo negative # log noise likelihoods loglr = - s0 - s1 # add squares of threshold stat values via idealized Gaussian formula threshes = [ self . fits_by_tid [ i ] [ 'thresh' ] for i in self . ifos ] loglr += sum ( [ t ** 2. / 2. for t in threshes ] ) # convert back...
def threshold_monitor_hidden_threshold_monitor_Memory_high_limit ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) threshold_monitor_hidden = ET . SubElement ( config , "threshold-monitor-hidden" , xmlns = "urn:brocade.com:mgmt:brocade-threshold-monitor" ) threshold_monitor = ET . SubElement ( threshold_monitor_hidden , "threshold-monitor" ) Memory = ET . SubElement ( threshold_monitor , "Memory" ...
def cbc_mac ( key , data ) : """AES - 265 - CBC - MAC on the data used in ProcessData . Does not use padding ( data has to be already padded ) . : param key : : param data : : return :"""
engine = AES . new ( key , AES . MODE_CBC , get_zero_vector ( 16 ) ) return engine . encrypt ( data ) [ - 16 : ]
def parse_grasp_gwas ( fn ) : """Read GRASP database and filter for unique hits . Parameters fn : str Path to ( subset of ) GRASP database . Returns df : pandas . DataFrame Pandas dataframe with de - duplicated , significant SNPs . The index is of the form chrom : pos where pos is the one - based posi...
df = pd . read_table ( fn , low_memory = False ) df = df [ df . Pvalue < 1e-5 ] df = df . sort ( columns = [ 'chr(hg19)' , 'pos(hg19)' , 'Pvalue' ] ) df = df . drop_duplicates ( subset = [ 'chr(hg19)' , 'pos(hg19)' ] ) df = df [ df . Pvalue < 1e-5 ] df [ 'chrom' ] = 'chr' + df [ 'chr(hg19)' ] . astype ( str ) df [ 'end...
def filterStack ( self , filters ) : """Filter the ObjectGraph in - place by removing all edges to nodes that do not match every filter in the given filter list Returns a tuple containing the number of : ( nodes _ visited , nodes _ removed , nodes _ orphaned )"""
visited , removes , orphans = filter_stack ( self . graph , self , filters ) for last_good , tail in orphans : self . graph . add_edge ( last_good , tail , edge_data = 'orphan' ) for node in removes : self . graph . hide_node ( node ) return len ( visited ) - 1 , len ( removes ) , len ( orphans )
def _is_path ( instance , attribute , s , exists = True ) : "Validator for path - yness"
if not s : # allow False as a default return if exists : if os . path . exists ( s ) : return else : raise OSError ( "path does not exist" ) else : # how do we tell if it ' s a path if it doesn ' t exist ? raise TypeError ( "Not a path?" )