signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def topic_exists ( name , region = None , key = None , keyid = None , profile = None ) : '''Check to see if an SNS topic exists . CLI example : : salt myminion boto3 _ sns . topic _ exists mytopic region = us - east - 1'''
topics = list_topics ( region = region , key = key , keyid = keyid , profile = profile ) return name in list ( topics . values ( ) + topics . keys ( ) )
def convert_bidirectional ( builder , layer , input_names , output_names , keras_layer ) : """Convert a bidirectional layer from keras to coreml . Currently assumes the units are LSTMs . Parameters keras _ layer : layer A keras layer object . builder : NeuralNetworkBuilder A neural network builder objec...
input_size = keras_layer . input_shape [ - 1 ] lstm_layer = keras_layer . forward_layer if ( type ( lstm_layer ) != keras . layers . recurrent . LSTM ) : raise TypeError ( 'Bidirectional layers only supported with LSTM' ) if lstm_layer . go_backwards : raise TypeError ( ' \'go_backwards\' mode not supported wit...
def put ( self , key , value , ttl = - 1 ) : """Associates the specified value with the specified key in this map . If the map previously contained a mapping for the key , the old value is replaced by the specified value . If ttl is provided , entry will expire and get evicted after the ttl . * * Warning : ...
check_not_none ( key , "key can't be None" ) check_not_none ( value , "value can't be None" ) key_data = self . _to_data ( key ) value_data = self . _to_data ( value ) return self . _put_internal ( key_data , value_data , ttl )
def _jacobian_both ( nodes , degree , dimension ) : r"""Compute : math : ` s ` and : math : ` t ` partial of : math : ` B ` . . . note : : There is also a Fortran implementation of this function , which will be used if it can be built . Args : nodes ( numpy . ndarray ) : Array of nodes in a surface . de...
_ , num_nodes = nodes . shape result = np . empty ( ( 2 * dimension , num_nodes - degree - 1 ) , order = "F" ) result [ : dimension , : ] = jacobian_s ( nodes , degree , dimension ) result [ dimension : , : ] = jacobian_t ( nodes , degree , dimension ) return result
def make_exporter_resources ( nb_name , out_folder , images_folder = None ) : """Creates resources dict for the exporter"""
resources = defaultdict ( str ) resources [ 'metadata' ] = defaultdict ( str ) resources [ 'metadata' ] [ 'name' ] = nb_name resources [ 'metadata' ] [ 'path' ] = out_folder # This results in images like AB _ 5_1 . png for a notebook called AB . ipynb resources [ 'unique_key' ] = nb_name resources [ 'output_files_dir' ...
def handle_comm_opened ( comm , msg ) : """Static method , called when a widget is constructed ."""
version = msg . get ( 'metadata' , { } ) . get ( 'version' , '' ) if version . split ( '.' ) [ 0 ] != PROTOCOL_VERSION_MAJOR : raise ValueError ( "Incompatible widget protocol versions: received version %r, expected version %r" % ( version , __protocol_version__ ) ) data = msg [ 'content' ] [ 'data' ] state = data ...
def sgd_entropic_regularization ( a , b , M , reg , batch_size , numItermax , lr ) : '''Compute the sgd algorithm to solve the regularized discrete measures optimal transport dual problem The function solves the following optimization problem : . . math : : \gamma = arg\min_\gamma <\gamma,M>_F + reg\cdot\Om...
n_source = np . shape ( M ) [ 0 ] n_target = np . shape ( M ) [ 1 ] cur_alpha = np . zeros ( n_source ) cur_beta = np . zeros ( n_target ) for cur_iter in range ( numItermax ) : k = np . sqrt ( cur_iter + 1 ) batch_alpha = np . random . choice ( n_source , batch_size , replace = False ) batch_beta = np . ra...
def generate_threshold_mask ( hist ) : '''Masking array elements when equal 0.0 or greater than 10 times the median Parameters hist : array _ like Input data . Returns masked array Returns copy of the array with masked elements .'''
masked_array = np . ma . masked_values ( hist , 0 ) masked_array = np . ma . masked_greater ( masked_array , 10 * np . ma . median ( hist ) ) logging . info ( 'Masking %d pixel(s)' , np . ma . count_masked ( masked_array ) ) return np . ma . getmaskarray ( masked_array )
from typing import List def collect_prefixes ( word : str ) -> List [ str ] : """Generates a list of all prefixes of the input string in ascending order of their length Args : word ( str ) : The string from which prefixes are to be generated Returns : List [ str ] : The list of all possible prefixes of the ...
return [ word [ : i + 1 ] for i in range ( len ( word ) ) ]
def get_complexes ( self ) : """Extract INDRA Complex Statements ."""
qstr = "$.events.frames[@.type is 'complex-assembly']" res = self . tree . execute ( qstr ) if res is None : return for r in res : epistemics = self . _get_epistemics ( r ) if epistemics . get ( 'negated' ) : continue # Due to an issue with the REACH output serialization # ( though seemingly...
def _get_right ( cls ) : # type : ( _ MetaRule ) - > List [ object ] """Get right part of the rule . : param cls : Rule for which return the right side . : return : Symbols on the right side of the array . : raise RuleNotDefinedException : If the rule is not defined . : raise CantCreateSingleRuleException :...
if cls . _traverse : return [ cls . toSymbol ] if len ( cls . rules ) > 1 : raise CantCreateSingleRuleException ( cls ) return cls . rules [ 0 ] [ 1 ]
def get_availability_zone ( ) : """Gets the AWS Availability Zone ID for this system : return : ( str ) Availability Zone ID where this system lives"""
log = logging . getLogger ( mod_logger + '.get_availability_zone' ) # Exit if not running on AWS if not is_aws ( ) : log . info ( 'This machine is not running in AWS, exiting...' ) return availability_zone_url = metadata_url + 'placement/availability-zone' try : response = urllib . urlopen ( availability_zo...
def run_mace_smothr ( x , y , bass_enhancement = 0.0 ) : # pylint : disable = unused - argument """Run the FORTRAN SMOTHR ."""
N = len ( x ) weight = numpy . ones ( N ) results = numpy . zeros ( N ) flags = numpy . zeros ( ( N , 7 ) ) mace . smothr ( 1 , x , y , weight , results , flags ) return results
def parse_key ( string ) : """Split a key into path elements : - a . b . c = > a , b , c - a . " b . c " = > a , QuotedKey ( " b . c " ) if . is any of the special characters : $ } [ ] : = + # ` ^ ? ! @ * & . - " a " = > a - a . b . " c " = > a , b , c ( special case ) : param str : : return :"""
special_characters = '$}[]:=+#`^?!@*&.' tokens = re . findall ( r'"[^"]+"|[^{special_characters}]+' . format ( special_characters = re . escape ( special_characters ) ) , string ) def contains_special_character ( token ) : return any ( ( c in special_characters ) for c in token ) return [ token if contains_special_...
def x ( self , x ) : """Project x"""
if x is None or x <= 0 or self . log10_xmax - self . log10_xmin == 0 : return None return ( self . width * ( log10 ( x ) - self . log10_xmin ) / ( self . log10_xmax - self . log10_xmin ) )
def set_values ( self , values ) : """expects a list of 2 - tuples"""
self . values = values self . height = len ( self . values ) * 14 self . _max = max ( rec [ 1 ] for rec in values ) if values else dt . timedelta ( 0 )
def _set_arrayorder ( obj , arrayorder = 'C' ) : """Set the memory order of all np . ndarrays in a tofu object"""
msg = "Arg arrayorder must be in ['C','F']" assert arrayorder in [ 'C' , 'F' ] , msg d = obj . to_dict ( strip = - 1 ) account = { 'Success' : [ ] , 'Failed' : [ ] } for k , v in d . items ( ) : if type ( v ) is np . array and v . ndim > 1 : try : if arrayorder == 'C' : d [ k ] =...
def _classify_arithmetic_load ( self , regs_init , regs_fini , mem_fini , written_regs , read_regs ) : """Classify arithmetic - load gadgets ."""
matches = [ ] # Check for " dst _ reg < - dst _ reg OP mem [ src _ reg + offset ] " pattern . for op_name , op_fn in self . _binary_ops . items ( ) : for dst_reg , dst_val in regs_fini . items ( ) : # Make sure the * dst * register was read and written . if dst_reg not in written_regs or dst_reg not in read...
def _evalString ( self , datetimeString , sourceTime = None ) : """Calculate the datetime based on flags set by the L { parse ( ) } routine Examples handled : : RFC822 , W3CDTF formatted dates HH : MM [ : SS ] [ am / pm ] MM / DD / YYYY DD MMMM YYYY @ type datetimeString : string @ param datetimeStrin...
s = datetimeString . strip ( ) now = time . localtime ( ) # Given string date is a RFC822 date if sourceTime is None : sourceTime = _parse_date_rfc822 ( s ) if sourceTime is not None : ( yr , mth , dy , hr , mn , sec , wd , yd , isdst , _ ) = sourceTime self . dateFlag = 1 if ( hr != 0 )...
def copy_params_from ( self , arg_params , aux_params = None , allow_extra_params = False ) : """Copy parameters from arg _ params , aux _ params into executor ' s internal array . Parameters arg _ params : dict of str to NDArray Parameters , dict of name to NDArray of arguments . aux _ params : dict of str...
for name , array in arg_params . items ( ) : if name in self . arg_dict : dst = self . arg_dict [ name ] array . astype ( dst . dtype ) . copyto ( dst ) elif not allow_extra_params : raise ValueError ( 'Find name \"%s\" that is not in the arguments' % name ) if aux_params is None : r...
def zeroize ( ) : '''Resets the device to default factory settings CLI Example : . . code - block : : bash salt ' device _ name ' junos . zeroize'''
conn = __proxy__ [ 'junos.conn' ] ( ) ret = { } ret [ 'out' ] = True try : conn . cli ( 'request system zeroize' ) ret [ 'message' ] = 'Completed zeroize and rebooted' except Exception as exception : ret [ 'message' ] = 'Could not zeroize due to : "{0}"' . format ( exception ) ret [ 'out' ] = False retu...
def index ( self ) : '''Index funtion .'''
self . render ( 'index/index.html' , userinfo = self . userinfo , catalog_info = MCategory . query_all ( by_order = True ) , link = MLink . query_all ( ) , cfg = CMS_CFG , view = MPost . query_most_pic ( 20 ) , kwd = { } , )
def close ( self ) : """Close connection to server ."""
try : self . _socket . sendall ( 'quit\r\n' ) except socket . error : pass try : self . _socket . close ( ) except socket . error : pass
def get ( self , session , fields = None ) : '''taobao . fenxiao . productcats . get 查询产品线列表 查询供应商的所有产品线数据 。 根据登陆用户来查询 , 不需要其他入参'''
request = TOPRequest ( 'taobao.fenxiao.productcats.get' ) if fields : request [ 'fields' ] = fields self . create ( self . execute ( request , session ) ) return self . productcats
def overlaps ( self , other : "Interval" ) -> bool : """Does this interval overlap the other ? Overlap : . . code - block : : none S - - - - - S S - - - S S - - - S O - - - O O - - - O O - - - O Simpler method of testing is for non - overlap ! . . code - block : : none S - - - S S - - - S O - - - O ...
return not ( self . end <= other . start or self . start >= other . end )
def find_bidi ( self , el ) : """Get directionality from element text ."""
for node in self . get_children ( el , tags = False ) : # Analyze child text nodes if self . is_tag ( node ) : # Avoid analyzing certain elements specified in the specification . direction = DIR_MAP . get ( util . lower ( self . get_attribute_by_name ( node , 'dir' , '' ) ) , None ) if ( self . get_...
def on_close ( self , filename ) : """Move this file to destination folder ."""
shutil . move ( filename , self . destination_folder ) path , fn = os . path . split ( filename ) return os . path . join ( self . destination_folder , fn )
def get_molecule_hash ( self , mol ) : """Return inchi as molecular hash"""
obmol = BabelMolAdaptor ( mol ) . openbabel_mol inchi = self . _inchi_labels ( obmol ) [ 2 ] return inchi
def formatchecker_factory ( ** checkerdict ) : """Converts a dictionary of strings : checkers into a formatchecker object"""
fc = FormatChecker ( ) for format_name , checker in checkerdict . items ( ) : fc . checks ( format_name ) ( checker ) return fc
def local_score ( self , variable , parents ) : "Computes a score that measures how much a given variable is \" influenced \" by a given list of potential parents ."
var_states = self . state_names [ variable ] var_cardinality = len ( var_states ) state_counts = self . state_counts ( variable , parents ) sample_size = len ( self . data ) num_parents_states = float ( len ( state_counts . columns ) ) score = 0 for parents_state in state_counts : # iterate over df columns ( only 1 if ...
def match_ancestor_bank_id ( self , bank_id , match ) : """Sets the bank ` ` Id ` ` for to match banks in which the specified bank is an acestor . arg : bank _ id ( osid . id . Id ) : a bank ` ` Id ` ` arg : match ( boolean ) : ` ` true ` ` for a positive match , ` ` false ` ` for a negative match raise : N...
# matches when the bank _ id param is an ancestor of # any bank bank_descendants = self . _get_descendant_catalog_ids ( bank_id ) identifiers = [ ObjectId ( i . identifier ) for i in bank_descendants ] self . _query_terms [ '_id' ] = { '$in' : identifiers }
def add_user ( self , users = None , groups = None ) : """Add the specified users or groups to this project role . One of ` ` users ` ` or ` ` groups ` ` must be specified . : param users : a user or users to add to the role : type users : string , list or tuple : param groups : a group or groups to add to ...
if users is not None and isinstance ( users , string_types ) : users = ( users , ) if groups is not None and isinstance ( groups , string_types ) : groups = ( groups , ) data = { 'user' : users } self . _session . post ( self . self , data = json . dumps ( data ) )
def LastOf ( * subcons ) : """Create an adapter which uses only the last construct . If first argument is a string it will be the name ."""
name = "seq" if isinstance ( subcons [ 0 ] , six . string_types ) : name = subcons [ 0 ] subcons = subcons [ 1 : ] return IndexingAdapter ( Sequence ( name , * subcons ) , - 1 )
def get_media_url ( request , media_id ) : """Get media URL ."""
media = Media . objects . get ( id = media_id ) return HttpResponse ( media . url . name )
def _process_templatedata ( self , node , ** _ ) : """Processes a ` TemplateData ` node , this is just a bit of as - is text to be written to the output ."""
# escape double quotes value = re . sub ( '"' , r'\\"' , node . data ) # escape new lines value = re . sub ( '\n' , r'\\n' , value ) # append value to the result self . output . write ( '__result += "' + value + '";' )
def config ( filename ) : """Construct ` Config ` object and return a list . : parse filename : A string containing the path to YAML file . : return : list"""
Config = collections . namedtuple ( 'Config' , [ 'git' , 'lock_file' , 'version' , 'name' , 'src' , 'dst' , 'files' , 'post_commands' , ] ) return [ Config ( ** d ) for d in _get_config_generator ( filename ) ]
def common_elements ( list1 , list2 ) : """The function identifies the common elements in two provided lists using a lambda function . Examples : common _ elements ( [ 1 , 2 , 3 , 5 , 7 , 8 , 9 , 10 ] , [ 1 , 2 , 4 , 8 , 9 ] ) - > [ 1 , 2 , 8 , 9] common _ elements ( [ 1 , 2 , 3 , 5 , 7 , 8 , 9 , 10 ] , [ 3...
common_list = list ( filter ( ( lambda x : x in list1 ) , list2 ) ) return common_list
def _on_mouse_moved ( self , event ) : """mouse moved callback"""
if event . modifiers ( ) & QtCore . Qt . ControlModifier : cursor = TextHelper ( self . editor ) . word_under_mouse_cursor ( ) if ( not self . _cursor or cursor . position ( ) != self . _cursor . position ( ) ) : self . _check_word_cursor ( cursor ) self . _cursor = cursor else : self . _cursor ...
def get_child_objectives ( self , objective_id = None ) : """Gets the children of the given objective . arg : objective _ id ( osid . id . Id ) : the Id to query return : ( osid . learning . ObjectiveList ) - the children of the objective raise : NotFound - objective _ id is not found raise : NullArgument...
if objective_id is None : raise NullArgument ( ) url_path = construct_url ( 'children' , bank_id = self . _catalog_idstr , obj_id = objective_id ) return objects . ObjectiveList ( self . _get_request ( url_path ) )
def __normalize_list ( self , msg ) : """Split message to list by commas and trim whitespace ."""
if isinstance ( msg , list ) : msg = "" . join ( msg ) return list ( map ( lambda x : x . strip ( ) , msg . split ( "," ) ) )
def __pull_image_info ( self , title , imageinfo , normalized ) : """Pull image INFO from API response and insert"""
for info in imageinfo : info . update ( { 'title' : title } ) # get API normalized " from " filename for matching _from = None for norm in normalized : if title == norm [ 'to' ] : _from = norm [ 'from' ] # let ' s put all " metadata " in one member info [ 'metadata' ] = { } ...
def write_line_shp ( line_list , out_shp ) : """Export ESRI Shapefile - - Line feature"""
print ( 'Write line shapefile: %s' % out_shp ) driver = ogr_GetDriverByName ( str ( 'ESRI Shapefile' ) ) if driver is None : print ( 'ESRI Shapefile driver not available.' ) sys . exit ( 1 ) if os . path . exists ( out_shp ) : driver . DeleteDataSource ( out_shp ) ds = driver . CreateDataSource ( out_shp . ...
def finalize ( self , ** kwargs ) : """Finalize the drawing by adding a title and legend , and removing the axes objects that do not convey information about TNSE ."""
self . set_title ( "TSNE Projection of {} Documents" . format ( self . n_instances_ ) ) # Remove the ticks self . ax . set_yticks ( [ ] ) self . ax . set_xticks ( [ ] ) # Add the legend outside of the figure box . if not all ( self . classes_ == np . array ( [ self . NULL_CLASS ] ) ) : box = self . ax . get_positio...
def onCallStarted ( self , mid = None , caller_id = None , is_video_call = None , thread_id = None , thread_type = None , ts = None , metadata = None , msg = None , ) : """. . todo : : Make this work with private calls Called when the client is listening , and somebody starts a call in a group : param mid : T...
log . info ( "{} started call in {} ({})" . format ( caller_id , thread_id , thread_type . name ) )
def create_from_json ( cls , json_str , ignore_non_defaults = True ) : """Creates a database object from a json object . The intent of this method is to allow creating a database object directly from json . Mongolia will also automatically convert any json values that are formatted using the MongoliaJSONEncod...
create_dict = json . loads ( json_str , cls = MongoliaJSONDecoder , encoding = "utf-8" ) # Remove all keys not in DEFAULTS if ignore _ non _ defaults is True if cls . DEFAULTS and ignore_non_defaults : for key in frozenset ( create_dict ) . difference ( frozenset ( cls . DEFAULTS ) ) : del create_dict [ key...
def is_value_in ( constants_group , value ) : """Checks whether value can be found in the given constants group , which in turn , should be a Django - like choices tuple ."""
for const_value , label in constants_group : if const_value == value : return True return False
def destroy_comment ( self , access_token , comment_id ) : """doc : http : / / open . youku . com / docs / doc ? id = 42"""
url = 'https://openapi.youku.com/v2/comments/destroy.json' data = { 'client_id' : self . client_id , 'access_token' : access_token , 'comment_id' : comment_id } r = requests . post ( url , data = data ) check_error ( r ) return r . json ( ) [ 'id' ]
def update_value ( self , id_number , new_value , metadata = None ) : """Update a canned value : type id _ number : int : param id _ number : canned value ID number : type new _ value : str : param new _ value : New canned value value : type metadata : str : param metadata : Optional metadata : rtype ...
data = { 'id' : id_number , 'new_value' : new_value } if metadata is not None : data [ 'metadata' ] = metadata return self . post ( 'updateValue' , data )
def partition_master ( incoming = True , outgoing = True ) : """Partition master ' s port alone . To keep DC / OS cluster running . : param incoming : Partition incoming traffic to master process . Default True . : param outgoing : Partition outgoing traffic from master process . Default True ."""
echo ( 'Partitioning master. Incoming:{} | Outgoing:{}' . format ( incoming , outgoing ) ) network . save_iptables ( shakedown . master_ip ( ) ) network . flush_all_rules ( shakedown . master_ip ( ) ) network . allow_all_traffic ( shakedown . master_ip ( ) ) if incoming and outgoing : network . run_iptables ( shake...
def _install_maya ( use_threaded_wrapper ) : """Helper function to Autodesk Maya support"""
from maya import utils , cmds def threaded_wrapper ( func , * args , ** kwargs ) : return utils . executeInMainThreadWithResult ( func , * args , ** kwargs ) sys . stdout . write ( "Setting up Pyblish QML in Maya\n" ) if cmds . about ( version = True ) == "2018" : _remove_googleapiclient ( ) _common_setup ( "Ma...
def _find_own_cgroups ( ) : """For all subsystems , return the information in which ( sub - ) cgroup this process is in . ( Each process is in exactly cgroup in each hierarchy . ) @ return a generator of tuples ( subsystem , cgroup )"""
try : with open ( '/proc/self/cgroup' , 'rt' ) as ownCgroupsFile : for cgroup in _parse_proc_pid_cgroup ( ownCgroupsFile ) : yield cgroup except IOError : logging . exception ( 'Cannot read /proc/self/cgroup' )
def find_in_mailbox ( cls , session , mailbox_or_id ) : """Get the users that are associated to a Mailbox . Args : session ( requests . sessions . Session ) : Authenticated session . mailbox _ or _ id ( MailboxRef or int ) : Mailbox of the ID of the mailbox to get the folders for . Returns : RequestPagi...
if hasattr ( mailbox_or_id , 'id' ) : mailbox_or_id = mailbox_or_id . id return cls ( '/mailboxes/%d/users.json' % mailbox_or_id , session = session , )
def matrix_rank ( model ) : """Return the rank of the model ' s stoichiometric matrix . Parameters model : cobra . Model The metabolic model under investigation ."""
s_matrix , _ , _ = con_helpers . stoichiometry_matrix ( model . metabolites , model . reactions ) return con_helpers . rank ( s_matrix )
def calc_bin ( self , _bin = None ) : """Calculate the smallest UCSC genomic bin that will contain this feature ."""
if _bin is None : try : _bin = bins . bins ( self . start , self . end , one = True ) except TypeError : _bin = None return _bin
def retrieve_mime ( self ) : """Check the mime - type to download"""
mime = self . retrieve_config ( 'mime' , 'audio' ) mimedict = { "number" : mime } # the input that parse _ for _ download expects return aux . parse_for_download ( mimedict )
def add_group ( self , group_attribs = None , parent = None ) : """Add an empty group element to the SVG ."""
if parent is None : parent = self . tree . getroot ( ) elif not self . contains_group ( parent ) : warnings . warn ( 'The requested group {0} does not belong to ' 'this Document' . format ( parent ) ) if group_attribs is None : group_attribs = { } else : group_attribs = group_attribs . copy ( ) return S...
def import_from_grammar_into_namespace ( grammar , namespace , aliases ) : """Returns all rules and terminals of grammar , prepended with a ' namespace ' prefix , except for those which are aliased ."""
imported_terms = dict ( grammar . term_defs ) imported_rules = { n : ( n , deepcopy ( t ) , o ) for n , t , o in grammar . rule_defs } term_defs = [ ] rule_defs = [ ] def rule_dependencies ( symbol ) : if symbol . type != 'RULE' : return [ ] try : _ , tree , _ = imported_rules [ symbol ] exc...
def getid ( self , language = None , version = None ) : """Return an identification string which uniquely names a manifest . This string is a combination of the manifest ' s processorArchitecture , name , publicKeyToken , version and language . Arguments : version ( tuple or list of integers ) - If version ...
if not self . name : logger . warn ( "Assembly metadata incomplete" ) return "" id = [ ] if self . processorArchitecture : id . append ( self . processorArchitecture ) id . append ( self . name ) if self . publicKeyToken : id . append ( self . publicKeyToken ) if version or self . version : id . app...
def _check_seismogenic_depths ( self , upper_depth , lower_depth ) : '''Checks the seismic depths for physical consistency : param float upper _ depth : Upper seismogenic depth ( km ) : param float lower _ depth : Lower seismogenis depth ( km )'''
# Simple check on depths if upper_depth : if upper_depth < 0. : raise ValueError ( 'Upper seismogenic depth must be greater than' ' or equal to 0.0!' ) else : self . upper_depth = upper_depth else : self . upper_depth = 0.0 if lower_depth : if lower_depth < self . upper_depth : r...
def setReplicationPolicyResponse ( self , pid , policy , serialVersion , vendorSpecific = None ) : """CNReplication . setReplicationPolicy ( session , pid , policy , serialVersion ) → boolean https : / / releases . dataone . org / online / api - docume ntation - v2.0.1 / apis / CN _ APIs . html # CNReplication ...
mmp_dict = { 'policy' : ( 'policy.xml' , policy . toxml ( 'utf-8' ) ) , 'serialVersion' : ( str ( serialVersion ) ) , } return self . PUT ( [ 'replicaPolicies' , pid ] , fields = mmp_dict , headers = vendorSpecific )
def getOntologyByName ( self , name ) : """Returns an ontology by name"""
if name not in self . _ontologyNameMap : raise exceptions . OntologyNameNotFoundException ( name ) return self . _ontologyNameMap [ name ]
def from_qs ( cls , qs , ** kwargs ) : """Creates a new queryset using class ` cls ` using ` qs ' ` data . : param qs : The query set to clone : keyword kwargs : The kwargs to pass to _ clone method"""
assert issubclass ( cls , QuerySet ) , "%s is not a QuerySet subclass" % cls assert isinstance ( qs , QuerySet ) , "qs has to be an instance of queryset" return qs . _clone ( klass = cls , ** kwargs )
def convert_gru ( builder , layer , input_names , output_names , keras_layer ) : """Convert a GRU layer from keras to coreml . Parameters keras _ layer : layer A keras layer object . builder : NeuralNetworkBuilder A neural network builder object ."""
hidden_size = keras_layer . output_dim input_size = keras_layer . input_shape [ - 1 ] output_all = keras_layer . return_sequences reverse_input = keras_layer . go_backwards if keras_layer . consume_less not in [ 'cpu' , 'gpu' ] : raise ValueError ( 'Cannot convert Keras layer with consume_less = %s' % keras_layer ....
def _g_3 ( self ) : """omega3 < omega < omega4"""
# return 3 * ( 1.0 - self . _ n _ 3 ( ) ) / ( self . _ vertices _ omegas [ 3 ] - self . _ omega ) return ( 3 * self . _f ( 1 , 3 ) * self . _f ( 2 , 3 ) / ( self . _vertices_omegas [ 3 ] - self . _vertices_omegas [ 0 ] ) )
def list_accounts_add ( self , id , account_ids ) : """Add the account ( s ) given in ` account _ ids ` to the list ."""
id = self . __unpack_id ( id ) if not isinstance ( account_ids , list ) : account_ids = [ account_ids ] account_ids = list ( map ( lambda x : self . __unpack_id ( x ) , account_ids ) ) params = self . __generate_params ( locals ( ) , [ 'id' ] ) self . __api_request ( 'POST' , '/api/v1/lists/{0}/accounts' . format (...
def main ( args = None ) : """Entry point for ` fuel - convert ` script . This function can also be imported and used from Python . Parameters args : iterable , optional ( default : None ) A list of arguments that will be passed to Fuel ' s conversion utility . If this argument is not specified , ` sys . ...
built_in_datasets = dict ( converters . all_converters ) if fuel . config . extra_converters : for name in fuel . config . extra_converters : extra_datasets = dict ( importlib . import_module ( name ) . all_converters ) if any ( key in built_in_datasets for key in extra_datasets . keys ( ) ) : ...
def A ( self ) : """Spectral VAR coefficients . . . math : : \ mathbf { A } ( f ) = \ mathbf { I } - \ sum _ { k = 1 } ^ { p } \ mathbf { a } ^ { ( k ) } \mathrm{e}^{-2\pi f}"""
return fft ( np . dstack ( [ np . eye ( self . m ) , - self . b ] ) , self . nfft * 2 - 1 ) [ : , : , : self . nfft ]
def get_application_instance ( ) : """Returns the current ` QApplication < http : / / doc . qt . nokia . com / qapplication . html > ` _ instance or create one if it doesn ' t exists . : return : Application instance . : rtype : QApplication"""
instance = QApplication . instance ( ) if not instance : instance = QApplication ( sys . argv ) return instance
def process_input ( input , output = None , ivmlist = None , updatewcs = True , prodonly = False , wcskey = None , ** workinplace ) : """Create the full input list of filenames after verifying and converting files as needed ."""
newfilelist , ivmlist , output , oldasndict , origflist = buildFileListOrig ( input , output = output , ivmlist = ivmlist , wcskey = wcskey , updatewcs = updatewcs , ** workinplace ) if not newfilelist : buildEmptyDRZ ( input , output ) return None , None , output # run all WCS updating - - Now done in buildFil...
def authenticate ( func , c , expose_request = False ) : """A decorator that facilitates authentication per method . Setting C { expose _ request } to C { True } will set the underlying request object ( if there is one ) , usually HTTP and set it to the first argument of the authenticating callable . If there...
if not python . callable ( func ) : raise TypeError ( 'func must be callable' ) if not python . callable ( c ) : raise TypeError ( 'Authenticator must be callable' ) attr = func if isinstance ( func , types . UnboundMethodType ) : attr = func . im_func if expose_request is True : c = globals ( ) [ 'expo...
def run_migrations_online ( ) : """Run migrations in ' online ' mode . In this scenario we need to create an Engine and associate a connection with the context ."""
from uliweb . manage import make_simple_application from uliweb import orm , settings # engine = engine _ from _ config ( # config . get _ section ( config . config _ ini _ section ) , # prefix = ' sqlalchemy . ' , # poolclass = pool . NullPool ) name = config . get_main_option ( "engine_name" ) make_simple_application...
def create_node ( self , name , project_id , node_id , * args , ** kwargs ) : """Create a new node : param name : Node name : param project _ id : Project identifier : param node _ id : restore a node identifier"""
if node_id in self . _nodes : return self . _nodes [ node_id ] project = ProjectManager . instance ( ) . get_project ( project_id ) if node_id and isinstance ( node_id , int ) : # old project with ( yield from BaseManager . _convert_lock ) : node_id = yield from self . convert_old_project ( project , no...
def get_as_list ( self , tag_name ) : """Return the value of a tag , making sure that it ' s a list . Absent tags are returned as an empty - list ; single tags are returned as a one - element list . The returned list is a copy , and modifications do not affect the original object ."""
val = self . get ( tag_name , [ ] ) if isinstance ( val , list ) : return val [ : ] else : return [ val ]
def read_node_label_matrix ( file_path , separator , number_of_nodes ) : """Reads node - label pairs in csv format and returns a list of tuples and a node - label matrix . Inputs : - file _ path : The path where the node - label matrix is stored . - separator : The delimiter among values ( e . g . " , " , " \t ...
# Open file file_row_generator = get_file_row_generator ( file_path , separator ) # Initialize lists for row and column sparse matrix arguments row = list ( ) col = list ( ) append_row = row . append append_col = col . append # Populate the arrays for file_row in file_row_generator : node = np . int64 ( file_row [ ...
def get_label ( self , label_id , request_data , project = None ) : """GetLabel . Get a single deep label . : param str label _ id : Unique identifier of label : param : class : ` < TfvcLabelRequestData > < azure . devops . v5_0 . tfvc . models . TfvcLabelRequestData > ` request _ data : maxItemCount : para...
route_values = { } if project is not None : route_values [ 'project' ] = self . _serialize . url ( 'project' , project , 'str' ) if label_id is not None : route_values [ 'labelId' ] = self . _serialize . url ( 'label_id' , label_id , 'str' ) query_parameters = { } if request_data is not None : if request_da...
def parse_at_element ( self , element , # type : ET . Element state # type : _ ProcessorState ) : # type : ( . . . ) - > Any """Parse the provided element as an aggregate ."""
parsed_dict = self . _dictionary . parse_at_element ( element , state ) return self . _converter . from_dict ( parsed_dict )
def ErrorMsg ( ) : """Helper to get a nice traceback as string"""
import traceback limit = None _type , value , tb = sys . exc_info ( ) _list = traceback . format_tb ( tb , limit ) + traceback . format_exception_only ( _type , value ) return "Traceback (innermost last):\n" + "%-20s %s" % ( " " . join ( _list [ : - 1 ] ) , _list [ - 1 ] )
def to_string ( self , buf = None , columns = None , col_space = None , header = True , index = True , na_rep = 'NaN' , formatters = None , float_format = None , sparsify = None , index_names = True , justify = None , max_rows = None , max_cols = None , show_dimensions = False , decimal = '.' , line_width = None ) : ...
formatter = fmt . DataFrameFormatter ( self , buf = buf , columns = columns , col_space = col_space , na_rep = na_rep , formatters = formatters , float_format = float_format , sparsify = sparsify , justify = justify , index_names = index_names , header = header , index = index , max_rows = max_rows , max_cols = max_col...
def has_local_job_refs ( io_hash ) : ''': param io _ hash : input / output hash : type io _ hash : dict : returns : boolean indicating whether any job - based object references are found in * io _ hash *'''
q = [ ] for field in io_hash : if is_job_ref ( io_hash [ field ] ) : if get_job_from_jbor ( io_hash [ field ] ) . startswith ( 'localjob' ) : return True elif isinstance ( io_hash [ field ] , list ) or isinstance ( io_hash [ field ] , dict ) : q . append ( io_hash [ field ] ) while l...
def _display_status ( normalized_data , stream ) : """print status message from docker - py stream ."""
if 'Pull complete' in normalized_data [ 'status' ] or 'Download complete' in normalized_data [ 'status' ] : stream . write ( "\n" ) if 'id' in normalized_data : stream . write ( "%s - " % normalized_data [ 'id' ] ) stream . write ( "{0}\n" . format ( normalized_data [ 'status' ] ) )
def get_sqlview ( self , uid , execute = False , var = None , criteria = None , merge = False ) : """GET SQL View data : param uid : sqlView UID : param execute : materialize sqlView before downloading its data : param var : for QUERY types , a dict of variables to query the sqlView : param criteria : for V...
params = { } sqlview_type = self . get ( 'sqlViews/{}' . format ( uid ) , params = { 'fields' : 'type' } ) . json ( ) . get ( 'type' ) if sqlview_type == 'QUERY' : if not isinstance ( var , dict ) : raise ClientException ( "Use a dict to submit variables: e.g. var={'key1': 'value1', 'key2': 'value2'}" ) ...
def get_most_distinct_words ( vocab , topic_word_distrib , doc_topic_distrib , doc_lengths , n = None ) : """Order the words from ` vocab ` by " distinctiveness score " ( Chuang et al . 2012 ) from most to least distinctive . Optionally only return the ` n ` most distinctive words . J . Chuang , C . Manning , J...
return _words_by_distinctiveness_score ( vocab , topic_word_distrib , doc_topic_distrib , doc_lengths , n )
def configure ( self , accountID , ** kwargs ) : """Set the client - configurable portions of an Account . Args : accountID : Account Identifier alias : Client - defined alias ( name ) for the Account marginRate : The string representation of a decimal number . Returns : v20 . response . Response ...
request = Request ( 'PATCH' , '/v3/accounts/{accountID}/configuration' ) request . set_path_param ( 'accountID' , accountID ) body = EntityDict ( ) if 'alias' in kwargs : body . set ( 'alias' , kwargs [ 'alias' ] ) if 'marginRate' in kwargs : body . set ( 'marginRate' , kwargs [ 'marginRate' ] ) request . set_b...
def wallet_destroy ( self , wallet ) : """Destroys * * wallet * * and all contained accounts . . enable _ control required : param wallet : Wallet to destroy : type wallet : str : raises : : py : exc : ` nano . rpc . RPCException ` > > > rpc . wallet _ destroy ( . . . wallet = " 000D1BAEC8EC208142C99059...
wallet = self . _process_value ( wallet , 'wallet' ) payload = { "wallet" : wallet } resp = self . call ( 'wallet_destroy' , payload ) return resp == { }
def is_comment_deleted ( comid ) : """Return True of the comment is deleted . Else False : param comid : ID of comment to check"""
query = """SELECT status from "cmtRECORDCOMMENT" WHERE id=%s""" params = ( comid , ) res = run_sql ( query , params ) if res and res [ 0 ] [ 0 ] != 'ok' : return True return False
def milestones ( self ) : '''Array of all milestones'''
if self . cache [ 'milestones' ] : return self . cache [ 'milestones' ] milestone_xml = self . bc . list_milestones ( self . id ) milestones = [ ] for node in ET . fromstring ( milestone_xml ) . findall ( "milestone" ) : milestones . append ( Milestone ( node ) ) milestones . sort ( ) milestones . reverse ( ) s...
def serial_adapters ( self , serial_adapters ) : """Sets the number of Serial adapters for this IOU VM . : param serial _ adapters : number of adapters"""
self . _serial_adapters . clear ( ) for _ in range ( 0 , serial_adapters ) : self . _serial_adapters . append ( SerialAdapter ( interfaces = 4 ) ) log . info ( 'IOU "{name}" [{id}]: number of Serial adapters changed to {adapters}' . format ( name = self . _name , id = self . _id , adapters = len ( self . _serial_ad...
def transformation_get ( node_id ) : """Get all the transformations of a node . The node id must be specified in the url . You can also pass transformation _ type ."""
exp = Experiment ( session ) # get the parameters transformation_type = request_parameter ( parameter = "transformation_type" , parameter_type = "known_class" , default = models . Transformation , ) if type ( transformation_type ) == Response : return transformation_type # check the node exists node = models . Node...
def infer_dtypes ( fit , model = None ) : """Infer dtypes from Stan model code . Function strips out generated quantities block and searchs for ` int ` dtypes after stripping out comments inside the block ."""
pattern_remove_comments = re . compile ( r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"' , re . DOTALL | re . MULTILINE ) stan_integer = r"int" stan_limits = r"(?:\<[^\>]+\>)*" # ignore group : 0 or more < . . . . > stan_param = r"([^;=\s\[]+)" # capture group : ends = " ; " , " = " , " [ " or whitespace sta...
def query ( self , event , pk , ts = None ) : """Query the last update timestamp of an event pk . You can pass a timestamp to only look for events later than that within the same namespace . : param event : the event name . : param pk : the pk value for query . : param ts : query event pk after ts , defau...
key = self . _keygen ( event , ts ) pk_ts = self . r . zscore ( key , pk ) return int ( pk_ts ) if pk_ts else None
def t_php_STRING ( t ) : r'[ A - Za - z _ ] [ \ w _ ] *'
t . type = reserved_map . get ( t . value . upper ( ) , 'STRING' ) return t
def prepare_ec ( oo , sizes , M ) : """This prepares EC and converts from contig _ id to an index ."""
tour = range ( len ( oo ) ) tour_sizes = np . array ( [ sizes . sizes [ x ] for x in oo ] ) tour_M = M [ oo , : ] [ : , oo ] return tour , tour_sizes , tour_M
async def reconnect ( self , force = True , connmark = None ) : '''Can call without delegate'''
if connmark is None : connmark = self . connmark self . scheduler . emergesend ( ConnectionControlEvent ( self , ConnectionControlEvent . RECONNECT , force , connmark ) )
def perform_permissions_check ( self , user , obj , perms ) : """Performs the permissions check ."""
return self . request . forum_permission_handler . can_update_topics_to_announces ( obj , user )
def binaryFiles ( self , path , minPartitions = None ) : """. . note : : Experimental Read a directory of binary files from HDFS , a local file system ( available on all nodes ) , or any Hadoop - supported file system URI as a byte array . Each file is read as a single record and returned in a key - value p...
minPartitions = minPartitions or self . defaultMinPartitions return RDD ( self . _jsc . binaryFiles ( path , minPartitions ) , self , PairDeserializer ( UTF8Deserializer ( ) , NoOpSerializer ( ) ) )
def add_node_ids_as_labels ( discoursegraph ) : """Adds the ID of each node of a discourse graph as a label ( an attribute named ` ` label ` ` with the value of the node ID ) to itself . This will ignore nodes whose ID isn ' t a string or which already have a label attribute . Parameters discoursegraph : ...
for node_id , properties in discoursegraph . nodes_iter ( data = True ) : if 'label' not in properties and isinstance ( node_id , ( str , unicode ) ) : discoursegraph . node [ node_id ] [ 'label' ] = ensure_utf8 ( node_id )
def list_all_products ( cls , ** kwargs ) : """List Products Return a list of Products This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please pass async = True > > > thread = api . list _ all _ products ( async = True ) > > > result = thread . get ( ) : p...
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return cls . _list_all_products_with_http_info ( ** kwargs ) else : ( data ) = cls . _list_all_products_with_http_info ( ** kwargs ) return data
def setrange ( self , name , offset , value ) : """Overwrite bytes in the value of ` ` name ` ` starting at ` ` offset ` ` with ` ` value ` ` . If ` ` offset ` ` plus the length of ` ` value ` ` exceeds the length of the original value , the new value will be larger than before . If ` ` offset ` ` exceeds the...
return self . execute_command ( 'SETRANGE' , name , offset , value )
def ramp_up_sp ( self ) : """Writing sets the ramp up setpoint . Reading returns the current value . Units are in milliseconds and must be positive . When set to a non - zero value , the motor speed will increase from 0 to 100 % of ` max _ speed ` over the span of this setpoint . The actual ramp time is the r...
self . _ramp_up_sp , value = self . get_attr_int ( self . _ramp_up_sp , 'ramp_up_sp' ) return value
def get_match ( self , match_id ) : """Get a multiplayer match . Parameters match _ id The ID of the match to retrieve . This is the ID that you see in a online multiplayer match summary . This does not correspond the in - game game ID ."""
return self . _make_req ( endpoints . MATCH , dict ( k = self . key , mp = match_id ) , Match )