code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
def add_media_description(self, media_description): if self.get_media_descriptions_metadata().is_read_only(): raise NoAccess() self.add_or_replace_value(, media_description)
Adds a media_description. arg: media_description (displayText): the new media_description raise: InvalidArgument - ``media_description`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` raise: NullArgument - ``media_description`` is ``null`` *compliance: ...
def inferObjectsWithRandomMovements(self): for objectName, objectFeatures in self.objects.iteritems(): self.reset() inferred = False prevTouchSequence = None for _ in xrange(4): while True: touchSequence = list(objectFeatures) random.shuffle(touchSequence)...
Infer each object without any location input.
def _element_to_bson(key, value, check_keys, opts): if not isinstance(key, string_type): raise InvalidDocument("documents must have only string keys, " "key was %r" % (key,)) if check_keys: if key.startswith("$"): raise InvalidDocument("key %r must ...
Encode a single key, value pair.
def get_doc(self, objtxt): if self._reading: return wait_loop = QEventLoop() self.sig_got_reply.connect(wait_loop.quit) self.silent_exec_method("get_ipython().kernel.get_doc()" % objtxt) wait_loop.exec_() self.sig_got_reply.disconnect(wait_l...
Get object documentation dictionary
def mod_watch(name, **kwargs): sfun = kwargs.pop(, None) mapfun = {: purged, : latest, : removed, : installed} if sfun in mapfun: return mapfun[sfun](name, **kwargs) return {: name, : {}, : .format(sfun), : False}
Install/reinstall a package based on a watch requisite .. note:: This state exists to support special handling of the ``watch`` :ref:`requisite <requisites>`. It should not be called directly. Parameters for this function should be set by the state being triggered.
def loadNetworkbyName(self, name, callback=None, errback=None): import ns1.ipam network = ns1.ipam.Network(self.config, name=name) return network.load(callback=callback, errback=errback)
Load an existing Network by name into a high level Network object :param str name: Name of an existing Network
def _set_hw_state(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=hw_state.hw_state, is_container=, presence=False, yang_name="hw-state", rest_name="hw-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=Tru...
Setter method for hw_state, mapped from YANG variable /hw_state (container) If this variable is read-only (config: false) in the source YANG file, then _set_hw_state is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_hw_state() directly. ...
def getPotential(self, columnIndex, potential): assert(columnIndex < self._numColumns) potential[:] = self._potentialPools[columnIndex]
:param columnIndex: (int) column index to get potential for. :param potential: (list) will be overwritten with column potentials. Must match the number of inputs.
def name(self, new_name): if self.script_file: content = self.startup_script content = content.replace(self._name, new_name) escaped_name = new_name.replace(, ) content = re.sub(r"^set pcname .+$", "set pcname " + escaped_name, content, flags=re.MULTILIN...
Sets the name of this VPCS VM. :param new_name: name
def put(self, url, html, cache_info=None): key = hashlib.md5(url).hexdigest() try: self._cache_set(key, html) except: self.exception("Failed to write cache") return self.update(url, cache_info)
Put response into cache :param url: Url to cache :type url: str | unicode :param html: HTML content of url :type html: str | unicode :param cache_info: Cache Info (default: None) :type cache_info: floscraper.models.CacheInfo :rtype: None
def simulate_leapfrog(config_func: Callable, accel_func: Callable, t0: date, t1: date, steps_per_day: int): N: int = (t1 - t0).days * steps_per_day q0, v0 = config_func(t0) dims: int = q0.shape[1] dt = float(day2sec) / float(steps_per_day) ...
Simulate the earth-sun system from t0 to t1 using Leapfrog Integration. INPUTS: config_func: function taking a date or date range and returning position and velocity of bodies accel_func: function taking positions of the bodies and returning their accelerations t0: start date of the simulation; a pytho...
def print_about(self): filepath = os.path.join(self.suite_path, "bin", self.tool_name) print "Tool: %s" % self.tool_name print "Path: %s" % filepath print "Suite: %s" % self.suite_path msg = "%s (%r)" % (self.context.load_path, self.context_name) prin...
Print an info message about the tool.
def search(query, team=None): if team is None: team = _find_logged_in_team() if team is not None: session = _get_session(team) response = session.get("%s/api/search/" % get_registry_url(team), params=dict(q=query)) print("* Packages in team %s" % team) packages = re...
Search for packages
def messages(self): if self._messages is None: self._messages = MessageList(self._version, session_sid=self._solution[], ) return self._messages
Access the messages :returns: twilio.rest.messaging.v1.session.message.MessageList :rtype: twilio.rest.messaging.v1.session.message.MessageList
def dumps(number): if not isinstance(number, integer_types): raise TypeError() if number < 0: return + dumps(-number) value = while number != 0: number, index = divmod(number, len(alphabet)) value = alphabet[index] + value return value or
Dumps an integer into a base36 string. :param number: the 10-based integer. :returns: the base36 string.
def reset_stats(self): scores = list(itertools.chain.from_iterable([v.total_scores for v in self._runners])) for v in self._runners: v.total_scores.clear() try: return np.mean(scores), np.max(scores) except Exception: logger.exception("Cannot...
Returns: mean, max: two stats of the runners, to be added to backend
async def dump_variant(self, elem, elem_type=None, params=None, obj=None): fvalue = None if isinstance(elem, x.VariantType) or elem_type.WRAPS_VALUE: try: self.tracker.push_variant(elem.variant_elem_type) fvalue = { elem.variant_el...
Dumps variant type to the writer. Supports both wrapped and raw variant. :param elem: :param elem_type: :param params: :param obj: :return:
def ParseFileObject(self, parser_mediator, file_object): file_offset = 0 file_size = file_object.get_size() record_map = self._GetDataTypeMap() while file_offset < file_size: try: pls_record, record_data_size = self._ReadStructureFromFileObject( file_object, file_offset, ...
Parses a PLSRecall.dat file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
def filter_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) def visible(app): return VisibleFilter(app, conf) return visible
Returns a WSGI filter app for use with paste.deploy.
async def echo_all(app, message): for address in app.kv.get_prefix().values(): host, port = address.decode().split() port = int(port) await tcp_echo_client(message, loop, host, port)
Send and recieve a message from all running echo servers
def label(self): with self.selenium.context(self.selenium.CONTEXT_CHROME): return self.root.get_attribute("label")
Provide access to the notification label. Returns: str: The notification label
def get_setup_version(reponame): from param.version import Version return Version.setup_version(os.path.dirname(__file__),reponame,archive_commit="$Format:%h$")
Use autover to get up to date version.
def gssa(model, maxit=100, tol=1e-8, initial_dr=None, verbose=False, n_sim=10000, deg=3, damp=0.1, seed=42): if deg < 0 or deg > 5: raise ValueError("deg must be in [1, 5]") if damp < 0 or damp > 1: raise ValueError("damp must be in [0, 1]") t1 = time.time() g ...
Sketch of algorithm: 0. Choose levels for the initial states and the simulation length (n_sim) 1. Obtain an initial decision rule -- here using first order perturbation 2. Draw a sequence of innovations epsilon 3. Iterate on the following steps: - Use the epsilons, initial states, and proposed ...
def on_scenario_directory_radio_toggled(self, flag): if flag: self.output_directory.setText(self.source_directory.text()) self.output_directory_chooser.setEnabled(not flag)
Autoconnect slot activated when scenario_directory_radio is checked. :param flag: Flag indicating whether the checkbox was toggled on or off. :type flag: bool
def __build_parser_for_fileobject_and_desiredtype(self, obj_on_filesystem: PersistedObject, object_typ: Type[T], logger: Logger = None) -> Parser: object_type = get_base_generic_type(object_typ) matching, no_type_match_bu...
Builds from the registry, a parser to parse object obj_on_filesystem as an object of type object_type. To do that, it iterates through all registered parsers in the list in reverse order (last inserted first), and checks if they support the provided object format (single or multifile) and type. ...
def _t_of_e(self, a0=None, t_start=None, f0=None, ef=None, t_obs=5.0): if ef is None: ef = np.ones_like(self.e0)*0.0000001 beta = 64.0/5.0*self.m1*self.m2*(self.m1+self.m2) e_vals = np.asarray([np.linspace(ef[i], self.e0[i], self.num_points) for...
Rearranged versions of Peters equations This function calculates the semi-major axis and eccentricity over time.
def set_outflow_BC(self, pores, mode=): r mode = self._parse_mode(mode, allowed=[, , ], single=True) pores = self._parse_indices(pores) network = self.project.network phase = self.project.phases()[self.settings[]] throats ...
r""" Adds outflow boundary condition to the selected pores. Outflow condition simply means that the gradient of the solved quantity does not change, i.e. is 0.
def dims(x): if isinstance(x, tf.TensorShape): return x.dims r = tf.TensorShape(x).dims return None if r is None else list(map(tf.compat.dimension_value, r))
Returns a list of dimension sizes, or `None` if `rank` is unknown. For more details, see `help(tf.TensorShape.dims)`. Args: x: object representing a shape; convertible to `tf.TensorShape`. Returns: shape_as_list: list of sizes or `None` values representing each dimensions size if known. A size is...
def msgmerge(self, locale_file, po_string): cmd = "msgmerge -q %s -" % locale_file p = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (msg, err) = p.communicate(input=po_string) if err: ...
Runs msgmerge on a locale_file and po_string
def build_board_checkers(): grd = Grid(8,8, ["B","W"]) for c in range(4): grd.set_tile(0,(c*2) - 1, "B") grd.set_tile(1,(c*2) - 0, "B") grd.set_tile(6,(c*2) + 1, "W") grd.set_tile(7,(c*2) - 0, "W") print(grd) return grd
builds a checkers starting board Printing Grid 0 B 0 B 0 B 0 B B 0 B 0 B 0 B 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ...
def merge_with(self, other): other = as_shape(other) if self._dims is None: return other else: try: self.assert_same_rank(other) new_dims = [] for i, dim in enumerate(self._dims): new_dims.append...
Returns a `TensorShape` combining the information in `self` and `other`. The dimensions in `self` and `other` are merged elementwise, according to the rules defined for `Dimension.merge_with()`. Args: other: Another `TensorShape`. Returns: A `TensorShape` containin...
def _post(url, headers={}, data=None, files=None): try: response = requests.post(url, headers=headers, data=data, files=files, verify=VERIFY_SSL) return _process_response(response) except requests.exceptions.RequestException as e: _log_and_raise_exception(, e)
Tries to POST data to an endpoint
def is_entailed_by(self, other): for (s_key, s_val) in self: if s_key in other: if not hasattr(other[s_key], ): raise Exception("Cell for %s is missing implies()" % s_key) if not other[s_key].implies(s_val): return Fals...
Given two beliefstates, returns True iff the calling instance implies the other beliefstate, meaning it contains at least the same structure (for all structures) and all values (for all defined values). Inverse of `entails`. Note: this only compares the items in the DictCell, n...
def request(self, action, data={}, headers={}, method=): headers = { "Authorization": "Bearer " + self.token, "Content-Type": "application/json", "X-Version": "1", "Accept": "application/json" } return Transport.request(self, action, data...
Append the REST headers to every request
def process_inlines(parser, token): args = token.split_contents() if not len(args) in (2, 4, 6): raise template.TemplateSyntaxError("%r tag requires either 1, 3 or 5 arguments." % args[0]) var_name = args[1] ALLOWED_ARGS = [, ] kwargs = { : None } if len(args) > 2: tuple...
Searches through the provided content and applies inlines where ever they are found. Syntax:: {% process_inlines entry.body [in template_dir] [as varname] } Examples:: {% process_inlines entry.body %} {% process_inlines entry.body as body %} {% process_inlines entry.bod...
def amount(self): return sum(self.get_compound_amount(c) for c in self.material.compounds)
Determine the sum of mole amounts of all the compounds. :returns: Amount. [kmol]
def overlay_gateway_sflow_sflow_vlan_action(self, **kwargs): config = ET.Element("config") overlay_gateway = ET.SubElement(config, "overlay-gateway", xmlns="urn:brocade.com:mgmt:brocade-tunnels") name_key = ET.SubElement(overlay_gateway, "name") name_key.text = kwargs.pop() ...
Auto Generated Code
def main(): import colorama import argparse import logging import sys import os parser = argparse.ArgumentParser(prog="gulpless", description="Simple build system.") parser.add_argument("-v", "--version", action="version", ...
Entry point for command line usage.
def members(name, members_list, root=None): *user1,user2,user3,...foo cmd = .format(members_list, name) retcode = __salt__[](cmd, python_shell=False) return not retcode
Replaces members of the group with a provided list. CLI Example: salt '*' group.members foo 'user1,user2,user3,...' Replaces a membership list for a local group 'foo'. foo:x:1234:user1,user2,user3,...
def to_dict(self, save_data=True): input_dict = super(SparseGP, self).to_dict(save_data) input_dict["class"] = "GPy.core.SparseGP" input_dict["Z"] = self.Z.tolist() return input_dict
Convert the object into a json serializable dictionary. :param boolean save_data: if true, it adds the training data self.X and self.Y to the dictionary :return dict: json serializable dictionary containing the needed information to instantiate the object
def acl_show(self, msg, args): name = args[0] if len(args) > 0 else None if name is None: return "%s: The following ACLs are defined: %s" % (msg.user, .join(self._acl.keys())) if name not in self._acl: return "Sorry, couldn%s\n%s, allow, deny']) ])
Show current allow and deny blocks for the given acl.
def extract_ipv4(roster_order, ipv4): for ip_type in roster_order: for ip_ in ipv4: if in ip_: continue if not salt.utils.validate.net.ipv4_addr(ip_): continue if ip_type == and ip_.startswith(): return ip_ ...
Extract the preferred IP address from the ipv4 grain
def assign_tip_labels_and_colors(self): "assign tip labels based on user provided kwargs" if self.style.tip_labels_colors: if self.ttree._fixed_order: if isinstance(self.style.tip_labels_colors, (list, np.ndarray)): ...
assign tip labels based on user provided kwargs
def setup_handlers(): __grains__ = salt.loader.grains(__opts__) __salt__ = salt.loader.minion_mods(__opts__) if not in __opts__: log.debug(sentry_handler\) return False options = {} dsn = get_config_value() if dsn is not None: try: from rave...
sets up the sentry handler
def delete_contacts( self, ids: List[int] ): contacts = [] for i in ids: try: input_user = self.resolve_peer(i) except PeerIdInvalid: continue else: if isinstance(input_user, types.InputPeer...
Use this method to delete contacts from your Telegram address book. Args: ids (List of ``int``): A list of unique identifiers for the target users. Can be an ID (int), a username (string) or phone number (string). Returns: True on success. ...
def detached_signature_for(plaintext_str, keys): ctx = gpg.core.Context(armor=True) ctx.signers = keys (sigblob, sign_result) = ctx.sign(plaintext_str, mode=gpg.constants.SIG_MODE_DETACH) return sign_result.signatures, sigblob
Signs the given plaintext string and returns the detached signature. A detached signature in GPG speak is a separate blob of data containing a signature for the specified plaintext. :param bytes plaintext_str: bytestring to sign :param keys: list of one or more key to sign with. :type keys: list[g...
def rename_variables(expression: Expression, renaming: Dict[str, str]) -> Expression: if isinstance(expression, Operation): if hasattr(expression, ): variable_name = renaming.get(expression.variable_name, expression.variable_name) return create_operation_expression( ...
Rename the variables in the expression according to the given dictionary. Args: expression: The expression in which the variables are renamed. renaming: The renaming dictionary. Maps old variable names to new ones. Variable names not occuring in the dictionary ar...
def unwrap(self): if self.algorithm == : return self[].parsed key_type = self.algorithm.upper() a_an = if key_type == else raise ValueError(unwrap( , a_an, key_type ))
Unwraps an RSA public key into an RSAPublicKey object. Does not support DSA or EC public keys since they do not have an unwrapped form. :return: An RSAPublicKey object
def index_all(self): self.logger.debug(, self.record_path) with self.db.connection(): for json_path in sorted(self.find_record_files()): self.index_record(json_path)
Index all records under :attr:`record_path`.
def decorate_class_method(func, classkey=None, skipmain=False): global __CLASSTYPE_ATTRIBUTES__ assert classkey is not None, __CLASSTYPE_ATTRIBUTES__[classkey].append(func) return func
Will inject all decorated function as methods of classkey classkey is some identifying string, tuple, or object func can also be a tuple
def cli(): ch = logging.StreamHandler() ch.setFormatter(logging.Formatter( , datefmt="%Y-%m-%d %H:%M:%S" )) logger.addHandler(ch) import argparse parser = argparse.ArgumentParser(description="Search for hosts with a \ response to that matches ") parser.add_argumen...
Command line interface
def p_element_list(self, p): if len(p) == 3: p[0] = p[1] + [p[2]] else: p[1].extend(p[3]) p[1].append(p[4]) p[0] = p[1]
element_list : elision_opt assignment_expr | element_list COMMA elision_opt assignment_expr
def pagure_specific_project_tag_filter(config, message, tags=None, *args, **kw): if not pagure_catchall(config, message): return False tags = tags.split() if tags else [] tags = [tag.strip() for tag in tags if tag and tag.strip()] project_tags = set() project_tags.update(message.get(...
Particular pagure project tags Adding this rule allows you to get notifications for one or more `pagure.io <https://pagure.io>`_ projects having the specified tags. Specify multiple tags by separating them with a comma ','.
def createEncoder(): consumption_encoder = ScalarEncoder(21, 0.0, 100.0, n=50, name="consumption", clipInput=True) time_encoder = DateEncoder(timeOfDay=(21, 9.5), name="timestamp_timeOfDay") encoder = MultiEncoder() encoder.addEncoder("consumption", consumption_encoder) encoder.addEncoder("timestamp...
Create the encoder instance for our test and return it.
def Create(path, password, generate_default_key=True): wallet = UserWallet(path=path, passwordKey=password, create=True) if generate_default_key: wallet.CreateKey() return wallet
Create a new user wallet. Args: path (str): A path indicating where to create or open the wallet e.g. "/Wallets/mywallet". password (str): a 10 characters minimum password to secure the wallet with. Returns: UserWallet: a UserWallet instance.
def to_dict(self): input_dict = super(Add, self)._save_to_input_dict() input_dict["class"] = str("GPy.kern.Add") return input_dict
Convert the object into a json serializable dictionary. Note: It uses the private method _save_to_input_dict of the parent. :return dict: json serializable dictionary containing the needed information to instantiate the object
def prune_old_authorization_codes(): from .compat import now from .models import AuthorizationCode AuthorizationCode.objects.with_expiration_before(now()).delete()
Removes all unused and expired authorization codes from the database.
async def storm(self, text, opts=None): async for mesg in self.cell.streamstorm(text, opts, user=self.user): yield mesg
Evaluate a storm query and yield result messages. Yields: ((str,dict)): Storm messages.
def get_token_issuer(token): try: unverified = decode_token(token) if not in unverified: raise TokenIssuerError return unverified.get() except jwt.DecodeError: raise TokenDecodeError
Issuer of a token is the identifier used to recover the secret Need to extract this from token to ensure we can proceed to the signature validation stage Does not check validity of the token :param token: signed JWT token :return issuer: iss field of the JWT token :raises TokenIssuerError: if iss fi...
def get_verb_phrases(sentence_doc): pattern = r verb_phrases = textacy.extract.pos_regex_matches(sentence_doc, pattern) result = [] for vp in verb_phrases: word_numbers = [] first_word = vp.start x = first_word if len(vp) > 1: for verb_or_a...
Returns an object like, [(1), (5,6,7)] where this means 2 verb phrases. a single verb at index 1, another verb phrase 5,6,7. - Adverbs are not included. - Infinitive phrases (and verb phrases that are subsets of infinitive phrases) are not included
def run(path, code=None, params=None, **meta): if in params: ignore_decorators = params[] else: ignore_decorators = None check_source_args = (code, path, ignore_decorators) if THIRD_ARG else (code, path) return [{ : e.line, ...
pydocstyle code checking. :return list: List of errors.
def hazards_for_layer(layer_geometry_key): result = [] for hazard in hazard_all: if layer_geometry_key in hazard.get(): result.append(hazard) return sorted(result, key=lambda k: k[])
Get hazard categories form layer_geometry_key. :param layer_geometry_key: The geometry id :type layer_geometry_key: str :returns: List of hazard :rtype: list
def flo(string): callers_locals = {} frame = inspect.currentframe() try: outerframe = frame.f_back callers_locals = outerframe.f_locals finally: del frame return string.format(**callers_locals)
Return the string given by param formatted with the callers locals.
def clip_foreign(network): foreign_buses = network.buses[network.buses.country_code != ] network.buses = network.buses.drop( network.buses.loc[foreign_buses.index].index) network.lines = network.lines.drop(network.lines[ (network.lines[].isin(network.buses....
Delete all components and timelines located outside of Germany. Add transborder flows divided by country of origin as network.foreign_trade. Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA Returns ------- network : :class:`pypsa.Network ...
def solve(succ, orien, i, direc): assert orien[i] is not None j = succ[i][direc] if j is None: return False if j == len(orien) - 1: return True if orien[j] is None: for x in [0, 1]: orien[j] = x if solve(succ, orien, j, reflex[direc][...
Can a laser leaving mirror i in direction direc reach exit ? :param i: mirror index :param direc: direction leaving mirror i :param orient: orient[i]=orientation of mirror i :param succ: succ[i][direc]=succ mirror reached when leaving i in direction direc
def generator_checker_py2(gen, gen_type, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check): initialized = False sn = None while True: a = gen.send(sn) if initialized or not a is None: if not gen_type.__args__[0] is Any and...
Builds a typechecking wrapper around a Python 2 style generator object.
def process_remote_sources(self): unpacked_sources = self.context.products.get_data(UnpackedArchives) remote_sources_targets = self.context.targets(predicate=lambda t: isinstance(t, RemoteSources)) if not remote_sources_targets: return snapshot_specs = [] filespecs = [] unpack_dirs =...
Create synthetic targets with populated sources from remote_sources targets.
def add_volume_bricks(name, bricks): * volinfo = info() if name not in volinfo: log.error(, name) return False new_bricks = [] cmd = .format(name) if isinstance(bricks, six.string_types): bricks = [bricks] volume_bricks = [x[] for x in volinfo[name][].values()] ...
Add brick(s) to an existing volume name Volume name bricks List of bricks to add to the volume CLI Example: .. code-block:: bash salt '*' glusterfs.add_volume_bricks <volume> <bricks>
def flat_list_to_polymer(atom_list, atom_group_s=4): atom_labels = [, , , , ] atom_elements = [, , , , ] atoms_coords = [atom_list[x:x + atom_group_s] for x in range(0, len(atom_list), atom_group_s)] atoms = [[Atom(x[0], x[1]) for x in zip(y, atom_elements)] for y i...
Takes a flat list of atomic coordinates and converts it to a `Polymer`. Parameters ---------- atom_list : [Atom] Flat list of coordinates. atom_group_s : int, optional Size of atom groups. Returns ------- polymer : Polypeptide `Polymer` object containing atom coords...
def open_zarr(store, group=None, synchronizer=None, chunks=, decode_cf=True, mask_and_scale=True, decode_times=True, concat_characters=True, decode_coords=True, drop_variables=None, consolidated=False, overwrite_encoded_chunks=False, **kwargs): if in kwa...
Load and decode a dataset from a Zarr store. .. note:: Experimental The Zarr backend is new and experimental. Please report any unexpected behavior via github issues. The `store` object should be a valid store for a Zarr group. `store` variables must contain dimension metadata ...
def module_can_run_parallel(test_module: unittest.TestSuite) -> bool: for test_class in test_module: raise TestClassNotIterable() for test_case in test_class: return not getattr(sys.modules[test_case.__module__], "__no_parallel__", False)
Checks if a given module of tests can be run in parallel or not :param test_module: the module to run :return: True if the module can be run on parallel, False otherwise
def _generate_struct(self, struct_type, extra_parameters=None, nameOverride=None): extra_parameters = extra_parameters if extra_parameters is not None else [] self._emit_jsdoc_header(struct_type.doc) self.emit( % ( nameOverride if nameOverride else fmt_type_...
Emits a JSDoc @typedef for a struct.
def add_ms1_quant_from_top3_mzidtsv(proteins, psms, headerfields, protcol): if not protcol: protcol = mzidtsvdata.HEADER_MASTER_PROT top_ms1_psms = generate_top_psms(psms, protcol) for protein in proteins: prot_acc = protein[prottabledata.HEADER_PROTEIN] prec_area = calculate_pr...
Collects PSMs with the highes precursor quant values, adds sum of the top 3 of these to a protein table
def print_status(self, repo): print(" {0}{1}{2}".format(repo, " " * (19 - len(repo)), self.st))
Print status
def analyze(self, output_folder=".", auto_remove=False): if auto_remove: try: shutil.rmtree(output_folder) except: pass try: mkdir(output_folder) except: pass tokens = [token for sublist in self.sent...
:type auto_remove: boolean :param boolean auto_remove: auto remove previous files in analyze folder
def enable_gtk3(self, app=None): from pydev_ipython.inputhookgtk3 import create_inputhook_gtk3 self.set_inputhook(create_inputhook_gtk3(self._stdin_file)) self._current_gui = GUI_GTK
Enable event loop integration with Gtk3 (gir bindings). Parameters ---------- app : ignored Ignored, it's only a placeholder to keep the call signature of all gui activation methods consistent, which simplifies the logic of supporting magics. Notes ...
def _clear(self, pipe=None): redis = self.redis if pipe is None else pipe redis.delete(self.key)
Helper for clear operations. :param pipe: Redis pipe in case update is performed as a part of transaction. :type pipe: :class:`redis.client.StrictPipeline` or :class:`redis.client.StrictRedis`
def send_command_ack(self, device_id, action): yield from self._ready_to_send.acquire() acknowledgement = None try: self._command_ack.clear() self.send_command(device_id, action) log.debug() try: yield from asynci...
Send command, wait for gateway to repond with acknowledgment.
def _stripe_object_to_refunds(cls, target_cls, data, charge): refunds = data.get("refunds") if not refunds: return [] refund_objs = [] for refund_data in refunds.get("data", []): item, _ = target_cls._get_or_create_from_stripe_object(refund_data, refetch=False) refund_objs.append(item) return r...
Retrieves Refunds for a charge :param target_cls: The target class to instantiate per invoice item. :type target_cls: ``Refund`` :param data: The data dictionary received from the Stripe API. :type data: dict :param charge: The charge object that refunds are for. :type invoice: ``djstripe.models.Refund`` ...
def is_valid_catalog(catalog, validator=None): catalog = readers.read_catalog(catalog) if not validator: if hasattr(catalog, "validator"): validator = catalog.validator else: validator = create_validator() jsonschema_res = validator.is_valid(catalog) custom_...
Valida que un archivo `data.json` cumpla con el schema definido. Chequea que el data.json tiene todos los campos obligatorios y que tanto los campos obligatorios como los opcionales siguen la estructura definida en el schema. Args: catalog (str o dict): Catálogo (dict, JSON o XLSX) a ser valid...
def _write_wrapper(self, name): io_attr = getattr(self._io, name) def write_wrapper(*args, **kwargs): ret_value = io_attr(*args, **kwargs) if not IS_PY2: return ret_value return write_wrapper
Wrap write() to adapt return value for Python 2. Returns: Wrapper which is described below.
def advance_job_status(namespace: str, job: Job, duration: float, err: Optional[Exception]): duration = human_duration(duration) if not err: job.status = JobStatus.SUCCEEDED logger.info(, job, duration) return if job.should_retry: job.status = Job...
Advance the status of a job depending on its execution. This function is called after a job has been executed. It calculates its next status and calls the appropriate signals.
def pipes(stream, *transformers): for transformer in transformers: stream = stream.pipe(transformer) return stream
Pipe several transformers end to end.
def delete(queue, items): with _conn(commit=True) as cur: if isinstance(items, dict): cmd = str().format( queue, salt.utils.json.dumps(items)) log.debug(, cmd) cur.execute(cmd) return True if isinstance(items, lis...
Delete an item or items from a queue
def aggregate(self, aggregates=None, drilldowns=None, cuts=None, order=None, page=None, page_size=None, page_max=None): def prep(cuts, drilldowns=False, aggregates=False, columns=None): q = select(columns) bindings = [] cuts, q, bindings = Cuts(sel...
Main aggregation function. This is used to compute a given set of aggregates, grouped by a given set of drilldown dimensions (i.e. dividers). The query can also be filtered and sorted.
def add_size_info (self): if self.headers and "Content-Length" in self.headers and \ "Transfer-Encoding" not in self.headers: try: self.size = int(self.getheader("Content-Length")) except (ValueError, OverflowError): ...
Get size of URL content from HTTP header.
def get_parameter_dict(self, include_frozen=False): return OrderedDict(zip( self.get_parameter_names(include_frozen=include_frozen), self.get_parameter_vector(include_frozen=include_frozen), ))
Get an ordered dictionary of the parameters Args: include_frozen (Optional[bool]): Should the frozen parameters be included in the returned value? (default: ``False``)
def _other_dpss_method(N, NW, Kmax): from scipy import linalg as la Kmax = int(Kmax) W = float(NW)/N ab = np.zeros((2,N), ) nidx = np.arange(N) ab[0,1:] = nidx[1:]*(N-nidx[1:])/2. ab[1] = ((N-1-2*nidx)/2.)*...
Returns the Discrete Prolate Spheroidal Sequences of orders [0,Kmax-1] for a given frequency-spacing multiple NW and sequence length N. See dpss function that is the official version. This version is indepedant of the C code and relies on Scipy function. However, it is slower by a factor 3 Tridiagonal...
def attach_http_service(cls, http_service: HTTPService): if cls._http_service is None: cls._http_service = http_service cls._set_bus(http_service) else: warnings.warn()
Attaches a service for hosting :param http_service: A HTTPService instance
def open(self, url): cache = self.cache() id = self.mangle(url, ) d = cache.get(id) if d is None: d = self.fn(url, self.options) cache.put(id, d) else: d.options = self.options for imp in d.imports: imp.impo...
Open a WSDL at the specified I{url}. First, the WSDL attempted to be retrieved from the I{object cache}. After unpickled from the cache, the I{options} attribute is restored. If not found, it is downloaded and instantiated using the I{fn} constructor and added to the cache for t...
def parse_sections(self, offset): self.sections = [] for i in xrange(self.FILE_HEADER.NumberOfSections): section = SectionStructure( self.__IMAGE_SECTION_HEADER_format__, pe=self ) if not section: break section_offset = offse...
Fetch the PE file sections. The sections will be readily available in the "sections" attribute. Its attributes will contain all the section information plus "data" a buffer containing the section's data. The "Characteristics" member will be processed and attributes ...
def visit(self, visitor, predicate=None, **kw): predicate = predicate or bool for n in self.walk(**kw): if predicate(n): visitor(n)
Apply a function to matching nodes in the (sub)tree rooted at self. :param visitor: A callable accepting a Node object as single argument.. :param predicate: A callable accepting a Node object as single argument and \ returning a boolean signaling whether Node matches; if `None` all nodes match...
def sample_categorical(prob, rng): ret = numpy.empty(prob.shape[0], dtype=numpy.float32) for ind in range(prob.shape[0]): ret[ind] = numpy.searchsorted(numpy.cumsum(prob[ind]), rng.rand()).clip(min=0.0, max=prob.shape[ ...
Sample from independent categorical distributions Each batch is an independent categorical distribution. Parameters ---------- prob : numpy.ndarray Probability of the categorical distribution. Shape --> (batch_num, category_num) rng : numpy.random.RandomState Returns ------- ret...
def get_module_verbosity_flags(*labels): verbose_prefix_list = [, , ] veryverbose_prefix_list = [, , ] verbose_flags = tuple( [prefix + lbl for prefix, lbl in itertools.product(verbose_prefix_list, labels)]) veryverbose_flags = tuple( [prefix + lbl for prefix, lbl in ...
checks for standard flags for enableing module specific verbosity
def from_export(cls, endpoint): assert isinstance(endpoint, ExportEndpoint) properties = endpoint.get_properties() properties[pelix.remote.PROP_ENDPOINT_ID] = endpoint.uid properties[pelix.remote.PROP_IMPORTED_CONFIGS] = endpoint.configurations ...
Converts an ExportEndpoint bean to an EndpointDescription :param endpoint: An ExportEndpoint bean :return: An EndpointDescription bean
def addHydrogens(molecule, usedPyroles=None): for atom in molecule.atoms: if atom.has_explicit_hcount: atom.hcount = atom.explicit_hcount continue if atom.valences: for valence in atom.valences: hcount = ...
(molecule) -> add implicit hydrogens to a molecule. If the atom has specified valences and the atom must be charged then a Valence Error is raised
def _set_load_interval(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={: []}, int_size=32), restriction_dict={: [u]}), default=RestrictedClassType(base_type=long, rest...
Setter method for load_interval, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/policy/load_interval (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_load_interval is considered as a private method. Backends looking to populate this variable shou...
def cd_to(path, mkdir=False): def cd_to_decorator(func): @functools.wraps(func) def _cd_and_exec(*args, **kwargs): with cd(path, mkdir): return func(*args, **kwargs) return _cd_and_exec return cd_to_decorator
make a generator like cd, but use it for function Usage:: >>> @cd_to("/") ... def say_where(): ... print(os.getcwd()) ... >>> say_where() /
def sparql_query(self, query, flush=None, limit=None): return self.find_statements(query, language=, type=, flush=flush, limit=limit)
Run a Sparql query. :param query: sparql query string :rtype: list of dictionary
def _preprocess(self, struct1, struct2, niggli=True): struct1 = struct1.copy() struct2 = struct2.copy() if niggli: struct1 = struct1.get_reduced_structure(reduction_algo="niggli") struct2 = struct2.get_reduced_structure(reduction_algo="niggli") ...
Rescales, finds the reduced structures (primitive and niggli), and finds fu, the supercell size to make struct1 comparable to s2