code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
def _get_struct_fillstyle(self, shape_number): obj = _make_object("FillStyle") obj.FillStyleType = style_type = unpack_ui8(self._src) if style_type == 0x00: if shape_number <= 2: obj.Color = self._get_struct_rgb() else: obj.Color ...
Get the values for the FILLSTYLE record.
def register_callback(self, callback): if not callable(callback): raise ValueError() self.callbacks.append(callback)
Register a new callback. Note: The callback will be executed in the AlarmTask context and for this reason it should not block, otherwise we can miss block changes.
def compute_consistency_score(returns_test, preds): returns_test_cum = cum_returns(returns_test, starting_value=1.) cum_preds = np.cumprod(preds + 1, 1) q = [sp.stats.percentileofscore(cum_preds[:, i], returns_test_cum.iloc[i], k...
Compute Bayesian consistency score. Parameters ---------- returns_test : pd.Series Observed cumulative returns. preds : numpy.array Multiple (simulated) cumulative returns. Returns ------- Consistency score Score from 100 (returns_test perfectly on the median line o...
def add_edge(self, edge): "Add edge to chart, and see if it extends or predicts another edge." start, end, lhs, found, expects = edge if edge not in self.chart[end]: self.chart[end].append(edge) if self.trace: print % (caller(2), edge) if not ...
Add edge to chart, and see if it extends or predicts another edge.
def _parse_fmt(fmt, color_key=, ls_key=, marker_key=): s _process_plot_format function.-------.-.-. None Illegal format string; two linestyle symbolsIllegal format string; two marker symbolsIllegal format string; two color symbolsUnrecognized character %c in format string' % c) return result
Modified from matplotlib's _process_plot_format function.
def writes(nb, format, **kwargs): format = unicode(format) if format == u or format == u: return writes_json(nb, **kwargs) elif format == u: return writes_py(nb, **kwargs) else: raise NBFormatError( % format)
Write a notebook to a string in a given format in the current nbformat version. This function always writes the notebook in the current nbformat version. Parameters ---------- nb : NotebookNode The notebook to write. format : (u'json', u'ipynb', u'py') The format to write the noteb...
def migrate_v0_rules(self): ideniden for iden, valu in self.core.slab.scanByFull(db=self.trigdb): ruledict = s_msgpack.un(valu) ver = ruledict.get() if ver != 0: continue user = ruledict.pop() if user is None: l...
Remove any v0 (i.e. pre-010) rules from storage and replace them with v1 rules. Notes: v0 had two differences user was a username. Replaced with iden of user as 'iden' field. Also 'iden' was storage as binary. Now it is stored as hex string.
def make_document(self, titlestring): root = etree.XML() document = etree.ElementTree(root) html = document.getroot() head = etree.SubElement(html, ) etree.SubElement(html, ) title = etree.SubElement(head, ) title.text = titlestring ...
This method may be used to create a new document for writing as xml to the OPS subdirectory of the ePub structure.
def delete(ctx, uri): http_client = get_wva(ctx).get_http_client() cli_pprint(http_client.delete(uri))
DELETE the specified URI Example: \b $ wva get files/userfs/WEB/python {'file_list': ['files/userfs/WEB/python/.ssh', 'files/userfs/WEB/python/README.md']} $ wva delete files/userfs/WEB/python/README.md '' $ wva get files/userfs/WEB/python {'file_list': ['files/userfs/WEB/p...
def unsign(wheelfile): import wheel.install vzf = wheel.install.VerifyingZipFile(wheelfile, "a") info = vzf.infolist() if not (len(info) and info[-1].filename.endswith()): raise WheelError("RECORD.jws not found at end of archive.") vzf.pop() vzf.close()
Remove RECORD.jws from a wheel by truncating the zip file. RECORD.jws must be at the end of the archive. The zip file must be an ordinary archive, with the compressed files and the directory in the same order, and without any non-zip content after the truncation point.
def serialize_data(data, compression=False, encryption=False, public_key=None): message = json.dumps(data) if compression: message = zlib.compress(message) message = binascii.b2a_base64(message) if encryption and public_key: message = encryption.encrypt(message, public_key) ...
Serializes normal Python datatypes into plaintext using json. You may also choose to enable compression and encryption when serializing data to send over the network. Enabling one or both of these options will incur additional overhead. Args: data (dict): The data to convert into plain text usin...
def get_paths(folder, ignore_endswith=ignore_endswith): folder = pathlib.Path(folder).resolve() files = folder.rglob("*") for ie in ignore_endswith: files = [ff for ff in files if not ff.name.endswith(ie)] return sorted(files)
Return hologram file paths Parameters ---------- folder: str or pathlib.Path Path to search folder ignore_endswith: list List of filename ending strings indicating which files should be ignored.
def diff_identifiers(a, b): a_ids = set(a.identifiers) b_ids = set(b.identifiers) difference = [] for i in a_ids.difference(b_ids): difference.append((i, True, False)) for i in b_ids.difference(a_ids): difference.append((i, False, True)) return difference
Return list of tuples where identifiers in datasets differ. Tuple structure: (identifier, present in a, present in b) :param a: first :class:`dtoolcore.DataSet` :param b: second :class:`dtoolcore.DataSet` :returns: list of tuples where identifiers in datasets differ
def __create_grid(self): data_sizes, min_corner, max_corner = self.__get_data_size_derscription() dimension = len(self.__data[0]) cell_sizes = [dimension_length / self.__amount_intervals for dimension_length in data_sizes] self.__cells = [clique_block() for _ in range(p...
! @brief Creates CLIQUE grid that consists of CLIQUE blocks for clustering process.
def hex_to_xy(self, h): rgb = self.color.hex_to_rgb(h) return self.rgb_to_xy(rgb[0], rgb[1], rgb[2])
Converts hexadecimal colors represented as a String to approximate CIE 1931 x and y coordinates.
def get_backend(): backend = getattr(settings, , None) if backend == : from simditor.image import pillow_backend as backend else: from simditor.image import dummy_backend as backend return backend
Get backend.
def get(msg_or_dict, key, default=_SENTINEL): key, subkey = _resolve_subkeys(key) if isinstance(msg_or_dict, message.Message): answer = getattr(msg_or_dict, key, default) elif isinstance(msg_or_dict, collections_abc.Mapping): answer = msg_or_dict.get(key, default) el...
Retrieve a key's value from a protobuf Message or dictionary. Args: mdg_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key to retrieve from the object. default (Any): If the key is not present on the object, and a default is se...
def declareLegacyItem(typeName, schemaVersion, attributes, dummyBases=()): if (typeName, schemaVersion) in _legacyTypes: return _legacyTypes[typeName, schemaVersion] if dummyBases: realBases = [declareLegacyItem(*A) for A in dummyBases] else: realBases = (Item,) attributes =...
Generate a dummy subclass of Item that will have the given attributes, and the base Item methods, but no methods of its own. This is for use with upgrading. @param typeName: a string, the Axiom TypeName to have attributes for. @param schemaVersion: an int, the (old) version of the schema this is a pro...
def set_coeffs(self, values, ls, ms): values = _np.array(values) ls = _np.array(ls) ms = _np.array(ms) mneg_mask = (ms < 0).astype(_np.int) self.coeffs[mneg_mask, ls, _np.abs(ms)] = values
Set spherical harmonic coefficients in-place to specified values. Usage ----- x.set_coeffs(values, ls, ms) Parameters ---------- values : float (list) The value(s) of the spherical harmonic coefficient(s). ls : int (list) The degree(s) of...
def iterate(self, iterable, element_timeout=None): self._assert_active() with self._queuelock: self._thread_loop_ids[self._thread_num] += 1 loop_id = self._thread_loop_ids[self._thread_num] return _IterableQueueIterator( ...
Iterate over an iterable. The iterator is executed in the host thread. The threads dynamically grab the elements. The iterator elements must hence be picklable to be transferred through the queue. If there is only one thread, no special operations are performed. Otherwise, effe...
def remove_network(self, action, n_name, **kwargs): c_kwargs = self.get_network_remove_kwargs(action, n_name, **kwargs) res = action.client.remove_network(**c_kwargs) del self._policy.network_names[action.client_name][n_name] return res
Removes a network. :param action: Action configuration. :type action: dockermap.map.runner.ActionConfig :param n_name: Network name or id. :type n_name: unicode | str :param kwargs: Additional keyword arguments. :type kwargs: dict
def get_partition_hash(self): if self.has_partition_hash(): return unpack_from(FMT_BE_INT, self._buffer, PARTITION_HASH_OFFSET)[0] return self.hash_code()
Returns partition hash calculated for serialized object. Partition hash is used to determine partition of a Data and is calculated using * PartitioningStrategy during serialization. * If partition hash is not set then hash_code() is used. :return: partition hash
def traverse_setter(obj, attribute, value): obj.traverse(lambda x: setattr(x, attribute, value))
Traverses the object and sets the supplied attribute on the object. Supports Dimensioned and DimensionedPlot types.
def restart_agent(self, agent_id, **kwargs): host_medium = self.get_medium() agent = host_medium.get_agent() d = host_medium.get_document(agent_id) d.addCallback( lambda desc: agent.start_agent(desc.doc_id, **kwargs)) return d
tells the host agent running in this agency to restart the agent.
def cut_psf(psf_data, psf_size): kernel = image_util.cut_edges(psf_data, psf_size) kernel = kernel_norm(kernel) return kernel
cut the psf properly :param psf_data: image of PSF :param psf_size: size of psf :return: re-sized and re-normalized PSF
def get_rows(self): possible_dataframes = [, , , , , , , , , , , , , , , , ] for df in possible_dataframes: if (df in self.__di...
Returns the name of the rows of the extension
def headers(self): headers = self.conn.issue_command("Headers") res = [] for header in headers.split("\r"): key, value = header.split(": ", 1) for line in value.split("\n"): res.append((_normalize_header(key), line)) return res
Returns a list of the last HTTP response headers. Header keys are normalized to capitalized form, as in `User-Agent`.
def noise_plot(signal, noise, normalise=False, **kwargs): import matplotlib.pyplot as plt n_traces = 0 for tr in signal: try: noise.select(id=tr.id)[0] except IndexError: continue n_traces += 1 fig, axes = plt.subplots(n_traces, 2, sharex=Tru...
Plot signal and noise fourier transforms and the difference. :type signal: `obspy.core.stream.Stream` :param signal: Stream of "signal" window :type noise: `obspy.core.stream.Stream` :param noise: Stream of the "noise" window. :type normalise: bool :param normalise: Whether to normalise the dat...
def getAnalogActionData(self, action, unActionDataSize, ulRestrictToDevice): fn = self.function_table.getAnalogActionData pActionData = InputAnalogActionData_t() result = fn(action, byref(pActionData), unActionDataSize, ulRestrictToDevice) return result, pActionData
Reads the state of an analog action given its handle. This will return VRInputError_WrongType if the type of action is something other than analog
def traverse(obj, target:str, default=nodefault, executable:bool=False, separator:str=, protect:bool=True): assert check_argument_types() value = obj remainder = target if not target: return obj while separator: name, separator, remainder = remainder.partition(separator) numeric = name.lstrip().i...
Traverse down an object, using getattr or getitem. If ``executable`` is ``True`` any executable function encountered will be, with no arguments. Traversal will continue on the result of that call. You can change the separator as desired, i.e. to a '/'. By default attributes (but not array elements) prefixed wit...
def _set_dst_vtep_ip(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={: u}), is_leaf=True, yang_name="dst-vtep-ip", rest_name="dst-vtep-ip-host", parent=self, choice=(u, u), path_helper=self._pat...
Setter method for dst_vtep_ip, mapped from YANG variable /overlay/access_list/type/vxlan/standard/seq/dst_vtep_ip (inet:ipv4-address) If this variable is read-only (config: false) in the source YANG file, then _set_dst_vtep_ip is considered as a private method. Backends looking to populate this variable sho...
def fix_e502(self, result): (line_index, _, target) = get_index_offset_contents(result, self.source) self.source[line_index] = target.rstrip() +
Remove extraneous escape of newline.
def find_revision_number(self, revision=None): self.create() revision = self.expand_branch_name(revision) output = self.context.capture(, , revision, ) if not (output and output.isdigit()): msg = "Failed to find local revision number! ( gave unexpec...
Find the local revision number of the given revision.
def task(self, _fn=None, queue=None, hard_timeout=None, unique=None, lock=None, lock_key=None, retry=None, retry_on=None, retry_method=None, schedule=None, batch=False, max_queue_size=None): def _delay(func): def _delay_inner(*args, **kwargs): ...
Function decorator that defines the behavior of the function when it is used as a task. To use the default behavior, tasks don't need to be decorated. See README.rst for an explanation of the options.
def expire(self, time=None): if time is None: time = self.__timer() root = self.__root curr = root.next links = self.__links cache_delitem = Cache.__delitem__ while curr is not root and curr.expire < time: cache_delitem(self, curr.key) ...
Remove expired items from the cache.
def load_yaml(path): with open(path, ) as f: yamldict = yaml.load(f.read(), Loader=yamlloader.ordereddict.CSafeLoader) if not yamldict: raise (LoadError( % path)) return yamldict
Load YAML file into an ordered dictionary Args: path (str): Path to YAML file Returns: OrderedDict: Ordered dictionary containing loaded YAML file
def update_privilege(self, obj, target): if in obj[]: os.chmod(target, int(obj[][], 8))
Get privileges from metadata of the source in s3, and apply them to target
def find_children(self, tag=None, namespace=None): results = [] if tag and namespace: for element in self.children: if element.tag == tag and element.namespace == namespace: results.append(element) elif tag and not namespace: ...
Searches child nodes for objects with the desired tag/namespace. Returns a list of extension elements within this object whose tag and/or namespace match those passed in. To find all children in a particular namespace, specify the namespace but not the tag name. If you specify only the ...
def samaccountname(self, base_dn, distinguished_name): mappings = self.samaccountnames(base_dn, [distinguished_name]) try: return mappings[distinguished_name] except KeyError: logging.info("%s - unable to retrieve object from AD by DistinguishedName...
Retrieve the sAMAccountName for a specific DistinguishedName :param str base_dn: The base DN to search within :param list distinguished_name: The base DN to search within :param list attributes: Object attributes to populate, defaults to all :return: A populated ADUser object :...
def avail_locations(call=None): if call == : raise SaltCloudSystemExit( ) ret = {} conn = get_conn(service=) locations = conn.getLocations(id=50) for location in locations: ret[location[]] = { : location[], : location[]...
List all available locations
def validate_file(parser, arg): if not os.path.isfile(arg): parser.error("%s is not a file." % arg) return arg
Validates that `arg` is a valid file.
def ids_sharing_same_pgn(id_x, pgn_x, id_y, pgn_y): for id_a, pgn_a in zip(id_x, pgn_x): for id_b, pgn_b in zip(id_y, pgn_y): if pgn_a == pgn_b: yield (id_a, id_b)
Yield arbitration ids which has the same pgn.
def set_wheel_mode(self, ids): self.set_control_mode(dict(zip(ids, itertools.repeat())))
Sets the specified motors to wheel mode.
def server_bind(self): TCPServer.server_bind(self) _, self.server_port = self.socket.getsockname()[:2]
Override of TCPServer.server_bind() that tracks bind-time assigned random ports.
def find_repo_by_path(i): p=i[] if p!=: p=os.path.normpath(p) found=False if p==work[]: uoa=cfg[] uid=cfg[] alias=uoa found=True elif p==work[]: uoa=cfg[] uid=cfg[] alias=uoa found=True else: r=reload_repo_cache({}) if...
Input: { path - path to repo } Output: { return - return code = 0, if successful 16, if repo not found (may be warning) > 0, if error (error) - error text if retur...
def tokenize_annotated(doc, annotation): tokens = tokenize(doc, include_hrefs=False) for tok in tokens: tok.annotation = annotation return tokens
Tokenize a document and add an annotation attribute to each token
def create_cfg_segment(filename, filecontent, description, auth, url): payload = {"confFileName": filename, "confFileType": "2", "cfgFileParent": "-1", "confFileDesc": description, "content": filecontent} f_url = url + "/imcrs/icc/confFile" re...
Takes a str into var filecontent which represents the entire content of a configuration segment, or partial configuration file. Takes a str into var description which represents the description of the configuration segment :param filename: str containing the name of the configuration segment. :param fi...
def create_new_label_by_content_id(self, content_id, label_names, callback=None): assert isinstance(label_names, list) assert all(isinstance(ln, dict) and set(ln.keys()) == {"prefix", "name"} for ln in label_names) return self._service_post_request("rest/api/content/{id}/label".format(i...
Adds a list of labels to the specified content. :param content_id (string): A string containing the id of the labels content container. :param label_names (list): A list of labels (strings) to apply to the content. :param callback: OPTIONAL: The callback to execute on the resulting data, before ...
def urlToIds(url): urlId = url.split("/")[-1] convUrl = "https://join.skype.com/api/v2/conversation/" json = SkypeConnection.externalCall("POST", convUrl, json={"shortId": urlId, "type": "wl"}).json() return {"id": json.get("Resource"), "long": json.get("Id"), ...
Resolve a ``join.skype.com`` URL and returns various identifiers for the group conversation. Args: url (str): public join URL, or identifier from it Returns: dict: related conversation's identifiers -- keys: ``id``, ``long``, ``blob``
def copyCurrentLayout(self, sourceViewSUID, targetViewSUID, body, verbose=None): response=api(url=self.___url++str(sourceViewSUID)++str(targetViewSUID)+, method="PUT", body=body, verbose=verbose) return response
Copy one network view layout onto another, setting the node location and view scale to match. This makes visually comparing networks simple. :param sourceViewSUID: Source network view SUID (or "current") :param targetViewSUID: Target network view SUID (or "current") :param body: Clone the speci...
def log_error(msg, logger="TaskLogger"): tasklogger = get_tasklogger(logger) tasklogger.error(msg) return tasklogger
Log an ERROR message Convenience function to log a message to the default Logger Parameters ---------- msg : str Message to be logged logger : str, optional (default: "TaskLogger") Unique name of the logger to retrieve Returns ------- logger : TaskLogger
def remove_duplicates(vector_tuple): array = np.column_stack(vector_tuple) a = np.ascontiguousarray(array) unique_a = np.unique(a.view([(, a.dtype)]*a.shape[1])) b = unique_a.view(a.dtype).reshape((unique_a.shape[0], a.shape[1])) return list(b.T)
Remove duplicates rows from N equally-sized arrays
def update_storage_policy(policy, policy_dict, service_instance=None): *policy name log.trace(, policy_dict) profile_manager = salt.utils.pbm.get_profile_manager(service_instance) policies = salt.utils.pbm.get_storage_policies(profile_manager, [policy]) if not policies: raise VMwareObjectRet...
Updates a storage policy. Supported capability types: scalar, set, range. policy Name of the policy to update. policy_dict Dictionary containing the changes to apply to the policy. (example in salt.states.pbm) service_instance Service instance (vim.ServiceInstance) of...
def _hash(self, obj, parent, parents_ids=EMPTY_FROZENSET): try: result = self[obj] except (TypeError, KeyError): pass else: return result result = not_hashed if self._skip_this(obj, parent): return elif obj is N...
The main diff method
def _format_firewall_stdout(cmd_ret): ret_dict = {: True, : {}} for line in cmd_ret[].splitlines(): if line.startswith(): continue if line.startswith(): continue ruleset_status = line.split() ret_dict[][ruleset_status[0]] = bool(rulese...
Helper function to format the stdout from the get_firewall_status function. cmd_ret The return dictionary that comes from a cmd.run_all call.
def _array_setitem_with_key_seq(self, array_name, index, key_seq, value): table = self.array(array_name)[index] key_so_far = tuple() for key in key_seq[:-1]: key_so_far += (key,) new_table = self._array_make_sure_table_exists(array_name, index, key_so_far) ...
Sets a the array value in the TOML file located by the given key sequence. Example: self._array_setitem(array_name, index, ('key1', 'key2', 'key3'), 'text_value') is equivalent to doing self.array(array_name)[index]['key1']['key2']['key3'] = 'text_value'
def get_turicreate_object_type(url): modelgraphsframesarray from .._connect import main as _glconnect ret = _glconnect.get_unity().get_turicreate_object_type(_make_internal_url(url)) if ret == : ret = return ret
Given url where a Turi Create object is persisted, return the Turi Create object type: 'model', 'graph', 'sframe', or 'sarray'
def unapply_patch(self, patch_name, force=False): self._check(force) patches = self.db.patches_after(Patch(patch_name)) for patch in reversed(patches): self._unapply_patch(patch) self.db.save() self.unapplied(self.db.top_patch())
Unapply patches up to patch_name. patch_name will end up as top patch
def compile_mako_files(self, app_config): for subdir_name in self.SEARCH_DIRS: subdir = subdir_name.format( app_path=app_config.path, app_name=app_config.name, ) def recurse_path(path): self.message(.format(pa...
Compiles the Mako templates within the apps of this system
def orientation(self, value): for values in self.__orientation: if value in values: self.server.jsonrpc.setOrientation(values[1]) break else: raise ValueError("Invalid orientation.")
setter of orientation property.
def drawcircle(self, x, y, r = 10, colour = None, label = None): self.checkforpilimage() colour = self.defaultcolour(colour) self.changecolourmode(colour) self.makedraw() (pilx, pily) = self.pilcoords((x,y)) pilr = self.pilscale(r) ...
Draws a circle centered on (x, y) with radius r. All these are in the coordinates of your initial image ! You give these x and y in the usual ds9 pixels, (0,0) is bottom left. I will convert this into the right PIL coordiates.
def get_context(self, data, accepted_media_type, renderer_context): view = renderer_context[] request = renderer_context[] response = renderer_context[] renderer = self.get_default_renderer(view) raw_data_post_form = self.get_raw_data_form(data, view, , request) ...
Returns the context used to render.
def lookups(self): if self._lookups is None: from twilio.rest.lookups import Lookups self._lookups = Lookups(self) return self._lookups
Access the Lookups Twilio Domain :returns: Lookups Twilio Domain :rtype: twilio.rest.lookups.Lookups
def register_validator(flag_name, checker, message=, flag_values=_flagvalues.FLAGS): v = SingleFlagValidator(flag_name, checker, message) _add_validator(flag_values, v)
Adds a constraint, which will be enforced during program execution. The constraint is validated when flags are initially parsed, and after each change of the corresponding flag's value. Args: flag_name: str, name of the flag to be checked. checker: callable, a function to validate the flag. input...
def calcDistMatchArr(matchArr, tKey, mKey): matchArrSize = listvalues(matchArr)[0].size distInfo = {: list(), : list()} _matrix = numpy.swapaxes(numpy.array([matchArr[tKey], matchArr[mKey]]), 0, 1) for pos1 in range(matchArrSize-1): for pos2 in range(pos1+1, matchArrSize): ...
Calculate the euclidean distance of all array positions in "matchArr". :param matchArr: a dictionary of ``numpy.arrays`` containing at least two entries that are treated as cartesian coordinates. :param tKey: #TODO: docstring :param mKey: #TODO: docstring :returns: #TODO: docstring ...
def scale_dataset(self, dsid, variable, info): variable = remove_empties(variable) scale = variable.attrs.get(, np.array(1)) offset = variable.attrs.get(, np.array(0)) if np.issubdtype((scale + offset).dtype, np.floating) or np.issubdtype(variable.dtype, np.floating): ...
Scale the data set, applying the attributes from the netCDF file
def afterglow(self, src=None, event=None, dst=None, **kargs): if src is None: src = lambda x: x[].src if event is None: event = lambda x: x[].dport if dst is None: dst = lambda x: x[].dst sl = {} el = {} dl = {} for i i...
Experimental clone attempt of http://sourceforge.net/projects/afterglow each datum is reduced as src -> event -> dst and the data are graphed. by default we have IP.src -> IP.dport -> IP.dst
def displayhook(value): global _displayhooks new_hooks = [] for hook_ref in _displayhooks: hook = hook_ref() if hook: hook(value) new_hooks.append(hook_ref) _displayhooks = new_hooks sys.__displayhook__(value)
Runs all of the registered display hook methods with the given value. Look at the sys.displayhook documentation for more information. :param value | <variant>
def get_joystick_buttons(joy): count_value = ctypes.c_int(0) count = ctypes.pointer(count_value) result = _glfw.glfwGetJoystickButtons(joy, count) return result, count_value.value
Returns the state of all buttons of the specified joystick. Wrapper for: const unsigned char* glfwGetJoystickButtons(int joy, int* count);
def _get_start_revision(self, graph, benchmark, entry_name): start_revision = min(six.itervalues(self.revisions)) if graph.params.get(): branch_suffix = + graph.params.get() else: branch_suffix = for regex, start_commit in six.iteritems(self.conf.regr...
Compute the first revision allowed by asv.conf.json. Revisions correspond to linearized commit history and the regression detection runs on this order --- the starting commit thus corresponds to a specific starting revision.
def unescape_LDAP(ldap_string): if ldap_string is None: return None if ESCAPE_CHARACTER not in ldap_string: return ldap_string escaped = False result = "" for character in ldap_string: if not escaped and character == ESCAPE_CHARACTER: ...
Unespaces an LDAP string :param ldap_string: The string to unescape :return: The unprotected string
def datetime(self, to_timezone=None, naive=False): if to_timezone: dt = self.datetime().astimezone(pytz.timezone(to_timezone)) else: dt = Datetime.utcfromtimestamp(self._epoch) dt.replace(tzinfo=self._tz) if naive: return dt.repla...
Returns a timezone-aware datetime... Defaulting to UTC (as it should). Keyword Arguments: to_timezone {str} -- timezone to convert to (default: None/UTC) naive {bool} -- if True, the tzinfo is simply dropped (default: False)
def get_country_by_name(self, country_name) -> : VALID_STR.validate(country_name, , exc=ValueError) if country_name not in self._countries_by_name.keys(): for country in self.countries: if country.country_name == country_name: return country ...
Gets a country in this coalition by its name Args: country_name: country name Returns: Country
def get_server_networks(self, network, public=False, private=False, key=None): return _get_server_networks(network, public=public, private=private, key=key)
Creates the dict of network UUIDs required by Cloud Servers when creating a new server with isolated networks. By default, the UUID values are returned with the key of "net-id", which is what novaclient expects. Other tools may require different values, such as 'uuid'. If that is the cas...
def headerData(self, section, orientation, role): if role == Qt.TextAlignmentRole: if orientation == Qt.Horizontal: return Qt.AlignCenter | Qt.AlignBottom else: return Qt.AlignRight | Qt.AlignVCenter if role != Qt.DisplayRole and ro...
Get the information to put in the header.
def distL2(x1,y1,x2,y2): xdiff = x2 - x1 ydiff = y2 - y1 return int(math.sqrt(xdiff*xdiff + ydiff*ydiff) + .5)
Compute the L2-norm (Euclidean) distance between two points. The distance is rounded to the closest integer, for compatibility with the TSPLIB convention. The two points are located on coordinates (x1,y1) and (x2,y2), sent as parameters
def str_display_width(s): a去 s= str(s) width = 0 len = s.__len__() for i in range(0,len): sublen = s[i].encode().__len__() sublen = int(sublen/2 + 1/2) width = width + sublen return(width)
from elist.utils import * str_display_width('a') str_display_width('去')
def command_x(self, x, to=None): if to is None: ActionChains(self.driver) \ .send_keys([Keys.COMMAND, x, Keys.COMMAND]) \ .perform() else: self.send_keys(to, [Keys.COMMAND, x, Keys.COMMAND])
Sends a character to the currently active element with Command pressed. This method takes care of pressing and releasing Command.
def get_concurrency(self): method = endpoint = .format( self.client.sauce_username) return self.client.request(method, endpoint)
Check account concurrency limits.
def parent_images(self): parents = [] for instr in self.structure: if instr[] != : continue image, _ = image_from(instr[]) if image is not None: parents.append(image) return parents
:return: list of parent images -- one image per each stage's FROM instruction
def constructRows(self, items): rows = [] for item in items: row = dict((colname, col.extractValue(self, item)) for (colname, col) in self.columns.iteritems()) link = self.linkToItem(item) if link is not None: row[u] = l...
Build row objects that are serializable using Athena for sending to the client. @param items: an iterable of objects compatible with my columns' C{extractValue} methods. @return: a list of dictionaries, where each dictionary has a string key for each column name in my list of c...
def get_timing_signal_1d(length, channels, min_timescale=1.0, max_timescale=1.0e4, start_index=0): position = tf.to_float(tf.range(length) + start_index) num_timescales = channels // 2 log_timescale_increment = ...
Gets a bunch of sinusoids of different frequencies. Each channel of the input Tensor is incremented by a sinusoid of a different frequency and phase. This allows attention to learn to use absolute and relative positions. Timing signals should be added to some precursors of both the query and the memory inpu...
def normalizeGlyphNote(value): if not isinstance(value, basestring): raise TypeError("Note must be a string, not %s." % type(value).__name__) return unicode(value)
Normalizes Glyph Note. * **value** must be a :ref:`type-string`. * Returned value is an unencoded ``unicode`` string
def crack_secret_exponent_from_k(generator, signed_value, sig, k): r, s = sig return ((s * k - signed_value) * generator.inverse(r)) % generator.order()
Given a signature of a signed_value and a known k, return the secret exponent.
def back_slash_to_front_converter(string): try: if not string or not isinstance(string, str): return string return string.replace(, ) except Exception: return string
Replacing all \ in the str to / :param string: single string to modify :type string: str
def urlize(text, trim_url_limit=None, nofollow=False): trim_url = lambda x, limit=trim_url_limit: limit is not None \ and (x[:limit] + (len(x) >=limit and or )) or x words = _word_split_re.split(unicode(escape(text))) nofollow_attr = nofollow and or ...
Converts any URLs in text into clickable links. Works on http://, https:// and www. links. Links can have trailing punctuation (periods, commas, close-parens) and leading punctuation (opening parens) and it'll still do the right thing. If trim_url_limit is not None, the URLs in link text will be limite...
def relabel(label_list, projections): unmapped_combinations = find_missing_projections(label_list, projections) if len(unmapped_combinations) > 0: raise UnmappedLabelsException(.format(unmapped_combinations)) new_labels = [] for labeled_segment in label_list.ranges(): combination =...
Relabel an entire :py:class:`~audiomate.annotations.LabelList` using user-defined projections. Labels can be renamed, removed or overlapping labels can be flattened to a single label per segment. Each entry in the dictionary of projections represents a single projection that maps a combination of labels (key) ...
def _find_server(account, servername=None): servers = servers = [s for s in account.resources() if in s.provides] if servername is not None: for server in servers: if server.name == servername: return server.connect() raise SystemExit( % servername) ...
Find and return a PlexServer object.
def upload(self, remote_path, local_path, progress=None): if os.path.isdir(local_path): self.upload_directory(local_path=local_path, remote_path=remote_path, progress=progress) else: self.upload_file(local_path=local_path, remote_path=remote_path)
Uploads resource to remote path on WebDAV server. In case resource is directory it will upload all nested files and directories. More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_PUT :param remote_path: the path for uploading resources on WebDAV server. Can be fi...
def iter_contributor_statistics(self, number=-1, etag=None): url = self._build_url(, , base_url=self._api) return self._iter(int(number), url, ContributorStats, etag=etag)
Iterate over the contributors list. See also: http://developer.github.com/v3/repos/statistics/ :param int number: (optional), number of weeks to return. Default -1 will return all of the weeks. :param str etag: (optional), ETag from a previous request to the same endpoi...
def _getH2singleTrait(self, K, verbose=None): verbose = dlimix.getVerbose(verbose) varg = sp.zeros(self.P) varn = sp.zeros(self.P) fixed = sp.zeros((1,self.P)) for p in range(self.P): y = self.Y[:,p:p+1] I = sp.isnan(y[:,...
Internal function for parameter initialization estimate variance components and fixed effect using a linear mixed model with an intercept and 2 random effects (one is noise) Args: K: covariance matrix of the non-noise random effect term
def transform(self, blocks, y=None): preds = (self.kmeans.predict(make_weninger_features(blocks)) > 0).astype(int) return np.reshape(preds, (-1, 1))
Computes the content to tag ratio per block, smooths the values, then predicts content (1) or not-content (0) using a fit k-means cluster model. Args: blocks (List[Block]): as output by :class:`Blockifier.blockify` y (None): This isn't used, it's only here for API consistency. ...
def job_exists(name=None): * if not name: raise SaltInvocationError(name\) server = _connect() if server.job_exists(name): return True else: return False
Check whether the job exists in configured Jenkins jobs. :param name: The name of the job is check if it exists. :return: True if job exists, False if job does not exist. CLI Example: .. code-block:: bash salt '*' jenkins.job_exists jobname
def load_bytes(self, bytes_data, key, bucket_name=None, replace=False, encrypt=False): if not bucket_name: (bucket_name, key) = self.parse_s3_url(key) if not replace and self.check_for_ke...
Loads bytes to S3 This is provided as a convenience to drop a string in S3. It uses the boto infrastructure to ship a file to s3. :param bytes_data: bytes to set as content for the key. :type bytes_data: bytes :param key: S3 key that will point to the file :type key: st...
async def connect(url, *, apikey=None, insecure=False): url = api_url(url) url = urlparse(url) if url.username is not None: raise ConnectError( "Cannot provide user-name explicitly in URL (%r) when connecting; " "use login instead." % url.username) if url.password i...
Connect to a remote MAAS instance with `apikey`. Returns a new :class:`Profile` which has NOT been saved. To connect AND save a new profile:: profile = connect(url, apikey=apikey) profile = profile.replace(name="mad-hatter") with profiles.ProfileStore.open() as config: con...
def _sensoryComputeInferenceMode(self, anchorInput): if len(anchorInput) == 0: return overlaps = self.connections.computeActivity(anchorInput, self.connectedPermanence) activeSegments = np.where(overlaps >= self.activationThreshold)[0] sensory...
Infer the location from sensory input. Activate any cells with enough active synapses to this sensory input. Deactivate all other cells. @param anchorInput (numpy array) A sensory input. This will often come from a feature-location pair layer.
def __expand_cluster(self, index_point): cluster = None self.__visited[index_point] = True neighbors = self.__neighbor_searcher(index_point) if len(neighbors) >= self.__neighbors: cluster = [index_point] self.__b...
! @brief Expands cluster from specified point in the input data space. @param[in] index_point (list): Index of a point from the data. @return (list) Return tuple of list of indexes that belong to the same cluster and list of points that are marked as noise: (cluster, noise), or No...
def create_table(self, name, schema): columns = [" ".join(column) for column in schema] self.execute("CREATE TABLE IF NOT EXISTS {name} ({columns})" .format(name=name, columns=",".join(columns)))
Create a new table. If the table already exists, nothing happens. Example: >>> db.create_table("foo", (("id", "integer primary key"), ("value", "text"))) Arguments: name (str): The name of the table to create. schema ...
def update_association(self, association): bad_goids = set() for goids in association.values(): parents = set() goids.update(parents) if bad_goids: sys.stdout.write("{N} GO IDs in assc. are not found in the GO-DAG: {GOs}\n".forma...
Add the GO parents of a gene's associated GO IDs to the gene's association.
def parse_query(self, query_string): if query_string == : return xapian.Query() elif query_string == : return xapian.Query() qp = xapian.QueryParser() qp.set_database(self._database()) qp.set_stemmer(xapian.Stem(self.language)) qp.set...
Given a `query_string`, will attempt to return a xapian.Query Required arguments: ``query_string`` -- A query string to parse Returns a xapian.Query