code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
def fit(self, X, y): super(GPEiVelocity, self).fit(X, y) self.POU = 0 if len(y) >= self.r_minimum: top_y = sorted(y)[-self.N_BEST_Y:] velocities = [top_y[i + 1] - top_y[i] for i in range(len(top_y) - 1)] ...
Train a gaussian process like normal, then compute a "Probability Of Uniform selection" (POU) value.
def device_log_list(self, **kwargs): kwargs[] = True if kwargs.get(): return self.device_log_list_with_http_info(**kwargs) else: (data) = self.device_log_list_with_http_info(**kwargs) return data
DEPRECATED: List all device events. # noqa: E501 DEPRECATED: List all device events. Use `/v3/device-events/` instead. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.device_log_...
def remove_local_zip(self): if self.stage_config.get(, True): try: if os.path.isfile(self.zip_path): os.remove(self.zip_path) if self.handler_path and os.path.isfile(self.handler_path): os.remove(self.handler_path) ...
Remove our local zip file.
def find_analysis(self, family, started_at, status): query = self.Analysis.query.filter_by( family=family, started_at=started_at, status=status, ) return query.first()
Find a single analysis.
def fetch_items(self, category, **kwargs): from_date = kwargs[] latest = kwargs[] logger.info("Fetching messages of channel from %s", self.channel, str(from_date)) raw_info = self.client.channel_info(self.channel) channel_info = self.parse_channel...
Fetch the messages :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items
def start(self): if self._packet_pipeline_mode == Kinect2PacketPipelineMode.OPENGL: self._pipeline = lf2.OpenGLPacketPipeline() elif self._packet_pipeline_mode == Kinect2PacketPipelineMode.CPU: self._pipeline = lf2.CpuPacketPipeline() self._log...
Starts the Kinect v2 sensor stream. Raises ------ IOError If the Kinect v2 is not detected.
def _extract_readnum(read_dict): pat = re.compile(r"(?P<readnum>/\d+)$") parts = pat.split(read_dict["name"]) if len(parts) == 3: name, readnum, endofline = parts read_dict["name"] = name read_dict["readnum"] = readnum else: read_dict["readnum"] = "" return read_...
Extract read numbers from old-style fastqs. Handles read 1 and 2 specifications where naming is readname/1 readname/2
def service_password_encryption(self, **kwargs): config = ET.Element("config") service = ET.SubElement(config, "service", xmlns="urn:brocade.com:mgmt:brocade-aaa") password_encryption = ET.SubElement(service, "password-encryption") callback = kwargs.pop(, self._callback) ...
Auto Generated Code
def get(self, uri, default_response=None, **kwargs): url = self.api_url + uri response = requests.get(url, params=kwargs, headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout) return self.succ...
Call GET on the Gitlab server >>> gitlab = Gitlab(host='http://localhost:10080', verify_ssl=False) >>> gitlab.login(user='root', password='5iveL!fe') >>> gitlab.get('/users/5') :param uri: String with the URI for the endpoint to GET from :param default_response: Return value if...
def get_devices(self): if self.__util is None: import tango db = tango.Database() else: db = self.__util.get_database() server = self.server_instance dev_list = db.get_device_class_list(server) class_map, dev_map = {}, {} for c...
Helper that retuns a dict of devices for this server. :return: Returns a tuple of two elements: - dict<tango class name : list of device names> - dict<device names : tango class name> :rtype: tuple<dict, dict>
def _determine_doubled_obj_type(self): if isclass(self.doubled_obj) or ismodule(self.doubled_obj): return self.doubled_obj return self.doubled_obj.__class__
Returns the type (class) of the target object. :return: The type (class) of the target. :rtype: type, classobj
def set_active_current(self, settings): s pipette, depending on what model pipette it is, and what action it is performing settings Dict with axes as valies (e.g.: , , , , , or ) and floating point number for current (generally between 0.1 and 2) nows current to ...
Sets the amperage of each motor for when it is activated by driver. Values are initialized from the `robot_config.high_current` values, and can then be changed through this method by other parts of the API. For example, `Pipette` setting the active-current of it's pipette, depending on ...
def rest_call(self, url, method, data=None, sensitive=False, timeout=None, content_json=True, retry=None, max_retry=None, retry_sleep=None): if timeout is None: timeout = self.rest_call_timeout if retry is not None: sys.stderr.writ...
Generic REST call worker function **Parameters:** - **url:** URL for the REST call - **method:** METHOD for the REST call - **data:** Optional DATA for the call (for POST/PUT/etc.) - **sensitive:** Flag if content request/response should be hidden from logging functions...
def copy_from(self, location, timeout=10 * 60): cmd = .format( location=location, dir=self.DESTDIR, vrf_name=self.VRF_NAME) run = self.device.api.exec_opcmd run(cmd, msg_type=, timeout=timeout)
This method will fetch the image; the fetch will happen from the device-side using the 'copy' command. Note that the NXAPI appears to be single-threaded, so the code needs to wait until this operation has completed before attempting another API call. Therefore the :timeout: value is se...
def main(): metator_args = sys.argv[1:] entry_point = pkg_resources.resource_filename("metator", "bin/metator.sh") try: metator_process = subprocess.Popen((entry_point, *metator_args)) except PermissionError: metator_process = subprocess.Popen( (entry_point, *metator...
This module just acts as an entry point to the bulk of the pipeline. All argument parsing is delegated to metator.sh
def close_path_traces(*args): s coordinates are specified is ultimately ignored by this function--the only ordering that matters is the order in which the list of paths is given. Each path argument may alternately be a curve-spline object or coordinate matrix, so long as all paths and curves track th...
close_path_traces(pt1, pt2...) yields the path-trace formed by joining the list of path traces at their intersection points in the order given. Note that the direction in which each individual path trace's coordinates are specified is ultimately ignored by this function--the only ordering that matters...
def _func(self, volume, params): e0, b0, b1, v0 = tuple(params) eta = (volume / v0) ** (1. / 3.) squiggle = -3.*np.log(eta) return e0 + b0 * v0 * squiggle ** 2 / 6. * (3. + squiggle * (b1 - 2))
Pourier-Tarantola equation from PRB 70, 224107
def get_all_tasks(self, subsystem): with open(os.path.join(self.per_subsystem[subsystem], ), ) as tasksFile: for line in tasksFile: yield int(line)
Return a generator of all PIDs currently in this cgroup for the given subsystem.
def get_route53_client(agent, region, cooperator=None): if cooperator is None: cooperator = task return region.get_client( _Route53Client, agent=agent, creds=region.creds, region=REGION_US_EAST_1, endpoint=AWSServiceEndpoint(_OTHER_ENDPOINT), cooperat...
Get a non-registration Route53 client.
def run(self, data, train_epochs=1000, test_epochs=1000, verbose=None, idx=0, lr=0.01, **kwargs): verbose = SETTINGS.get_default(verbose=verbose) optim = th.optim.Adam(self.parameters(), lr=lr) self.score.zero_() with trange(train_epochs + test_epochs, disable=not ve...
Run the CGNN on a given graph.
def gff3_parse_attributes(attributes_string): attributes = dict() fields = attributes_string.split() for f in fields: if in f: key, value = f.split() attributes[unquote_plus(key).strip()] = unquote_plus(value.strip()) elif len(f) > 0: ...
Parse a string of GFF3 attributes ('key=value' pairs delimited by ';') and return a dictionary.
def get(self, conn_name, default=None, **kwargs): if isinstance(conn_name, RdfwConnections): return conn_name try: return self.conns[conn_name] except KeyError: if default: return self.get(default, **kwargs) raise LookupEr...
returns the specified connection args: conn_name: the name of the connection
def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." self.attrs = self.widget.build_attrs(extra_attrs=None, **kwargs) return self.attrs
Helper function for building an attribute dictionary.
def _extract_mnist_labels(filename, num_labels): with gzip.open(filename) as bytestream: bytestream.read(8) buf = bytestream.read(num_labels) labels = np.frombuffer(buf, dtype=np.uint8).astype(np.int64) return labels
Extract labels from an MNIST file into integers. Args: filename: The path to an MNIST labels file. num_labels: The number of labels in the file. Returns: A int64 numpy array of shape [num_labels]
def _refresh(self): if len(self.__data) or self.__killed: return len(self.__data) if self.__id is None: self.__send_message(_Query(self.__query_flags, self.__collection.database.name, self._...
Refreshes the cursor with more data from Mongo. Returns the length of self.__data after refresh. Will exit early if self.__data is already non-empty. Raises OperationFailure when the cursor cannot be refreshed due to an error on the query.
def destroy(name, conn=None, call=None, kwargs=None): if call == : raise SaltCloudSystemExit( ) if not conn: conn = get_conn() if kwargs is None: kwargs = {} instance_data = show_instance(name, call=) service_name = instance_data[][] ...
Destroy a VM CLI Examples: .. code-block:: bash salt-cloud -d myminion salt-cloud -a destroy myminion service_name=myservice
def load(path, dtype=np.float64): _import_skimage() import skimage.io im = skimage.io.imread(path) if dtype == np.uint8: return im elif dtype in {np.float16, np.float32, np.float64}: return im.astype(dtype) / 255 else: raise ValueError()
Loads an image from file. Parameters ---------- path : str Path to image file. dtype : np.dtype Defaults to ``np.float64``, which means the image will be returned as a float with values between 0 and 1. If ``np.uint8`` is specified, the values will be between 0 and 255 a...
def _print_token_factory(col): def _helper(msg): style = style_from_dict({ Token.Color: col, }) tokens = [ (Token.Color, msg) ] print_tokens(tokens, style=style) def _helper_no_terminal(msg): print(msg) if sys.stdout.isat...
Internal helper to provide color names.
def median_grouped(data, interval=1): data = sorted(data) n = len(data) if n == 0: raise StatisticsError("no median for empty data") elif n == 1: return data[0] x = data[n//2] for obj in (x, interval): if isinstance(obj, (str, bytes)): raise Typ...
Return the 50th percentile (median) of grouped continuous data. >>> median_grouped([1, 2, 2, 3, 4, 4, 4, 4, 4, 5]) 3.7 >>> median_grouped([52, 52, 53, 54]) 52.5 This calculates the median as the 50th percentile, and should be used when your data is continuous and grouped. In the above example,...
def _init_client(): global client, path_prefix if client is not None: return etcd_kwargs = { : __opts__.get(, ), : __opts__.get(, 2379), : __opts__.get(, ), : __opts__.get(, True), : __opts__.get(, False), : __opts__.get(,...
Setup client and init datastore.
def imagetransformer_ae_imagenet(): hparams = imagetransformer_ae_cifar() hparams.max_length = int(64 * 64 * 3) hparams.img_len = 64 hparams.num_heads = 4 hparams.num_decoder_layers = 8 hparams.num_compress_steps = 2 return hparams
For 64x64 ImageNet. ~56M trainable variables.
def unlock(name, zk_hosts=None, identifier=None, max_concurrency=1, ephemeral_lease=False, profile=None, scheme=None, username=None, password=None, default_acl=None): ret = {: name, : {}, ...
Remove lease from semaphore.
def handle_callback_exception(self, callback): if self._error_handler: self._error_handler(callback) else: app_log.error("Exception in callback %r", callback, exc_info=True)
This method is called whenever a callback run by the `IOLoop` throws an exception. By default simply logs the exception as an error. Subclasses may override this method to customize reporting of exceptions. The exception itself is not passed explicitly, but is available in `sy...
def verifyWriteMode(files): if not isinstance(files, list): files = [files] not_writable = [] writable = True for fname in files: try: f = open(fname,) f.close() del f except: not_writable.append(fna...
Checks whether files are writable. It is up to the calling routine to raise an Exception, if desired. This function returns True, if all files are writable and False, if any are not writable. In addition, for all files found to not be writable, it will print out the list of names of affected files.
def create_from_tables(cls, norm_type=, tab_s="SCANDATA", tab_e="EBOUNDS"): if norm_type in [, , ]: norm_vals = np.array(tab_s[] * tab_e[ % norm_type][:, np.newaxis]) elif norm_type == "norm": ...
Create a CastroData object from two tables Parameters ---------- norm_type : str Type of normalization to use. Valid options are: * norm : Normalization w.r.t. to test source * flux : Flux of the test source ( ph cm^-2 s^-1 ) * eflux: Energy Flu...
def iter_srels(self): for srel in self._pkg_srels: yield (PACKAGE_URI, srel) for spart in self._sparts: for srel in spart.srels: yield (spart.partname, srel)
Generate a 2-tuple `(source_uri, srel)` for each of the relationships in the package.
def process_shell(self, creator, entry, config): self.logger.info("Processing Bash code: start") output = [] shell = creator(entry, config) for line in shell.process(): output.append(line) self.logger.info(" | %s", line) if shell.success: ...
Processing a shell entry.
def get_ylabel(self): units = self.units if len(units) == 1 and str(units[0]) == : return if len(units) == 1 and self.usetex: return units[0].to_string() elif len(units) == 1: return units[0].to_string() elif len(units) > 1: ...
Text for y-axis label, check if channel defines it
def elink(db: str, dbfrom: str, ids=False, webenv=False, query_key=False, api_key=False, email=False, **kwargs) -> Optional[ElinkResult]: url = BASE_URL + f url = check_webenv(webenv, url) url = check_query_key(query_key, url) url = check_api_key(api_key, url) url = check_email(email,...
Get document summaries using the Entrez ESearch API. Parameters ---------- db : str Entez database to get ids from. dbfrom : str Entez database the provided ids are from. ids : list or str List of IDs to submit to the server. webenv : str An Entrez WebEnv to use ...
def run_validators(self, value): errors = [] for validator in self.validators: try: validator(value) except ValidationError as error: errors.extend(error.messages) if errors: raise ValidationError(errors)
Run the validators on the setting value.
def _select(self): for element in self.browser.find_elements_by_xpath(self.xpath): if self.filter_displayed: if not element.is_displayed(): continue if self.filter_enabled: if not element.is_enabled(): con...
Fetch the elements from the browser.
def main(): dir_path = res, ages = , 0 plot = 0 proj = results_file = ell, flip = 0, 0 lat_0, lon_0 = 90., 0. fmt = sym, size = , 8 rsym, rsize = , 8 anti = 0 fancy = 0 coord = "" if in sys.argv: ind = sys.argv.index() dir_path = sys.argv...
NAME vgpmap_magic.py DESCRIPTION makes a map of vgps and a95/dp,dm for site means in a pmag_results table SYNTAX vgpmap_magic.py [command line options] OPTIONS -h prints help and quits -eye ELAT ELON [specify eyeball location], default is 90., 0. -f FILE p...
def instance_from_str(instance_str): match = instance_str_re.match(instance_str) if not match: raise ValueError("Invalid instance string") model_string = match.group(1) try: model = apps.get_model(model_string) except (LookupError, ValueError): raise ValueError("Invalid...
Given an instance string in the form "app.Model:pk", returns a tuple of ``(model, instance)``. If the pk part is empty, ``instance`` will be ``None``. Raises ``ValueError`` on invalid model strings or missing instances.
def assert_empty(self, class_name: str): if self.params: raise ConfigurationError("Extra parameters passed to {}: {}".format(class_name, self.params))
Raises a ``ConfigurationError`` if ``self.params`` is not empty. We take ``class_name`` as an argument so that the error message gives some idea of where an error happened, if there was one. ``class_name`` should be the name of the `calling` class, the one that got extra parameters (if there a...
def to_XML(self, xml_declaration=True, xmlns=True): root_node = self._to_DOM() if xmlns: xmlutils.annotate_with_XMLNS(root_node, UVINDEX_XMLNS_PREFIX, UVINDEX_XMLNS_URL) return xmlutils.DOM_nod...
Dumps object fields to an XML-formatted string. The 'xml_declaration' switch enables printing of a leading standard XML line containing XML version and encoding. The 'xmlns' switch enables printing of qualified XMLNS prefixes. :param XML_declaration: if ``True`` (default) prints a lead...
def __make_response(self, data, default_renderer=None): status = headers = None if isinstance(data, tuple): data, status, headers = unpack(data) if data is None: data = self.__app.response_class(status=204) elif not isinstance(data, self.__app.response_...
Creates a Flask response object from the specified data. The appropriated encoder is taken based on the request header Accept. If there is not data to be serialized the response status code is 204. :param data: The Python object to be serialized. :return: A Flask response object.
def element(self): element = self.keywords["VRHFIN"].split(":")[0].strip() try: return Element(element).symbol except ValueError: if element == "X": return "Xe" return Element(self.symbol.split("_")[0]).symbol
Attempt to return the atomic symbol based on the VRHFIN keyword.
def load_metadata_csv(input_filepath): with open(input_filepath) as f: csv_in = csv.reader(f) header = next(csv_in) if in header: tags_idx = header.index() else: raise ValueError() if header[0] == : if header[1] == : m...
Return dict of metadata. Format is either dict (filenames are keys) or dict-of-dicts (project member IDs as top level keys, then filenames as keys). :param input_filepath: This field is the filepath of the csv file.
def preconnect(self, size=-1): if size == -1 and self.max_size == -1: raise ClientError("size=-1 not allowed with pool max_size=-1") limit = min(size, self.max_size) if size != -1 else self.max_size clients = yield [self.get_connected_client() for _ in range(0, limit)] ...
(pre)Connects some or all redis clients inside the pool. Args: size (int): number of redis clients to build and to connect (-1 means all clients if pool max_size > -1) Raises: ClientError: when size == -1 and pool max_size == -1
def escape_queue(s): if isinstance(s, PosixPath): s = unicode_(s) elif isinstance(s, bytes): s = s.decode() if s.startswith(): return + shell_escape(s[2:]) else: return shell_escape(s)
Escapes the path to a queue, e.g. preserves ~ at the begining.
def construct_came_from(environ): came_from = environ.get("PATH_INFO") qstr = environ.get("QUERY_STRING", "") if qstr: came_from += "?" + qstr return came_from
The URL that the user used when the process where interupted for single-sign-on processing.
def appendcsv(table, source=None, encoding=None, errors=, write_header=False, **csvargs): source = write_source_from_arg(source) csvargs.setdefault(, ) appendcsv_impl(table, source=source, encoding=encoding, errors=errors, write_header=write_header, **csvargs)
Append data rows to an existing CSV file. As :func:`petl.io.csv.tocsv` but the file is opened in append mode and the table header is not written by default. Note that no attempt is made to check that the fields or row lengths are consistent with the existing data, the data rows from the table are simpl...
async def get_key_metadata(wallet_handle: int, verkey: str) -> str: logger = logging.getLogger(__name__) logger.debug("get_key_metadata: >>> wallet_handle: %r, verkey: %r", wallet_handle, verkey) if not hasattr(get_key_metadata, "cb"): ...
Retrieves the meta information for the giving key in the wallet. :param wallet_handle: Wallet handle (created by open_wallet). :param verkey: The key (verkey, key id) to retrieve metadata. :return: metadata: The meta information stored with the key; Can be null if no metadata was saved for this key.
def max_sparse_hyperplane_size(tree): if tree.is_leaf: return 0 else: return max( tree.hyperplane.shape[1], max_sparse_hyperplane_size(tree.left_child), max_sparse_hyperplane_size(tree.right_child), )
Determine the most number on non zeros in a hyperplane entry
def _register_numpy_extensions(self): import numpy as np numpy_floating_types = (np.float16, np.float32, np.float64) if hasattr(np, ): numpy_floating_types = numpy_floating_types + (np.float128,) @self.add_iterable_check def is_object_ndarray(data...
Numpy extensions are builtin
def __reset_parser(self): self.result = [] self.hash_comments = [] self.__cstate = None self.__curcommand = None self.__curstringlist = None self.__expected = None self.__opened_blocks = 0 RequireCommand.loaded_extensions = []
Reset parser's internal variables Restore the parser to an initial state. Useful when creating a new parser or reusing an existing one.
def retrieveVals(self): opcinfo = OPCinfo(self._host, self._port, self._user, self._password, self._monpath, self._ssl) stats = opcinfo.getAllStats() if self.hasGraph() and stats: mem = stats[] keys = (, , ) map(lam...
Retrieve values for graphs.
def _copy_old_features(new_eopatch, old_eopatch, copy_features): if copy_features: existing_features = set(new_eopatch.get_feature_list()) for copy_feature_type, copy_feature_name, copy_new_feature_name in copy_features: new_feature = copy_feature_type, copy_new...
Copy features from old EOPatch :param new_eopatch: New EOPatch container where the old features will be copied to :type new_eopatch: EOPatch :param old_eopatch: Old EOPatch container where the old features are located :type old_eopatch: EOPatch :param copy_features: List of tupl...
def configure_settings(): if not settings.configured: db_config = { : , : , } settings.configure( TEST_RUNNER=, NOSE_ARGS=[, , ], DATABASES={ : db_config, }, INSTALLED_APPS=( ...
Configures settings for manage.py and for run_tests.py.
def color_table(color, N=1, sort=False, sort_values=False, inline=False, as_html=False): if isinstance(color, list): c_ = rgb_tup = [normalize(c) for c in color] if sort: rgb_tup.sort() elif isinstance(color, dict): c_ = items = [(k, normalize(v), hex_t...
Generates a colour table Parameters: ----------- color : string | list | dict Color representation in rgba|rgb|hex If a list of colors is passed then these are displayed in a table N : int number of colou...
def update(self, settings): settings = self.clean(settings) self._settings.update(settings) self.set_settings(settings) return self._settings
Update object attributes from given settings Args: settings (dict): Dictionnary of elements to update settings. Returns: dict: Dictionnary of all current saved settings.
def plot(self): plt.ion() plt.show() x = numpy.linspace(0, 15, 100) y = numpy.zeros(x.shape) y = self.excite(y, x) plt.plot(x, y) plt.xlabel() plt.ylabel() plt.title()
plot the activation function
def _model_abilities_two_components(self,beta): parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])]) scale, shape, skewness = self._get_scale_and_shape(parm) state_vectors = np.zeros(shape=(self.max_team+1)) state_vectors_2 =...
Creates the structure of the model - store abilities Parameters ---------- beta : np.array Contains untransformed starting values for latent variables Returns ---------- theta : np.array Contains the predicted values for the time series ...
def findsource(object): file = getsourcefile(object) or getfile(object) candidates = [] for i in range(len(lines)): match = pat.match(lines[i]) if match: if lines[i][0] == : return lines, i ...
Return the entire source file and starting line number for an object. The argument may be a module, class, method, function, traceback, frame, or code object. The source code is returned as a list of all the lines in the file and the line number indexes a line in that list. An IOError is raised if th...
def get_hashtags(tweet): entities = get_entities(tweet) hashtags = entities.get("hashtags") hashtags = [tag["text"] for tag in hashtags] if hashtags else [] return hashtags
Get a list of hashtags in the Tweet Note that in the case of a quote-tweet, this does not return the hashtags in the quoted status. Args: tweet (Tweet or dict): A Tweet object or dictionary Returns: list (a list of strings): list of all of the hashtags in the Tweet Example: ...
def resolve_memory_access(self, tb, x86_mem_operand): size = self.__get_memory_access_size(x86_mem_operand) addr = None if x86_mem_operand.base: addr = ReilRegisterOperand(x86_mem_operand.base, size) if x86_mem_operand.index and x86_mem_operand.scale != 0x0: ...
Return operand memory access translation.
def _seconds_have_elapsed(token, num_seconds): now = timeit.default_timer() then = _log_timer_per_token.get(token, None) if then is None or (now - then) >= num_seconds: _log_timer_per_token[token] = now return True else: return False
Tests if 'num_seconds' have passed since 'token' was requested. Not strictly thread-safe - may log with the wrong frequency if called concurrently from multiple threads. Accuracy depends on resolution of 'timeit.default_timer()'. Always returns True on the first call for a given 'token'. Args: token: T...
def parse_headers(lines, offset=0): headers = {} for header_line in lines[offset:]: header_match = HEADER_LINE_RE.match(header_line) if header_match: key = header_match.group() key = re.sub(r, _unescape_header, key) if key not in headers: ...
Parse the headers in a STOMP response :param list(str) lines: the lines received in the message response :param int offset: the starting line number :rtype: dict(str,str)
def rebuild_cmaps(self): self.logger.info("building color maps image") ht, wd, sep = self._cmht, self._cmwd, self._cmsep viewer = self.p_view canvas = viewer.get_canvas() canvas.delete_all_objects() cm_names = self.cm_names num_cmaps =...
Builds a color RGB image containing color bars of all the possible color maps and their labels.
def create_branch(profile, name, branch_off): branch_off_sha = get_branch_sha(profile, branch_off) ref = "heads/" + name data = refs.create_ref(profile, ref, branch_off_sha) return data
Create a branch. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. name The name of the new branch. branch_...
def get_slave_managers(self, as_coro=False): async def slave_task(addr): r_manager = await self.env.connect(addr) return await r_manager.get_slave_managers() tasks = create_tasks(slave_task, self.addrs) return run_or_coro(tasks, as_coro)
Return all slave environment manager addresses. :param bool as_coro: If ``True`` returns awaitable coroutine, otherwise runs the calls to the slave managers asynchoronously in the event loop. This method returns the addresses of the true slave environment managers, i.e....
def _do_submit_problems(self): try: while True: item = self._submission_queue.get() if item is None: break ready_problems = [item] while ...
Pull problems from the submission queue and submit them. Note: This method is always run inside of a daemon thread.
def _validate_user_inputs(self, attributes=None, event_tags=None): if attributes and not validator.are_attributes_valid(attributes): self.logger.error() self.error_handler.handle_error(exceptions.InvalidAttributeException(enums.Errors.INVALID_ATTRIBUTE_FORMAT)) return False if event_tag...
Helper method to validate user inputs. Args: attributes: Dict representing user attributes. event_tags: Dict representing metadata associated with an event. Returns: Boolean True if inputs are valid. False otherwise.
def _assert_keys_match(keys1, keys2): if set(keys1) != set(keys2): raise ValueError(.format(list(keys1), list(keys2)))
Ensure the two list of keys matches.
def relocated_grid_from_grid_jit(grid, border_grid): border_origin = np.zeros(2) border_origin[0] = np.mean(border_grid[:, 0]) border_origin[1] = np.mean(border_grid[:, 1]) border_grid_radii = np.sqrt(np.add(np.square(np.subtract(border_grid[:, 0], border_origin[0])), ...
Relocate the coordinates of a grid to its border if they are outside the border. This is performed as \ follows: 1) Use the mean value of the grid's y and x coordinates to determine the origin of the grid. 2) Compute the radial distance of every grid coordinate from the origin. 3) For e...
def AAA(cpu): cpu.AF = Operators.OR(cpu.AL & 0x0F > 9, cpu.AF) cpu.CF = cpu.AF cpu.AH = Operators.ITEBV(8, cpu.AF, cpu.AH + 1, cpu.AH) cpu.AL = Operators.ITEBV(8, cpu.AF, cpu.AL + 6, cpu.AL) cpu.AL = cpu.AL & 0x0f
ASCII adjust after addition. Adjusts the sum of two unpacked BCD values to create an unpacked BCD result. The AL register is the implied source and destination operand for this instruction. The AAA instruction is only useful when it follows an ADD instruction that adds (binary addition)...
def _update(self, rect, delta_y, force_update_margins=False): helper = TextHelper(self.editor) if not self: return for zones_id, zone in self._panels.items(): if zones_id == Panel.Position.TOP or \ zones_id == Panel.Position.BOTTOM: ...
Updates panels
def embedManifestDllCheck(target, source, env): if env.get(, 0): manifestSrc = target[0].get_abspath() + if os.path.exists(manifestSrc): ret = (embedManifestDllAction) ([target[0]],None,env) if ret: raise SCons.Errors.UserError("Unable to embed m...
Function run by embedManifestDllCheckAction to check for existence of manifest and other conditions, and embed the manifest by calling embedManifestDllAction if so.
def authenticate(self, code: str) -> : headers = self._get_authorization_headers() data = { : , : code } r = self.session.post(self.TOKEN_URL, headers=headers, data=data) if not r.status_code == 200: raise Exception(f) new_kwar...
Authenticates using the code from the EVE SSO. A new Preston object is returned; this object is not modified. The intended usage is: auth = preston.authenticate('some_code_here') Args: code: SSO code Returns: new Preston, authenticated
def send(self, subname, delta): ms = delta * 1000 if ms > self.min_send_threshold: name = self._get_name(self.name, subname) self.logger.info(, name, ms) return statsd.Client._send(self, {name: % ms}) else: return True
Send the data to statsd via self.connection :keyword subname: The subname to report the data to (appended to the client name) :type subname: str :keyword delta: The time delta (time.time() - time.time()) to report :type delta: float
def run_tree_inference(self, nexus, idx): tmpdir = tempfile.tempdir tmpfile = os.path.join(tempfile.NamedTemporaryFile( delete=False, prefix=str(idx), dir=tmpdir, )) tmpfile.write(nexus) tmpfile.flush() ...
Write nexus to tmpfile, runs phyml tree inference, and parses and returns the resulting tree.
def get(self, key, default=None): try: return self[key] except (KeyError, ValueError, IndexError): return default
Get item from object for given key (DataFrame column, Panel slice, etc.). Returns default value if not found. Parameters ---------- key : object Returns ------- value : same type as items contained in object
def open(self): if self._table_exists(): self.mode = "open" self._get_table_info() self.types = dict([ (f[0],self.conv_func[f[1].upper()]) for f in self.fields if f[1].upper() in self.conv_func ]) return self ...
Open an existing database
def _get_bgzip_version(exe): p = subprocess.Popen([exe, "-h"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) output = p.communicate() version_line = output[0].splitlines()[1] version = re.match(r"(?:Version:|bgzip \(htslib\))\s+(\d+\.\d+(\.\d+)?)", version_line).group(1...
return bgzip version as string
def p_debug(self, message): "Format and print debug messages" print("{}{} `{}`".format(self._debug_indent * " ", message, repr(self.p_suffix(10))))
Format and print debug messages
def set_override_rendered(self): if in self.request.accept: self.request.override_renderer = self._default_renderer elif in self.request.accept: self.request.override_renderer = elif in self.request.accept: self.request.override_renderer =
Set self.request.override_renderer if needed.
def breaks(self, frame, no_remove=False): for breakpoint in set(self.breakpoints): if breakpoint.breaks(frame): if breakpoint.temporary and not no_remove: self.breakpoints.remove(breakpoint) return True return False
Return True if there's a breakpoint at frame
def process_fastq_plain(fastq, **kwargs): logging.info("Nanoget: Starting to collect statistics from plain fastq file.") inputfastq = handle_compressed_input(fastq) return ut.reduce_memory_usage(pd.DataFrame( data=[res for res in extract_from_fastq(inputfastq) if res], columns=["quals",...
Combine metrics extracted from a fastq file.
def pause(self) -> None: with self.__state_lock: if self.__state == DataChannelBuffer.State.started: self.__state = DataChannelBuffer.State.paused
Pause recording. Thread safe and UI safe.
def get_product_target_mappings_for_targets(self, targets): product_target_mappings = [] for target in targets: for product in self._products_by_target[target]: product_target_mappings.append((product, target)) return product_target_mappings
Gets the product-target associations for the given targets, preserving the input order. :API: public :param targets: The targets to lookup products for. :returns: The ordered (product, target) tuples.
def precision(y, y_pred): tp = true_positives(y, y_pred) fp = false_positives(y, y_pred) return tp / (tp + fp)
Precision score precision = true_positives / (true_positives + false_positives) Parameters: ----------- y : vector, shape (n_samples,) The target labels. y_pred : vector, shape (n_samples,) The predicted labels. Returns: -------- precision : float
def _prepare_variables(self): self._moving_averager = tf.train.ExponentialMovingAverage( decay=self._beta, zero_debias=self._zero_debias) prepare_variables_op = [] self._grad_squared = [] self._grad_norm_squared = [] for v, g in zip(self._vars, self._grad): i...
Prepare Variables for YellowFin. Returns: Grad**2, Norm, Norm**2, Mean(Norm**2) ops
def get_user_brief(): client = get_user_api() with catch_raise_api_exception(): data, _, headers = client.user_self_with_http_info() ratelimits.maybe_rate_limit(client, headers) return data.authenticated, data.slug, data.email, data.name
Retrieve brief for current user (if any).
def remove_likely_non_central(self, candidates): if len(candidates) > 1: for unlikely in [ "test", "tests", "example", "examples", "demo", "demos", "test_files", ...
Stuff that is likely to be in find_packages(exclude...) :param candidates: :return:
async def get_cred_infos_by_filter(self, filt: dict = None) -> str: LOGGER.debug(, filt) if not self.wallet.handle: LOGGER.debug(, self.name) raise WalletState(.format(self.name)) rv_json = await anoncreds.prover_get_credentials(self.wallet.handle, json.dumps(...
Return cred-info (json list) from wallet by input filter for schema identifier and/or credential definition identifier components; return info of all credentials for no filter. Raise WalletState if the wallet is closed. :param filt: indy-sdk filter for credentials; i.e., :: ...
def update_command(self, command, args=None): if command is None: self._command = lambda: None else: if args is None: self._command = command else: self._command = utils.with_args(command, *args)
Updates the callback command which is called when the ButtonGroup changes. Setting to `None` stops the callback. :param callback command: The callback function to call. :param callback args: A list of arguments to pass to the widgets `command`, defaults to ...
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): super(RevokeResponsePayload, self).read( istream, kmip_version=kmip_version ) tstream = BytearrayStream(istream.read(self.length)) self.unique_identifier = attributes.UniqueIdentifier() ...
Read the data encoding the RevokeResponsePayload object and decode it into its constituent parts. Args: istream (Stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enume...
def pks(self): if self._primary_keys is None: self._primary_keys = list( self.queryset.values_list(, flat=True)) return self._primary_keys
Lazy-load the primary keys.
def is_entailed_by(self, other): other = self.coerce(other) to_i = self.to_i return to_i(other.low) >= to_i(self.low) and \ to_i(other.high) <= to_i(self.high)
Other is more specific than self. Other is bounded within self.
def run(self): while True: to_send = self._queue.get() if to_send is _SHUTDOWN: break dest = (self._gateway.addr, self._gateway.port) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) ...
Process all outgoing packets, until `stop()` is called. Intended to run in its own thread.