code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
def add_disk_encryption_passwords(self, ids, passwords, clear_on_suspend): if not isinstance(ids, list): raise TypeError("ids can only be an instance of type list") for a in ids[:10]: if not isinstance(a, basestring): raise TypeError( ...
Adds a password used for hard disk encryption/decryption. in ids of type str List of identifiers for the passwords. Must match the identifier used when the encrypted medium was created. in passwords of type str List of passwords. in clear_on_suspend of type...
def cli(ctx, env): env.out("Welcome to the SoftLayer shell.") env.out("") formatter = formatting.HelpFormatter() commands = [] shell_commands = [] for name in cli_core.cli.list_commands(ctx): command = cli_core.cli.get_command(ctx, name) if command.short_help is None: ...
Print shell help text.
def find_repositories_with_locate(path): command = [b, b] for dotdir in DOTDIRS: command.append(br % (escape(path), escape(dotdir))) command.append(br % (escape(path), escape(dotdir))) try: paths = check_output(command).strip(b).split(b) e...
Use locate to return a sequence of (directory, dotdir) pairs.
def div(self, key, value=2): return uwsgi.cache_mul(key, value, self.timeout, self.name)
Divides the specified key value by the specified value. :param str|unicode key: :param int value: :rtype: bool
def post(self, path, data=None, json=None, headers=None, **kwargs): if headers is not None: merger = jsonmerge.Merger(SCHEMA) kwargs["headers"] = merger.merge(self.defaultHeaders, headers) else: kwargs["headers"] = self.defaultHeaders url = combine_...
Sends a POST request to host/path. :param path: String, resource path on server :param data: Dictionary, bytes or file-like object to send in the body of the request :param json: JSON formatted data to send in the body of the request :param headers: Dictionary of HTTP headers to be sent...
def _get_server(self): with self._lock: inactive_server_count = len(self._inactive_servers) for i in range(inactive_server_count): try: ts, server, message = heapq.heappop(self._inactive_servers) except IndexError: ...
Get server to use for request. Also process inactive server list, re-add them after given interval.
def pyobj_role(make_node, name, rawtext, text, lineno, inliner, options=None, content=None): if options is None: options = {} if content is None: content = [] try: prefixed_name, obj, parent, modname = import_by_name(text) except ImportError: msg = inliner.reporter...
Include Python object value, rendering it to text using str. Returns 2 part tuple containing list of nodes to insert into the document and a list of system messages. Both are allowed to be empty. :param make_node: A callable which accepts (rawtext, app, prefixed_name, obj, parent, modname, options) a...
def read_csv(self, file: str, table: str = , libref: str = , results: str = , opts: dict = None) -> : opts = opts if opts is not None else {} if results == : results = self.results self._io.read_csv(file, table, libref, self.nosub, opts) if self.e...
:param file: either the OS filesystem path of the file, or HTTP://... for a url accessible file :param table: the name of the SAS Data Set to create :param libref: the libref for the SAS Data Set being created. Defaults to WORK, or USER if assigned :param results: format of results, SASsession.r...
def get_sentence_xpath_tuples(filename_url_or_filelike, xpath_to_text=TEXT_FINDER_XPATH): parsed_html = get_html_tree(filename_url_or_filelike) try: xpath_finder = parsed_html.getroot().getroottree().getpath except(AttributeError): xpath_finder =...
Given a url and xpath, this function will download, parse, then iterate though queried text-nodes. From the resulting text-nodes, extract a list of (text, exact-xpath) tuples.
def validate(self): if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u u.format(type(self.fold_scope_location), self.fold_scope_location))
Ensure the Fold block is valid.
def close_session(self): if not self._session.closed: if self._session._connector_owner: self._session._connector.close() self._session._connector = None
Close current session.
def update_filenames(self): self.sky_file = os.path.abspath(os.path.join(os.path.join(self.input_path, ), + self.sky_state + + str( self.sky_zenith) + + str( ...
Does nothing currently. May not need this method
def detranslify(text): try: res = translit.detranslify(text) except Exception as err: res = default_value % {: err, : text} return res
Detranslify russian text
def score(self): "The total score for the words found, according to the rules." return sum([self.scores[len(w)] for w in self.words()])
The total score for the words found, according to the rules.
def eval_option_value(self, option): try: value = eval(option, {}, {}) except (SyntaxError, NameError, TypeError): return option if type(value) in (str, bool, int, float): return value elif type(value) in (list, tuple): for v in va...
Evaluates an option :param option: a string :return: an object of type str, bool, int, float or list
def login(self, **kwargs): payload = { : self.username, : self.password, } headers = kwargs.setdefault(, {}) headers.setdefault( , ) url = response = self.request(url, , json=payload, **kwargs) r_j...
登录
def object(self, object): if object is None: raise ValueError("Invalid value for `object`, must not be `None`") allowed_values = ["service-package-quota-history"] if object not in allowed_values: raise ValueError( "Invalid value for `object` ({0})...
Sets the object of this ServicePackageQuotaHistoryResponse. Always set to 'service-package-quota-history'. :param object: The object of this ServicePackageQuotaHistoryResponse. :type: str
def _train_model( self, train_data, loss_fn, valid_data=None, log_writer=None, restore_state={} ): self.train() train_config = self.config["train_config"] train_loader = self._create_data_loader(train_data) valid_loader = self._create_data_loader(v...
The internal training routine called by train_model() after setup Args: train_data: a tuple of Tensors (X,Y), a Dataset, or a DataLoader of X (data) and Y (labels) for the train split loss_fn: the loss function to minimize (maps *data -> loss) valid_data: a t...
def get_object_reference(obj: Object) -> str: resource_name = obj.title if resource_name is None: class_name = obj.__name__ resource_name = class_name_to_resource_name(class_name) ALL_RESOURCES[resource_name] = obj return .format( .join(resource_name.split()).lower().strip()...
Gets an object reference string from the obj instance. This adds the object type to ALL_RESOURCES so that it gets documented and returns a str which contains a sphinx reference to the documented object. :param obj: The Object instance. :returns: A sphinx docs reference str.
def get_mouse_pos(self, window_pos=None): window_pos = window_pos or pygame.mouse.get_pos() window_pt = point.Point(*window_pos) + 0.5 for surf in reversed(self._surfaces): if (surf.surf_type != SurfType.CHROME and surf.surf_rect.contains_point(window_pt)): surf_rel_pt = wi...
Return a MousePos filled with the world position and surf it hit.
def write_numeric_array(fd, header, array): bd = BytesIO() write_var_header(bd, header) if not isinstance(array, basestring) and header[][0] > 1: array = list(chain.from_iterable(izip(*array))) write_elements(bd, header[], array) data = bd.getvalue() ...
Write the numeric array
def require_at_least_one_query_parameter(*query_parameter_names): def outer_wrapper(view): @wraps(view) def wrapper(request, *args, **kwargs): requirement_satisfied = False for query_parameter_name in query_parameter_names: query_par...
Ensure at least one of the specified query parameters are included in the request. This decorator checks for the existence of at least one of the specified query parameters and passes the values as function parameters to the decorated view. If none of the specified query parameters are included in the requ...
def count_(self): try: num = len(self.df.index) except Exception as e: self.err(e, "Can not count data") return return num
Returns the number of rows of the main dataframe
def attach_volume_to_device(self, volume_id, device_id): try: volume = self.manager.get_volume(volume_id) volume.attach(device_id) except packet.baseapi.Error as msg: raise PacketManagerException(msg) return volume
Attaches the created Volume to a Device.
def mock_attr(self, *args, **kwargs): self.path.extend(args) self.qs.update(kwargs) return self
Empty method to call to slurp up args and kwargs. `args` get pushed onto the url path. `kwargs` are converted to a query string and appended to the URL.
def open_handle(self, dwDesiredAccess = win32.THREAD_ALL_ACCESS): hThread = win32.OpenThread(dwDesiredAccess, win32.FALSE, self.dwThreadId) self.close_handle() self.hThread = hThread
Opens a new handle to the thread, closing the previous one. The new handle is stored in the L{hThread} property. @warn: Normally you should call L{get_handle} instead, since it's much "smarter" and tries to reuse handles and merge access rights. @type dwDesiredAccess: int ...
async def nextset(self): conn = self._get_db() current_result = self._result if current_result is None or current_result is not conn._result: return if not current_result.has_next: return self._result = None self._clear_result() aw...
Get the next query set
def _set_output_arguments(self): group = self.parser.add_argument_group() group.add_argument(, , type=argparse.FileType(), dest=, default=sys.stdout, help="output file") group.add_argument(, dest=, action=, ...
Activate output arguments parsing
def savorSessionCookie(self, request): cookieValue = request.getSession().uid request.addCookie( self.cookieKey, cookieValue, path=, max_age=PERSISTENT_SESSION_LIFETIME, domain=self.cookieDomainForRequest(request))
Make the session cookie last as long as the persistent session. @type request: L{nevow.inevow.IRequest} @param request: The HTTP request object for the guard login URL.
def ContextTupleToDict(context): d = {} if not context: return d for k, v in zip(ExceptionWithContext.CONTEXT_PARTS, context): if v != and v != None: d[k] = v return d
Convert a tuple representing a context into a dict of (key, value) pairs
def churn_rate(user, summary=, **kwargs): if len(user.records) == 0: return statistics([], summary=summary) query = { : , : OrderedDict([ (, []), (, []) ]), : , : True, : True } rv = grouping_query(user, query) we...
Computes the frequency spent at every towers each week, and returns the distribution of the cosine similarity between two consecutives week. .. note:: The churn rate is always computed between pairs of weeks.
def get_branch_info(self): branch_info = None if os.path.exists(constants.cached_branch_info): logger.debug(u) ctime = datetime.utcfromtimestamp( os.path.getctime(constants.cached_branch_info)) if datetime.utcnow() < ...
Retrieve branch_info from Satellite Server
def apply_perturbations(i, j, X, increase, theta, clip_min, clip_max): warnings.warn( "This function is dead code and will be removed on or after 2019-07-18") if increase: X[0, i] = np.minimum(clip_max, X[0, i] + theta) X[0, j] = np.minimum(clip_max, X[0, j] + theta) else: X[0, i] = np.ma...
TensorFlow implementation for apply perturbations to input features based on salency maps :param i: index of first selected feature :param j: index of second selected feature :param X: a matrix containing our input features for our sample :param increase: boolean; true if we are increasing pixels, false other...
def _get_media(media_types): get_mapped_media = (lambda x: maps.VIRTUAL_MEDIA_TYPES_MAP[x] if x in maps.VIRTUAL_MEDIA_TYPES_MAP else None) return list(map(get_mapped_media, media_types))
Helper method to map the media types.
def __get_zero_seq_indexes(self, message: str, following_zeros: int): result = [] if following_zeros > len(message): return result zero_counter = 0 for i in range(0, len(message)): if message[i] == "0": zero_counter += 1 else...
:rtype: list[tuple of int]
def inner(tensor0: BKTensor, tensor1: BKTensor) -> BKTensor: return np.vdot(tensor0, tensor1)
Return the inner product between two tensors
def get_volume_options(volumes): if not isinstance(volumes, list): volumes = [volumes] volumes = [Volume.create_from_tuple(v) for v in volumes] result = [] for v in volumes: result += ["-v", str(v)] return result
Generates volume options to run methods. :param volumes: tuple or list of tuples in form target x source,target x source,target,mode. :return: list of the form ["-v", "/source:/target", "-v", "/other/source:/destination:z", ...]
def compute_elementary_effects(model_inputs, model_outputs, trajectory_size, delta): num_vars = model_inputs.shape[1] num_rows = model_inputs.shape[0] num_trajectories = int(num_rows / trajectory_size) ee = np.zeros((num_trajectories, num_vars), dtype=np.float) ...
Arguments --------- model_inputs : matrix of inputs to the model under analysis. x-by-r where x is the number of variables and r is the number of rows (a function of x and num_trajectories) model_outputs an r-length vector of model outputs trajectory_size a scalar indicat...
def mergeGraphs(targetGraph, addedGraph, incrementedNodeVal = , incrementedEdgeVal = ): for addedNode, attribs in addedGraph.nodes(data = True): if incrementedNodeVal: try: targetGraph.node[addedNode][incrementedNodeVal] += attribs[incrementedNodeVal] except Key...
A quick way of merging graphs, this is meant to be quick and is only intended for graphs generated by metaknowledge. This does not check anything and as such may cause unexpected results if the source and target were not generated by the same method. **mergeGraphs**() will **modify** _targetGraph_ in place by addi...
def deliver_tx(self, raw_transaction): self.abort_if_abci_chain_is_not_synced() logger.debug(, raw_transaction) transaction = self.bigchaindb.is_valid_transaction( decode_transaction(raw_transaction), self.block_transactions) if not transaction: logger...
Validate the transaction before mutating the state. Args: raw_tx: a raw string (in bytes) transaction.
def deepgetattr(obj, attr, default=AttributeError): try: return reduce(getattr, attr.split(), obj) except AttributeError: if default is not AttributeError: return default raise
Recurses through an attribute chain to get the ultimate value (obj/data/member/value) from: http://pingfive.typepad.com/blog/2010/04/deep-getattr-python-function.html >>> class Universe(object): ... def __init__(self, galaxy): ... self.galaxy = galaxy ... >>> class Galaxy(objec...
def run_solr_text_on(solrInstance, category, q, qf, fields, optionals): if optionals == None: optionals = "" query = solrInstance.value + "select?q=" + q + "&qf=" + qf + "&fq=document_category:\"" + category.value + "\"&fl=" + fields + "&wt=json&indent=on" + optionals response = requests....
Return the result of a solr query on the given solrInstance (Enum ESOLR), for a certain document_category (ESOLRDoc) and id
def radec2azel(ra_deg: float, dec_deg: float, lat_deg: float, lon_deg: float, time: datetime, usevallado: bool = False) -> Tuple[float, float]: if usevallado or Time is None: return vradec2azel(ra_deg, dec_deg, lat_deg, lon_deg, time) lat = np.atleast_1d(lat_deg) ...
sky coordinates (ra, dec) to viewing angle (az, el) Parameters ---------- ra_deg : float or numpy.ndarray of float ecliptic right ascension (degress) dec_deg : float or numpy.ndarray of float ecliptic declination (degrees) lat_deg : float observer latitude [-90, 90] ...
def _simulate_coef_from_bootstraps( self, n_draws, coef_bootstraps, cov_bootstraps): random_bootstrap_indices = np.random.choice( np.arange(len(coef_bootstraps)), size=n_draws, replace=True) ...
Simulate coefficients using bootstrap samples.
def run_shell_command(commands, **kwargs): p = subprocess.Popen(commands, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) output, error = p.communicate() return p.returncode, output, error
Run a shell command.
def db_create(name, user=None, host=None, port=None, maintenance_db=None, password=None, tablespace=None, encoding=None, lc_collate=None, lc_ctype=None, owner=None, t...
Adds a databases to the Postgres server. CLI Example: .. code-block:: bash salt '*' postgres.db_create 'dbname' salt '*' postgres.db_create 'dbname' template=template_postgis
def addcomment(self, comment, private=False): vals = self.bugzilla.build_update(comment=comment, comment_private=private) log.debug("addcomment: update=%s", vals) return self.bugzilla.update_bugs(self.bug_id, vals)
Add the given comment to this bug. Set private to True to mark this comment as private.
def _safe_dump(data): custom_dumper = __utils__[]() def boto_listelement_presenter(dumper, data): return dumper.represent_list(list(data)) yaml.add_representer(boto.ec2.cloudwatch.listelement.ListElement, boto_listelement_presenter, Dumper=cus...
this presenter magic makes yaml.safe_dump work with the objects returned from boto.describe_alarms()
def remove_tmp_prefix_from_filename(filename): if not filename.startswith(dju_settings.DJU_IMG_UPLOAD_TMP_PREFIX): raise RuntimeError(ERROR_MESSAGES[] % {: filename}) return filename[len(dju_settings.DJU_IMG_UPLOAD_TMP_PREFIX):]
Remove tmp prefix from filename.
def from_ZNM(cls, Z, N, M, name=): df = pd.DataFrame.from_dict({: Z, : N, : M}).set_index([, ])[] df.name = name return cls(df=df, name=name)
Creates a table from arrays Z, N and M Example: ________ >>> Z = [82, 82, 83] >>> N = [126, 127, 130] >>> M = [-21.34, -18.0, -14.45] >>> Table.from_ZNM(Z, N, M, name='Custom Table') Z N 82 126 -21.34 127 -18.00 83 130 -14.4...
def p_plus_assignment(self, t): self.accu.add(Term(, [self.name,"gen(\""+t[1]+"\")","1"]))
plus_assignment : IDENT EQ PLUS
def url(self, pattern, method=None, type_cast=None): if not type_cast: type_cast = {} def decorator(function): self.add(pattern, function, method, type_cast) return function return decorator
Decorator for registering a path pattern. Args: pattern (str): Regex pattern to match a certain path method (str, optional): Usually used to define one of GET, POST, PUT, DELETE. You may use whatever fits your situation though. Defaults to None. ...
def load_table_from_config(input_dir, config): path = pathlib.Path(input_dir).joinpath(config[]) kwargs = config[] return pd.read_csv(path, **kwargs)
Load table from table config dict Args: input_dir (path-like): directory containing input files config (dict): mapping with keys 'name', 'path', and 'pd_read_kwargs'. Returns: pd.DataFrame
def _get_k(self): if not self.ready: self.k.create() self.ready = True return self.k
Accessing self.k indirectly allows for creating the kvstore table if necessary.
def has_project_permissions(user: , project: , request_method: str) -> bool: if user.is_staff or user.is_superuser or project.user == user: return True return request_method in permissions.SAFE_METHODS and project.is_public
This logic is extracted here to be used also with Sanic api.
def divide(self, phi1, inplace=True): phi = self if inplace else self.copy() phi1 = phi1.copy() if set(phi1.variables) - set(phi.variables): raise ValueError("Scope of divisor should be a subset of dividend") extra_vars = set(phi.variables) - set(phi1.vari...
DiscreteFactor division by `phi1`. Parameters ---------- phi1 : `DiscreteFactor` instance The denominator for division. inplace: boolean If inplace=True it will modify the factor itself, else would return a new factor. Returns ------...
def get_archive(self, archive_name, default_version=None): auth, archive_name = self._normalize_archive_name(archive_name) res = self.manager.get_archive(archive_name) if default_version is None: default_version = self._default_versions.get(archive_name, None) if...
Retrieve a data archive Parameters ---------- archive_name: str Name of the archive to retrieve default_version: version str or :py:class:`~distutils.StrictVersion` giving the default version number to be used on read operations Returns ...
def walk(self): if self.verbose > 1: print_( + self._id + ) phi = self.phi theta = self.walk_theta u = random(len(phi)) z = (theta / (1 + theta)) * (theta * u ** 2 + 2 * u - 1) if self._prime: xp, x = self.values else...
Walk proposal kernel
def from_string(cls, string, relpath=None, encoding=None, is_sass=None): if isinstance(string, six.text_type): if encoding is None: encoding = determine_encoding(string) byte_contents = string.encode(encoding) ...
Read Sass source from the contents of a string. The origin is always None. `relpath` defaults to "string:...".
def abbreviations(text): return PreProcessorRegex( search_args=symbols.ABBREVIATIONS, search_func=lambda x: r"(?<={})(?=\.).".format(x), repl=, flags=re.IGNORECASE).run(text)
Remove periods after an abbreviation from a list of known abbrevations that can be spoken the same without that period. This prevents having to handle tokenization of that period. Note: Could potentially remove the ending period of a sentence. Note: Abbreviations that Google Translate ...
def isDiurnal(self): sun = self.getObject(const.SUN) mc = self.getAngle(const.MC) lat = self.pos.lat sunRA, sunDecl = utils.eqCoords(sun.lon, sun.lat) mcRA, mcDecl = utils.eqCoords(mc.lon, 0) return utils.isAboveHorizon(sunRA, sunDecl, ...
Returns true if this chart is diurnal.
def service_highstate(requires=True): ret = {} running = running_service_owners() for service in running: ret[service] = {: []} if requires: ret[service][].append( {: {: running[service]}} ) enabled = enabled_service_owners() for service...
Return running and enabled services in a highstate structure. By default also returns package dependencies for those services, which means that package definitions must be created outside this function. To drop the package dependencies, set ``requires`` to False. CLI Example: salt myminion int...
def get_filters(cls, raw_filters, num_cols, columns_dict): filters = None for flt in raw_filters: col = flt.get() op = flt.get() eq = flt.get() if ( not col or not op or (eq is None and...
Given Superset filter data structure, returns pydruid Filter(s)
def get_model_from_path_string(root_model, path): for path_section in path.split(): if path_section: try: field, model, direct, m2m = _get_field_by_name(root_model, path_section) except FieldDoesNotExist: return root_model if direct: ...
Return a model class for a related model root_model is the class of the initial model path is like foo__bar where bar is related to foo
def calc_steady_state_dist(R): w, v = np.linalg.eig(R) for i in range(4): if np.abs(w[i] - 1) < 1e-8: return np.real(v[:, i] / np.sum(v[:, i])) return -1
Calculate the steady state dist of a 4 state markov transition matrix. Parameters ---------- R : ndarray Markov transition matrix Returns ------- p_ss : ndarray Steady state probability distribution
def logger(name=None, save=False): logger = logging.getLogger(name) if save: logformat = log_file_path = open(log_file_path, ).write() logger.setLevel(logging.DEBUG) logger_handler = logging.FileHandler(log_file_path) logger_handler.setFormatter(loggin...
Init and configure logger.
def find_usage(self): logger.debug("Checking usage for service %s", self.service_name) self.connect() for lim in self.limits.values(): lim._reset_usage() self._find_usage_nodes() self._find_usage_subnet_groups() self._find_usage_parameter_groups() ...
Determine the current usage for each limit of this service, and update corresponding Limit via :py:meth:`~.AwsLimit._add_current_usage`.
def parse_raxml(handle): s = .join(handle.readlines()) result = {} try_set_fields(result, r, s) try_set_fields(result, r, s) result[] = ( result[] != or re.search(, s, re.IGNORECASE) is not None) try_set_fields(result, r, s) rates = {} if result[] != : try_s...
Parse RAxML's summary output. *handle* should be an open file handle containing the RAxML output. It is parsed and a dictionary returned.
def current_changed(self, i): m = self.model() ri = self.rootModelIndex() index = m.index(i, 0, ri) self.new_root.emit(index)
Slot for when the current index changes. Emits the :data:`AbstractLevel.new_root` signal. :param index: the new current index :type index: int :returns: None :rtype: None :raises: None
def line(self, serie, rescale=False): serie_node = self.svg.serie(serie) if rescale and self.secondary_series: points = self._rescale(serie.points) else: points = serie.points view_values = list(map(self.view, points)) if serie.show_dots: ...
Draw the line serie
def as_dict(self, cache=None, fetch=True): if not self._fetched and fetch: info = self.fetch(cache) elif self._use_cache(cache): info = self._attrs.copy() else: info = {} info.update(url=self.url) return info
Return torrent properties as a dictionary. Set the cache flag to False to disable the cache. On the other hand, set the fetch flag to False to avoid fetching data if it's not cached.
def create_ui(self): builder = gtk.Builder() glade_str = pkgutil.get_data(__name__, ) builder.add_from_string(glade_str) self.window = builder.get_object() self.vbox_form = builder.get_object() if self.title...
.. versionchanged:: 0.21.2 Load the builder configuration file using :func:`pkgutil.getdata`, which supports loading from `.zip` archives (e.g., in an app packaged with Py2Exe).
def which(cmd): def is_exe(fp): return os.path.isfile(fp) and os.access(fp, os.X_OK) fpath, fname = os.path.split(cmd) if fpath: if is_exe(cmd): return cmd else: for path in os.environ["PATH"].split(os.pathsep): exe_file = os.path.join(path, cmd) ...
Returns full path to a executable. Args: cmd (str): Executable command to search for. Returns: (str) Full path to command. None if it is not found. Example:: full_path_to_python = which("python")
def find_n50(contig_lengths_dict, genome_length_dict): n50_dict = dict() for file_name, contig_lengths in contig_lengths_dict.items(): currentlength = 0 for contig_length in contig_lengths: currentlength += contig_length if cur...
Calculate the N50 for each strain. N50 is defined as the largest contig such that at least half of the total genome size is contained in contigs equal to or larger than this contig :param contig_lengths_dict: dictionary of strain name: reverse-sorted list of all contig lengths :param genome_length_dict: dic...
def transform(self, maps): out = {} out[parameters.mass1] = conversions.mass1_from_mchirp_eta( maps[parameters.mchirp], maps[parameters.eta]) out[parameters.mass2] = conversions.mass2_from_mc...
This function transforms from chirp mass and symmetric mass ratio to component masses. Parameters ---------- maps : a mapping object Examples -------- Convert a dict of numpy.array: >>> import numpy >>> from pycbc import transforms >>> t...
def splitterfields(data, commdct): objkey = "Connector:Splitter".upper() fieldlists = splittermixerfieldlists(data, commdct, objkey) return extractfields(data, commdct, objkey, fieldlists)
get splitter fields to diagram it
def report(*arrays, **kwargs): name = kwargs.pop("name",None) grouped = len(arrays) > 1 if grouped: arr = N.concatenate(arrays) components = [PCAOrientation(a) for a in arrays] else: arr = arrays[0] components = [] pca = PCAOrientation(arr) ...
Outputs a standalone HTML 'report card' for a measurement (or several grouped measurements), including relevant statistical information.
def get_timer(self, name=None): return self.get_client(name=name, class_=statsd.Timer)
Shortcut for getting a :class:`~statsd.timer.Timer` instance :keyword name: See :func:`~statsd.client.Client.get_client` :type name: str
def write_to_conll_eval_file(prediction_file: TextIO, gold_file: TextIO, verb_index: Optional[int], sentence: List[str], prediction: List[str], gold_labels: List[str]): ...
Prints predicate argument predictions and gold labels for a single verbal predicate in a sentence to two provided file references. Parameters ---------- prediction_file : TextIO, required. A file reference to print predictions to. gold_file : TextIO, required. A file reference to pr...
def _extract_properties(config): general, options, sets = {}, {}, {} for line in config.splitlines(): if not line or not line[-1:] == and not in line: continue line = line[:-1].lstrip() if line[:6] == : key, value = _extract_pr...
Parse a line within a lease block The line should basically match the expression: >>> r"\s+(?P<key>(?:option|set)\s+\S+|\S+) (?P<value>[\s\S]+?);" For easier seperation of the cases and faster parsing this is done using substrings etc.. :param config: :return: tuple of properties dict, options dict ...
def find(self, name): result = None for t in self.array: if str(t) == name: result = Tag(t.jobject) break return result
Returns the Tag that matches the name. :param name: the string representation of the tag :type name: str :return: the tag, None if not found :rtype: Tag
def find_kernel_specs(self): specs = self.find_kernel_specs_for_envs() specs.update(super(EnvironmentKernelSpecManager, self).find_kernel_specs()) return specs
Returns a dict mapping kernel names to resource directories.
def import_module(name, package=None): if name.startswith(): if not package: raise TypeError("relative imports require the argument") level = 0 for character in name: if character != : break level += 1 name = _resolve_name(nam...
Import a module. The 'package' argument is required when performing a relative import. It specifies the package to use as the anchor point from which to resolve the relative import to an absolute import.
def url(self): scheme = self.scheme host = self.host path = self.path query = self.query port = self.port host_domain, host_port = Url.split_hostname_from_port(host) if host_port: port = host_port controller_path = "" ...
return the full request url as an Url() instance
def converter_pm_log10(data): indices_gt_zero = np.where(data > 0) indices_lt_zero = np.where(data < 0) data_converted = np.zeros(data.shape) data_converted[indices_gt_zero] = np.log10(data[indices_gt_zero]) data_converted[indices_lt_zero] = -np.log10(-data[indices_lt_zero]) return in...
Convert the given data to: log10(subdata) for subdata > 0 log10(-subdata') for subdata' < 0 0 for subdata'' == 0 Parameters ---------- data: array input data Returns ------- array_converted: array converted data
def is_uniform_join_units(join_units): return ( all(type(ju.block) is type(join_units[0].block) for ju in join_units) and all(not ju.is_na or ju.block.is_extension for ju in join_units) and all(not ju.indexers for ju in join_units) and ...
Check if the join units consist of blocks of uniform type that can be concatenated using Block.concat_same_type instead of the generic concatenate_join_units (which uses `_concat._concat_compat`).
def get_mutation_rates(transcripts, mut_dict, ensembl): rates = {: 0, : 0, : 0, : 0, : 0} combined = None for tx_id in transcripts: try: tx = construct_gene_object(ensembl, tx_id) except ValueError: continue if len(tx.get_cds_se...
determines mutation rates per functional category for transcripts Args: transcripts: list of transcript IDs for a gene mut_dict: dictionary of local sequence context mutation rates ensembl: EnsemblRequest object, to retrieve information from Ensembl. Returns: tuple of (...
def run(self): try: self.job_state = time.sleep(1) image = cmd = name = .format(self.job_name, self.job_id) self.job_state = time.sleep(1) self.job_state = container = self.d...
Run the container. Logic is as follows: Generate container info (eventually from task definition) Start container Loop whilst not asked to stop and the container is running. Get all logs from container between the last time I checked and now. Convert logs into cloudwat...
def _acquire_lock(self, identifier, atime=30, ltime=5): conn = redis.Redis(connection_pool=self.pool) end = time.time() + atime while end > time.time(): if conn.set(self._lock_name, identifier, ex=ltime, nx=True): return identifier ...
Acquire a lock for a given identifier. If the lock cannot be obtained immediately, keep trying at random intervals, up to 3 seconds, until `atime` has passed. Once the lock has been obtained, continue to hold it for `ltime`. :param str identifier: lock token to write :param in...
def set_active_scalar(self, name, preference=): _, field = get_scalar(self, name, preference=preference, info=True) if field == POINT_DATA_FIELD: self.GetPointData().SetActiveScalars(name) elif field == CELL_DATA_FIELD: self.GetCellData().SetActiveScalars(name) ...
Finds the scalar by name and appropriately sets it as active
def create(cls, name, members): NewEnum = type(name, (cls,), {}) if isinstance(members, dict): members = members.items() for member in members: if isinstance(member, tuple): name, value = member setattr(NewEnum, name, value) ...
Creates a new enum type based on this one (cls) and adds newly passed members to the newly created subclass of cls. This method helps to create enums having the same member values as values of other enum(s). :param name: name of the newly created type :param members: 1) a dict ...
def long_description(): changes = latest_changes() changes[0] = "`Changes for v{}".format(changes[0][1:]) changes[1] = * len(changes[0]) return "\n\n\n".join([ read_file(), .join(changes), "`Full changelog <{}/en/develop/changelog.html DOCUMENTATION_URL)])
Collates project README and latest changes.
def __set_basic_auth_string(self, username, password): auth = b2handle.utilhandle.create_authentication_string(username, password) self.__basic_authentication_string = auth
Creates and sets the authentication string for (write-)accessing the Handle Server. No return, the string is set as an attribute to the client instance. :param username: Username handle with index: index:prefix/suffix. :param password: The password contained in the index of the ...
def magic_set(obj): def decorator(func): is_class = isinstance(obj, six.class_types) args, varargs, varkw, defaults = inspect.getargspec(func) if not args or args[0] not in (, , ): if is_class: replacement = staticmethod(func) else: ...
Adds a function/method to an object. Uses the name of the first argument as a hint about whether it is a method (``self``), class method (``cls`` or ``klass``), or static method (anything else). Works on both instances and classes. >>> class color: ... def __init__(self, r, g, b): ... self.r, self.g, self.b = r, g, b ...
def get_ntp_peers(self): ntp_stats = self.get_ntp_stats() return { ntp_peer.get("remote"): {} for ntp_peer in ntp_stats if ntp_peer.get("remote") }
Implementation of get_ntp_peers for IOS.
def attendee(request, form, user_id=None): if user_id is None and form.cleaned_data["user"] is not None: user_id = form.cleaned_data["user"] if user_id is None: return attendee_list(request) attendee = people.Attendee.objects.get(user__id=user_id) name = attendee.attendeeprofileb...
Returns a list of all manifested attendees if no attendee is specified, else displays the attendee manifest.
def _QueryHash(self, digest): if not self._url: self._url = .format( self._protocol, self._host, self._port) request_data = {self.lookup_hash: digest} try: json_response = self.MakeRequestAndDecodeJSON( self._url, , data=request_data) except errors.ConnectionError...
Queries the Viper Server for a specfic hash. Args: digest (str): hash to look up. Returns: dict[str, object]: JSON response or None on error.
def xAxisIsMajor(self): return max(self.radius.x, self.radius.y) == self.radius.x
Returns True if the major axis is parallel to the X axis, boolean.
def which(program, environ=None): def is_exe(path): return isfile(path) and os.access(path, os.X_OK) if program is None: raise CommandException("Invalid program name passed") fpath, fname = split(program) if fpath: if is_exe(program): return program ...
Find out if an executable exists in the supplied PATH. If so, the absolute path to the executable is returned. If not, an exception is raised. :type string :param program: Executable to be checked for :param dict :param environ: Any additional ENV variables required, specifically PATH :re...
def get_newest_app_version() -> Version: with urllib3.PoolManager(cert_reqs=, ca_certs=certifi.where()) as p_man: pypi_json = p_man.urlopen(, static_data.PYPI_JSON_URL).data.decode() releases = json.loads(pypi_json).get(, []) online_version = Version() for release in releases: cur_v...
Download the version tag from remote. :return: version from remote :rtype: ~packaging.version.Version