code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|
def output(self, pin, value):
if pin < 0 or pin > 15:
raise ValueError()
self._output_pin(pin, value)
self.mpsse_write_gpio() | Set the specified pin the provided high/low value. Value should be
either HIGH/LOW or a boolean (true = high). |
def group_and_sort_statements(stmt_list, ev_totals=None):
def _count(stmt):
if ev_totals is None:
return len(stmt.evidence)
else:
return ev_totals[stmt.get_hash()]
stmt_rows = defaultdict(list)
stmt_counts = defaultdict(lambda: 0)
arg_counts = defaultdict(la... | Group statements by type and arguments, and sort by prevalence.
Parameters
----------
stmt_list : list[Statement]
A list of INDRA statements.
ev_totals : dict{int: int}
A dictionary, keyed by statement hash (shallow) with counts of total
evidence as the values. Including this wi... |
def great_circle_distance(self, other):
distance_latitude = math.radians(abs(self.latitude - other.latitude))
distance_longitude = math.radians(abs(self.longitude - other.longitude))
a = math.sin(distance_latitude / 2) * math.sin(distance_latitude / 2) \
+ math.cos(math.radi... | Return the great-circle distance, in meters, from this geographic
coordinates to the specified other point, i.e., the shortest distance
over the earth’s surface, ‘as-the-crow-flies’ distance between the
points, ignoring any natural elevations of the ground.
Haversine formula::
... |
def get_eval_metrics(logits, labels, params):
metrics = {
"accuracy": _convert_to_eval_metric(padded_accuracy)(logits, labels),
"accuracy_top5": _convert_to_eval_metric(padded_accuracy_top5)(
logits, labels),
"accuracy_per_sequence": _convert_to_eval_metric(
padded_sequence_ac... | Return dictionary of model evaluation metrics. |
def enbw(data):
r
N = len(data)
return N * np.sum(data**2) / np.sum(data)**2 | r"""Computes the equivalent noise bandwidth
.. math:: ENBW = N \frac{\sum_{n=1}^{N} w_n^2}{\left(\sum_{n=1}^{N} w_n \right)^2}
.. doctest::
>>> from spectrum import create_window, enbw
>>> w = create_window(64, 'rectangular')
>>> enbw(w)
1.0
The following table contains t... |
def spherical_histogram(data=None, radial_bins="numpy", theta_bins=16, phi_bins=16, transformed=False, *args, **kwargs):
dropna = kwargs.pop("dropna", True)
data = _prepare_data(data, transformed=transformed, klass=SphericalHistogram, dropna=dropna)
if isinstance(theta_bins, int):
theta_range... | Facade construction function for the SphericalHistogram. |
def _convert_operator(self, node_name, op_name, attrs, inputs):
if op_name in convert_map:
op_name, new_attrs, inputs = convert_map[op_name](attrs, inputs, self)
else:
raise NotImplementedError("Operator {} not implemented.".format(op_name))
if isinstance(op_name... | Convert from onnx operator to mxnet operator.
The converter must specify conversions explicitly for incompatible name, and
apply handlers to operator attributes.
Parameters
----------
:param node_name : str
name of the node to be translated.
:param op_name : ... |
def default_software_reset_type(self, reset_type):
assert isinstance(reset_type, Target.ResetType)
assert reset_type in (Target.ResetType.SW_SYSRESETREQ, Target.ResetType.SW_VECTRESET,
Target.ResetType.SW_EMULATED)
self._default_software_reset_type = rese... | ! @brief Modify the default software reset method.
@param self
@param reset_type Must be one of the software reset types: Target.ResetType.SW_SYSRESETREQ,
Target.ResetType.SW_VECTRESET, or Target.ResetType.SW_EMULATED. |
def get_choices_for(self, field):
choices = self._fields[field].choices
if isinstance(choices, six.string_types):
return [(d[], d[]) for d in self._choices_manager.get_all(choices)]
else:
return choices | Get the choices for the given fields.
Args:
field (str): Name of field.
Returns:
List of tuples. [(name, value),...] |
def set_time(self, vfy_time):
param = _lib.X509_VERIFY_PARAM_new()
param = _ffi.gc(param, _lib.X509_VERIFY_PARAM_free)
_lib.X509_VERIFY_PARAM_set_time(param, int(vfy_time.strftime()))
_openssl_assert(_lib.X509_STORE_set1_param(self._store, param) != 0) | Set the time against which the certificates are verified.
Normally the current time is used.
.. note::
For example, you can determine if a certificate was valid at a given
time.
.. versionadded:: 17.0.0
:param datetime vfy_time: The verification time to set on th... |
def default_username_algo(email):
return smart_text(username) | Generate username for the Django user.
:arg str/unicode email: the email address to use to generate a username
:returns: str/unicode |
def push_new_themes(catalog, portal_url, apikey):
ckan_portal = RemoteCKAN(portal_url, apikey=apikey)
existing_themes = ckan_portal.call_action()
new_themes = [theme[] for theme in catalog[
] if theme[] not in existing_themes]
pushed_names = []
for new_theme in new_themes:
name ... | Toma un catálogo y escribe los temas de la taxonomía que no están
presentes.
Args:
catalog (DataJson): El catálogo de origen que contiene la
taxonomía.
portal_url (str): La URL del portal CKAN de destino.
apikey (str): La apikey de un usuario con los perm... |
def vecs_to_datmesh(x, y):
x, y = meshgrid(x, y)
out = zeros(x.shape + (2,), dtype=float)
out[:, :, 0] = x
out[:, :, 1] = y
return out | Converts input arguments x and y to a 2d meshgrid,
suitable for calling Means, Covariances and Realizations. |
def get_nonoauth_parameters(self):
return dict([(k, v) for k, v in self.items()
if not k.startswith()]) | Get any non-OAuth parameters. |
def _row_to_str(self, row):
_row_text =
for col, width in self.col_widths.items():
_row_text += self.COLUMN_SEP
l_pad, r_pad = self._split_int(width - len(row[col]))
_row_text += .format( * (l_pad + self.PADDING),
... | Converts a list of strings to a correctly spaced and formatted
row string.
e.g.
['some', 'foo', 'bar'] --> '| some | foo | bar |'
:param row: list
:return: str |
def qos(self, prefetch_size=0, prefetch_count=0, is_global=False):
args = Writer()
args.write_long(prefetch_size).\
write_short(prefetch_count).\
write_bit(is_global)
self.send_frame(MethodFrame(self.channel_id, 60, 10, args))
self.channel.add_synchronou... | Set QoS on this channel. |
def kw_changelist_view(self, request: HttpRequest, extra_context=None, **kw):
return self.changelist_view(request, extra_context) | Changelist view which allow key-value arguments.
:param request: HttpRequest
:param extra_context: Extra context dict
:param kw: Key-value dict
:return: See changelist_view() |
def setCurrentProfile(self, prof):
if prof is None:
self.clearActive()
return
profile = None
blocked = self.signalsBlocked()
self.blockSignals(True)
for act in self._profileGroup.actions():
if prof in (act.p... | Sets the current profile for this toolbar to the inputed profile.
:param prof | <projexui.widgets.xviewwidget.XViewProfile> || <str> |
def clear_annotation_data(self):
self.genes = set()
self.annotations = []
self.term_annotations = {}
self.gene_annotations = {} | Clear annotation data.
Parameters
----------
Returns
-------
None |
def fast_maxwell_boltzmann(mass, file_name=None,
return_code=False):
r
code = ""
code = "def maxwell_boltzmann(v, T):\n"
code +=
code += " if hasattr(v, ):\n"
code += " d = 1\n"
code += " m = %s\n" % mass
code += " f = np.sqrt(m/2/... | r"""Return a function that returns values of a Maxwell-Boltzmann
distribution.
>>> from fast import Atom
>>> mass = Atom("Rb", 87).mass
>>> f = fast_maxwell_boltzmann(mass)
>>> print f(0, 273.15+20)
0.00238221482739
>>> import numpy as np
>>> v = np.linspace(-600, 600, 101)
>>> dis... |
def deserialize(self, xml_input, *args, **kwargs):
return xmltodict.parse(xml_input, *args, **kwargs) | Convert XML to dict object |
def cmd_join(self, connection, sender, target, payload):
if payload:
connection.join(payload)
else:
raise ValueError("No channel given") | Asks the bot to join a channel |
def sqliteRowsToDicts(sqliteRows):
return map(lambda r: dict(zip(r.keys(), r)), sqliteRows) | Unpacks sqlite rows as returned by fetchall
into an array of simple dicts.
:param sqliteRows: array of rows returned from fetchall DB call
:return: array of dicts, keyed by the column names. |
def make_strain_from_inj_object(self, inj, delta_t, detector_name,
distance_scale=1):
detector = Detector(detector_name)
hp, hc = ringdown_td_approximants[inj[]](
inj, delta_t=delta_t, **self.extra_args)
hp._epoch += inj[]
... | Make a h(t) strain time-series from an injection object as read from
an hdf file.
Parameters
-----------
inj : injection object
The injection object to turn into a strain h(t).
delta_t : float
Sample rate to make injection at.
detector_name : stri... |
def acoustic_similarity_directories(directories, analysis_function, distance_function, stop_check=None, call_back=None, multiprocessing=True):
files = []
if call_back is not None:
call_back()
call_back(0, len(directories))
cur = 0
for d in directories:
if not os.path.i... | Analyze many directories.
Parameters
----------
directories : list of str
List of fully specified paths to the directories to be analyzed |
def clear(self):
self.prop_dt_map = dict()
self.prop_data = dict()
self.rev_lookup = defaultdict(set) | convinience function to empty this fastrun container |
def flush_all(self, conn):
command = b
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException(, response) | Its effect is to invalidate all existing items immediately |
def spearmanr(x, y):
from scipy import stats
if not x or not y:
return 0
corr, pvalue = stats.spearmanr(x, y)
return corr | Michiel de Hoon's library (available in BioPython or standalone as
PyCluster) returns Spearman rsb which does include a tie correction.
>>> x = [5.05, 6.75, 3.21, 2.66]
>>> y = [1.65, 26.5, -5.93, 7.96]
>>> z = [1.65, 2.64, 2.64, 6.95]
>>> round(spearmanr(x, y), 4)
0.4
>>> round(spearmanr(x... |
def get_profiles(self):
out = set(x.profile for x in self.requires if x.profile)
out.update(x.profile for x in self.removes if x.profile)
return out | Returns set of profile names referenced in this Feature
:returns: set of profile names |
def group_dashboard(request, group_slug):
groups = get_user_groups(request.user)
group = get_object_or_404(groups, slug=group_slug)
tenants = get_user_tenants(request.user, group)
can_edit_group = request.user.has_perm(, group)
count = len(tenants)
if count == 1:
return red... | Dashboard for managing a TenantGroup. |
def broadcast(self, event):
try:
if event.broadcasttype == "users":
if len(self._users) > 0:
self.log("Broadcasting to all users:",
event.content, lvl=network)
for useruuid in self._users.keys():
... | Broadcasts an event either to all users or clients, depending on
event flag |
def BTC(cpu, dest, src):
if dest.type == :
value = dest.read()
pos = src.read() % dest.size
cpu.CF = value & (1 << pos) == 1 << pos
dest.write(value ^ (1 << pos))
elif dest.type == :
addr, pos = cpu._getMemoryBit(dest, src)
... | Bit test and complement.
Selects the bit in a bit string (specified with the first operand, called
the bit base) at the bit-position designated by the bit offset operand
(second operand), stores the value of the bit in the CF flag, and complements
the selected bit in the bit string.
... |
def get(self, date=datetime.date.today(), country=None):
if not country:
country = self.country
if country == "all":
raise ValueError("You need to specify a country")
if not isinstance(date, str) and not isinstance(date, int):
date = date.year
... | Get the CPI value for a specific time. Defaults to today. This uses
the closest method internally but sets limit to one day. |
def check(f):
if hasattr(f, ):
return f
else:
@wraps(f)
def decorated(*args, **kwargs):
return check_conditions(f, args, kwargs)
decorated.wrapped_fn = f
return decorated | Wraps the function with a decorator that runs all of the
pre/post conditions. |
def upload(self, file_path, timeout=-1):
return self._client.upload(file_path, timeout=timeout) | Upload an SPP ISO image file or a hotfix file to the appliance.
The API supports upload of one hotfix at a time into the system.
For the successful upload of a hotfix, ensure its original name and extension are not altered.
Args:
file_path: Full path to firmware.
timeout... |
def _load_wm_map(exclude_auto=None):
exclude_auto = [] if not exclude_auto else exclude_auto
path_here = os.path.dirname(os.path.abspath(__file__))
ontomap_file = os.path.join(path_here, )
mappings = {}
def make_hume_prefix_map():
hume_ont = os.path.join(path_here, )
graph = rd... | Load an ontology map for world models.
exclude_auto : None or list[tuple]
A list of ontology mappings for which automated mappings should be
excluded, e.g. [(HUME, UN)] would result in not using mappings
from HUME to UN. |
def unregister_transform(self, node_class, transform, predicate=None):
self.transforms[node_class].remove((transform, predicate)) | Unregister the given transform. |
def cwd_filt2(depth):
full_cwd = os.getcwdu()
cwd = full_cwd.replace(HOME,"~").split(os.sep)
if in cwd and len(cwd) == depth+1:
depth += 1
drivepart =
if sys.platform == and len(cwd) > depth:
drivepart = os.path.splitdrive(full_cwd)[0]
out = drivepart + .join(cwd[-depth:... | Return the last depth elements of the current working directory.
$HOME is always replaced with '~'.
If depth==0, the full path is returned. |
def cancel(self):
if not self.id:
raise TypeError(u"You cant been created yet.")
self.refresh_change_key()
self.service.send(soap_request.delete_event(self))
return None | Cancels an event in Exchange. ::
event = service.calendar().get_event(id='KEY HERE')
event.cancel()
This will send notifications to anyone who has not declined the meeting. |
def recovery(self, using=None, **kwargs):
return self._get_connection(using).indices.recovery(index=self._name, **kwargs) | The indices recovery API provides insight into on-going shard
recoveries for the index.
Any additional keyword arguments will be passed to
``Elasticsearch.indices.recovery`` unchanged. |
def getLabelByName(self, name):
name = name.lower()
if name in self.stimLabels:
return self.stimLabels[name]
else:
return None | Gets a label widget by it component name
:param name: name of the AbstractStimulusComponent which this label is named after
:type name: str
:returns: :class:`DragLabel<sparkle.gui.drag_label.DragLabel>` |
def _gen_doc(cls, summary, full_name, identifier, example_call, doc_str,
show_examples):
example_call = .join(map(str.strip, example_call.split()[1:]))
ret = docstrings.dedents( % (summary, full_name, identifier, example_call, doc_str))
if show_examples:
... | Generate the documentation docstring for a PlotMethod |
def parse_name_altree(record):
name_tuple = split_name(record.value)
if name_tuple[1] == :
name_tuple = (name_tuple[0], , name_tuple[2])
maiden = record.sub_tag_value("SURN")
if maiden:
ending = + maiden +
surname = name_tuple[1]
if surname.endswith(ending... | Parse NAME structure assuming ALTREE dialect.
In ALTREE dialect maiden name (if present) is saved as SURN sub-record
and is also appended to family name in parens. Given name is saved in
GIVN sub-record. Few examples:
No maiden name:
1 NAME John /Smith/
2 GIVN John
With maiden na... |
def _coerce_json_to_collection(self, json_repr):
if isinstance(json_repr, dict):
collection = json_repr
else:
try:
collection = anyjson.loads(json_repr)
except:
_LOG.warn()
return None
return collection | Use to ensure that a JSON string (if found) is parsed to the equivalent dict in python.
If the incoming value is already parsed, do nothing. If a string fails to parse, return None. |
def getDirectory(*args):
result = QtGui.QFileDialog.getDirectory(*args)
if type(result) is not tuple:
return result, bool(result)
else:
return result | Normalizes the getDirectory method between the different Qt
wrappers.
:return (<str> filename, <bool> accepted) |
def get_configuration_set_by_id(self, id):
for cs in self.configuration_sets:
if cs.id == id:
return cs
return None | Finds a configuration set in the component by its ID.
@param id The ID of the configuration set to search for.
@return The ConfigurationSet object for the set, or None if it was not
found. |
def onBatchRejected(self, ledger_id):
if ledger_id == POOL_LEDGER_ID:
if isinstance(self.poolManager, TxnPoolManager):
self.get_req_handler(POOL_LEDGER_ID).onBatchRejected()
elif self.get_req_handler(ledger_id):
self.get_req_handler(ledger_id).onBatchReje... | A batch of requests has been rejected, if stateRoot is None, reject
the current batch.
:param ledger_id:
:param stateRoot: state root after the batch was created
:return: |
def getR(self, i=5, j=6):
if self.refresh is True:
self.getMatrix()
return self.transM[i - 1, j - 1] | return transport matrix element, indexed by i, j,
be default, return dispersion value, i.e. getR(5,6) in [m]
:param i: row index, with initial index of 1
:param j: col indx, with initial index of 1
:return: transport matrix element |
def dynamic_content_item_variant_delete(self, item_id, id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/dynamic_content
api_path = "/api/v2/dynamic_content/items/{item_id}/variants/{id}.json"
api_path = api_path.format(item_id=item_id, id=id)
return self.call(api_path, me... | https://developer.zendesk.com/rest_api/docs/core/dynamic_content#delete-variant |
def _read_linguas_from_files(env, linguas_files=None):
import SCons.Util
import SCons.Environment
global _re_comment
global _re_lang
if not SCons.Util.is_List(linguas_files) \
and not SCons.Util.is_String(linguas_files) \
and not isinstance(linguas_files, SCons.Node.FS.B... | Parse `LINGUAS` file and return list of extracted languages |
def winsorize(x, axis=0, limits=0.01):
x = x.copy()
if isinstance(x, pd.DataFrame):
return x.apply(_winsorize_wrapper, axis=axis, args=(limits, ))
else:
return pd.Series(_winsorize_wrapper(x, limits).values,
index=x.index) | `Winsorize <https://en.wikipedia.org/wiki/Winsorizing>`_ values based on limits |
def AFF4Path(self, client_urn):
if not self.HasField("pathtype"):
raise ValueError("Can't determine AFF4 path without a valid pathtype.")
first_component = self[0]
dev = first_component.path
if first_component.HasField("offset"):
... | Returns the AFF4 URN this pathspec will be stored under.
Args:
client_urn: A ClientURN.
Returns:
A urn that corresponds to this pathspec.
Raises:
ValueError: If pathspec is not of the correct type. |
def absent(
name,
force=False,
region=None,
key=None,
keyid=None,
profile=None,
remove_lc=False):
ret = {: name, : True, : , : {}}
asg = __salt__[](name, region, key, keyid, profile)
if asg is None:
ret[] = False
ret[] =
elif ... | Ensure the named autoscale group is deleted.
name
Name of the autoscale group.
force
Force deletion of autoscale group.
remove_lc
Delete the launch config as well.
region
The region to connect to.
key
Secret key to be used.
keyid
Access key t... |
def getBody(self, url, method=, headers={}, data=None, socket=None):
if not in headers:
headers[] = []
return self.request(url, method, headers, data, socket) | Make an HTTP request and return the body |
def requiv_contact_min(b, component, solve_for=None, **kwargs):
hier = b.get_hierarchy()
if not len(hier.get_value()):
raise NotImplementedError("constraint for requiv_contact_min requires hierarchy")
component_ps = _get_system_ps(b, component)
parentorbit = hier.get_pa... | Create a constraint to determine the critical (at L1) value of
requiv at which a constact will underflow. This will only be used
for contacts for requiv_min
:parameter b: the :class:`phoebe.frontend.bundle.Bundle`
:parameter str component: the label of the star in which this
constraint should ... |
def authenticate(self):
log.info("Authenticating to HP Cloud...")
creds = self.creds
access_key_id = creds.get(, )
secret_access_key = creds.get(, )
if access_key_id and secret_access_key:
self.nova_client.client.os_access_key_id = access_key_id
... | Authenticate against the HP Cloud Identity Service. This is the first
step in any hpcloud.com session, although this method is automatically
called when accessing higher-level methods/attributes.
**Examples of Credentials Configuration**
- Bare minimum for authentication using HP API ... |
def frombed(args):
from jcvi.formats.fasta import Fasta
from jcvi.formats.bed import Bed
from jcvi.utils.cbook import fill
p = OptionParser(frombed.__doc__)
opts, args = p.parse_args(args)
if len(args) != 3:
sys.exit(not p.print_help())
bedfile, contigfasta, readfasta = args
... | %prog frombed bedfile contigfasta readfasta
Convert read placement to contig format. This is useful before running BAMBUS. |
def is_instance_of(self, some_class):
try:
if not isinstance(self.val, some_class):
if hasattr(self.val, ):
t = self.val.__name__
elif hasattr(self.val, ):
t = self.val.__class__.__name__
else:
... | Asserts that val is an instance of the given class. |
def _set_autobw_threshold_table_summary(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=autobw_threshold_table_summary.autobw_threshold_table_summary, is_container=, presence=False, yang_name="autobw-threshold-table-summary", rest_name="autobw-thresho... | Setter method for autobw_threshold_table_summary, mapped from YANG variable /mpls_state/autobw_threshold_table_summary (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_autobw_threshold_table_summary is considered as a private
method. Backends looking to populate t... |
def get_token(self, hash):
tokens_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_TOKEN).snapshot()
try:
val = tokens_snapshot.get(hash.ToBytes())
if val:
event = SmartContractEvent.FromByteArray(val)
return event
except ... | Looks up a token by hash
Args:
hash (UInt160): The token to look up
Returns:
SmartContractEvent: A smart contract event with a contract that is an NEP5 Token |
def _AddEvent(self, event):
if hasattr(event, ):
event_data_identifier = identifiers.SQLTableIdentifier(
self._CONTAINER_TYPE_EVENT_DATA,
event.event_data_row_identifier)
lookup_key = event_data_identifier.CopyToString()
event_data_identifier = self._event_data_identifier... | Adds an event.
Args:
event (EventObject): event. |
async def traverse(self, func):
async_executor = self
if inspect.isasyncgenfunction(func):
async for result in func(*async_executor.args):
yield result
else:
yield await func(*async_executor.args) | Traverses an async function or generator, yielding each result.
This function is private. The class should be used as an iterator instead of using this method. |
def load_texture(self, texture_version):
import numpy as np
lowres_tex_template = % texture_version
highres_tex_template = % texture_version
from lace.mesh import Mesh
from lace.cache import sc
mesh_with_texture = Mesh(filename=sc(lowres_tex_template))
... | Expect a texture version number as an integer, load the texture version from /is/ps/shared/data/body/template/texture_coordinates/.
Currently there are versions [0, 1, 2, 3] availiable. |
def list(args):
jm = setup(args)
jm.list(job_ids=get_ids(args.job_ids), print_array_jobs=args.print_array_jobs, print_dependencies=args.print_dependencies, status=args.status, long=args.long, print_times=args.print_times, ids_only=args.ids_only, names=args.names) | Lists the jobs in the given database. |
def do_lzop_get(creds, url, path, decrypt, do_retry):
assert url.endswith(),
with files.DeleteOnError(path) as decomp_out:
key = _uri_to_key(creds, url)
with get_download_pipeline(PIPE, decomp_out.f, decrypt) as pl:
g = gevent.spawn(write_and_return_error, key, pl.stdin)
... | Get and decompress a URL
This streams the content directly to lzop; the compressed version
is never stored on disk. |
def split_by(self, layer, sep=):
if not self.is_tagged(layer):
self.tag(layer)
return self.split_given_spans(self.spans(layer), sep=sep) | Split the text into multiple instances defined by elements of given layer.
The spans for layer elements are extracted and feed to :py:meth:`~estnltk.text.Text.split_given_spans`
method.
Parameters
----------
layer: str
String determining the layer that is used to de... |
def locate_profile(profile=):
from IPython.core.profiledir import ProfileDir, ProfileDirError
try:
pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
except ProfileDirError:
raise IOError("Couldn't find profile %r" % profile)
return pd.location | Find the path to the folder associated with a given profile.
I.e. find $IPYTHONDIR/profile_whatever. |
def zone_helper(zone):
if zone is None:
return None
elif isinstance(zone, Zone):
return zone.href
elif zone.startswith():
return zone
return Zone.get_or_create(name=zone).href | Zone finder by name. If zone doesn't exist, create it and
return the href
:param str zone: name of zone (if href, will be returned as is)
:return str href: href of zone |
def stop_scan(self):
try:
self.bable.stop_scan(sync=True)
except bable_interface.BaBLEException:
pass
self.scanning = False | Stop to scan. |
def build_requirements(docs_path, package_name="yacms"):
mezz_string = "yacms=="
project_path = os.path.join(docs_path, "..")
requirements_file = os.path.join(project_path, package_name,
"project_template", "requirements.txt")
with open(requirements_file, "r") a... | Updates the requirements file with yacms's version number. |
def forward(self, X):
s = X[:-2]
f = X[-2]
w = X[-1]
batch_size = len(f)
x_idx = self._cuda(
torch.as_tensor(np.arange(1, self.settings["lstm_dim"] + 1)).repeat(
batch_size, 1
)
)
outputs = self._cuda(t... | Forward function.
:param X: The input (batch) of the model contains word sequences for lstm,
features and feature weights.
:type X: For word sequences: a list of torch.Tensor pair (word sequence
and word mask) of shape (batch_size, sequence_length).
For features: tor... |
def info(self):
print("\n--- File Info ---")
for key, val in self.file_header.items():
if key == :
val = val.to_string(unit=u.hour, sep=)
if key == :
val = val.to_string(unit=u.deg, sep=)
print("%16s : %32s" % (key, val))
... | Print header information and other derived information. |
def get_page_of_iterator(iterator, page_size, page_number):
try:
page_number = validate_page_number(page_number)
except (PageNotAnInteger, EmptyPage):
page_number = 1
start = (page_number - 1) * page_size
end = (page_number * page_size) + 1
skipped_items = list(islice(iter... | Get a page from an interator, handling invalid input from the page number
by defaulting to the first page. |
def mm_top1(
n_items, data, initial_params=None, alpha=0.0,
max_iter=10000, tol=1e-8):
return _mm(n_items, data, initial_params, alpha, max_iter, tol, _mm_top1) | Compute the ML estimate of model parameters using the MM algorithm.
This function computes the maximum-likelihood (ML) estimate of model
parameters given top-1 data (see :ref:`data-top1`), using the
minorization-maximization (MM) algorithm [Hun04]_, [CD12]_.
If ``alpha > 0``, the function returns the ... |
def update_issue_remote_link_by_id(self, issue_key, link_id, url, title, global_id=None, relationship=None):
data = {: {: url, : title}}
if global_id:
data[] = global_id
if relationship:
data[] = relationship
url = .format(issue_key=issue_key, link_id=lin... | Update existing Remote Link on Issue
:param issue_key: str
:param link_id: str
:param url: str
:param title: str
:param global_id: str, OPTIONAL:
:param relationship: str, Optional. Default by built-in method: 'Web Link' |
def update_reach_number_data(self):
if not self.rapid_connect_file:
log("Missing rapid_connect_file. "
"Please set before running this function ...",
"ERROR")
if not self.riv_bas_id_file:
log("Missing riv_bas_id_file. "
"P... | Update the reach number data for the namelist based on input files.
.. warning:: You need to make sure you set *rapid_connect_file*
and *riv_bas_id_file* before running this function.
Example:
.. code:: python
from RAPIDpy import RAPID
rapid_man... |
def expand(self, url):
url = self.clean_url(url)
expand_url = f
payload = {
: getattr(self, , ),
: getattr(self, , ),
: getattr(self, , None),
: self.api_key,
: self.user_id,
: url,
}
response = self... | Expand implementation for Adf.ly
Args:
url: the URL you want to expand
Returns:
A string containing the expanded URL
Raises:
BadAPIResponseException: If the data is malformed or we got a bad
status code on API response
ShorteningError... |
def get_anchor_point(self, anchor_name):
if anchor_name in self._possible_anchors:
return TikZNodeAnchor(self.handle, anchor_name)
else:
try:
anchor = int(anchor_name.split()[1])
except:
anchor = None
if anchor is... | Return an anchor point of the node, if it exists. |
def correlation(T, obs1, obs2=None, times=(1), maxtime=None, k=None, ncv=None, return_times=False):
r
T = _types.ensure_ndarray_or_sparse(T, ndim=2, uniform=True, kind=)
n = T.shape[0]
obs1 = _types.ensure_ndarray(obs1, ndim=1, size=n, kind=)
obs2 = _types.ensure_ndarray_or_None(obs2, ndim=1, s... | r"""Time-correlation for equilibrium experiment.
Parameters
----------
T : (M, M) ndarray or scipy.sparse matrix
Transition matrix
obs1 : (M,) ndarray
Observable, represented as vector on state space
obs2 : (M,) ndarray (optional)
Second observable, for cross-correlations
... |
def setup(self, phase, entry_pressure=, pore_volume=, throat_volume=):
r
self.settings[] = phase.name
if pore_volume:
self.settings[] = pore_volume
if throat_volume:
self.settings[] = throat_volume
if entry_pressure:
self.settings[] = entry_pre... | r"""
Set up the required parameters for the algorithm
Parameters
----------
phase : OpenPNM Phase object
The phase to be injected into the Network. The Phase must have the
capillary entry pressure values for the system.
entry_pressure : string
... |
def add_metadata(self, metadata_matrix, meta_index_store):
assert isinstance(meta_index_store, IndexStore)
assert len(metadata_matrix.shape) == 2
assert metadata_matrix.shape[0] == self.get_num_docs()
return self._make_new_term_doc_matrix(new_X=self._X,
... | Returns a new corpus with a the metadata matrix and index store integrated.
:param metadata_matrix: scipy.sparse matrix (# docs, # metadata)
:param meta_index_store: IndexStore of metadata values
:return: TermDocMatrixWithoutCategories |
def permission_set(self, name, func=None):
if func is None:
return functools.partial(self.predicate, name)
self.permission_sets[name] = func
return func | Define a new permission set (directly, or as a decorator).
E.g.::
@authz.permission_set('HTTP')
def is_http_perm(perm):
return perm.startswith('http.') |
def clean(self):
errors = {}
cleaned = {}
for name, validator in self.validate_schema.items():
val = getattr(self, name, None)
try:
cleaned[name] = validator.to_python(val)
except formencode.api.Invalid, err:
errors[na... | Cleans the data and throws ValidationError on failure |
def folderitem(self, obj, item, index):
obj = api.get_object(obj)
uid = api.get_uid(obj)
url = api.get_url(obj)
title = api.get_title(obj)
if self.show_categories_enabled():
category = obj.getCategoryTitle()
if category not in s... | Service triggered each time an item is iterated in folderitems.
The use of this service prevents the extra-loops in child objects.
:obj: the instance of the class to be foldered
:item: dict containing the properties of the object to be used by
the template
:index: current i... |
def inplace_filter(func, sequence):
target = 0
for source in xrange(len(sequence)):
if func(sequence[source]):
sequence[target] = sequence[source]
target += 1
del sequence[target:] | Like Python's filter() builtin, but modifies the sequence in place.
Example:
>>> l = range(10)
>>> inplace_filter(lambda x: x > 5, l)
>>> l
[6, 7, 8, 9]
Performance considerations: the function iterates over the
sequence, shuffling surviving members down and deleting whatever
top part of the sequence is lef... |
def is_all_field_none(self):
if self._BillingInvoice is not None:
return False
if self._DraftPayment is not None:
return False
if self._MasterCardAction is not None:
return False
if self._Payment is not None:
return False
... | :rtype: bool |
def biclique(self, xmin, xmax, ymin, ymax):
Aside = sum((self.maximum_hline_bundle(y, xmin, xmax)
for y in range(ymin, ymax + 1)), [])
Bside = sum((self.maximum_vline_bundle(x, ymin, ymax)
for x in range(xmin, xmax + 1)), [])
return Aside, Bsid... | Compute a maximum-sized complete bipartite graph contained in the
rectangle defined by ``xmin, xmax, ymin, ymax`` where each chain of
qubits is either a vertical line or a horizontal line.
INPUTS:
xmin,xmax,ymin,ymax: integers defining the bounds of a rectangle
where we ... |
def _make_cmap(colors, position=None, bit=False):
bit_rgb = np.linspace(0,1,256)
if position == None:
position = np.linspace(0,1,len(colors))
else:
if len(position) != len(colors):
sys.exit("position length must be the same as colors")
elif position[0] != 0 or positi... | _make_cmap takes a list of tuples which contain RGB values. The RGB
values may either be in 8-bit [0 to 255] (in which bit must be set to
True when called) or arithmetic [0 to 1] (default). _make_cmap returns
a cmap with equally spaced colors.
Arrange your tuples so that the first color is the lowest va... |
def fromOPEndpointURL(cls, op_endpoint_url):
service = cls()
service.server_url = op_endpoint_url
service.type_uris = [OPENID_IDP_2_0_TYPE]
return service | Construct an OP-Identifier OpenIDServiceEndpoint object for
a given OP Endpoint URL
@param op_endpoint_url: The URL of the endpoint
@rtype: OpenIDServiceEndpoint |
def get_field_mappings(self, field):
retdict = {}
retdict[] = False
retdict[] = False
for (key, val) in iteritems(field):
if key in self.mappings:
if (key == and
(val == "long" or
val == "integer" or
... | Converts ES field mappings to .kibana field mappings |
def bind(self, server, net=None, address=None):
if _debug: NetworkServiceAccessPoint._debug("bind %r net=%r address=%r", server, net, address)
if net in self.adapters:
raise RuntimeError("already bound")
adapter = NetworkAdapter(self, net)
self.ad... | Create a network adapter object and bind. |
def extract_ast_species(ast):
species_id = "None"
species_label = "None"
species = [
(species_id, species_label) for (species_id, species_label) in ast.species if species_id
]
if len(species) == 1:
(species_id, species_label) = species[0]
if not species_id:
specie... | Extract species from ast.species set of tuples (id, label) |
def accepts(*argtypes, **kwargtypes):
theseargtypes = [T.TypeFactory(a) for a in argtypes]
thesekwargtypes = {k : T.TypeFactory(a) for k,a in kwargtypes.items()}
def _decorator(func):
f = func.__wrapped__ if hasattr(func, "__wrapped__") else func
try:
argtypes = in... | A function decorator to specify argument types of the function.
Types may be specified either in the order that they appear in the
function or via keyword arguments (just as if you were calling the
function).
Example usage:
| @accepts(Positive0)
| def square_root(x):
| ... |
def add_at(self, moment: float, fn_process: Callable, *args: Any, **kwargs: Any) -> :
delay = moment - self.now()
if delay < 0.0:
raise ValueError(
f"The given moment to start the process ({moment:f}) is in the past (now is {self.now():f})."
)
ret... | Adds a process to the simulation, which is made to start at the given exact time on the simulated clock. Note
that times in the past when compared to the current moment on the simulated clock are forbidden.
See method add() for more details. |
def m2i(self, pkt, s):
diff_tag, s = BER_tagging_dec(s, hidden_tag=self.ASN1_tag,
implicit_tag=self.implicit_tag,
explicit_tag=self.explicit_tag,
safe=self.flexible_tag)
if diff_tag... | The good thing about safedec is that it may still decode ASN1
even if there is a mismatch between the expected tag (self.ASN1_tag)
and the actual tag; the decoded ASN1 object will simply be put
into an ASN1_BADTAG object. However, safedec prevents the raising of
exceptions needed for ASN... |
def regex(pattern, flags: int = 0):
def f(_, m):
m.matches = [i for i in _.p.finditer(m.text or m.caption or "")]
return bool(m.matches)
return create("Regex", f, p=re.compile(pattern, flags)) | Filter messages that match a given RegEx pattern.
Args:
pattern (``str``):
The RegEx pattern as string, it will be applied to the text of a message. When a pattern matches,
all the `Match Objects <https://docs.python.org/3/library/re.html#match-objects>`_
... |
def create_aggregator(self, subordinates):
if not isinstance(subordinates, list):
raise TypeError("subordinates can only be an instance of type list")
for a in subordinates[:10]:
if not isinstance(a, IEventSource):
raise TypeError(
... | Creates an aggregator event source, collecting events from multiple sources.
This way a single listener can listen for events coming from multiple sources,
using a single blocking :py:func:`get_event` on the returned aggregator.
in subordinates of type :class:`IEventSource`
Subordi... |
def write_json(json_obj, filename, mode="w", print_pretty=True):
with open(filename, mode) as filey:
if print_pretty:
filey.writelines(print_json(json_obj))
else:
filey.writelines(json.dumps(json_obj))
return filename | write_json will (optionally,pretty print) a json object to file
Parameters
==========
json_obj: the dict to print to json
filename: the output file to write to
pretty_print: if True, will use nicer formatting |
def make_mujoco_env(env_id, seed, reward_scale=1.0):
rank = MPI.COMM_WORLD.Get_rank()
myseed = seed + 1000 * rank if seed is not None else None
set_global_seeds(myseed)
env = gym.make(env_id)
logger_path = None if logger.get_dir() is None else os.path.join(logger.get_dir(), str(rank))
env ... | Create a wrapped, monitored gym.Env for MuJoCo. |
def is_finished(self):
if self._total_time > self._global_time_limit:
logger.warning("Exceeded global time limit {} / {}".format(
self._total_time, self._global_time_limit))
return True
trials_done = all(trial.is_finished() for trial in self._trials)
... | Returns whether all trials have finished running. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.