code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|
def setupWorkerTransition():
default_warn_method = getWarningMethod()
def custom_warn_method(message, category, stacklevel):
if stacklevel is not None:
stacklevel += 1
if _WORKER_WARNING_MARK in message:
default_warn_method(message, category, stackleve... | Hook Twisted deprecation machinery to use custom warning class
for Worker API deprecation warnings. |
def get_string_from_view(self, request, view_name, url_kwargs,
render_type=):
response = ""
try:
view, name = self.get_initialized_view_and_name(view_name,
render_type=render_type,
... | Returns a string that is a rendering of the view given a
request, view_name, and the original url_kwargs. Makes the
following changes the view before rendering:
* Sets can_submit to False.
* Adds action_url to the context. This is the url where \
this view actually lives.
... |
def teff(cluster):
b_vs, _ = cluster.stars()
teffs = []
for b_v in b_vs:
b_v -= cluster.eb_v
if b_v > -0.04:
x = (14.551 - b_v) / 3.684
else:
x = (3.402 - math.sqrt(0.515 + 1.376 * b_v)) / 0.688
teffs.append(math.pow(10, x))
return teffs | Calculate Teff for main sequence stars ranging from Teff 3500K - 8000K. Use
[Fe/H] of the cluster, if available.
Returns a list of Teff values. |
def loadSignal(self, name, start=None, end=None):
entry = self._getCacheEntry(name)
if entry is not None:
from analyser.common.signal import loadSignalFromWav
return loadSignalFromWav(entry[], start=start, end=end)
else:
return None | Loads the named entry from the upload cache as a signal.
:param name: the name.
:param start: the time to start from in HH:mm:ss.SSS format
:param end: the time to end at in HH:mm:ss.SSS format.
:return: the signal if the named upload exists. |
def get_obj(app_label, model_name, object_id):
try:
model = apps.get_model("{}.{}".format(app_label, model_name))
assert is_valid_django_model(model), ("Model {}.{} do not exist.").format(
app_label, model_name
)
obj = get_Object_or_None(model, pk=object_id)
... | Function used to get a object
:param app_label: A valid Django Model or a string with format: <app_label>.<model_name>
:param model_name: Key into kwargs that contains de data: new_person
:param object_id:
:return: instance |
def annealing_cos(start:Number, end:Number, pct:float)->Number:
"Cosine anneal from `start` to `end` as pct goes from 0.0 to 1.0."
cos_out = np.cos(np.pi * pct) + 1
return end + (start-end)/2 * cos_out | Cosine anneal from `start` to `end` as pct goes from 0.0 to 1.0. |
def run(self, executable_input, project=None, folder=None, name=None, tags=None, properties=None, details=None,
instance_type=None, stage_instance_types=None, stage_folders=None, rerun_stages=None, cluster_spec=None,
depends_on=None, allow_ssh=None, debug=None, delay_workspace_destruction=None, ... | :param executable_input: Hash of the executable's input arguments
:type executable_input: dict
:param project: Project ID of the project context
:type project: string
:param folder: Folder in which executable's outputs will be placed in *project*
:type folder: string
:par... |
def append_string(t, string):
node = t.tree
if string:
if len(node) == 0:
if node.text is not None:
node.text += string
else:
node.text = string
else:
child = list(node)[-1]
if child.tail is not None:
... | Append a string to a node, as text or tail of last child. |
def getNorthSouthClone(self, i):
north = self.getAdjacentClone(i, south=False)
south = self.getAdjacentClone(i)
return north, south | Returns the adjacent clone name from both sides. |
def in_virtual_env():
import sys
has_venv = False
if hasattr(sys, ):
has_venv = True
elif hasattr(sys, ):
has_venv = sys.base_prefix != sys.prefix
return has_venv | returns True if you are running inside a python virtual environment.
(DOES NOT WORK IF IN IPYTHON AND USING A VIRTUALENV)
sys.prefix gives the location of the virtualenv
Notes:
It seems IPython does not respect virtual environments properly.
TODO: find a solution
http://stackoverfl... |
def get_string():
keycodes = _get_keycodes()
initial_code, codes = keycodes[0], keycodes[1:]
initial_char = chr(initial_code)
if initial_code == 27:
initial_char =
elif not ascii.isgraph(initial_char):
initial_char = % initial_code
chars = .join([chr(c) for c in codes])
... | A better str(_get_keycodes()) method |
def ConsumeRange(self, start, end):
old = self.CurrentRange()
if old is None:
return
if old.start > start:
if old.start < end:
raise RuntimeError()
return
if old.start < start:
raise RuntimeError()
if old.end == end:
del self.ranges[0]
elif old.end > en... | Consumes an entire range, or part thereof.
If the finger has no ranges left, or the curent range start is higher
than the end of the consumed block, nothing happens. Otherwise,
the current range is adjusted for the consumed block, or removed,
if the entire block is consumed. For things to work, the con... |
def get_query(self, show=True, proxy=None, timeout=0):
if not self.params.get() and not self.params.get():
raise ValueError("get_query needs title or pageid")
self._get(, show, proxy, timeout)
while self.data.get():
self._get(, show, proxy, timeout)
re... | GET MediaWiki:API action=query selected data
https://en.wikipedia.org/w/api.php?action=help&modules=query
Required {params}: title OR pageid
- title: <str> article title
- pageid: <int> Wikipedia database ID
Optional arguments:
- [show]: <bool> echo page data if true
... |
def _update(qs):
try:
with transaction.atomic():
qs.update(sort_order=models.F() + 1)
except IntegrityError:
for obj in qs.order_by():
qs.filter(pk=obj.pk).update(sort_order=models.F() + 1) | Increment the sort_order in a queryset.
Handle IntegrityErrors caused by unique constraints. |
def initialize_openstack(func):
async def wrap(self, *args, **kwargs):
if not hasattr(self, ) or not self.auth.is_token_valid():
self.auth = AuthPassword(auth_url=self.config[],
username=self.config[],
... | Initialize and refresh openstack connection |
def run(self):
if self.init_sec:
sleep(self.init_sec)
self._functime = time()
while self._running:
start = time()
self._func()
self._functime += self.interval_sec
if self._functime - start > 0:
sleep(self._funct... | Start the recurring task. |
def uniqueId(self, prefix=""):
_IdCounter.count += 1
id = _IdCounter.count
if prefix:
return self._wrap(prefix + str(id))
else:
return self._wrap(id) | Generate a unique integer id (unique within the entire client session).
Useful for temporary DOM ids. |
def _write_arg_to_bytes(builder, arg, args, name=None):
if arg.generic_definition:
return
if name is None:
name = .format(arg.name)
if arg.is_flag:
if arg.type == :
return
elif arg.is_vector:
... | Writes the .__bytes__() code for the given argument
:param builder: The source code builder
:param arg: The argument to write
:param args: All the other arguments in TLObject same __bytes__.
This is required to determine the flags value
:param name: The name of the argument. Defaults to... |
def extended_help_option(extended_help=None, *param_decls, **attrs):
def decorator(f):
def callback(ctx, param, value):
if value and not ctx.resilient_parsing:
if not extended_help:
ctx.command.help = ctx.command.callback.__doc__
clic... | Based on the click.help_option code.
Adds a ``--extended-help`` option which immediately ends the program
printing out the extended extended-help page. Defaults to using the
callback's doc string, but can be given an explicit value as well.
This is intended for use as a decorator on a command to provi... |
def find_cycle(graph):
if (isinstance(graph, graph_class)):
directed = False
elif (isinstance(graph, digraph_class)):
directed = True
else:
raise InvalidGraphType
def find_cycle_to_ancestor(node, ancestor):
path = []
while (node != ancestor):
... | Find a cycle in the given graph.
This function will return a list of nodes which form a cycle in the graph or an empty list if
no cycle exists.
@type graph: graph, digraph
@param graph: Graph.
@rtype: list
@return: List of nodes. |
def parse_string(xml):
string = ""
dom = XML(xml)
for sentence in dom(XML_SENTENCE):
_anchors.clear()
_attachments.clear()
language = sentence.get(XML_LANGUAGE, "en")
format = sentence.get(XML_TOKEN, [WORD, POS, CHUNK, PNP, ... | Returns a slash-formatted string from the given XML representation.
The return value is a TokenString (for MBSP) or TaggedString (for Pattern). |
def _GetNextLogCountPerToken(token):
global _log_counter_per_token
_log_counter_per_token[token] = 1 + _log_counter_per_token.get(token, -1)
return _log_counter_per_token[token] | Wrapper for _log_counter_per_token.
Args:
token: The token for which to look up the count.
Returns:
The number of times this function has been called with
*token* as an argument (starting at 0) |
def set_perplexity(self, new_perplexity):
if new_perplexity == self.perplexity:
return
new_perplexity = self.check_perplexity(new_perplexity)
k_neighbors = min(self.n_samples - 1, int(3 * new_perplexity))
if k_neighbors > self.__neighbors.s... | Change the perplexity of the affinity matrix.
Note that we only allow lowering the perplexity or restoring it to its
original value. This restriction exists because setting a higher
perplexity value requires recomputing all the nearest neighbors, which
can take a long time. To avoid pot... |
def getValue(self, key):
LOG.debug("HMGeneric.getValue: address = , key = " % (self._ADDRESS, key))
try:
returnvalue = self._proxy.getValue(self._ADDRESS, key)
self._VALUES[key] = returnvalue
return returnvalue
except Exception as err:
LOG... | Some devices allow to directly get values for specific parameters. |
def validate_email_domain(email):
try:
domain = email.split(, 1)[1].lower().strip()
except IndexError:
return
if domain in dju_settings.DJU_EMAIL_DOMAIN_BLACK_LIST:
raise ValidationError(_(u),
code=, params={: domain}) | Validates email domain by blacklist. |
def setCollectors(self, collectors):
self.__collectors = {}
for name, collector in collectors.items():
self.__collectors[name] = collector
collector.setSchema(self) | Sets the collector methods that will be used for this schema.
:param collectors | [<orb.Collectors>, ..] |
def get_domain(self):
points = ([poly.points for poly in self]+
[holes.points for holes in self.holes])
points = np.concatenate(points, axis=0)
return np.array([points.min(axis=0), points.max(axis=0)]) | :returns: opposite vertices of the bounding prism for this
object in the form of ndarray([min], [max])
.. note:: This method automatically stores the solution in order
to do not repeat calculations if the user needs to call it
more than once. |
def compute_distance(x_ori, x_pert, constraint=):
if constraint == :
dist = np.linalg.norm(x_ori - x_pert)
elif constraint == :
dist = np.max(abs(x_ori - x_pert))
return dist | Compute the distance between two images. |
def sortByTotal(requestContext, seriesList):
return list(sorted(seriesList, key=safeSum, reverse=True)) | Takes one metric or a wildcard seriesList.
Sorts the list of metrics by the sum of values across the time period
specified. |
def seek_end(fileobj, offset):
if offset < 0:
raise ValueError
if get_size(fileobj) < offset:
fileobj.seek(0, 0)
else:
fileobj.seek(-offset, 2) | Like fileobj.seek(-offset, 2), but will not try to go beyond the start
Needed since file objects from BytesIO will not raise IOError and
file objects from open() will raise IOError if going to a negative offset.
To make things easier for custom implementations, instead of allowing
both behaviors, we ju... |
def prepare_bam(bam_in, precursors):
a = pybedtools.BedTool(bam_in)
b = pybedtools.BedTool(precursors)
c = a.intersect(b, u=True)
out_file = utils.splitext_plus(op.basename(bam_in))[0] + "_clean.bam"
c.saveas(out_file)
return op.abspath(out_file) | Clean BAM file to keep only position inside the bigger cluster |
async def main(self):
try:
lastkeys = set()
dataupdate = FlowUpdaterNotification.createMatcher(self, FlowUpdaterNotification.DATAUPDATED)
startwalk = FlowUpdaterNotification.createMatcher(self, FlowUpdaterNotification.STARTWALK)
self.subroutine(self._flow... | Main coroutine |
async def get_proxies(self):
log.debug( % self.domain)
async with aiohttp.ClientSession(
headers=get_headers(), cookies=self._cookies, loop=self._loop
) as self._session:
await self._pipe()
log.debug(
% (len(self.proxies), self.... | Receive proxies from the provider and return them.
:return: :attr:`.proxies` |
def catch_error(response):
status = response.status_code
if status == 401 or status == 403:
raise EnvironmentError("Forbidden")
elif status == 417 or status == 404:
raise EnvironmentError("NotFound") | Checks for Errors in a Response.
401 or 403 - Security Rules Violation.
404 or 417 - Firebase NOT Found.
response - (Request.Response) - response from a request. |
def extendedMeasurementOrder():
a = L2PseudoLength(l2pLength=0x12)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x37)
d = ExtendedMeasurementFrequencyList()
packet = a / b / c / d
return packet | EXTENDED MEASUREMENT ORDER Section 9.1.51 |
def off(self, event):
try:
self._once_events.remove(event)
except KeyError:
pass
self._callback_by_event.pop(event, None) | Remove an event handler |
def compile(stream_spec, cmd=, overwrite_output=False):
if isinstance(cmd, basestring):
cmd = [cmd]
elif type(cmd) != list:
cmd = list(cmd)
return cmd + get_args(stream_spec, overwrite_output=overwrite_output) | Build command-line for invoking ffmpeg.
The :meth:`run` function uses this to build the commnad line
arguments and should work in most cases, but calling this function
directly is useful for debugging or if you need to invoke ffmpeg
manually for whatever reason.
This is the same as calling :meth:`... |
def modprobe(state, host, name, present=True, force=False):
modules = host.fact.kernel_modules
is_present = name in modules
args =
if force:
args =
elif present and not is_present:
yield .format(args, name) | Load/unload kernel modules.
+ name: name of the module to manage
+ present: whether the module should be loaded or not
+ force: whether to force any add/remove modules |
def walk(self, action, user_data=None):
action(self.index_file, self.__root, 0, user_data)
self.__do_walk(self.__root, 1, action, user_data) | Walk the hierarchy, applying action to each filename.
Args:
action: callable, the callable to invoke for each filename,
will be invoked with the filename, the subfiles, and
the level in the sitemap. |
def get_gnupg_components(sp=subprocess):
args = [util.which(), ]
output = check_output(args=args, sp=sp)
components = dict(re.findall(, output.decode()))
log.debug(, components)
return components | Parse GnuPG components' paths. |
def load_settings(self, settings):
with open(settings) as settings_file:
settings_dict = simplejson.load(settings_file)
for key, value in settings_dict.items():
self.__setattr__(key, value) | Load settings from file |
def get_full_durable_object(arn, event_time, durable_model):
LOG.debug(f)
item = list(durable_model.query(arn, durable_model.eventTime == event_time))
if not item:
LOG.error(f
f)
raise DurableItemIsMissingException({"item_arn": arn, "event_time": event_time})
... | Utility method to fetch items from the Durable table if they are too big for SNS/SQS.
:param record:
:param durable_model:
:return: |
def find_keyword_in_context(tokens, keyword, contextsize=1):
if isinstance(keyword,tuple) and isinstance(keyword,list):
l = len(keyword)
else:
keyword = (keyword,)
l = 1
n = l + contextsize*2
focuspos = contextsize + 1
for ngram in Windower(tokens,n,None,None):
i... | Find a keyword in a particular sequence of tokens, and return the local context. Contextsize is the number of words to the left and right. The keyword may have multiple word, in which case it should to passed as a tuple or list |
def delete_nic(self, instance_id, port_id):
self.client.servers.interface_detach(instance_id, port_id)
return True | Delete a Network Interface Controller |
def init_glance_consumer(self, mq):
if not self.enable_component_notification(Openstack.Glance):
log.debug("disable listening glance notification")
return
for i in range(self.config.glance_mq_consumer_count):
mq.create_consumer(self.config.glance_mq_exchange... | Init openstack glance mq
1. Check if enable listening glance notification
2. Create consumer
:param mq: class ternya.mq.MQ |
def _new_DatetimeIndex(cls, d):
if "data" in d and not isinstance(d["data"], DatetimeIndex):
data = d.pop("data")
result = cls._simple_new(data, **d)
else:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
... | This is called upon unpickling, rather than the default which doesn't
have arguments and breaks __new__ |
def regression():
Command("touch", DIR.project.joinpath("pathquery", "__init__.py").abspath()).run()
storybook = _storybook({}).only_uninherited()
Command("touch", DIR.project.joinpath("pathquery", "__init__.py").abspath()).run()
storybook.with_params(**{"python version": "3... | Run regression testing - lint and then run all tests. |
def parse_header_line(self, line):
self.header = line[1:].rstrip().split()
if len(self.header) < 9:
self.header = line[1:].rstrip().split()
self.individuals = self.header[9:] | docstring for parse_header_line |
def fetch_suvi_l1b(self, product, correct=True, median_kernel=5):
if self.date < datetime(2018, 5, 23) and not (self.date >= datetime(2017, 9, 6) \
and self.date <= datetime(2017, 9, 10, 23, 59)):
print("SUVI data is only available after 2018-5-23")
return produc... | Given a product keyword, downloads the SUVI l1b image into the current directory.
NOTE: the suvi_l1b_url must be properly set for the Fetcher object
:param product: the keyword for the product, e.g. suvi-l1b-fe094
:param correct: remove nans and negatives
:return: tuple of product name, ... |
def current(sam=False):
*
try:
if sam:
user_name = win32api.GetUserNameEx(win32con.NameSamCompatible)
else:
user_name = win32api.GetUserName()
except pywintypes.error as exc:
log.error()
log.error(, exc.winerror)
log.error(, exc.funcname)
... | Get the username that salt-minion is running under. If salt-minion is
running as a service it should return the Local System account. If salt is
running from a command prompt it should return the username that started the
command prompt.
.. versionadded:: 2015.5.6
Args:
sam (bool, optional... |
def json(self):
data = {}
for item in self._data:
if isinstance(self._data[item], filetree):
data[item] = self._data[item].json()
else:
data[item] = self._data[item]
return data | Return JSON representation of object. |
def submit(self):
futures = []
while self.submitted < 4 and not self.done():
part = self.parts.pop(0)
part_number = part[]
part_read_offset = part[]
part_read_limit = part[]
self.fp.seek(part_read_offset)
part_data = self... | Partitions the file into chunks and submits them into group of 4
for upload on the api upload pool.
:return: Futures |
def install_dir(self):
max_len = 500
directory = self._get_str(self._iface.get_install_dir, [self.app_id], max_len=max_len)
if not directory:
directory = self._get_str(self._iface_list.get_install_dir, [self.app_id], max_len=max_len)
return directory | Returns application installation path.
.. note::
If fails this falls back to a restricted interface, which can only be used by approved apps.
:rtype: str |
def startall(self, wait=False, **kwdargs):
self.logger.debug("startall called")
with self.regcond:
while self.status != :
if self.status in (, ) or self.ev_quit.is_set():
self.logger.error("ignoring duplicate request to start ... | Start all of the threads in the thread pool. If _wait_ is True
then don't return until all threads are up and running. Any extra
keyword arguments are passed to the worker thread constructor. |
def input_validate_aead(aead, name=, expected_len=None, max_aead_len = pyhsm.defines.YSM_AEAD_MAX_SIZE):
if isinstance(aead, pyhsm.aead_cmd.YHSM_GeneratedAEAD):
aead = aead.data
if expected_len != None:
return input_validate_str(aead, name, exact_len = expected_len)
else:
return... | Input validation for YHSM_GeneratedAEAD or string. |
def compute_cost(A2, Y):
m = Y.shape[1]
logprobs = np.multiply(np.log(A2), Y) + np.multiply(np.log(1 - A2), (1 - Y))
cost = -np.sum(logprobs) / m
cost = np.squeeze(cost)
assert (isinstance(cost, float))
return cost | Computes the cross-entropy cost given in equation (13)
Arguments:
A2 -- The sigmoid output of the second activation, of shape (1, number of examples)
Y -- "true" labels vector of shape (1, number of examples)
parameters -- python dictionary containing your parameters W1, b1, W2 and b2
Returns:
... |
def jsonarrlen(self, name, path=Path.rootPath()):
return self.execute_command(, name, str_path(path)) | Returns the length of the array JSON value under ``path`` at key
``name`` |
def _model_to_sbml(cobra_model, f_replace=None, units=True):
if f_replace is None:
f_replace = {}
sbml_ns = libsbml.SBMLNamespaces(3, 1)
sbml_ns.addPackageNamespace("fbc", 2)
doc = libsbml.SBMLDocument(sbml_ns)
doc.setPackageRequired("fbc", False)
doc.setSBOTerm(SBO_FBA_FRAM... | Convert Cobra model to SBMLDocument.
Parameters
----------
cobra_model : cobra.core.Model
Cobra model instance
f_replace : dict of replacement functions
Replacement to apply on identifiers.
units : boolean
Should the FLUX_UNITS be written in the SBMLDocument.
Returns
... |
def p_file_comments_on_lics(self, f_term, predicate):
try:
for _, _, comment in self.graph.triples((f_term, predicate, None)):
self.builder.set_file_license_comment(self.doc, six.text_type(comment))
except CardinalityError:
self.more_than_one_error() | Sets file license comment. |
def pseudotime(starting_node, edges, fitted_vals):
distances = np.array([[sum((x - y)**2) for x in fitted_vals.T] for y in fitted_vals.T])
distance_dict = graph_distances(starting_node, edges, distances)
output = []
for i in range(fitted_vals.shape[1]):
output.append(distance... | Args:
starting_node (int): index of the starting node
edges (list): list of tuples (node1, node2)
fitted_vals (array): output of lineage (2 x cells)
Returns:
A 1d array containing the pseudotime value of each cell. |
def get_probs_for_labels(labels, prediction_results):
probs = []
if in prediction_results:
for i, r in prediction_results.iterrows():
probs_one = [0.0] * len(labels)
for k, v in six.iteritems(r):
if v in labels and k.startswith():
if k == :
prob_name =
... | Given ML Workbench prediction results, get probs of each label for each instance.
The prediction results are like:
[
{'predicted': 'daisy', 'probability': 0.8, 'predicted_2': 'rose', 'probability_2': 0.1},
{'predicted': 'sunflower', 'probability': 0.9, 'predicted_2': 'daisy', 'probability_2': 0.01},
..... |
def last_string(self):
cb = self.code_builder
len_cb = len(cb)
if len_cb > 0:
return cb[len_cb - 1]
else:
return None | The last entry in code_builder, or ``None`` if none so far. |
def _todo_do_update(self, line):
"update [:tablename] {hashkey[,rangekey]} [!fieldname:expectedvalue] [-add|-delete] [+ALL_OLD|ALL_NEW|UPDATED_OLD|UPDATED_NEW] {attributes}"
table, line = self.get_table_params(line)
hkey, line = line.split(" ", 1)
expected, attr = self.get_expected(line)... | update [:tablename] {hashkey[,rangekey]} [!fieldname:expectedvalue] [-add|-delete] [+ALL_OLD|ALL_NEW|UPDATED_OLD|UPDATED_NEW] {attributes} |
def replace_namespaced_service_account(self, name, namespace, body, **kwargs):
kwargs[] = True
if kwargs.get():
return self.replace_namespaced_service_account_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_service_accou... | replace_namespaced_service_account # noqa: E501
replace the specified ServiceAccount # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_service_account(name, namespa... |
def list_sessions(logged_in_users_only=False):
*
ret = list()
server = win32ts.WTS_CURRENT_SERVER_HANDLE
protocols = {win32ts.WTS_PROTOCOL_TYPE_CONSOLE: ,
win32ts.WTS_PROTOCOL_TYPE_ICA: ,
win32ts.WTS_PROTOCOL_TYPE_RDP: }
statuses = {win32ts.WTSActive: , win32ts.WTSC... | List information about the sessions.
.. versionadded:: 2016.11.0
:param logged_in_users_only: If True, only return sessions with users logged in.
:return: A list containing dictionaries of session information.
CLI Example:
.. code-block:: bash
salt '*' rdp.list_sessions |
def bulkWrite(self, endpoint, buffer, timeout = 100):
r
return self.dev.write(endpoint, buffer, timeout) | r"""Perform a bulk write request to the endpoint specified.
Arguments:
endpoint: endpoint number.
buffer: sequence data buffer to write.
This parameter can be any sequence type.
timeout: operation timeout in milliseconds. (default: 100... |
def _deliver_message(self, msg):
my_subscribed_actions = self.subscriptions.get(msg.sender, [])
for action in my_subscribed_actions:
if Global.CONFIG_MANAGER.tracing_mode:
Global.LOGGER.debug(f"delivering message to {action.name}")
action.on_input_receive... | Deliver the message to the subscripted actions |
def save_model(self, request, obj, form, change):
if change:
obj.tree = self.tree
obj.save() | Saves TreeItem model under certain Tree.
Handles item's parent assignment exception. |
def text_pieces(self, method, uplaces=2, use_exponent=True):
md, hi, lo = self.repvals(method)
if hi == lo:
return % lo, None, None, None
if not np.isfinite([lo, md, hi]).all():
raise ValueError()
from numpy import abs, ceil, floor, log10
... | Return (main, dhigh, dlow, sharedexponent), all as strings. The
delta terms do not have sign indicators. Any item except the first
may be None.
`method` is passed to Uval.repvals() to compute representative
statistical limits. |
def get_precision(self):
config_str = self.raw_sensor_strings[1].split()[4]
bit_base = int(config_str, 16) >> 5
return bit_base + 9 | Get the current precision from the sensor.
:returns: sensor resolution from 9-12 bits
:rtype: int |
def uniform_unit_scaling(tensor: torch.Tensor, nonlinearity: str = "linear"):
size = 1.
for dimension in list(tensor.size())[:-1]:
size *= dimension
activation_scaling = torch.nn.init.calculate_gain(nonlinearity, tensor)
max_value = math.sqrt(3 / size) * activation... | An initaliser which preserves output variance for approximately gaussian
distributed inputs. This boils down to initialising layers using a uniform
distribution in the range ``(-sqrt(3/dim[0]) * scale, sqrt(3 / dim[0]) * scale)``, where
``dim[0]`` is equal to the input dimension of the parameter and the ``s... |
def task_view_generator(job_descriptor):
for task_descriptor in job_descriptor.task_descriptors:
jd = JobDescriptor(job_descriptor.job_metadata, job_descriptor.job_params,
job_descriptor.job_resources, [task_descriptor])
yield jd | Generator that yields a task-specific view of the job.
This generator exists to make it easy for callers to iterate over the tasks
in a JobDescriptor. Each pass yields a new JobDescriptor with a single task.
Args:
job_descriptor: A JobDescriptor with 1 or more tasks.
Yields:
A JobDescriptor with a si... |
def add(self, si):
if self.o_chunk is None:
if os.path.exists(self.t_path):
os.remove(self.t_path)
self.o_chunk = streamcorpus.Chunk(self.t_path, mode=)
self.o_chunk.add(si)
logger.debug(, len(self.o_chunk))
if len(self.o_chunk) == self.ch... | puts `si` into the currently open chunk, which it creates if
necessary. If this item causes the chunk to cross chunk_max,
then the chunk closed after adding. |
def QA_SU_save_future_min(client=DATABASE, ui_log=None, ui_progress=None):
future_list = [
item for item in QA_fetch_get_future_list().code.unique().tolist()
if str(item)[-2:] in [,
]
]
coll = client.future_min
coll.create_index(
[
... | save future_min
Keyword Arguments:
client {[type]} -- [description] (default: {DATABASE}) |
def _read_mode_tsopt(self, size, kind):
temp = struct.unpack(, self._read_fileng(size))
data = dict(
kind=kind,
length=size,
val=temp[0],
ecr=temp[1],
)
return data | Read Timestamps option.
Positional arguments:
* size - int, length of option
* kind - int, 8 (Timestamps)
Returns:
* dict -- extracted Timestamps (TS) option
Structure of TCP TSopt [RFC 7323]:
+-------+-------+---------------------+-------------... |
def send(self, api_key=None, secret=None, list_data=None, auth=False, **kwargs):
if auth:
nonce = str(int(time.time() * 10000000))
auth_string = + nonce
auth_sig = hmac.new(secret.encode(), auth_string.encode(),
hashlib.sha384).hexdig... | Sends the given Payload to the API via the websocket connection.
:param kwargs: payload paarameters as key=value pairs
:return: |
def start(self):
if self._status == TransferState.PREPARING:
self._running.set()
super(Download, self).start()
self._status = TransferState.RUNNING
self._time_started = time.time()
else:
raise SbgError(
) | Starts the download.
:raises SbgError: If download is not in PREPARING state. |
def _generate_name(self, space, service_name, plan_name):
return str.join(, [space, service_name, plan_name]).lower() | Can generate a name based on the space, service name and plan. |
def decode_solution(self, encoded_solution):
return self._decode_function(encoded_solution, *self._decode_args,
**self._decode_kwargs) | Return solution from an encoded representation. |
def solve_with_sdpa(sdp, solverparameters=None):
solverexecutable = detect_sdpa(solverparameters)
if solverexecutable is None:
raise OSError("SDPA is not in the path or the executable provided is" +
" not correct")
primal, dual = 0, 0
tempfile_ = tempfile.NamedTemporar... | Helper function to write out the SDP problem to a temporary
file, call the solver, and parse the output.
:param sdp: The SDP relaxation to be solved.
:type sdp: :class:`ncpol2sdpa.sdp`.
:param solverparameters: Optional parameters to SDPA.
:type solverparameters: dict of str.
:returns: tuple of... |
def decode(self, bytes, raw=False):
return struct.unpack(self.format, buffer(bytes))[0] | decode(bytearray, raw=False) -> value
Decodes the given bytearray according to this PrimitiveType
definition.
NOTE: The parameter ``raw`` is present to adhere to the
``decode()`` inteface, but has no effect for PrimitiveType
definitions. |
def open_conn(host, db, user, password, retries=0, sleep=0.5):
assert retries >= 0
try:
return MySQLdb.connect(host=host, user=user, passwd=password, db=db)
except Exception:
if retries > 0:
time.sleep(sleep)
return open_conn(host, db, user, password, retries - ... | Return an open mysql db connection using the given credentials. Use
`retries` and `sleep` to be robust to the occassional transient connection
failure.
retries: if an exception when getting the connection, try again at most this many times.
sleep: pause between retries for this many seconds. a float ... |
def hide_routemap_holder_route_map_content_match_ip_route_source_prefix_list_rmrs(self, **kwargs):
config = ET.Element("config")
hide_routemap_holder = ET.SubElement(config, "hide-routemap-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy")
route_map = ET.SubElement(hide_routemap_h... | Auto Generated Code |
def send(self, command, tab_key, params=None):
s associated response.
%s'", navcom)
try:
self.soclist[tab_key].send(navcom)
except (socket.timeout, websocket.WebSocketTimeoutException):
raise cr_exceptions.ChromeCommunicationsError("Failure sending command to chromium.")
except websocket.WebSocketConnec... | Send command `command` with optional parameters `params` to the
remote chrome instance.
The command `id` is automatically added to the outgoing message.
return value is the command id, which can be used to match a command
to it's associated response. |
def get_or_create_node(self, graph: BELGraph, node: BaseEntity) -> Optional[Node]:
sha512 = node.as_sha512()
if sha512 in self.object_cache_node:
return self.object_cache_node[sha512]
node_model = self.get_node_by_hash(sha512)
if node_model is not None:
... | Create an entry and object for given node if it does not exist. |
def draw_marked_line(self, data, coordinates, linestyle, markerstyle,
label, mplobj=None):
if linestyle is not None:
self.draw_line(data, coordinates, linestyle, label, mplobj)
if markerstyle is not None:
self.draw_markers(data, coordinates, mark... | Draw a line that also has markers.
If this isn't reimplemented by a renderer object, by default, it will
make a call to BOTH draw_line and draw_markers when both markerstyle
and linestyle are not None in the same Line2D object. |
def lock_multi(self, keys, ttl=0):
return _Base.lock_multi(self, keys, ttl=ttl) | Lock multiple keys. Multi variant of :meth:`lock`
:param keys: the keys to lock
:type keys: :ref:`iterable<argtypes>`
:param int ttl: The lock timeout for all keys
:return: a :class:`~.MultiResult` object
.. seealso:: :meth:`lock` |
def _print(self, char):
assert len(char) == 1
try:
try:
char = self.decoder(bytes(char, self.encoding))[0]
except TypeError:
char = self.decoder(char)[0]
except UnicodeDecodeError:
... | Print a character at the current cursor position and advance the
cursor. |
def list_all_braintree_gateways(cls, **kwargs):
kwargs[] = True
if kwargs.get():
return cls._list_all_braintree_gateways_with_http_info(**kwargs)
else:
(data) = cls._list_all_braintree_gateways_with_http_info(**kwargs)
return data | List BraintreeGateways
Return a list of BraintreeGateways
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_all_braintree_gateways(async=True)
>>> result = thread.get()
:param asyn... |
def model_fn(features, labels, mode, params):
policy_output, value_output, logits = model_inference_fn(
features, mode == tf.estimator.ModeKeys.TRAIN, params)
policy_cost = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits_v2(
logits=logits, labels=tf.stop_gradient(... | Create the model for estimator api
Args:
features: tensor with shape
[BATCH_SIZE, go.N, go.N, features_lib.NEW_FEATURES_PLANES]
labels: dict from string to tensor with shape
'pi_tensor': [BATCH_SIZE, go.N * go.N + 1]
'value_tensor': [BATCH_SIZE]
mode: a t... |
def lowpass(data,filterSize=None):
if filterSize is None:
filterSize=len(data)/10
kernel=kernel_gaussian(size=filterSize)
data=convolve(data,kernel)
return data | minimal complexity low-pass filtering.
Filter size is how "wide" the filter will be.
Sigma will be 1/10 of this filter width.
If filter size isn't given, it will be 1/10 of the data size. |
async def send_message():
jar = aiohttp.CookieJar(unsafe=True)
websession = aiohttp.ClientSession(cookie_jar=jar)
modem = eternalegypt.Modem(hostname=sys.argv[1], websession=websession)
await modem.login(password=sys.argv[2])
await modem.sms(phone=sys.argv[3], message=sys.argv[4])
await ... | Example of sending a message. |
def list_hosted_zones(self, page_chunks=100):
return self._do_autopaginating_api_call(
path=,
params={: page_chunks},
method=,
parser_func=xml_parsers.list_hosted_zones_parser,
next_marker_xpath="./{*}NextMarker",
next_marker_par... | List all hosted zones associated with this connection's account. Since
this method returns a generator, you can pull as many or as few
entries as you'd like, without having to query and receive every
hosted zone you may have.
:keyword int page_chunks: This API call is "paginated" behind... |
def get_matched_token(self, match):
match_groupdict = match.groupdict()
for group in self.groups:
if match_groupdict[group] is not None:
token, match_type = self.groups[group]
return (token, match_type, group) | Find which token has been matched by compound regex |
def send_packet(self, pk, expected_reply=(), resend=False, timeout=0.2):
self._send_lock.acquire()
if self.link is not None:
if len(expected_reply) > 0 and not resend and \
self.link.needs_resending:
pattern = (pk.header,) + expected_reply
... | Send a packet through the link interface.
pk -- Packet to send
expect_answer -- True if a packet from the Crazyflie is expected to
be sent back, otherwise false |
def setIsolateHidden(self, state):
self._isolatedHidden = state
super(XNode, self).setVisible(self.isVisible()) | Sets whether or not this item is hidden due to isolation.
:param state | <bool> |
def select_address_family(host, port):
if host.startswith("unix://"):
return socket.AF_UNIX
elif ":" in host and hasattr(socket, "AF_INET6"):
return socket.AF_INET6
return socket.AF_INET | Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on
the host and port. |
def tag(self, tag: str, overwrite: bool = False) -> None:
LOGGER.info(, tag)
try:
self.repo.create_tag(tag)
except GitCommandError as exc:
if in exc.stderr and overwrite:
LOGGER.info()
self.remove_tag(tag)
self.rep... | Tags the current commit
:param tag: tag
:type tag: str
:param overwrite: overwrite existing tag
:type overwrite: bool |
def save(self, *args, **kwargs):
if isinstance(self.geometry, GeometryCollection) and 0 < len(self.geometry) < 2:
self.geometry = self.geometry[0]
if not self.status and not self.status_id:
try:
self.status = Status.objects.filter(is_def... | Custom save method does the following things:
* converts geometry collections of just 1 item to that item (eg: a collection of 1 Point becomes a Point)
* intercepts changes to status and fires node_status_changed signal
* set default status |
def is_already_running(self):
redis_key = self.CELERY_LOCK.format(task_id=self.task_identifier)
return self.celery_self.backend.client.exists(redis_key) | Return True if lock exists and has not timed out. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.