code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|
def calculate_check_digit(gtin):
reverse_gtin = gtin[::-1]
total = 0
count = 0
for char in reverse_gtin:
digit = int(char)
if count % 2 == 0:
digit = digit * 3
total = total + digit
count = count + 1
nearest_multiple_of_ten = int(math.ceil(total / 10.0) * 10)
return nearest_multiple_of_ten - total | Given a GTIN (8-14) or SSCC, calculate its appropriate check digit |
def _combined_regex(regexes, flags=re.IGNORECASE, use_re2=False, max_mem=None):
joined_regexes = "|".join(r for r in regexes if r)
if not joined_regexes:
return None
if use_re2:
import re2
return re2.compile(joined_regexes, flags=flags, max_mem=max_mem)
return re.compile(joined_regexes, flags=flags) | Return a compiled regex combined (using OR) from a list of ``regexes``.
If there is nothing to combine, None is returned.
re2 library (https://github.com/axiak/pyre2) often can match and compile
large regexes much faster than stdlib re module (10x is not uncommon),
but there are some gotchas:
* in case of "DFA out of memory" errors use ``max_mem`` argument
to increase the amount of memory re2 is allowed to use. |
def validate_instance_size(self, size):
try:
int(size)
except ValueError:
return
if int(size) < 5 or int(size) > 1024:
return
return True | integer between 5-1024 (inclusive) |
def get_gdb_response(
self, timeout_sec=DEFAULT_GDB_TIMEOUT_SEC, raise_error_on_timeout=True
):
self.verify_valid_gdb_subprocess()
if timeout_sec < 0:
self.logger.warning("timeout_sec was negative, replacing with 0")
timeout_sec = 0
if USING_WINDOWS:
retval = self._get_responses_windows(timeout_sec)
else:
retval = self._get_responses_unix(timeout_sec)
if not retval and raise_error_on_timeout:
raise GdbTimeoutError(
"Did not get response from gdb after %s seconds" % timeout_sec
)
else:
return retval | Get response from GDB, and block while doing so. If GDB does not have any response ready to be read
by timeout_sec, an exception is raised.
Args:
timeout_sec (float): Maximum time to wait for reponse. Must be >= 0. Will return after
raise_error_on_timeout (bool): Whether an exception should be raised if no response was found
after timeout_sec
Returns:
List of parsed GDB responses, returned from gdbmiparser.parse_response, with the
additional key 'stream' which is either 'stdout' or 'stderr'
Raises:
GdbTimeoutError if response is not received within timeout_sec
ValueError if select returned unexpected file number
NoGdbProcessError if there is no gdb subprocess running |
def are_flags_valid(packet_type, flags):
if packet_type == MqttControlPacketType.publish:
rv = 0 <= flags <= 15
elif packet_type in (MqttControlPacketType.pubrel,
MqttControlPacketType.subscribe,
MqttControlPacketType.unsubscribe):
rv = flags == 2
elif packet_type in (MqttControlPacketType.connect,
MqttControlPacketType.connack,
MqttControlPacketType.puback,
MqttControlPacketType.pubrec,
MqttControlPacketType.pubcomp,
MqttControlPacketType.suback,
MqttControlPacketType.unsuback,
MqttControlPacketType.pingreq,
MqttControlPacketType.pingresp,
MqttControlPacketType.disconnect):
rv = flags == 0
else:
raise NotImplementedError(packet_type)
return rv | True when flags comply with [MQTT-2.2.2-1] requirements based on
packet_type; False otherwise.
Parameters
----------
packet_type: MqttControlPacketType
flags: int
Integer representation of 4-bit MQTT header flags field.
Values outside of the range [0, 15] will certainly cause the
function to return False.
Returns
-------
bool |
def parse_declaration_expressn_memberaccess(self, lhsAST, rhsAST, es):
if isinstance(lhsAST, wdl_parser.Terminal):
es = es + lhsAST.source_string
elif isinstance(lhsAST, wdl_parser.Ast):
raise NotImplementedError
elif isinstance(lhsAST, wdl_parser.AstList):
raise NotImplementedError
es = es +
if isinstance(rhsAST, wdl_parser.Terminal):
es = es + rhsAST.source_string
elif isinstance(rhsAST, wdl_parser.Ast):
raise NotImplementedError
elif isinstance(rhsAST, wdl_parser.AstList):
raise NotImplementedError
return es | Instead of "Class.variablename", use "Class.rv('variablename')".
:param lhsAST:
:param rhsAST:
:param es:
:return: |
def envvar_constructor(loader, node):
value = loader.construct_python_unicode(node)
data = value.split(, 1)
if len(data) == 2:
var, default = data
return os.environ.get(var, default)
else:
return os.environ[value] | Tag constructor to use environment variables in YAML files. Usage:
- !TAG VARIABLE
raise while loading the document if variable does not exists
- !TAG VARIABLE:=DEFAULT_VALUE
For instance:
credentials:
user: !env USER:=root
group: !env GROUP:= root |
def on_close(self, stats, previous_stats):
reports = {
: self._messages,
: stats,
: previous_stats,
}
print(json.dumps(reports, cls=JSONSetEncoder, indent=4), file=self.out) | Print the extended JSON report to reporter's output.
:param dict stats: Metrics for the current pylint run
:param dict previous_stats: Metrics for the previous pylint run |
def skew_y(self, y):
self.root.set("transform", "%s skewY(%f)" %
(self.root.get("transform") or , y))
return self | Skew element along the y-axis by the given angle.
Parameters
----------
y : float
y-axis skew angle in degrees |
def update(self, id, **kwargs):
kwargs[] = True
if kwargs.get():
return self.update_with_http_info(id, **kwargs)
else:
(data) = self.update_with_http_info(id, **kwargs)
return data | Updates an existing License
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: License id (required)
:param LicenseRest body:
:return: None
If the method is called asynchronously,
returns the request thread. |
def calc_nfalse(d):
dtfactor = n.sum([1./i for i in d[]])
ntrials = d[] * dtfactor * len(d[]) * d[] * d[]
qfrac = 1 - (erf(d[]/n.sqrt(2)) + 1)/2.
nfalse = int(qfrac*ntrials)
return nfalse | Calculate the number of thermal-noise false positives per segment. |
def toDict(self):
return {
: [hsp.toDict() for hsp in self.hsps],
: self.read.toDict(),
} | Get information about a title alignment as a dictionary.
@return: A C{dict} representation of the title aligment. |
def cluster_path(cls, project, instance, cluster):
return google.api_core.path_template.expand(
"projects/{project}/instances/{instance}/clusters/{cluster}",
project=project,
instance=instance,
cluster=cluster,
) | Return a fully-qualified cluster string. |
def listing(
source: list,
ordered: bool = False,
expand_full: bool = False
):
r = _get_report()
r.append_body(render.listing(
source=source,
ordered=ordered,
expand_full=expand_full
))
r.stdout_interceptor.write_source() | An unordered or ordered list of the specified *source* iterable where
each element is converted to a string representation for display.
:param source:
The iterable to display as a list.
:param ordered:
Whether or not the list should be ordered. If False, which is the
default, an unordered bulleted list is created.
:param expand_full:
Whether or not the list should expand to fill the screen horizontally.
When defaulted to False, the list is constrained to the center view
area of the screen along with other text. This can be useful to keep
lists aligned with the text flow. |
def titled_box(self, titles, contents, tdir=, cdir=):
H = self.H
def wrapt(x):
return H.div[](x)
rval = H.div[f]
contents = H.div[f].fill(contents)
if isinstance(titles, tuple) and len(titles) == 2:
open, close = titles
else:
open, close = titles, None
if open:
rval = rval(wrapt(open))
rval = rval(contents)
if close:
rval = rval(wrapt(close))
return rval | Helper function to build a box containing a list of elements,
with a title above and/or below, or left and/or right of the
box. (e.g. a class name on top, or brackets on both sides.)
The elements given must already have been transformed into
Tag instances.
Arguments:
titles: A pair of strings to display on top and bottom
(if tdir=='v') or left and right (if tdir=='h').
If either or both titles are None, they will be
omitted.
contents: A list of Tags.
tdir: tdir=='h' (default) means the titles will be on
the left and right. tdir=='v' means they will be
on top and bottom.
cdir: cdir=='h' (default) means the contents will be
stacked horizontally. cdir=='v' means they will
be stacked vertically. |
def speak(self, message):
campfire = self.get_campfire()
if not isinstance(message, Message):
message = Message(campfire, message)
result = self._connection.post(
"room/%s/speak" % self.id,
{"message": message.get_data()},
parse_data=True,
key="message"
)
if result["success"]:
return Message(campfire, result["data"])
return result["success"] | Post a message.
Args:
message (:class:`Message` or string): Message
Returns:
bool. Success |
def showEvent(self, event):
selected = self.paramList.selectedIndexes()
model = self.paramList.model()
self.visibilityChanged.emit(1)
if len(selected) > 0:
self.paramList.parameterChanged.emit(model.selection(selected[0]))
self.hintRequested.emit()
elif model.rowCount() > 0:
self.paramList.selectRow(0)
self.paramList.parameterChanged.emit(model.selection(model.index(0,0)))
self.hintRequested.emit()
else:
model.emptied.emit(True)
self.hintRequested.emit() | When this widget is shown it has an effect of putting
other widgets in the parent widget into different editing modes, emits
signal to notify other widgets. Restores the previous selection the last
time this widget was visible |
def get_syslog(self, service_id, version_number, name):
content = self._fetch("/service/%s/version/%d/syslog/%s" % (service_id, version_number, name))
return FastlySyslog(self, content) | Get the Syslog for a particular service and version. |
def Join(self):
for _ in range(self.JOIN_TIMEOUT_DECISECONDS):
if self._queue.empty() and not self.busy_threads:
return
time.sleep(0.1)
raise ValueError("Timeout during Join() for threadpool %s." % self.name) | Waits until all outstanding tasks are completed. |
def select_seqs(ol,seqs):
abcd
rslt =copy.deepcopy(ol)
rslt = itemgetter(*seqs)(ol)
if(seqs.__len__()==0):
rslt = []
elif(seqs.__len__()==1):
rslt = [rslt]
else:
rslt = list(rslt)
return(rslt) | from elist.elist import *
ol = ['a','b','c','d']
select_seqs(ol,[1,2]) |
def apply_customization(self, serializer, customization):
if customization.fields is not None:
if len(customization.fields) == 0:
serializer.Meta.fields = ALL_FIELDS
else:
serializer.Meta.fields = customization.fields
if customization.exclude is not None:
serializer.Meta.exclude = customization.exclude
if customization.extra_kwargs is not None:
serializer.Meta.extra_kwargs = customization.extra_kwargs
for method_name, method in customization.validate_methods.items():
setattr(serializer, method_name, method) | Applies fields customization to a nested or embedded DocumentSerializer. |
def _match_dfs_expr(lo_meta, expr, tt):
logger_dataframes.info("enter match_dfs_expr")
filenames = []
s = "{}Data".format(tt)
for k, v in lo_meta["{}Data".format(tt)].items():
if "measurement" in expr:
for k1, v1 in v["{}MeasurementTable".format(tt)].items():
try:
f = v1["filename"]
if f.endswith(".csv"):
filenames.append(f)
except KeyError:
logger_dataframes.info("match_dfs_expr: KeyError: filename not found in: {} {}".format(tt, "ensemble"))
elif "model" in expr:
for k1, v1 in v["{}Model".format(tt)].items():
try:
f = v1["{}ModelTable".format(tt)]["filename"]
if f.endswith(".csv"):
filenames.append(f)
except KeyError:
logger_dataframes.info(
"match_dfs_expr: KeyError: filename not found in: {} {}".format(tt, "dist"))
logger_dataframes.info("exit match_dfs_expr")
return filenames | Use the given expression to get all data frames that match the criteria (i.e. "paleo measurement tables")
:param dict lo_meta: Lipd object metadata
:param str expr: Search expression
:param str tt: Table type (chron or paleo)
:return list: All filenames that match the expression |
def _serialize_lnk(lnk):
s = ""
if lnk is not None:
s =
if lnk.type == Lnk.CHARSPAN:
cfrom, cto = lnk.data
s += .join([str(cfrom), , str(cto)])
elif lnk.type == Lnk.CHARTSPAN:
cfrom, cto = lnk.data
s += .join([str(cfrom), , str(cto)])
elif lnk.type == Lnk.TOKENS:
s += .join([str(t) for t in lnk.data])
elif lnk.type == Lnk.EDGE:
s += .join([, str(lnk.data)])
s +=
return s | Serialize a predication lnk to surface form into the SimpleMRS
encoding. |
def _set_pspf_timer(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=pspf_timer.pspf_timer, is_container=, presence=False, yang_name="pspf-timer", rest_name="pspf-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None}}, namespace=, defining_module=, yang_type=, is_config=False)
except (TypeError, ValueError):
raise ValueError({
: ,
: "container",
: ,
})
self.__pspf_timer = t
if hasattr(self, ):
self._set() | Setter method for pspf_timer, mapped from YANG variable /isis_state/router_isis_config/pspf_timer (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_pspf_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_pspf_timer() directly.
YANG Description: Timer for IS-IS Partial SPF calculation for IPv4 |
def process_sentence(sentence, start_word="<S>", end_word="</S>"):
if start_word is not None:
process_sentence = [start_word]
else:
process_sentence = []
process_sentence.extend(nltk.tokenize.word_tokenize(sentence.lower()))
if end_word is not None:
process_sentence.append(end_word)
return process_sentence | Seperate a sentence string into a list of string words, add start_word and end_word,
see ``create_vocab()`` and ``tutorial_tfrecord3.py``.
Parameters
----------
sentence : str
A sentence.
start_word : str or None
The start word. If None, no start word will be appended.
end_word : str or None
The end word. If None, no end word will be appended.
Returns
---------
list of str
A list of strings that separated into words.
Examples
-----------
>>> c = "how are you?"
>>> c = tl.nlp.process_sentence(c)
>>> print(c)
['<S>', 'how', 'are', 'you', '?', '</S>']
Notes
-------
- You have to install the following package.
- `Installing NLTK <http://www.nltk.org/install.html>`__
- `Installing NLTK data <http://www.nltk.org/data.html>`__ |
def _recover_shape_information(self, inputs, outputs):
batch_size_value = inputs.get_shape()[0]
if self._data_format.startswith("NC"):
output_shape_value = ((batch_size_value, self.output_channels) +
self.output_shape)
elif self._data_format.startswith("N") and self._data_format.endswith("C"):
output_shape_value = ((batch_size_value,) + self.output_shape +
(self.output_channels,))
outputs.set_shape(output_shape_value)
return outputs | Recover output tensor shape value to enable shape inference.
The batch size of `inputs` isn't preserved by the convolution op. Calculate
what the proper output shape will be for `outputs`.
Args:
inputs: A Tensor of shape `data_format` and of type `tf.float16`,
`tf.bfloat16` or `tf.float32`.
outputs: A Tensor of shape `data_format` and of type `tf.float16`,
`tf.bfloat16` or `tf.float32`. The output of `inputs` from a transpose
convolution op.
Returns:
outputs: The passed-in `outputs` with all shape information filled in. |
def status(self):
response = self._get(url.status)
self._check_response(response, 200)
return self._create_response(response) | Get server status. Uses GET to /status interface.
:Returns: (dict) Server status as described `here <https://cloud.knuverse.com/docs/api/#api-General-Status>`_. |
def _fulfill(self, bits, ignore_nonpromised_bits=False):
if self._allsubsfulfilled():
if not self._components:
if ignore_nonpromised_bits:
self._value = bits[self._bitstartselective:
self._bitstartselective +
self._bitlength]
else:
self._value = bits[self._bitstart:self._bitend]
else:
self._value = self._components[0][0]._value
for sub, offset in self._components[1:]:
self._value += sub._value
if self._parent is not None:
self._parent._fulfill(None) | Supply the promise with the bits from its associated primitive's execution.
The fulfillment process must walk the promise chain backwards
until it reaches the original promise and can supply the final
value.
The data that comes in can either be all a bit read for every
bit written by the associated primitive, or (if the primitive
supports it), only the bits that are used by promises. The
ignore_nonpromised_bits flag specifies which format the
incoming data is in.
Args:
bits: A bitarray (or compatible) containing the data read from the jtag controller's TDO pin.
ignore_nonpromised_bits: A boolean specifying if only promised bits are being returned (and thus the 2nd index of the promise must be used for slicing the incoming data). |
def coupling(self, source_y, target_y, weight):
return np.ones_like(target_y)*np.mean(source_y)*weight | How to couple the output of one subsystem to the input of another.
This is a fallback default coupling function that should usually be
replaced with your own.
This example coupling function takes the mean of all variables of the
source subsystem and uses that value weighted by the connection
strength to drive all variables of the target subsystem.
Arguments:
source_y (array of shape (d,)): State of the source subsystem.
target_y (array of shape (d,)): State of target subsystem.
weight (float): the connection strength for this connection.
Returns:
input (array of shape (d,)): Values to drive each variable of the
target system. |
def add_virtual_columns_cartesian_velocities_to_polar(self, x="x", y="y", vx="vx", radius_polar=None, vy="vy", vr_out="vr_polar", vazimuth_out="vphi_polar",
propagate_uncertainties=False,):
x = self._expr(x)
y = self._expr(y)
vx = self._expr(vx)
vy = self._expr(vy)
if radius_polar is None:
radius_polar = np.sqrt(x**2 + y**2)
radius_polar = self._expr(radius_polar)
self[vr_out] = (x*vx + y*vy) / radius_polar
self[vazimuth_out] = (x*vy - y*vx) / radius_polar
if propagate_uncertainties:
self.propagate_uncertainties([self[vr_out], self[vazimuth_out]]) | Convert cartesian to polar velocities.
:param x:
:param y:
:param vx:
:param radius_polar: Optional expression for the radius, may lead to a better performance when given.
:param vy:
:param vr_out:
:param vazimuth_out:
:param propagate_uncertainties: {propagate_uncertainties}
:return: |
def system(session, py):
if not os.environ.get(, ):
session.skip()
session.interpreter = .format(py)
session.virtualenv_dirname = + py
session.install(, )
*session.posargs
) | Run the system test suite. |
def formatTime(self, record, datefmt=None):
_seconds_fraction = record.created - int(record.created)
_datetime_utc = time.mktime(time.gmtime(record.created))
_datetime_utc += _seconds_fraction
_created = self.converter(_datetime_utc)
if datefmt:
time_string = _created.strftime(datefmt)
else:
time_string = _created.strftime()
time_string = "%s,%03d" % (time_string, record.msecs)
return time_string | Format the log timestamp. |
def quadratic_forms(h1, h2):
r
h1, h2 = __prepare_histogram(h1, h2)
A = __quadratic_forms_matrix_euclidean(h1, h2)
return math.sqrt((h1-h2).dot(A.dot(h1-h2))) | r"""
Quadrativ forms metric.
Notes
-----
UNDER DEVELOPMENT
This distance measure shows very strange behaviour. The expression
transpose(h1-h2) * A * (h1-h2) yields egative values that can not be processed by the
square root. Some examples::
h1 h2 transpose(h1-h2) * A * (h1-h2)
[1, 0] to [0.0, 1.0] : -2.0
[1, 0] to [0.5, 0.5] : 0.0
[1, 0] to [0.6666666666666667, 0.3333333333333333] : 0.111111111111
[1, 0] to [0.75, 0.25] : 0.0833333333333
[1, 0] to [0.8, 0.2] : 0.06
[1, 0] to [0.8333333333333334, 0.16666666666666666] : 0.0444444444444
[1, 0] to [0.8571428571428572, 0.14285714285714285] : 0.0340136054422
[1, 0] to [0.875, 0.125] : 0.0267857142857
[1, 0] to [0.8888888888888888, 0.1111111111111111] : 0.0216049382716
[1, 0] to [0.9, 0.1] : 0.0177777777778
[1, 0] to [1, 0]: 0.0
It is clearly undesireable to recieve negative values and even worse to get a value
of zero for other cases than the same histograms. |
def get_all_instances(include_fastboot=False):
if include_fastboot:
serial_list = list_adb_devices() + list_fastboot_devices()
return get_instances(serial_list)
return get_instances(list_adb_devices()) | Create AndroidDevice instances for all attached android devices.
Args:
include_fastboot: Whether to include devices in bootloader mode or not.
Returns:
A list of AndroidDevice objects each representing an android device
attached to the computer. |
def gist(self, id_num):
url = self._build_url(, str(id_num))
json = self._json(self._get(url), 200)
return Gist(json, self) if json else None | Gets the gist using the specified id number.
:param int id_num: (required), unique id of the gist
:returns: :class:`Gist <github3.gists.Gist>` |
def _get_segmentation_id(self, netid, segid, source):
return self.seg_drvr.allocate_segmentation_id(netid, seg_id=segid,
source=source) | Allocate segmentation id. |
def ack(self):
if not self._method:
raise AMQPMessageError(
)
self._channel.basic.ack(delivery_tag=self.delivery_tag) | Acknowledge Message.
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
encountered an error.
:return: |
def read_alignment(out_sam, loci, seqs, out_file):
hits = defaultdict(list)
with open(out_file, "w") as out_handle:
samfile = pysam.Samfile(out_sam, "r")
for a in samfile.fetch():
if not a.is_unmapped:
nm = int([t[1] for t in a.tags if t[0] == "NM"][0])
a = makeBED(a)
if not a:
continue
ref, locus = get_loci(samfile.getrname(int(a.chr)), loci)
hits[a.name].append((nm, "%s %s %s %s %s %s" % (a.name, a.name.split("-")[0], locus, ref, a.start, a.end)))
for hit in hits.values():
nm = hit[0][0]
for l in hit:
if nm == l[0]:
print(l[1], file=out_handle)
return out_file | read which seqs map to which loci and
return a tab separated file |
def take_screen_shot_to_array(self, screen_id, width, height, bitmap_format):
if not isinstance(screen_id, baseinteger):
raise TypeError("screen_id can only be an instance of type baseinteger")
if not isinstance(width, baseinteger):
raise TypeError("width can only be an instance of type baseinteger")
if not isinstance(height, baseinteger):
raise TypeError("height can only be an instance of type baseinteger")
if not isinstance(bitmap_format, BitmapFormat):
raise TypeError("bitmap_format can only be an instance of type BitmapFormat")
screen_data = self._call("takeScreenShotToArray",
in_p=[screen_id, width, height, bitmap_format])
return screen_data | Takes a guest screen shot of the requested size and format
and returns it as an array of bytes.
in screen_id of type int
The guest monitor to take screenshot from.
in width of type int
Desired image width.
in height of type int
Desired image height.
in bitmap_format of type :class:`BitmapFormat`
The requested format.
return screen_data of type str
Array with resulting screen data. |
def get_scene(self):
return self._x, self._y, self._hsml, self._m, self._kview | - get_scene(): It return the x and y position, the smoothing length
of the particles and the index of the particles that are active in
the scene. In principle this is an internal function and you don't
need this data. |
def fromxml(node):
if not isinstance(node,ElementTree._Element):
node = ElementTree.parse(StringIO(node)).getroot()
if node.tag in globals():
id =
paramflag =
name =
description =
kwargs = {}
error = None
for attrib, value in node.attrib.items():
if attrib == :
id = value
elif attrib == :
paramflag = value
elif attrib == :
name = value
elif attrib == :
description = value
elif attrib == :
error = value
else:
kwargs[attrib] = value
if in kwargs and (kwargs[] == or kwargs[] == or kwargs[] == ):
kwargs[] = []
for subtag in node:
if subtag.tag == :
if not in kwargs: kwargs[] = {}
kwargs[][subtag.attrib[]] = subtag.text
if in subtag.attrib and (subtag.attrib[] == or subtag.attrib[] == ):
if in kwargs and (kwargs[] == or kwargs[] == or kwargs[] == ):
kwargs[].append(subtag.attrib[])
else:
kwargs[] = subtag.attrib[]
parameter = globals()[node.tag](id, name, description, **kwargs)
if error:
parameter.error = error
return parameter
else:
raise Exception("No such parameter exists: " + node.tag) | Create a Parameter instance (of any class derived from AbstractParameter!) given its XML description. Node can be a string containing XML or an lxml _Element |
def cyclic(self):
"Returns True if the options cycle, otherwise False"
return any(isinstance(val, Cycle) for val in self.kwargs.values()) | Returns True if the options cycle, otherwise False |
def nodePop(ctxt):
if ctxt is None: ctxt__o = None
else: ctxt__o = ctxt._o
ret = libxml2mod.nodePop(ctxt__o)
if ret is None:raise treeError()
return xmlNode(_obj=ret) | Pops the top element node from the node stack |
def json_loads(cls, s, **kwargs):
if not in kwargs:
kwargs[] = cls.json_decoder
return json.loads(s, **kwargs) | A rewrap of json.loads done for one reason - to inject a custom `cls` kwarg
:param s:
:param kwargs:
:return:
:rtype: dict |
def _get_nets_radb(self, *args, **kwargs):
from warnings import warn
warn(
)
return self.get_nets_radb(*args, **kwargs) | Deprecated. This will be removed in a future release. |
def stSpectralEntropy(X, n_short_blocks=10):
L = len(X)
Eol = numpy.sum(X ** 2)
sub_win_len = int(numpy.floor(L / n_short_blocks))
if L != sub_win_len * n_short_blocks:
X = X[0:sub_win_len * n_short_blocks]
sub_wins = X.reshape(sub_win_len, n_short_blocks, order=).copy()
s = numpy.sum(sub_wins ** 2, axis=0) / (Eol + eps)
En = -numpy.sum(s*numpy.log2(s + eps))
return En | Computes the spectral entropy |
def _resolve_by_callback(request, url, urlconf=None):
try:
callback, args, kwargs = _resolve_url(url, request, urlconf=urlconf)
except urlresolvers.Resolver404:
return None
bc = getattr(callback, , None)
if bc is None:
bc = getattr(callback, , None)
if bc is not None:
import warnings
warnings.warn(
)
if bc is None:
return None
if hasattr(bc, ):
try:
title = bc(request, *args, **kwargs)
except http.Http404:
return None
assert isinstance(title, basestring), (
% title)
else:
title = unicode(bc)
return Crumb(url, title) | Finds a view function by urlconf. If the function has attribute
'navigation', it is used as breadcrumb title. Such title can be either a
callable or an object with `__unicode__` attribute. If it is callable, it
must follow the views API (i.e. the only required argument is request
object). It is also expected to return a `unicode` value. |
def correct_bounding_box_list_for_nonzero_origin(bbox_list, full_box_list):
corrected_box_list = []
for bbox, full_box in zip(bbox_list, full_box_list):
left_x = full_box[0]
lower_y = full_box[1]
corrected_box_list.append([bbox[0]+left_x, bbox[1]+lower_y,
bbox[2]+left_x, bbox[3]+lower_y])
return corrected_box_list | The bounding box calculated from an image has coordinates relative to the
lower-left point in the PDF being at zero. Similarly, Ghostscript reports a
bounding box relative to a zero lower-left point. If the MediaBox (or full
page box) has been shifted, like when cropping a previously cropped
document, then we need to correct the bounding box by an additive
translation on all the points. |
def config_to_string(config):
output = []
for section, section_content in config.items():
output.append("[{}]".format(section))
for option, option_value in section_content.items():
output.append("{} = {}".format(option, option_value))
return "\n".join(output) | Nice output string for the config, which is a nested defaultdict.
Args:
config (defaultdict(defaultdict)): The configuration information.
Returns:
str: A human-readable output string detailing the contents of the config. |
def zthread_fork(ctx, func, *args, **kwargs):
a = ctx.socket(zmq.PAIR)
a.setsockopt(zmq.LINGER, 0)
a.setsockopt(zmq.RCVHWM, 100)
a.setsockopt(zmq.SNDHWM, 100)
a.setsockopt(zmq.SNDTIMEO, 5000)
a.setsockopt(zmq.RCVTIMEO, 5000)
b = ctx.socket(zmq.PAIR)
b.setsockopt(zmq.LINGER, 0)
b.setsockopt(zmq.RCVHWM, 100)
b.setsockopt(zmq.SNDHWM, 100)
b.setsockopt(zmq.SNDTIMEO, 5000)
a.setsockopt(zmq.RCVTIMEO, 5000)
iface = "inproc://%s" % binascii.hexlify(os.urandom(8))
a.bind(iface)
b.connect(iface)
thread = threading.Thread(target=func, args=((ctx, b) + args), kwargs=kwargs)
thread.daemon = False
thread.start()
return a | Create an attached thread. An attached thread gets a ctx and a PAIR
pipe back to its parent. It must monitor its pipe, and exit if the
pipe becomes unreadable. Returns pipe, or NULL if there was an error. |
def set_header(self,header):
self.header = header
self.root.insert(0,header.get_node()) | Sets the header of the object
@type header: L{CHeader}
@param header: the header object |
def requiv_to_pot_contact(requiv, q, sma, compno=1):
logger.debug("requiv_to_pot_contact(requiv={}, q={}, sma={}, compno={})".format(requiv, q, sma, compno))
requiv = requiv/sma
vequiv = 4./3*np.pi*requiv**3
d = 1.
F = 1.
logger.debug("libphoebe.roche_contact_Omega_at_partial_vol(vol={}, phi=pi/2, q={}, d={}, choice={})".format(vequiv, q, d, compno-1))
return libphoebe.roche_contact_Omega_at_partial_vol(vequiv, np.pi/2, q, d, choice=compno-1) | :param requiv: user-provided equivalent radius
:param q: mass ratio
:param sma: semi-major axis (d = sma because we explicitly assume circular orbits for contacts)
:param compno: 1 for primary, 2 for secondary
:return: potential and fillout factor |
def check_keepalive(self):
if self.sock != NC.INVALID_SOCKET and time.time() - self.last_msg_out >= self.keep_alive:
if self.state == NC.CS_CONNECTED:
self.send_pingreq()
else:
self.socket_close() | Send keepalive/PING if necessary. |
def query_bypass(self, query, raw_output=True):
if not isinstance(query, dict):
raise BadQueryException()
self.__query = query
if raw_output:
self._raw_output = True
return self.__get_query_result().cursor
else:
return self | Bypass query meaning that field check and validation is skipped, then query object directly executed by pymongo.
:param raw_output: Skip OmMongo ORM layer (default: True) |
def _load(self, scale=1.0):
LOG.debug("File: %s", str(self.requested_band_filename))
ncf = Dataset(self.requested_band_filename, )
wvl = ncf.variables[][:] * scale
resp = ncf.variables[][:]
self.rsr = {: wvl, : resp} | Load the SLSTR relative spectral responses |
def proba2onehot(proba: [list, np.ndarray], confident_threshold: float, classes: [list, np.ndarray]) -> np.ndarray:
return labels2onehot(proba2labels(proba, confident_threshold, classes), classes) | Convert vectors of probabilities to one-hot representations using confident threshold
Args:
proba: samples where each sample is a vector of probabilities to belong with given classes
confident_threshold: boundary of probability to belong with a class
classes: array of classes' names
Returns:
2d array with one-hot representation of given samples |
def update(self, ell, k):
self.alpha_[k] += ell
self.beta_[k] += 1 - ell
self._calc_theta()
if self.store_variance:
self._calc_var_theta() | Update the posterior and estimates after a label is sampled
Parameters
----------
ell : int
sampled label: 0 or 1
k : int
index of stratum where label was sampled |
def get_metric_group_definitions(self):
group_names = self.properties.get(, None)
if not group_names:
group_names = self.manager.get_metric_group_definition_names()
mg_defs = []
for group_name in group_names:
try:
mg_def = self.manager.get_metric_group_definition(group_name)
mg_defs.append(mg_def)
except ValueError:
pass
return mg_defs | Get the faked metric group definitions for this context object
that are to be returned from its create operation.
If a 'metric-groups' property had been specified for this context,
only those faked metric group definitions of its manager object that
are in that list, are included in the result. Otherwise, all metric
group definitions of its manager are included in the result.
Returns:
iterable of :class:~zhmcclient.FakedMetricGroupDefinition`: The faked
metric group definitions, in the order they had been added. |
def restore_instances(self, instances):
instances = instances.encode()
if os.path.exists(instances):
ret = lib.EnvRestoreInstances(self._env, instances)
if ret == -1:
raise CLIPSError(self._env)
else:
ret = lib.EnvRestoreInstancesFromString(self._env, instances, -1)
if ret == -1:
raise CLIPSError(self._env)
return ret | Restore a set of instances into the CLIPS data base.
The Python equivalent of the CLIPS restore-instances command.
Instances can be passed as a set of strings or as a file. |
def sanitize_args(cmd: List[str]) -> List[str]:
sanitized = []
for idx, fieldname in enumerate(cmd):
def _is_password(cmdstr):
return in cmdstr\
or in cmdstr.lower()
if idx > 0 and _is_password(cmd[idx-1]):
sanitized.append()
else:
sanitized.append(fieldname)
return sanitized | Filter the command so that it no longer contains passwords |
def get_spectrum(self, nr_id=None, abmn=None, plot_filename=None):
assert nr_id is None or abmn is None
if abmn is not None:
subdata = self.data.query(
.format(*abmn)
).sort_values()
if subdata.shape[0] == 0:
return None, None
nr_id = subdata[].iloc[0]
subdata_nor = self.data.query(
.format(nr_id)
).sort_values()
subdata_rec = self.data.query(
.format(nr_id)
).sort_values()
spectrum_nor = None
spectrum_rec = None
if subdata_nor.shape[0] > 0:
spectrum_nor = eis_plot.sip_response(
frequencies=subdata_nor[].values,
rmag=subdata_nor[],
rpha=subdata_nor[],
)
if subdata_rec.shape[0] > 0:
spectrum_rec = eis_plot.sip_response(
frequencies=subdata_rec[].values,
rmag=subdata_rec[],
rpha=subdata_rec[],
)
if plot_filename is not None:
if spectrum_nor is not None:
fig = spectrum_nor.plot(
plot_filename,
reciprocal=spectrum_rec,
return_fig=True,
title=.format(
*subdata_nor[[, , , ]].values[0, :]
)
)
return spectrum_nor, spectrum_rec, fig
return spectrum_nor, spectrum_rec | Return a spectrum and its reciprocal counter part, if present in the
dataset. Optimally, refer to the spectrum by its normal-reciprocal id.
Returns
-------
spectrum_nor : :py:class:`reda.eis.plots.sip_response`
Normal spectrum. None if no normal spectrum is available
spectrum_rec : :py:class:`reda.eis.plots.sip_response` or None
Reciprocal spectrum. None if no reciprocal spectrum is available
fig : :py:class:`matplotlib.Figure.Figure` , optional
Figure object (only if plot_filename is set) |
def create_project(self, project_path):
shutil.copytree(self.project_path, project_path)
self.update_file(project_path, , {
: trionyx.__version__
})
self.update_file(project_path, , {
: utils.random_string(32)
}) | Create Trionyx project in given path
:param str path: path to create project in.
:raises FileExistsError: |
def query_one(self, *args, **kwargs):
for r in self.query(*args, **kwargs):
return r
return None | Return first document from :meth:`query`, with same parameters. |
def recalculate_current_specimen_interpreatations(self):
self.initialize_CART_rot(self.s)
if str(self.s) in self.pmag_results_data[]:
for fit in self.pmag_results_data[][self.s]:
if fit.get() and in fit.get():
fit.put(self.s, , self.get_PCA_parameters(
self.s, fit, fit.tmin, fit.tmax, , fit.get()[]))
if len(self.Data[self.s][]) > 0 and fit.get() and in fit.get():
fit.put(self.s, , self.get_PCA_parameters(
self.s, fit, fit.tmin, fit.tmax, , fit.get()[]))
if len(self.Data[self.s][]) > 0 and fit.get() and in fit.get():
fit.put(self.s, , self.get_PCA_parameters(self.s, fit, fit.tmin,
fit.tmax, , fit.get()[])) | recalculates all interpretations on all specimens for all coordinate
systems. Does not display recalcuated data. |
def sync_entities_watching(instance):
for entity_model, entity_model_getter in entity_registry.entity_watching[instance.__class__]:
model_objs = list(entity_model_getter(instance))
if model_objs:
sync_entities(*model_objs) | Syncs entities watching changes of a model instance. |
def _set_ipv6_interface(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("ipv6_interface_type ipv6_interface_name",ipv6_interface.ipv6_interface, yang_name="ipv6-interface", rest_name="interface", parent=self, is_container=, user_ordered=False, path_helper=self._path_helper, yang_keys=, extensions={u: {u: u, u: None, u: None, u: u, u: None, u: None, u: None}}), is_container=, yang_name="ipv6-interface", rest_name="interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None, u: None, u: u, u: None, u: None, u: None}}, namespace=, defining_module=, yang_type=, is_config=True)
except (TypeError, ValueError):
raise ValueError({
: ,
: "list",
: ,
})
self.__ipv6_interface = t
if hasattr(self, ):
self._set() | Setter method for ipv6_interface, mapped from YANG variable /rbridge_id/interface/ve/ipv6/ipv6_local_anycast_gateway/ipv6_track/ipv6_interface (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ipv6_interface is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ipv6_interface() directly. |
def stop():
*
ret = {: , : False}
cmd = __execute_cmd(, )
if cmd[] != 0:
ret[] = cmd[]
else:
ret[] = cmd[]
ret[] = True
return ret | Stop Riak
.. versionchanged:: 2015.8.0
CLI Example:
.. code-block:: bash
salt '*' riak.stop |
def user_admin_view(model, login_view="Login", template_dir=None):
Pylot.context_(COMPONENT_USER_ADMIN=True)
User = model.UserStruct.User
LoginView = login_view
if not template_dir:
template_dir = "Pylot/UserAdmin"
template_page = template_dir + "/%s.html"
class UserAdmin(object):
route_base = "user-admin"
@classmethod
def _options(cls):
return {
"user_role": [("Rol", "Role")],
"user_status": [("Sta", "Stat")]
}
@classmethod
def search_handler(cls, per_page=20):
page = request.args.get("page", 1)
show_deleted = True if request.args.get("show-deleted") else False
name = request.args.get("name")
email = request.args.get("email")
users = User.all(include_deleted=show_deleted)
users = users.order_by(User.name.asc())
if name:
users = users.filter(User.name.contains(name))
if email:
users = users.filter(User.email.contains(email))
users = users.paginate(page=page, per_page=per_page)
cls.__(user_admin=dict(
options=cls._options(),
users=users,
search_query={
"excluded_deleted": request.args.get("show-deleted"),
"role": request.args.get("role"),
"status": request.args.get("status"),
"name": request.args.get("name"),
"email": request.args.get("email")
}
))
return users
@classmethod
def get_user_handler(cls, id):
user = User.get(id, include_deleted=True)
if not user:
abort(404, "User doesnt change user info. Invalid user")
return redirect(url_for("UserAdmin:index"))
delete_entry = True if request.form.get("delete-entry") else False
if delete_entry:
user.update(status=user.STATUS_SUSPENDED)
user.delete()
self.success_("User DELETED Successfully!")
return redirect(url_for("UserAdmin:get", id=id))
email = request.form.get("email")
password = request.form.get("password")
password2 = request.form.get("password2")
name = request.form.get("name")
role = request.form.get("user_role")
status = request.form.get("user_status")
upd = {}
if email and email != user.email:
LoginView.change_login_handler(user_context=user)
if password and password2:
LoginView.change_password_handler(user_context=user)
if name != user.name:
upd.update({"name": name})
if role and role != user.role:
upd.update({"role": role})
if status and status != user.status:
if user.is_deleted and status == user.STATUS_ACTIVE:
user.delete(False)
upd.update({"status": status})
if upd:
user.update(**upd)
self.success_("Usert reset password. Invalid user")
return redirect(url_for("User:index"))
password = LoginView.reset_password_handler(user_context=user)
self.success_("User's password reset successfully!")
except Exception as ex:
self.error_("Error: %s " % ex.message)
return redirect(url_for("UserAdmin:get", id=id))
@route("create", methods=["POST"])
def create(self):
try:
account = LoginView.signup_handler()
account.set_role(request.form.get("role", "USER"))
self.success_("User created successfully!")
return redirect(url_for("UserAdmin:get", id=account.id))
except Exception as ex:
self.error_("Error: %s" % ex.message)
return redirect(url_for("UserAdmin:index"))
return UserAdmin | :param UserStruct: The User model structure containing other classes
:param login_view: The login view interface
:param template_dir: The directory containing the view pages
:return: UserAdmin
Doc:
User Admin is a view that allows you to admin users.
You must create a Pylot view called `UserAdmin` to activate it
UserAdmin = app.views.user_admin(User, Login)
class UserAdmin(UserAdmin, Pylot):
pass
The user admin create some global available vars under '__.user_admin'
It's also best to add some security access on it
class UserAdmin(UserAdmin, Pylot):
decorators = [login_required]
You can customize the user info page (::get) by creating the directory in your
templates dir, and include the get.html inside of it
ie:
>/admin/templates/UserAdmin/get.html
<div>
{% include "Pylot/UserAdmin/get.html" %}
<div>
<div>Hello {{ __.user_admin.user.name }}<div> |
def minimum_enclosing_circle(labels, indexes = None,
hull_and_point_count = None):
if indexes is None:
if hull_and_point_count is not None:
indexes = np.array(np.unique(hull_and_point_count[0][:,0]),dtype=np.int32)
else:
max_label = np.max(labels)
indexes = np.array(list(range(1,max_label+1)),dtype=np.int32)
else:
indexes = np.array(indexes,dtype=np.int32)
if indexes.shape[0] == 0:
return np.zeros((0,2)),np.zeros((0,))
if hull_and_point_count is None:
hull, point_count = convex_hull(labels, indexes)
else:
hull, point_count = hull_and_point_count
centers = np.zeros((len(indexes),2))
radii = np.zeros((len(indexes),))
point_index = np.zeros((indexes.shape[0],),int)
point_index[1:] = np.cumsum(point_count[:-1])
anti_indexes=np.zeros((np.max(indexes)+1,),int)
anti_indexes[indexes] = list(range(indexes.shape[0]))
anti_indexes_per_point = anti_indexes[hull[:,0]]
centers[point_count==0,:]= np.NaN
if np.all(point_count == 0):
return centers,radii
centers[point_count==1,:]=hull[point_index[point_count==1],1:]
radii[point_count < 2]=0
centers[point_count==2,:]=(hull[point_index[point_count==2],1:]+
hull[point_index[point_count==2]+1,1:])/2
distance = centers[point_count==2,:] - hull[point_index[point_count==2],1:]
radii[point_count==2]=np.sqrt(distance[:,0]**2+distance[:,1]**2)
keep_me = point_count > 2
s0_idx = point_index.copy()
s1_idx = s0_idx+1
if np.any(keep_me):
labels_to_consider = indexes[keep_me]
indexes_to_consider = anti_indexes[labels_to_consider]
v_obtuse_indexes = v_indexes[min_position[keep_me]]
angle_vs0s1_to_consider = angle_vs0s1[min_position[keep_me]]
angle_vs1s0_to_consider = angle_vs1s0[min_position[keep_me]]
s0_is_obtuse = angle_vs0s1_to_consider > np.pi/2
if np.any(s0_is_obtuse):
v_obtuse_s0_indexes = v_obtuse_indexes[s0_is_obtuse]
obtuse_s0_idx = s0_idx[indexes_to_consider[s0_is_obtuse]]
within_label_indexes[obtuse_s0_idx] = \
within_label_indexes[v_obtuse_s0_indexes]
s0_idx[indexes_to_consider[s0_is_obtuse]] = v_obtuse_s0_indexes
within_label_indexes[v_obtuse_s0_indexes] = 0
s1_is_obtuse = np.logical_not(s0_is_obtuse)
if np.any(s1_is_obtuse):
v_obtuse_s1_indexes = v_obtuse_indexes[s1_is_obtuse]
obtuse_s1_idx = s1_idx[indexes_to_consider[s1_is_obtuse]]
within_label_indexes[obtuse_s1_idx] = \
within_label_indexes[v_obtuse_s1_indexes]
s1_idx[indexes_to_consider[s1_is_obtuse]] = v_obtuse_s1_indexes
within_label_indexes[v_obtuse_s1_indexes] = 1
return centers, radii | Find the location of the minimum enclosing circle and its radius
labels - a labels matrix
indexes - an array giving the label indexes to be processed
hull_and_point_count - convex_hull output if already done. None = calculate
returns an Nx3 array organized as i,j of the center and radius
Algorithm from
http://www.personal.kent.edu/~rmuhamma/Compgeometry/MyCG/CG-Applets/Center/centercli.htm
who calls it the Applet's Algorithm and ascribes it to Pr. Chrystal
The original citation is Professor Chrystal, "On the problem to construct
the minimum circle enclosing n given points in a plane", Proceedings of
the Edinburgh Mathematical Society, vol 3, 1884 |
def unplug(self):
s methods from the callback registry.'
if not self.__plugged:
return
members = set([method for _, method
in inspect.getmembers(self, predicate=inspect.ismethod)])
for message in global_callbacks:
global_callbacks[message] -= members
self.__plugged = False | Remove the actor's methods from the callback registry. |
def read_analogy_file(filename):
section = None
with open(filename, ) as questions_file:
for line in questions_file:
if line.startswith():
section = line[2:].replace(, )
continue
else:
words = line.replace(, ).split()
yield section, words | Read the analogy task test set from a file. |
def dump_t_coords(dataset_dir, data_dir, dataset, root=None, compress=True):
if root is None:
root = {}
tcoords = dataset.GetPointData().GetTCoords()
if tcoords:
dumped_array = dump_data_array(dataset_dir, data_dir, tcoords, {}, compress)
root[][] = len(root[][])
root[][].append({: dumped_array}) | dump vtkjs texture coordinates |
def versions(self):
versions = []
for v, _ in self.restarts:
if len(versions) == 0 or v != versions[-1]:
versions.append(v)
return versions | Return all version changes. |
def get_ties(G):
ties = []
dep_dict = {}
for node in G.nodes(data=True):
if in node[1]:
for item in node[1][]:
if item not in dep_dict:
dep_dict[item] = []
dep_dict[item].append(node[0])
for item in dep_dict:
if len(list(set(dep_dict[item]))) > 1:
ties.append(list(set(dep_dict[item])))
return ties | If you specify a target that shares a dependency with another target,
both targets need to be updated. This is because running one will resolve
the sha mismatch and sake will think that the other one doesn't have to
run. This is called a "tie". This function will find such ties. |
def join(self, _id):
if not SockJSRoomHandler._room.has_key(self._gcls() + _id):
SockJSRoomHandler._room[self._gcls() + _id] = set()
SockJSRoomHandler._room[self._gcls() + _id].add(self) | Join a room |
def addDEX(self, filename, data, dx=None):
digest = hashlib.sha256(data).hexdigest()
log.debug("add DEX:%s" % digest)
log.debug("Parsing format ...")
d = DalvikVMFormat(data)
log.debug("added DEX:%s" % digest)
self.analyzed_files[filename].append(digest)
self.analyzed_digest[digest] = filename
self.analyzed_dex[digest] = d
if dx is None:
dx = Analysis()
dx.add(d)
dx.create_xref()
for d in dx.vms:
d.set_decompiler(DecompilerDAD(d, dx))
d.set_vmanalysis(dx)
self.analyzed_vms[digest] = dx
if self.export_ipython:
log.debug("Exporting in ipython")
d.create_python_export()
return digest, d, dx | Add a DEX file to the Session and run analysis.
:param filename: the (file)name of the DEX file
:param data: binary data of the dex file
:param dx: an existing Analysis Object (optional)
:return: A tuple of SHA256 Hash, DalvikVMFormat Object and Analysis object |
def add(setname=None, entry=None, family=, **kwargs):
**
if not setname:
return
if not entry:
return
setinfo = _find_set_info(setname)
if not setinfo:
return .format(setname)
settype = setinfo[]
cmd = .format(entry)
if in kwargs:
if not in setinfo[]:
return .format(setname)
if in kwargs or in kwargs:
if not in setinfo[]:
return .format(setname)
if in kwargs:
if not in setinfo[]:
return .format(setname)
if not in entry:
cmd = .format(cmd, kwargs[])
if set([, , ]) & set(kwargs):
if not in setinfo[]:
return .format(setname)
for item in _ADD_OPTIONS[settype]:
if item in kwargs:
cmd = .format(cmd, item, kwargs[item])
current_members = _find_set_members(setname)
if cmd in current_members:
return .format(cmd, setname)
cmd = .format(_ipset_cmd(), setname, cmd)
out = __salt__[](cmd, python_shell=False)
if not out:
return
return .format(out) | Append an entry to the specified set.
CLI Example:
.. code-block:: bash
salt '*' ipset.add setname 192.168.1.26
salt '*' ipset.add setname 192.168.0.3,AA:BB:CC:DD:EE:FF |
def _create_binary_trigger(trigger):
ops = {
0: ">",
1: "<",
2: ">=",
3: "<=",
4: "==",
5:
}
op_codes = {y: x for x, y in ops.items()}
source = 0
if isinstance(trigger, TrueTrigger):
op_code = op_codes[]
elif isinstance(trigger, FalseTrigger):
raise ArgumentError("Cannot express a never trigger in binary descriptor", trigger=trigger)
else:
op_code = op_codes[trigger.comp_string]
if trigger.use_count:
source = 1
return (op_code << 1) | source | Create an 8-bit binary trigger from an InputTrigger, TrueTrigger, FalseTrigger. |
def build_dummy_request(newsitem):
url = newsitem.full_url
if url:
url_info = urlparse(url)
hostname = url_info.hostname
path = url_info.path
port = url_info.port or 80
else:
try:
hostname = settings.ALLOWED_HOSTS[0]
except IndexError:
hostname =
path =
port = 80
request = WSGIRequest({
: ,
: path,
: hostname,
: port,
: hostname,
: StringIO(),
})
handler = BaseHandler()
handler.load_middleware()
if hasattr(handler, ):
for middleware_method in handler._request_middleware:
middleware_method(request)
else:
handler.get_response(request)
return request | Construct a HttpRequest object that is, as far as possible,
representative of ones that would receive this page as a response. Used
for previewing / moderation and any other place where we want to
display a view of this page in the admin interface without going
through the regular page routing logic. |
def tzinfo_eq(tzinfo1, tzinfo2, startYear = 2000, endYear=2020):
if tzinfo1 == tzinfo2:
return True
elif tzinfo1 is None or tzinfo2 is None:
return False
def dt_test(dt):
if dt is None:
return True
return tzinfo1.utcoffset(dt) == tzinfo2.utcoffset(dt)
if not dt_test(datetime.datetime(startYear, 1, 1)):
return False
for year in range(startYear, endYear):
for transitionTo in , :
t1=getTransition(transitionTo, year, tzinfo1)
t2=getTransition(transitionTo, year, tzinfo2)
if t1 != t2 or not dt_test(t1):
return False
return True | Compare offsets and DST transitions from startYear to endYear. |
def resolve_metric_as_tuple(metric):
if "." in metric:
_, metric = metric.split(".")
r = [
(operator, match) for operator, match in ALL_METRICS if match[0] == metric
]
if not r or len(r) == 0:
raise ValueError(f"Metric {metric} not recognised.")
else:
return r[0] | Resolve metric key to a given target.
:param metric: the metric name.
:type metric: ``str``
:rtype: :class:`Metric` |
def versions_information(include_salt_cloud=False):
salt_info = list(salt_information())
lib_info = list(dependency_information(include_salt_cloud))
sys_info = list(system_information())
return {: dict(salt_info),
: dict(lib_info),
: dict(sys_info)} | Report the versions of dependent software. |
def from_hsv(cls, h, s, v):
rgb = colorsys.hsv_to_rgb(h, s, v)
return cls.from_rgb(*(int(x * 255) for x in rgb)) | Constructs a :class:`Colour` from an HSV tuple. |
def generate_name(length=15, not_in=None):
value = .join(random.choice(string.ascii_lowercase) for i in range(length))
while (not_in is not None) and (value in not_in):
value = .join(random.choice(string.ascii_lowercase) for i in range(length))
return value | Generates a random string of lowercase letters with the given length.
Parameters:
length (int): Length of the string to output.
not_in (list): Only return a string not in the given iterator.
Returns:
str: A new name thats not in the given list. |
def telemetry_client(self, value: BotTelemetryClient) -> None:
if value is None:
self._telemetry_client = NullTelemetryClient()
else:
self._telemetry_client = value | Sets the telemetry client for logging events. |
def wrap_callable(cls, uri, methods, callable_obj):
if isinstance(callable_obj, HandlerMeta):
callable_obj.base_endpoint = uri
callable_obj.is_valid = True
return callable_obj
if isinstance(callable_obj, types.FunctionType):
return cls(uri=uri, methods=methods, callable_obj=callable_obj)
raise RouteError("Invalid handler type.") | Wraps function-based callable_obj into a `Route` instance, else
proxies a `bottle_neck.handlers.BaseHandler` subclass instance.
Args:
uri (str): The uri relative path.
methods (tuple): A tuple of valid method strings.
callable_obj (instance): The callable object.
Returns:
A route instance.
Raises:
RouteError for invalid callable object type. |
def modules_directory():
return os.path.join(os.path.dirname(os.path.abspath(__file__)), "modules") | Get the core modules directory. |
def resource_path(relative_path=None, expect=None):
if expect not in (None, , ):
raise ArgumentError("Invalid expect parameter, must be None, or ",
expect=expect)
this_dir = os.path.dirname(__file__)
_resource_path = os.path.join(this_dir, , )
if relative_path is not None:
path = os.path.normpath(relative_path)
_resource_path = os.path.join(_resource_path, path)
if expect == and not os.path.isfile(_resource_path):
raise DataError("Expected resource %s to be a file and it wasnfoldert" % _resource_path)
return os.path.abspath(_resource_path) | Return the absolute path to a resource in iotile-build.
This method finds the path to the `config` folder inside
iotile-build, appends `relative_path` to it and then
checks to make sure the desired file or directory exists.
You can specify expect=(None, 'file', or 'folder') for
what you expect to find at the given path.
Args:
relative_path (str): The relative_path from the config
folder to the resource in question. This path can
be specified using / characters on all operating
systems since it will be normalized before usage.
If None is passed, the based config folder will
be returned.
expect (str): What the path should resolve to, which is
checked before returning, raising a DataError if
the check fails. You can pass None for no checking,
file for checking `os.path.isfile`, or folder for
checking `os.path.isdir`. Default: None
Returns:
str: The normalized absolute path to the resource. |
def _get_not_annotated(func, annotations=None):
argspec = inspect.getfullargspec(func)
args = argspec.args
if argspec.defaults is not None:
args = args[:-len(argspec.defaults)]
if inspect.isclass(func) or inspect.ismethod(func):
args = args[1:]
kwonlyargs = argspec.kwonlyargs
if argspec.kwonlydefaults is not None:
kwonlyargs = kwonlyargs[:-len(argspec.kwonlydefaults)]
annotations = annotations or argspec.annotations
return [arg for arg in args + kwonlyargs if arg not in annotations] | Return non-optional parameters that are not annotated. |
def parse_motion_state(val):
number = val & 0b00111111
unit = (val & 0b11000000) >> 6
if unit == 1:
number *= 60
elif unit == 2:
number *= 60 * 60
elif unit == 3 and number < 32:
number *= 60 * 60 * 24
elif unit == 3:
number -= 32
number *= 60 * 60 * 24 * 7
return number | Convert motion state byte to seconds. |
def old(self):
value = getattr(self.fastaccess_old, self.name, None)
if value is None:
raise RuntimeError(
% objecttools.elementphrase(self))
else:
if self.NDIM:
value = numpy.asarray(value)
return value | Assess to the state value(s) at beginning of the time step, which
has been processed most recently. When using *HydPy* in the
normal manner. But it can be helpful for demonstration and debugging
purposes. |
def parse(self, p_todo):
parsed_list = []
repl_trunc = None
for substr, placeholder, getter in self.format_list:
repl = getter(p_todo) if getter else
pattern = MAIN_PATTERN.format(ph=placeholder)
if placeholder == :
repl_trunc = repl
try:
if repl == :
substr = re.sub(pattern, , substr)
else:
substr = re.sub(pattern, _strip_placeholder_braces, substr)
substr = re.sub(r.format(ph=placeholder), repl, substr)
except re.error:
raise ListFormatError
parsed_list.append(substr)
parsed_str = _unescape_percent_sign(.join(parsed_list))
parsed_str = _remove_redundant_spaces(parsed_str)
if self.one_line and len(escape_ansi(parsed_str)) >= _columns():
parsed_str = _truncate(parsed_str, repl_trunc)
if re.search(, parsed_str):
parsed_str = _right_align(parsed_str)
return parsed_str.rstrip() | Returns fully parsed string from 'format_string' attribute with all
placeholders properly substituted by content obtained from p_todo.
It uses preprocessed form of 'format_string' (result of
ListFormatParser._preprocess_format) stored in 'format_list'
attribute. |
def session_info(consul_url=None, token=None, session=None, **kwargs):
*c1c4d223-91cb-3d1f-1ee8-f2af9e7b6716
ret = {}
if not consul_url:
consul_url = _get_config()
if not consul_url:
log.error()
ret[] =
ret[] = False
return ret
if not session:
raise SaltInvocationError()
query_params = {}
if in kwargs:
query_params[] = kwargs[]
function = .format(session)
ret = _query(consul_url=consul_url,
function=function,
token=token,
query_params=query_params)
return ret | Information about a session
:param consul_url: The Consul server URL.
:param session: The ID of the session to return information about.
:param dc: By default, the datacenter of the agent is queried;
however, the dc can be provided using the "dc" parameter.
:return: Boolean & message of success or failure.
CLI Example:
.. code-block:: bash
salt '*' consul.session_info session='c1c4d223-91cb-3d1f-1ee8-f2af9e7b6716' |
def add_and_get(self, delta):
return self._invoke_internal(pn_counter_add_codec, delta=delta, get_before_update=False) | Adds the given value to the current value and returns the updated value.
:raises NoDataMemberInClusterError: if the cluster does not contain any data members.
:raises UnsupportedOperationError: if the cluster version is less than 3.10.
:raises ConsistencyLostError: if the session guarantees have been lost.
:param delta: (int), the value to add.
:return: (int), the updated value. |
def getHeaders(self):
basicauth = base64.encodestring(b(self.user + + self.password)).strip()
return {
"Depth": "1",
"Authorization": + _decode_utf8(basicauth),
"Accept": "*/*"
} | Get common headers
:return: |
def place_instruction(order_type, selection_id, side, handicap=None, limit_order=None, limit_on_close_order=None,
market_on_close_order=None, customer_order_ref=None):
args = locals()
return {
to_camel_case(k): v for k, v in args.items() if v is not None
} | Create order instructions to place an order at exchange.
:param str order_type: define type of order to place.
:param int selection_id: selection on which to place order
:param float handicap: handicap if placing order on asianhandicap type market
:param str side: side of order
:param resources.LimitOrder limit_order: if orderType is a limitOrder structure details of the order.
:param resources.LimitOnCloseOrder limit_on_close_order: if orderType is a
limitOnCloseOrder structure details of the order.
:param resources.MarketOnCloseOrder market_on_close_order: if orderType is
a marketOnCloseOrder structure details of the order.
:param str customer_order_ref: an optional reference customers can set to identify instructions..
:return: orders to place.
:rtype: dict |
def patch_custom_resource_definition_status(self, name, body, **kwargs):
kwargs[] = True
if kwargs.get():
return self.patch_custom_resource_definition_status_with_http_info(name, body, **kwargs)
else:
(data) = self.patch_custom_resource_definition_status_with_http_info(name, body, **kwargs)
return data | patch_custom_resource_definition_status # noqa: E501
partially update status of the specified CustomResourceDefinition # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_custom_resource_definition_status(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the CustomResourceDefinition (required)
:param UNKNOWN_BASE_TYPE body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1beta1CustomResourceDefinition
If the method is called asynchronously,
returns the request thread. |
def parse_bismark_mbias(self, f):
s = f[]
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
self.bismark_mbias_data[][][s] = {}
key = None
for l in f[]:
if in l:
if in l:
key =
elif in l:
key =
elif in l:
key =
if in l:
key +=
elif in l:
key +=
else:
key +=
if key is not None:
sections = l.split()
try:
pos = int(sections[0])
self.bismark_mbias_data[][key][s][pos] = float(sections[3])
self.bismark_mbias_data[][key][s][pos] = int(sections[4])
except (IndexError, ValueError):
continue
for t in self.bismark_mbias_data:
for k in self.bismark_mbias_data[t]:
self.bismark_mbias_data[t][k] = {
s_name: self.bismark_mbias_data[t][k][s_name]
for s_name in self.bismark_mbias_data[t][k]
if len(self.bismark_mbias_data[t][k][s_name]) > 0
} | Parse the Bismark M-Bias plot data |
def wrap_http_for_jwt_access(credentials, http):
orig_request_method = http.request
wrap_http_for_auth(credentials, http)
authenticated_request_method = http.request
def new_request(uri, method=, body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
if in credentials._kwargs:
if (credentials.access_token is None or
credentials.access_token_expired):
credentials.refresh(None)
return request(authenticated_request_method, uri,
method, body, headers, redirections,
connection_type)
else:
return request(orig_request_method, uri, method, body,
clean_headers(headers),
redirections, connection_type)
http.request = new_request
http.request.credentials = credentials | Prepares an HTTP object's request method for JWT access.
Wraps HTTP requests with logic to catch auth failures (typically
identified via a 401 status code). In the event of failure, tries
to refresh the token used and then retry the original request.
Args:
credentials: _JWTAccessCredentials, the credentials used to identify
a service account that uses JWT access tokens.
http: httplib2.Http, an http object to be used to make
auth requests. |
def pairwise_distances(x, y=None, **kwargs):
r
x = _transform_to_2d(x)
if y is None or y is x:
return _pdist(x, **kwargs)
else:
y = _transform_to_2d(y)
return _cdist(x, y, **kwargs) | r"""
pairwise_distances(x, y=None, *, exponent=1)
Pairwise distance between points.
Return the pairwise distance between points in two sets, or
in the same set if only one set is passed.
Parameters
----------
x: array_like
An :math:`n \times m` array of :math:`n` observations in
a :math:`m`-dimensional space.
y: array_like
An :math:`l \times m` array of :math:`l` observations in
a :math:`m`-dimensional space. If None, the distances will
be computed between the points in :math:`x`.
exponent: float
Exponent of the Euclidean distance.
Returns
-------
numpy ndarray
A :math:`n \times l` matrix where the :math:`(i, j)`-th entry is the
distance between :math:`x[i]` and :math:`y[j]`.
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12],
... [13, 14, 15, 16]])
>>> b = np.array([[16, 15, 14, 13],
... [12, 11, 10, 9],
... [8, 7, 6, 5],
... [4, 3, 2, 1]])
>>> dcor.distances.pairwise_distances(a)
array([[ 0., 8., 16., 24.],
[ 8., 0., 8., 16.],
[16., 8., 0., 8.],
[24., 16., 8., 0.]])
>>> dcor.distances.pairwise_distances(a, b)
array([[24.41311123, 16.61324773, 9.16515139, 4.47213595],
[16.61324773, 9.16515139, 4.47213595, 9.16515139],
[ 9.16515139, 4.47213595, 9.16515139, 16.61324773],
[ 4.47213595, 9.16515139, 16.61324773, 24.41311123]]) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.