code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|
def is_data_dependent(fmto, data):
if callable(fmto.data_dependent):
return fmto.data_dependent(data)
return fmto.data_dependent | Check whether a formatoption is data dependent
Parameters
----------
fmto: Formatoption
The :class:`Formatoption` instance to check
data: xarray.DataArray
The data array to use if the :attr:`~Formatoption.data_dependent`
attribute is a callable
Returns
-------
bool
... |
def metadata(self, run_id=None):
try:
version = subprocess.check_output(
["git", "describe", "--tags", "--always"]).decode()
except:
version = None
if self.config[][] == :
data_version = self.config[][]
... | Provide metadata on a Ding0 run
Parameters
----------
run_id: str, (defaults to current date)
Distinguish multiple versions of Ding0 data by a `run_id`. If not
set it defaults to current date in the format YYYYMMDDhhmmss
Returns
-------
dict
... |
def getAllData(self, temp = True, accel = True, gyro = True):
allData = {}
if temp:
allData["temp"] = self.getTemp()
if accel:
allData["accel"] = self.getAccelData( raw = False )
if gyro:
allData["gyro"] = self.getGyroData()
return ... | !
Get all the available data.
@param temp: True - Allow to return Temperature data
@param accel: True - Allow to return Accelerometer data
@param gyro: True - Allow to return Gyroscope data
@return a dictionary data
@retval {} Did not read any data
@retv... |
def file_detector_context(self, file_detector_class, *args, **kwargs):
last_detector = None
if not isinstance(self.file_detector, file_detector_class):
last_detector = self.file_detector
self.file_detector = file_detector_class(*args, **kwargs)
try:
y... | Overrides the current file detector (if necessary) in limited context.
Ensures the original file detector is set afterwards.
Example:
with webdriver.file_detector_context(UselessFileDetector):
someinput.send_keys('/etc/hosts')
:Args:
- file_detector_class - Class ... |
def cmdloop(self):
while True:
cmdline = input(self.prompt)
tokens = shlex.split(cmdline)
if not tokens:
if self.last_cmd:
tokens = self.last_cmd
else:
print()
continue
... | Start CLI REPL. |
def isTemporal(inferenceType):
if InferenceType.__temporalInferenceTypes is None:
InferenceType.__temporalInferenceTypes = \
set([InferenceType.TemporalNextStep,
InferenceType.TemporalClassification,
... | Returns True if the inference type is 'temporal', i.e. requires a
temporal memory in the network. |
def is_instance_factory(_type):
if isinstance(_type, (tuple, list)):
_type = tuple(_type)
type_repr = "|".join(map(str, _type))
else:
type_repr = "" % _type
def inner(x):
if not isinstance(x, _type):
raise ValueError("Value must be an instance of %s" % type_... | Parameters
----------
`_type` - the type to be checked against
Returns
-------
validator - a function of a single argument x , which returns the
True if x is an instance of `_type` |
def _get_filename(class_name, language):
name = str(class_name).strip()
lang = str(language)
if language in [, ]:
name = "".join([name[0].upper() + name[1:]])
suffix = {
: , : , : ,
: , : , :
}
suffix = suf... | Generate the specific filename.
Parameters
----------
:param class_name : str
The used class name.
:param language : {'c', 'go', 'java', 'js', 'php', 'ruby'}
The target programming language.
Returns
-------
filename : str
... |
def __check_mem(self):
mem_free = psutil.virtual_memory().available / 2**20
self.log.debug("Memory free: %s/%s", mem_free, self.mem_limit)
if mem_free < self.mem_limit:
raise RuntimeError(
"Not enough resources: free memory less "
"than %sMB: ... | raise exception on RAM exceeded |
def as_tree(self, visitor=None, children=None):
_parameters = {"node": self}
if visitor is not None:
_parameters["visitor"] = visitor
if children is not None:
_parameters["children"] = children
return self.__class__.objects.node_as_tree(**_parameters) | Recursively traverses each tree (starting from each root) in order
to generate a dictionary-based tree structure of the entire forest.
Each level of the forest/tree is a list of nodes, and each node
consists of a dictionary representation, where the entry
``children`` (by... |
def convenience_calc_fisher_approx(self, params):
shapes, intercepts, betas = self.convenience_split_params(params)
args = [betas,
self.design,
self.alt_id_vector,
self.rows_to_obs,
self.rows_to_alts,
self.choice_v... | Calculates the BHHH approximation of the Fisher Information Matrix for
this model / dataset. |
def enver(*args):
from optparse import OptionParser
parser = OptionParser(usage=trim(enver.__doc__))
parser.add_option(
, ,
action=, const=UserRegisteredEnvironment,
default=MachineRegisteredEnvironment,
dest=,
help="Use the current user-a--appendstore_true-r--replacestore_true--remove-valuestore_true-e-... | %prog [<name>=[value]]
To show all environment variables, call with no parameters:
%prog
To Add/Modify/Delete environment variable:
%prog <name>=[value]
If <name> is PATH or PATHEXT, %prog will by default append the value using
a semicolon as a separator. Use -r to disable this behavior or -a to force
it for... |
def save_tip_length(labware: Labware, length: float):
calibration_path = CONFIG[]
if not calibration_path.exists():
calibration_path.mkdir(parents=True, exist_ok=True)
labware_offset_path = calibration_path/.format(labware._id)
calibration_data = _helper_tip_length_data_format(
str(... | Function to be used whenever an updated tip length is found for
of a given tip rack. If an offset file does not exist, create the file
using labware id as the filename. If the file does exist, load it and
modify the length and the lastModified fields under the "tipLength" key. |
def step2(expnums, ccd, version, prefix=None, dry_run=False, default="WCS"):
jmp_trans = []
jmp_args = []
matt_args = []
idx = 0
for expnum in expnums:
jmp_args.append(
storage.get_file(expnum, ccd=ccd, version=version, ext=, prefix=prefix)[0:-8]
)
jmp_tran... | run the actual step2 on the given exp/ccd combo |
def broadcast(self, command, *args, **kwargs):
criterion = kwargs.pop(, self.BROADCAST_FILTER_ALL)
for index, user in items(self.users()):
if criterion(user, command, *args, **kwargs):
self.notify(user, command, *args, **kwargs) | Notifies each user with a specified command. |
def page(title=None, pageid=None, auto_suggest=True, redirect=True, preload=False):
if title is not None:
if auto_suggest:
results, suggestion = search(title, results=1, suggestion=True)
try:
title = suggestion or results[0]
except IndexError:
raise PageError(title)
... | Get a WikipediaPage object for the page with title `title` or the pageid
`pageid` (mutually exclusive).
Keyword arguments:
* title - the title of the page to load
* pageid - the numeric pageid of the page to load
* auto_suggest - let Wikipedia find a valid page title for the query
* redirect - allow redir... |
def _lookup_vpc_allocs(query_type, session=None, order=None, **bfilter):
if session is None:
session = bc.get_reader_session()
if order:
query_method = getattr(session.query(
nexus_models_v2.NexusVPCAlloc).filter_by(**bfilter).order_by(
order),
quer... | Look up 'query_type' Nexus VPC Allocs matching the filter.
:param query_type: 'all', 'one' or 'first'
:param session: db session
:param order: select what field to order data
:param bfilter: filter for mappings query
:returns: VPCs if query gave a result, else
raise NexusVPCAllocNotFou... |
def system_find_affiliates(input_params={}, always_retry=True, **kwargs):
return DXHTTPRequest(, input_params, always_retry=always_retry, **kwargs) | Invokes the /system/findAffiliates API method. |
def echo(text, **kwargs):
if shakedown.cli.quiet:
return
if not in kwargs:
kwargs[] = True
if in kwargs:
text = decorate(text, kwargs[])
if in os.environ and os.environ[] == :
if text:
print(text, end="", flush=True)
if kwargs.get():
... | Print results to the console
:param text: the text string to print
:type text: str
:return: a string
:rtype: str |
def _base_type(self):
type_class = self._dimension_dict["type"]["class"]
if type_class == "categorical":
return "categorical"
if type_class == "enum":
subclass = self._dimension_dict["type"]["subtype"]["class"]
return "enum.%s" % subclass
rais... | Return str like 'enum.numeric' representing dimension type.
This string is a 'type.subclass' concatenation of the str keys
used to identify the dimension type in the cube response JSON.
The '.subclass' suffix only appears where a subtype is present. |
def tpx(mt, x, t):
return mt.lx[x + t] / mt.lx[x] | tpx : Returns the probability that x will survive within t years |
def parse_field(field: str) -> Tuple[str, Optional[str]]:
_field = field.split()
_field = [f.strip() for f in _field]
if len(_field) == 1 and _field[0]:
return _field[0], None
elif len(_field) == 2 and _field[0] and _field[1]:
return _field[0], _field[1]
raise QueryParserExcepti... | Parses fields with underscores, and return field and suffix.
Example:
foo => foo, None
metric.foo => metric, foo |
def _pwl_gen_costs(self, generators, base_mva):
ng = len(generators)
gpwl = [g for g in generators if g.pcost_model == PW_LINEAR]
if self.dc:
pgbas = 0
nq = 0
ybas = ng
else:
pgbas = 0
nq =... | Returns the basin constraints for piece-wise linear gen cost
variables. CCV cost formulation expressed as Ay * x <= by.
Based on makeAy.m from MATPOWER by C. E. Murillo-Sanchez, developed at
PSERC Cornell. See U{http://www.pserc.cornell.edu/matpower/} for more
information. |
def index_of(self, name):
result = -1
for index, actor in enumerate(self.actors):
if actor.name == name:
result = index
break
return result | Returns the index of the actor with the given name.
:param name: the name of the Actor to find
:type name: str
:return: the index, -1 if not found
:rtype: int |
def from_grpc_error(rpc_exc):
if isinstance(rpc_exc, grpc.Call):
return from_grpc_status(
rpc_exc.code(), rpc_exc.details(), errors=(rpc_exc,), response=rpc_exc
)
else:
return GoogleAPICallError(str(rpc_exc), errors=(rpc_exc,), response=rpc_exc) | Create a :class:`GoogleAPICallError` from a :class:`grpc.RpcError`.
Args:
rpc_exc (grpc.RpcError): The gRPC error.
Returns:
GoogleAPICallError: An instance of the appropriate subclass of
:class:`GoogleAPICallError`. |
def rpcexec(self, payload):
log.debug(json.dumps(payload))
self.ws.send(json.dumps(payload, ensure_ascii=False).encode("utf8")) | Execute a call by sending the payload
:param dict payload: Payload data
:raises ValueError: if the server does not respond in proper JSON format
:raises RPCError: if the server returns an error |
def process_event(self, event, ipmicmd, seldata):
event[] = None
evdata = event[]
if evdata[0] & 0b11000000 == 0b10000000:
event[] = evdata[1]
if evdata[0] & 0b110000 == 0b100000:
event[] = evdata[2] | Modify an event according with OEM understanding.
Given an event, allow an OEM module to augment it. For example,
event data fields can have OEM bytes. Other times an OEM may wish
to apply some transform to some field to suit their conventions. |
def set_pump_status(self, status):
self.pump_status = status
_logger.info("%r partition %r", status, self.lease.partition_id) | Updates pump status and logs update to console. |
async def connect(channel: discord.VoiceChannel):
node_ = node.get_node(channel.guild.id)
p = await node_.player_manager.create_player(channel)
return p | Connects to a discord voice channel.
This is the publicly exposed way to connect to a discord voice channel.
The :py:func:`initialize` function must be called first!
Parameters
----------
channel
Returns
-------
Player
The created Player object.
Raises
------
Inde... |
def loop_read(self, max_packets=1):
if self._sock is None and self._ssl is None:
return MQTT_ERR_NO_CONN
max_packets = len(self._out_messages) + len(self._in_messages)
if max_packets < 1:
max_packets = 1
for i in range(0, max_packets):
rc = ... | Process read network events. Use in place of calling loop() if you
wish to handle your client reads as part of your own application.
Use socket() to obtain the client socket to call select() or equivalent
on.
Do not use if you are using the threaded interface loop_start(). |
def occult(target1, shape1, frame1, target2, shape2, frame2, abcorr, observer,
et):
target1 = stypes.stringToCharP(target1)
shape1 = stypes.stringToCharP(shape1)
frame1 = stypes.stringToCharP(frame1)
target2 = stypes.stringToCharP(target2)
shape2 = stypes.stringToCharP(shape2)
fr... | Determines the occultation condition (not occulted, partially,
etc.) of one target relative to another target as seen by
an observer at a given time.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/occult_c.html
:param target1: Name or ID of first target.
:type target1: str
:param shap... |
def get_default_config(self):
config = super(KafkaConsumerLagCollector, self).get_default_config()
config.update({
: ,
: ,
:
})
return config | Returns the default collector settings |
def from_span(cls, inputs, window_length, span, **kwargs):
if span <= 1:
raise ValueError(
"`span` must be a positive number. %s was passed." % span
)
decay_rate = (1.0 - (2.0 / (1.0 + span)))
assert 0.0 < decay_rate <= 1.0
return cls(
... | Convenience constructor for passing `decay_rate` in terms of `span`.
Forwards `decay_rate` as `1 - (2.0 / (1 + span))`. This provides the
behavior equivalent to passing `span` to pandas.ewma.
Examples
--------
.. code-block:: python
# Equivalent to:
# ... |
def add_filter(self, filter_, frequencies=None, dB=True,
analog=False, sample_rate=None, **kwargs):
if not analog:
if not sample_rate:
raise ValueError("Must give sample_rate frequency to display "
"digital (analog=False) f... | Add a linear time-invariant filter to this BodePlot
Parameters
----------
filter_ : `~scipy.signal.lti`, `tuple`
the filter to plot, either as a `~scipy.signal.lti`, or a
`tuple` with the following number and meaning of elements
- 2: (numerator, denomin... |
def __update_offsets(self, fileobj, atoms, delta, offset):
if delta == 0:
return
moov = atoms[b"moov"]
for atom in moov.findall(b, True):
self.__update_offset_table(fileobj, ">%dI", atom, delta, offset)
for atom in moov.findall(b, True):
self.... | Update offset tables in all 'stco' and 'co64' atoms. |
def from_hdf5_path(cls, hdf5_path):
from keras.models import load_model
hdf5_local_path = BCommon.get_local_file(hdf5_path)
kmodel = load_model(hdf5_local_path)
return kmodel, DefinitionLoader.from_kmodel(kmodel) | :param hdf5_path: hdf5 path which can be stored in a local file system, HDFS, S3, or any Hadoop-supported file system.
:return: BigDL Model |
def data(self):
clone = copy.deepcopy(self)
clone._cfg[] = ReturnType.Object
return clone | return (data_dict, key) tuple instead of models instances |
def save(self):
try:
response = requests.post(self._upload_url,
auth=self.jss.session.auth,
verify=self.jss.session.verify,
files=self.resource)
except JSSPostError as erro... | POST the object to the JSS. |
def get_permission_request(parser, token):
return PermissionForObjectNode.handle_token(
parser, token, approved=False, name=) | Performs a permission request check with the given signature, user and objects
and assigns the result to a context variable.
Syntax::
{% get_permission_request PERMISSION_LABEL.CHECK_NAME for USER and *OBJS [as VARNAME] %}
{% get_permission_request "poll_permission.change_poll"
fo... |
def keyphrases_table(keyphrases, texts, similarity_measure=None, synonimizer=None,
language=consts.Language.ENGLISH):
similarity_measure = similarity_measure or relevance.ASTRelevanceMeasure()
text_titles = texts.keys()
text_collection = texts.values()
similarity_measure.set_... | Constructs the keyphrases table, containing their matching scores in a set of texts.
The resulting table is stored as a dictionary of dictionaries,
where the entry table["keyphrase"]["text"] corresponds
to the matching score (0 <= score <= 1) of keyphrase "keyphrase"
in the text named "text".
... |
def update(self, *fields):
from mongoframes.queries import to_refs
assert in self._document, "Canupdate_id_id$setupdated').send(self.__class__, frames=[self]) | Update this document. Optionally a specific list of fields to update can
be specified. |
def get_client():
return InfluxDBClient(
settings.INFLUXDB_HOST,
settings.INFLUXDB_PORT,
settings.INFLUXDB_USER,
settings.INFLUXDB_PASSWORD,
settings.INFLUXDB_DATABASE,
timeout=settings.INFLUXDB_TIMEOUT,
ssl=getattr(settings, , False),
verify_ssl=... | Returns an ``InfluxDBClient`` instance. |
def parse_clubs(self, clubs_page):
character_info = self.parse_sidebar(clubs_page)
second_col = clubs_page.find(u, {: }).find(u).find(u).find_all(u, recursive=False)[1]
try:
clubs_header = second_col.find(u, text=u)
character_info[u] = []
if clubs_header:
curr_elt = clubs_hea... | Parses the DOM and returns character clubs attributes.
:type clubs_page: :class:`bs4.BeautifulSoup`
:param clubs_page: MAL character clubs page's DOM
:rtype: dict
:return: character clubs attributes. |
def find_l50(contig_lengths_dict, genome_length_dict):
l50_dict = dict()
for file_name, contig_lengths in contig_lengths_dict.items():
currentlength = 0
currentcontig = 0
for contig_length in contig_lengths:
currentlength += contig_length
... | Calculate the L50 for each strain. L50 is defined as the number of contigs required to achieve the N50
:param contig_lengths_dict: dictionary of strain name: reverse-sorted list of all contig lengths
:param genome_length_dict: dictionary of strain name: total genome length
:return: l50_dict: dictionary of s... |
def load_all(path, include_core=True, subfolders=None, path_in_arc=None):
def clean(varStr):
return re.sub(, , str(varStr))
path = Path(path)
if zipfile.is_zipfile(str(path)):
with zipfile.ZipFile(file=str(path), mode=) as zz:
zipcontent = zz.namelist()
if... | Loads a full IO system with all extension in path
Parameters
----------
path : pathlib.Path or string
Path or path with para file name for the data to load.
This must either point to the directory containing the uncompressed
data or the location of a compressed zip file with the dat... |
def decrypt(ciphertext, secret, inital_vector, checksum=True, lazy=True):
secret = _lazysecret(secret) if lazy else secret
encobj = AES.new(secret, AES.MODE_CFB, inital_vector)
try:
padded = ciphertext + ( * (len(ciphertext) % 4))
decoded = base64.urlsafe_b64decode(str(padded))
... | Decrypts ciphertext with secret
ciphertext - encrypted content to decrypt
secret - secret to decrypt ciphertext
inital_vector - initial vector
lazy - pad secret if less than legal blocksize (default: True)
checksum - verify crc32 byte encoded checksum (default: True)
... |
def redfearn(lat, lon, false_easting=None, false_northing=None,
zone=None, central_meridian=None, scale_factor=None):
from math import pi, sqrt, sin, cos, tan
a = 6378137.0
inverse_flattening = 298.257222101
if scale_factor is None:
K0 = 0.999... | Compute UTM projection using Redfearn's formula
lat, lon is latitude and longitude in decimal degrees
If false easting and northing are specified they will override
the standard
If zone is specified reproject lat and long to specified zone instead of
standard zone
If meridian is specified, r... |
def post(self, url, data, params=None):
r = self.session.post(url, data=data, params=params)
return self._response_parser(r, expect_json=False) | Initiate a POST request |
def callgrind(self, out, filename=None, commandline=None, relative_path=False):
print(u, file=out)
print(u, file=out)
print(u, file=out)
print(u, file=out)
print(u, file=out)
if commandline is not None:
print(u, commandline, file=out)
file_dic... | Dump statistics in callgrind format.
Contains:
- per-line hit count, time and time-per-hit
- call associations (call tree)
Note: hit count is not inclusive, in that it is not the sum of all
hits inside that call.
Time unit: microsecond (1e-6 second).
out (file... |
def _prm_get_longest_stringsize(string_list):
maxlength = 1
for stringar in string_list:
if isinstance(stringar, np.ndarray):
if stringar.ndim > 0:
for string in stringar.ravel():
maxlength = max(len(string), maxlength)
... | Returns the longest string size for a string entry across data. |
def line_spacing(self):
pPr = self._element.pPr
if pPr is None:
return None
return self._line_spacing(pPr.spacing_line, pPr.spacing_lineRule) | |float| or |Length| value specifying the space between baselines in
successive lines of the paragraph. A value of |None| indicates line
spacing is inherited from the style hierarchy. A float value, e.g.
``2.0`` or ``1.75``, indicates spacing is applied in multiples of
line heights. A |Le... |
def info(self, msg):
self._execActions(, msg)
msg = self._execFilters(, msg)
self._processMsg(, msg)
self._sendMsg(, msg) | Log Info Messages |
def imagetransformerpp_base_8l_8h_big_cond_dr03_dan():
hparams = imagetransformerpp_sep_channels_8l_8h()
hparams.hidden_size = 512
hparams.num_heads = 8
hparams.filter_size = 2048
hparams.batch_size = 4
hparams.max_length = 3075
hparams.layer_prepostprocess_dropout = 0.3
hparams.layer_preprocess_sequ... | big 1d model for conditional image generation.2.99 on cifar10. |
def _add_secondary_if_exists(secondary, out, get_retriever):
secondary = [_file_local_or_remote(y, get_retriever) for y in secondary]
secondary = [z for z in secondary if z]
if secondary:
out["secondaryFiles"] = [{"class": "File", "path": f} for f in secondary]
return out | Add secondary files only if present locally or remotely. |
def _produceIt(self, segments, thunk):
if not self.prefixURL:
needle = ()
else:
needle = tuple(self.prefixURL.split())
S = len(needle)
if segments[:S] == needle:
if segments == JUST_SLASH:
subsegments = segment... | Underlying implmeentation of L{PrefixURLMixin.produceResource} and
L{PrefixURLMixin.sessionlessProduceResource}.
@param segments: the URL segments to dispatch.
@param thunk: a 0-argument callable which returns an L{IResource}
provider, or None.
@return: a 2-tuple of C{(resourc... |
def generate_csr(self, basename=):
csr = BytesIO()
crypto.create_csr(
self.key.file,
self.name,
.format(basename, int(datetime.now().timestamp())),
.format(self.cuit),
csr,
)
csr.seek(0)
return csr | Creates a CSR for this TaxPayer's key
Creates a file-like object that contains the CSR which can be used to
request a new certificate from AFIP. |
def set_qos(self, prefetch_size=0, prefetch_count=0, apply_globally=False):
self.sender.send_BasicQos(prefetch_size, prefetch_count, apply_globally)
yield from self.synchroniser.wait(spec.BasicQosOK)
self.reader.ready() | Specify quality of service by requesting that messages be pre-fetched
from the server. Pre-fetching means that the server will deliver messages
to the client while the client is still processing unacknowledged messages.
This method is a :ref:`coroutine <coroutine>`.
:param int prefetch... |
def find_channel_groups(chan):
labels = chan.return_label()
group_names = {match(, label).group(1) for label in labels}
groups = {}
for group_name in group_names:
groups[group_name] = [label for label in labels if label.startswith(group_name)]
return groups | Channels are often organized in groups (different grids / strips or
channels in different brain locations), so we use a simple heuristic to
get these channel groups.
Parameters
----------
chan : instance of Channels
channels to group
Returns
-------
groups : dict
channe... |
def _update_remote_children(remote_parent, children):
name_to_child = _name_to_child_map(children)
for remote_child in remote_parent.children:
local_child = name_to_child.get(remote_child.name)
if local_child:
local_child.update_remote_ids(remote_child) | Update remote_ids based on on parent matching up the names of children.
:param remote_parent: RemoteProject/RemoteFolder who has children
:param children: [LocalFolder,LocalFile] children to set remote_ids based on remote children |
def from_string(cls, address, case_sensitive=False):
assert isinstance(address, str),
username, domainname = address.split()
return cls(username, domainname, case_sensitive=case_sensitive) | Alternate constructor for building from a string.
:param str address: An email address in <user>@<domain> form
:param bool case_sensitive: passed directly to the constructor argument
of the same name.
:returns: An account from the given arguments
:rtype: :class:`Account` |
def _xysxy2(date):
planets = _planets(date)
x_tab, y_tab, s_tab = _tab(), _tab(), _tab()
ttt = date.change_scale().julian_century
X = -16616.99 + 2004191742.88 * ttt - 427219.05 * ttt ** 2 - 198620.54 * ttt ** 3\
- 46.05 * ttt ** 4 + 5.98 * ttt ** 5
Y = -6950.78 - 25381.99 * tt... | Here we deviate from what has been done everywhere else. Instead of taking the formulas
available in the Vallado, we take those described in the files tab5.2{a,b,d}.txt.
The result should be equivalent, but they are the last iteration of the IAU2000A as of June 2016
Args:
date (Date)
Return:
... |
def get_factors_iterative2(n):
ans, stack, x = [], [], 2
while True:
if x > n // x:
if not stack:
return ans
ans.append(stack + [n])
x = stack.pop()
n *= x
x += 1
elif n % x == 0:
stack.append(x)
... | [summary]
analog as above
Arguments:
n {[int]} -- [description]
Returns:
[list of lists] -- [all factors of n] |
def incremental_a_value(bval, min_mag, mag_inc):
a_cum = 10. ** (bval * min_mag)
a_inc = a_cum + np.log10((10. ** (bval * mag_inc)) -
(10. ** (-bval * mag_inc)))
return a_inc | Incremental a-value from cumulative - using the version of the
Hermann (1979) formula described in Wesson et al. (2003)
:param float bval:
Gutenberg & Richter (1944) b-value
:param np.ndarray min_mag:
Minimum magnitude of completeness table
:param float mag_inc:
Magnitude incr... |
def _make_input(self, action, old_quat):
return {
"dpos": action[:3],
"rotation": T.quat2mat(T.quat_multiply(old_quat, action[3:7])),
} | Helper function that returns a dictionary with keys dpos, rotation from a raw input
array. The first three elements are taken to be displacement in position, and a
quaternion indicating the change in rotation with respect to @old_quat. |
def _elements(cls):
if not cls.__is_selector():
raise Exception("Invalid selector[%s]." %cls.__control["by"])
driver = Web.driver
try:
elements = WebDriverWait(driver, cls.__control["timeout"]).until(lambda driver: getattr(driver,"find... | find the elements with controls |
def hide_me(tb, g=globals()):
base_tb = tb
try:
while tb and tb.tb_frame.f_globals is not g:
tb = tb.tb_next
while tb and tb.tb_frame.f_globals is g:
tb = tb.tb_next
except Exception as e:
logging.exception(e)
tb = base_tb
if not tb:
t... | Hide stack traceback of given stack |
def namelist(self):
names = []
for member in self.filelist:
names.append(member.filename)
return names | Return a list of file names in the archive. |
def namespace(self):
if self.prefix is None:
return self.defaultNamespace()
return self.resolvePrefix(self.prefix) | Get the element's namespace.
@return: The element's namespace by resolving the prefix, the explicit
namespace or the inherited namespace.
@rtype: (I{prefix}, I{name}) |
async def delayNdefProps(self):
async with self.getTempSlab() as slab:
seqn = s_slabseqn.SlabSeqn(slab, )
self.ndefdelay = seqn
yield
self.ndefdelay = None
logger.info(f)
for i, (oldv, newv) in seqn.iter(0):
... | Hold this during a series of renames to delay ndef
secondary property processing until the end.... |
def p_FuncDef(p):
p[0] = FuncDef(p[2], p[3], p[5], p[8], p[9], p[10]) | FuncDef : DEF RefModifier INDENTIFIER LPARENT ParamList RPARENT COLON ReturnTypeModifier Terminator Block |
def get(self, rid, data_callback=None, raise_on_error=True):
cached_data = None
ds_data = self.ds.get(rid, raise_on_error=False)
if ds_data is not None:
expired = True
if ds_data.get() is True:
if self.ttl < int(ds_data.get(, {}).get(, 0)):
... | Get cached data from the data store.
Args:
rid (str): The record identifier.
data_callback (callable): A method that will return the data.
raise_on_error (bool): If True and not r.ok this method will raise a RunTimeError.
Returns:
object : Python request... |
def get_region_from_metadata():
global __Location__
if __Location__ == :
log.debug()
return None
if __Location__ != :
return __Location__
try:
result = requests.get(
"http://169.254.169.254/latest/dynamic/instance-identity/document",
... | Try to get region from instance identity document and cache it
.. versionadded:: 2015.5.6 |
def _create_doc(self):
root = etree.Element()
root.set(, )
root.set(, self.name)
return root | Create document.
:return: |
def licenses(self):
return {self._acronym_lic(l): l for l in self.resp_text.split()
if l.startswith(self.prefix_lic)} | OSI Approved license. |
def as_proto(self):
if self._dims is None:
return tensor_shape_pb2.TensorShapeProto(unknown_rank=True)
else:
return tensor_shape_pb2.TensorShapeProto(
dim=[
tensor_shape_pb2.TensorShapeProto.Dim(
size=-1 if d.va... | Returns this shape as a `TensorShapeProto`. |
def cmd_rollback(self, name):
from peewee_migrate.router import Router, LOGGER
LOGGER.setLevel()
LOGGER.propagate = 0
router = Router(self.database,
migrate_dir=self.app.config[],
migrate_table=self.app.config[])
router.... | Rollback migrations. |
def getPropAllSupers(self, aURI):
aURI = aURI
try:
qres = self.rdflib_graph.query( % (aURI))
except:
printDebug(
"... warning: the query failed (maybe missing SPARQL 1.1 support?)"
)
qres = []
return list(qres) | note: requires SPARQL 1.1
2015-06-04: currenlty not used, inferred from above |
def _value_format(self, value):
return % (
self.area_names.get(self.adapt_code(value[0]), ),
self._y_format(value[1])
) | Format value for map value display. |
def delete(self):
if self in self._parent.vlan_interface:
self._parent.data[] = [
v for v in self._parent.vlan_interface
if v != self]
self.update()
for route in self._parent._engine.routing:
if route.to_delete:
... | Delete this Vlan interface from the parent interface.
This will also remove stale routes if the interface has
networks associated with it.
:return: None |
def connect(self, deleteOldVersions=False, recreate=False):
with ConnectionFactory.get() as conn:
self._initTables(cursor=conn.cursor, deleteOldVersions=deleteOldVersions,
recreate=recreate)
conn.cursor.execute()
self._connectionID = conn.cursor.fet... | Locate the current version of the jobs DB or create a new one, and
optionally delete old versions laying around. If desired, this method
can be called at any time to re-create the tables from scratch, delete
old versions of the database, etc.
Parameters:
--------------------------------------------... |
def iter_insert_items(tree):
if tree.list_values:
keys = tree.attrs
for values in tree.list_values:
if len(keys) != len(values):
raise SyntaxError(
"Values do not match attributes " "" % (values, keys)
)
yield dict(zip... | Iterate over the items to insert from an INSERT statement |
def sha256(message, encoder=nacl.encoding.HexEncoder):
return encoder.encode(nacl.bindings.crypto_hash_sha256(message)) | Hashes ``message`` with SHA256.
:param message: The message to hash.
:type message: bytes
:param encoder: A class that is able to encode the hashed message.
:returns: The hashed message.
:rtype: bytes |
def _remove_list_item(self, beacon_config, label):
index = self._get_index(beacon_config, label)
del beacon_config[index] | Remove an item from a beacon config list |
def extended_arg_patterns(self):
for arg in self._arg_iterator(self.args):
if isinstance(arg, Pattern):
if arg.mode > self.single:
while True:
yield arg
else:
yield arg
else:
... | Iterator over patterns for positional arguments to be matched
This yields the elements of :attr:`args`, extended by their `mode`
value |
def get_still_seg_belonged(dt_str, seg_duration, fmt=):
dt = time_util.str_to_datetime(dt_str, fmt)
minutes_of_day = time_util.get_minutes_of_day(dt)
return time_util.minutes_to_time_str(
minutes_of_day - minutes_of_day % seg_duration) | 获取该时刻所属的非滑动时间片
:param dt_str: datetime string, eg: 2016-10-31 12:22:11
:param seg_duration: 时间片长度, unit: minute
:param fmt: datetime string format
:return: |
def check_calendar(self, ds):
valid_calendars = [
,
,
,
,
,
,
,
,
,
]
ret_val = []
for time_var in ds.get_variables_by_attributes(calendar... | Check the calendar attribute for variables defining time and ensure it
is a valid calendar prescribed by CF.
CF §4.4.1 In order to calculate a new date and time given a base date, base
time and a time increment one must know what calendar to use.
The values currently defined for calend... |
def asserts(self, *args, **kwargs):
result = self.match(*args, **kwargs)
self.expect(result)
return result | Wraps match method and places under an assertion. Override this for higher-level control,
such as returning a custom object for additional validation (e.g. expect().to.change()) |
def tasks(self):
task_input = {: }
output = taskengine.execute(task_input, self._engine_name, cwd=self._cwd)
return output[][] | Returns a list of all tasks known to the engine.
:return: A list of task names. |
def unpitched_low(dur, idx):
env = sinusoid(lag2freq(dur * 2)).limit(dur) ** 2
freq = 40 + 20 * sinusoid(1000 * Hz, phase=uniform(-pi, pi))
result = (low_table(freq * Hz) + low_table(freq * 1.1 * Hz)) * env * .5
return list(result) | Non-harmonic bass/lower frequency sound as a list (due to memoization).
Parameters
----------
dur:
Duration, in samples.
idx:
Zero or one (integer), for a small difference to the sound played.
Returns
-------
A list with the synthesized note. |
def p_case_list(p):
if len(p) == 6:
p[0] = p[1] + [ast.Case(p[3], p[5], lineno=p.lineno(2))]
elif len(p) == 5:
p[0] = p[1] + [ast.Default(p[4], lineno=p.lineno(2))]
else:
p[0] = [] | case_list : empty
| case_list CASE expr case_separator inner_statement_list
| case_list DEFAULT case_separator inner_statement_list |
def create_lv(self, name, length, units):
if units != "%":
size = size_units[units] * length
else:
if not (0 < length <= 100) or type(length) is float:
raise ValueError("Length not supported.")
size = (self.size("B") / 100) * length
se... | Creates a logical volume and returns the LogicalVolume instance associated with
the lv_t handle::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
lv = vg.create_lv("mylv", 40, "MiB")
*Args:*
* name (str): The des... |
def clean(bundle, before, after, keep_last):
bundles_module.clean(
bundle,
before,
after,
keep_last,
) | Clean up data downloaded with the ingest command. |
def parse_child_elements(self, element):
for child in element.iterchildren():
self.parsers[child.tag](child) | parses all children of an etree element |
def make_dependent(self, source, target, action):
sourcetargetactiononetwotwo
if not self._generators:
return
src_permuter, src = self._resolve_child(source)
dest = self._resolve_child(target)[1]
container = src_permuter._generators
idx = container.... | Create a dependency between path 'source' and path 'target' via the
callable 'action'.
>>> permuter._generators
[IterValueGenerator(one), IterValueGenerator(two)]
>>> permuter.make_dependent('one', 'two', lambda x: x + 1)
Going forward, 'two' will only contain values that are (... |
def _update_dPrxy(self):
super(ExpCM_fitprefs, self)._update_dPrxy()
if in self.freeparams:
tildeFrxyQxy = self.tildeFrxy * self.Qxy
j = 0
zetaxterm = scipy.ndarray((self.nsites, N_CODON, N_CODON), dtype=)
zetayterm = scipy.ndarray((self.nsites,... | Update `dPrxy`. |
def _param_callback(self, name, value):
print(.format(name, value))
self._param_check_list.remove(name)
if len(self._param_check_list) == 0:
print()
for g in self._param_groups:
self._cf.param.remove_update_callbac... | Generic callback registered for all the groups |
def StringIO(*args, **kw):
global StringIO
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
return StringIO(*args,**kw) | Thunk to load the real StringIO on demand |
def _check_neg(self, level, *tokens):
for record in self.records:
if level is not None and record.levelno != level:
continue
if all(token in record.message for token in tokens):
break
else:
return
msg = "Token... | Check that the different tokens were NOT logged in one record, assert by level. |
def _set_get_vnetwork_hosts(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=get_vnetwork_hosts.get_vnetwork_hosts, is_leaf=True, yang_name="get-vnetwork-hosts", rest_name="get-vnetwork-hosts", parent=self, path_helper=self._path_helper, extmethods=sel... | Setter method for get_vnetwork_hosts, mapped from YANG variable /brocade_vswitch_rpc/get_vnetwork_hosts (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_get_vnetwork_hosts is considered as a private
method. Backends looking to populate this variable should
do so via... |
def assert_reset(self, asserted):
try:
self._invalidate_cached_registers()
self._link.assert_reset(asserted)
except DAPAccess.Error as exc:
six.raise_from(self._convert_exception(exc), exc) | Assert or de-assert target reset line |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.