code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|
def run(self, file, updateconfig=True, clean=False, path=None):
if updateconfig:
self.update_config()
self.program, self.version = self.setup(path)
commandline = (
self.program + " -c " + self.config[] + " " + file)
rcode =... | Run SExtractor.
If updateconfig is True (default), the configuration
files will be updated before running SExtractor.
If clean is True (default: False), configuration files
(if any) will be deleted after SExtractor terminates. |
def params(self):
if self._params is None:
self._params = list(filter(lambda attr: isinstance(attr, Param),
[getattr(self, x) for x in dir(self) if x != "params" and
not isinstance(getattr(type(self), x, None), p... | Returns all params ordered by name. The default implementation
uses :py:func:`dir` to get all attributes of type
:py:class:`Param`. |
def get_node_ip_address(address="8.8.8.8:53"):
ip_address, port = address.split(":")
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect((ip_address, int(port)))
node_ip_address = s.getsockname()[0]
except Exception as e:
node_ip_address = "... | Determine the IP address of the local node.
Args:
address (str): The IP address and port of any known live service on the
network you care about.
Returns:
The IP address of the current node. |
def __rmfile(path):
logger.info("rmfile: %s" % path)
try:
os.remove(path)
return True
except Exception as e:
logger.error("rmfile: %s failed! Error: %s" % (path, e))
return False | Delete a file.
Args:
path (str): Path to the file that needs to be deleted.
Returns:
bool: True if the operation is successful, False otherwise. |
def remove_existing_fpaths(fpath_list, verbose=VERBOSE, quiet=QUIET,
strict=False, print_caller=PRINT_CALLER,
lbl=):
import utool as ut
if print_caller:
print(util_dbg.get_caller_name(range(1, 4)) + )
fpath_list_ = ut.filter_Nones(fpath_list... | checks existance before removing. then tries to remove exisint paths |
def read_rtt(jlink):
try:
while jlink.connected():
terminal_bytes = jlink.rtt_read(0, 1024)
if terminal_bytes:
sys.stdout.write("".join(map(chr, terminal_bytes)))
sys.stdout.flush()
time.sleep(0.1)
except Exception:
print("... | Reads the JLink RTT buffer #0 at 10Hz and prints to stdout.
This method is a polling loop against the connected JLink unit. If
the JLink is disconnected, it will exit. Additionally, if any exceptions
are raised, they will be caught and re-raised after interrupting the
main thread.
sys.stdout.write... |
def set_reps(self, reps):
self.stimulus.setRepCount(reps)
self.refstim.setRepCount(reps) | set the number of repetitions for the stimuli (reference tone and cal stim)
:param reps: number of times to present the same stimulus
:type reps: int |
def git_clone_job_list(job_list):
queue = Queue()
for job in job_list:
queue.put(job)
if len(job_list) < 20:
thread_num = len(job_list)
else:
thread_num = 20
threads = []
for _ in range(thread_num):
thread = Thread(target=git_clone_worker, args=(queue, ))
... | Deal with all git clone jobs in $job_list. |
def xgroup_setid(self, stream, group_name, latest_id=):
fut = self.execute(b, b, stream, group_name, latest_id)
return wait_ok(fut) | Set the latest ID for a consumer group |
def dmsToDeg(sign, deg, min, sec):
return sign * (deg + min * degPerDmsMin + sec * degPerDmsSec) | Convert dec sign, degrees, minutes, seconds into a signed angle in
degrees. |
def mask_and_dates_for_term(self,
term,
root_mask_term,
workspace,
all_dates):
mask = term.mask
mask_offset = self.extra_rows[mask] - self.extra_rows[term]
... | Load mask and mask row labels for term.
Parameters
----------
term : Term
The term to load the mask and labels for.
root_mask_term : Term
The term that represents the root asset exists mask.
workspace : dict[Term, any]
The values that have bee... |
def get_timestamp(self, cycle=None, dataset_number=None,
in_minutes=False, full=True):
dataset_number = self._validate_dataset_number(dataset_number)
if dataset_number is None:
self._report_empty_dataset()
return
cycle_index_header = self.h... | Returns timestamps (in sec or minutes (if in_minutes==True)).
Args:
cycle: cycle number (all if None)
dataset_number: first dataset if None
in_minutes: return values in minutes instead of seconds if True
full: valid only for cycle=None (i.e. all cycles), returns ... |
def draw_line(self, img, pixmapper, pt1, pt2, colour, linewidth):
pix1 = pixmapper(pt1)
pix2 = pixmapper(pt2)
(width, height) = image_shape(img)
(ret, pix1, pix2) = cv2.clipLine((0, 0, width, height), pix1, pix2)
if ret is False:
return
cv2.line(img, ... | draw a line on the image |
def decode(self, value):
if self.encoding:
value = value.decode(self.encoding)
return self.deserialize(value) | Decode value. |
def parse(readDataInstance):
boundEntry = ImageBoundImportDescriptorEntry()
boundEntry.timeDateStamp.value = readDataInstance.readDword()
boundEntry.offsetModuleName.value = readDataInstance.readWord()
boundEntry.numberOfModuleForwarderRefs.value = readDataInstance.readWord()
... | Returns a new L{ImageBoundImportDescriptorEntry} object.
@type readDataInstance: L{ReadData}
@param readDataInstance: A L{ReadData} object containing data to create a new L{ImageBoundImportDescriptorEntry}.
@rtype: L{ImageBoundImportDescriptorEntry}
@return: A new {Imag... |
def _federation_indicators(catalog, central_catalog,
identifier_search=False):
result = {
: None,
: None,
: None,
: None,
: None,
: [],
: [],
: [],
}
try:
central_catalog = readers.read_catalog(central_c... | Cuenta la cantidad de datasets incluídos tanto en la lista
'catalogs' como en el catálogo central, y genera indicadores a partir
de esa información.
Args:
catalog (dict): catálogo ya parseado
central_catalog (str o dict): ruta a catálogo central, o un dict
con el catálogo ya par... |
def parseExtensionArgs(self, args, strict=False):
policies_str = args.get()
if policies_str and policies_str != :
self.auth_policies = policies_str.split()
nist_level_str = args.get()
if nist_level_str:
try:
nist_level = int(nist_level_st... | Parse the provider authentication policy arguments into the
internal state of this object
@param args: unqualified provider authentication policy
arguments
@param strict: Whether to raise an exception when bad data is
encountered
@returns: None. The data is par... |
def step(self, actions):
if self._store_rollouts and \
self._rollouts_by_epoch_and_split[self.current_epoch]:
raise ValueError(
"Data for current epoch has already been loaded from disk."
)
(obs, unclipped_rewards, dones) = self._step(actions)
obs = self._preprocess_observ... | Makes a step in all environments.
Does any preprocessing and records frames.
Args:
actions: Batch of actions.
Returns:
(obs, rewards, dones) - batches of observations, rewards and done flags
respectively.
Raises:
ValueError: when the data for current epoch has already been lo... |
def make(ctx, check, version, initial_release, skip_sign, sign_only):
from ..signing import update_link_metadata, YubikeyException
releasing_all = check ==
valid_checks = get_valid_checks()
if not releasing_all and check not in valid_checks:
abort(.format(check))
| Perform a set of operations needed to release a single check:
\b
* update the version in __about__.py
* update the changelog
* update the requirements-agent-release.txt file
* update in-toto metadata
* commit the above changes
You can release everything at once by setting the che... |
def cast(self, dtype):
for child in self._children.values():
child.cast(dtype)
for _, param in self.params.items():
param.cast(dtype) | Cast this Block to use another data type.
Parameters
----------
dtype : str or numpy.dtype
The new data type. |
def radianceSpectrum(Omegas,AbsorptionCoefficient,Environment={:100.,:296.},
File=None, Format=, Wavenumber=None):
if Wavenumber: Omegas=Wavenumber
l = Environment[]
T = Environment[]
Alw = 1-exp(-AbsorptionCoefficient*l)
LBBTw = 2*hh*cc**2*Omegas**3 / (exp(hh*cc*Omega... | INPUT PARAMETERS:
Wavenumber/Omegas: wavenumber grid (required)
AbsorptionCoefficient: absorption coefficient on grid (required)
Environment: dictionary containing path length in cm.
and temperature in Kelvin.
Default={'l':100.,'... |
def get_app_model_voice(self, app_model_item):
if app_model_item.get(, None) is None:
raise ImproperlyConfigured()
if app_model_item.get(, None) is None:
raise ImproperlyConfigured()
return self.get_model_voice(app_model_item.get(), app_model_item) | App Model voice
Returns the js menu compatible voice dict if the user
can see it, None otherwise |
def create(self, *args, **kwargs):
if kwargs.has_key():
kwargs[] = kwargs[]
kwargs[] = kwargs[]
del kwargs[]
return super(CMSPageManager, self).create(*args, **kwargs) | Allow an 'author' kwarg to automatically fill in the created_by and last_modified_by fields. |
def parse(self, text):
self.expr = text
try:
out = ast.parse(text)
except SyntaxError:
self.raise_exception(None, msg=, expr=text)
except:
self.raise_exception(None, msg=, expr=text)
return out | Parse statement/expression to Ast representation. |
def start(self):
if self.is_started:
raise ConnectionError("Client has already been started")
if self.BOT_TOKEN_RE.match(self.session_name):
self.is_bot = True
self.bot_token = self.session_name
self.session_name = self.session_name.split(":")[0]... | Use this method to start the Client after creating it.
Requires no parameters.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
``ConnectionError`` in case you try to start an already started Client. |
def note_update(self, note_id, coor_x=None, coor_y=None, width=None,
height=None, body=None):
params = {
: coor_x,
: coor_y,
: width,
: height,
: body
}
return self._get(.format(note_id), params, method=... | Function to update a note (Requires login) (UNTESTED).
Parameters:
note_id (int): Where note_id is the note id.
coor_x (int): The x coordinates of the note in pixels,
with respect to the top-left corner of the image.
coor_y (int): The y coordinates ... |
def set_pid():
global pid
lines = .join([, ])
try:
msg_id = send(lines, silent=True, user_variables=[])
except TypeError:
msg_id = send(lines, silent=True, user_expressions={:})
try:
child = get_child_msg(msg_id)
except Empty:
echo("no reply from IPyt... | Explicitly ask the ipython kernel for its pid |
def delete(self, query_id=None, **kwargs):
path = "/logging-service/v1/queries/{}".format(query_id)
r = self._httpclient.request(
method="DELETE",
url=self.url,
path=path,
**kwargs
)
return r | Delete a query job.
Uses the DELETE HTTP method to delete a query job. After calling
this endpoint, it is an error to poll for query results using
the queryId specified here.
Args:
query_id (str): Specifies the ID of the query job.
**kwargs: Supported :meth:`~pa... |
def check_categories(lines):
rcat_line = lines[0].split()
num_rc = 0
found_end = False
for inst_string in rcat_line[1:]:
if inst_string == :
if found_end is False:
num_rc = num_rc + 1
else:
found_end = True
max_rcat = 15
if max_rcat > len(lines):
max_rcat = len... | find out how many row and col categories are available |
def get_expire_time(cache_duration=None, valid_until=None):
expire_time = None
if cache_duration is not None:
expire_time = OneLogin_Saml2_Utils.parse_duration(cache_duration)
if valid_until is not None:
if isinstance(valid_until, int):
valid_un... | Compares 2 dates and returns the earliest.
:param cache_duration: The duration, as a string.
:type: string
:param valid_until: The valid until date, as a string or as a timestamp
:type: string
:return: The expiration time.
:rtype: int |
def named_eq_relations(self, name, neg=False):
if self.eqLinks and not neg:
if isinstance(name, six.string_types):
return filter(lambda x: x.relation.name == name,
self.eqLinks)
elif isinstance(name, list):
return f... | Returns list of named eqLinks.
<name> may be string or list. |
def fetch_git_package(self, config):
from git import Repo
ref = self.determine_git_ref(config)
dir_name = self.sanitize_git_path(uri=config[], ref=ref)
cached_dir_path = os.path.join(self.package_cache_dir, dir_name)
cached_d... | Make a remote git repository available for local use.
Args:
config (dict): git config dictionary |
def as_txt(self):
s = "IIIF Image Server Error\n\n"
s += self.text if (self.text) else
s += "\n\n"
if (self.parameter):
s += "parameter=%s\n" % self.parameter
if (self.code):
s += "code=%d\n\n" % self.code
for header in sorted(self.header... | Text rendering of error response.
Designed for use with Image API version 1.1 and above where the
error response is suggested to be text or html but not otherwise
specified. Intended to provide useful information for debugging. |
def sessions_info(self, hosts):
info_by_id = {}
for server_endpoint, dump in self.dump_by_server(hosts).items():
server_ip, server_port = server_endpoint
for line in dump.split("\n"):
mat = self.IP_PORT_REGEX.match(line)
if mat is None:
... | Returns ClientInfo per session.
:param hosts: comma separated lists of members of the ZK ensemble.
:returns: A dictionary of (session_id, ClientInfo). |
def json_integrity(baseline, suspect):
try:
for k,v in baseline.items():
for ks, vs in suspect.items():
keys_baseline = set(v.keys())
keys_suspect = set(vs.keys())
intersect_keys = keys_baseline.intersection(keys_suspect)
added... | Summary:
Validates baseline dict against suspect dict to ensure contain USERNAME
k,v parameters.
Args:
baseline (dict): baseline json structure
suspect (dict): json object validated against baseline structure
Returns:
Success (matches baseline) | Failure (no match), TYPE:... |
def spawn_spark_cluster(job,
numWorkers,
cores=None,
memory=None,
disk=None,
overrideLeaderIP=None):
if numWorkers < 1:
raise ValueError("Must have more than one worker. %d given." %... | :param numWorkers: The number of worker nodes to have in the cluster. \
Must be greater than or equal to 1.
:param cores: Optional parameter to set the number of cores per node. \
If not provided, we use the number of cores on the node that launches \
the service.
:param memory: Optional parameter t... |
def keys(self, pattern=None):
logger.debug(, pattern)
if pattern is None:
pattern =
return self._redis.keys(pattern=pattern) | Returns a list of keys matching ``pattern``.
By default return all keys.
>>> dc = Dictator()
>>> dc['l0'] = [1, 2, 3, 4]
>>> dc['s0'] = 'string value'
>>> dc.keys()
['l0', 's0']
>>> dc.keys('h*')
[]
>>> dc.clear()
:param pattern: key patt... |
def planfn(*args, **kwargs):
if len(args) > 2: raise ValueError()
elif len(args) == 0: (params,result) = (None,None)
elif len(args) == 2: (params,result) = args
elif is_str(args[0]): (params,result) = (None,args[0])
elif is_vector(args[0], str): (param... | planfn(val1=fn1, val2=fn2...) uses the pimms plan mechanism to yield a function f that produces
an immutable imap when called with the correct paramters. Unlike in plan(), planfn() does not
care about the names of the calculation units; instead the val1, val2, etc. are names of the
efferent values whi... |
def to_iso_time_string(self) -> str:
short_time = self.to_short_time_string()
second = self.time.second
return f"{short_time}:{second:02}" | Return the iso time string only |
def set_log_level(log_level):
if not LOGBOOK_INSTALLED:
return
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_lev... | Set logging level of this module. Using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if ``log_level`` is ``logbook.NOTSET``... |
def pull_image(self, image_name, stream=None):
stream_writer = stream or StreamWriter(sys.stderr)
try:
result_itr = self.docker_client.api.pull(image_name, stream=True, decode=True)
except docker.errors.APIError as ex:
LOG.debug("Failed to download image with na... | Ask Docker to pull the container image with given name.
Parameters
----------
image_name str
Name of the image
stream samcli.lib.utils.stream_writer.StreamWriter
Optional stream writer to output to. Defaults to stderr
Raises
------
Docker... |
def range_daily(start=None, stop=None, timezone=, count=None):
return stops(start=start, stop=stop, freq=DAILY, timezone=timezone, count=count) | This an alternative way to generating sets of Delorean objects with
DAILY stops |
def cmd_list(unused_conf: Config):
for name, builder in sorted(Plugin.builders.items()):
if builder.func:
print(
.format(name, builder.func))
else:
print(.format(name))
for hook_name, hook_func in sorted(Plugin.get_hooks_for_builder(name)):
... | Print out information on loaded builders and hooks. |
def output(self, name):
m = WorkUnit._valid_name_re.match(name)
if not m or m.group(0) != name:
raise Exception(.format(name))
if name not in self._outputs:
workunit_name = re.sub(r, , self.name)
path = os.path.join(self.run_info_dir,
,
... | Returns the output buffer for the specified output name (e.g., 'stdout'), creating it if
necessary.
:API: public |
def boll(self, n, dev, array=False):
mid = self.sma(n, array)
std = self.std(n, array)
up = mid + std * dev
down = mid - std * dev
return up, down | 布林通道 |
def get_actions(self, request):
actions = super(CertificateMixin, self).get_actions(request)
actions.pop(, )
return actions | Disable the "delete selected" admin action.
Otherwise the action is present even though has_delete_permission is False, it just doesn't
work. |
def cmd_full_return(
self,
tgt,
fun,
arg=(),
timeout=None,
tgt_type=,
ret=,
verbose=False,
kwarg=None,
**kwargs):
was_listening = self.event.cpub
try:
pub_data = ... | Execute a salt command and return |
def detect_events(self, data, method, params, label):
if self.annot is None:
self.parent.statusBar().showMessage()
return
lg.info( + label)
self.annot.add_event_type(label)
self.display_eventtype()
n_eventtype = self.idx_eventtype.count()
... | Detect events and display on signal.
Parameters
----------
data : instance of ChanTime
one segment with all channels of interest
method : str
Method used for detection.
params : dict
Parameters used for detection.
label : str
... |
def text_filepaths_for_task(self, tmp_dir, task_id):
assert task_id >= 0
assert task_id < self.num_train_shards + self.num_dev_shards
if task_id < self.num_train_shards:
return [
f for i, f in enumerate(self.train_text_filepaths(tmp_dir))
if i % self.num_train_shards == task_i... | List of input filepaths for a particular training or dev shard.
Args:
tmp_dir: a string
task_id: an integer less than self.num_shards
Returns:
a list of tuples (filepath, start_pos, num_bytes) |
def l(*members, meta=None) -> List:
return List(
plist(iterable=members), meta=meta
) | Creates a new list from members. |
def _read(self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile):
self.fileExtension = extension
with open(path, ) as f:
self.rasterText = f.read()
lines = self.rasterText.split()
for lin... | Index Map Read from File Method |
def generate(self):
sampled_arr = np.zeros((self.__batch_size, self.__channel, self.__seq_len, self.__dim))
for batch in range(self.__batch_size):
for i in range(len(self.__program_list)):
program_key = self.__program_list[i]
key = np.random.r... | Generate noise samples.
Returns:
`np.ndarray` of samples. |
def delta_unaccelerated(self):
if self.type not in {EventType.GESTURE_SWIPE_UPDATE,
EventType.GESTURE_PINCH_UPDATE}:
raise AttributeError(_wrong_prop.format(self.type))
delta_x = self._libinput.libinput_event_gesture_get_dx_unaccelerated(
self._handle)
delta_y = self._libinput.libinput_event_gesture... | The relative delta of the unaccelerated motion vector of
the current event.
For gesture events that are not of type
:attr:`~libinput.constant.EventType.GESTURE_SWIPE_UPDATE` or
:attr:`~libinput.constant.EventType.GESTURE_PINCH_UPDATE`, this
property raises :exc:`AttributeError`.
Relative unaccelerated mot... |
def cross(self, other):
b = self.__class__._convert(other)
return sum([(self.y * b.z) - (self.z * b.y),
(self.z * b.x) - (self.x * b.z),
(self.x * b.y) - (self.y * b.x)]) | :other: Point or point equivalent
:return: float
Vector cross product of points U (self) and V (other), computed:
U x V = (u1*i + u2*j + u3*k) x (v1*i + v2*j + v3*k)
s1 = u2v3 - u3v2
s2 = u3v1 - u1v3
s3 = u1v2 - u2v1
U x V = s1 + s2 + s3
Returns a floa... |
def console_get_height_rect(
con: tcod.console.Console, x: int, y: int, w: int, h: int, fmt: str
) -> int:
return int(
lib.TCOD_console_get_height_rect_fmt(
_console(con), x, y, w, h, _fmt(fmt)
)
) | Return the height of this text once word-wrapped into this rectangle.
Returns:
int: The number of lines of text once word-wrapped.
.. deprecated:: 8.5
Use :any:`Console.get_height_rect` instead. |
def pretty_to_link(inst, link):
TO
values =
prefix =
metaclass = xtuml.get_metaclass(inst)
for name, ty in metaclass.attributes:
if name in link.key_map:
value = getattr(inst, name)
value = xtuml.serialize_value(value, ty)
name = link.key_map[name]
... | Create a human-readable representation of a link on the 'TO'-side |
def do_GET(self):
f = self.send_head()
if f:
self.copyfile(f, self.wfile)
f.close() | Serve a GET request. |
def _set_pfc(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("pfc_cos",pfc.pfc, yang_name="pfc", rest_name="pfc", parent=self, is_container=, user_ordered=False, path_helper=self._path_helper, yang_keys=, extensions={u: {u: u, u: None, u:... | Setter method for pfc, mapped from YANG variable /interface/port_channel/qos/flowcontrol/pfc (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_pfc is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_pfc... |
def get_agg_data(cls, obj, category=None):
paths = []
if isinstance(obj, Graph):
obj = obj.edgepaths
kdims = list(obj.kdims)
vdims = list(obj.vdims)
dims = obj.dimensions()[:2]
if isinstance(obj, Path):
glyph =
for p in obj.sp... | Reduces any Overlay or NdOverlay of Elements into a single
xarray Dataset that can be aggregated. |
def getNamespace(self, prefix):
namespace = None
if prefix == :
namespace = DOM.findDefaultNS(prefix, self.__node)
else:
try:
namespace = DOM.findNamespaceURI(prefix, self.__node)
except DOMException, ex:
if prefix != :... | prefix -- deference namespace prefix in node's context.
Ascends parent nodes until found. |
def enforce(self, rule, target, creds, exc=None, *args, **kwargs):
self.load_rules()
if isinstance(rule, checks.BaseCheck):
result = rule(target, creds, self, rule)
elif not self.rules:
result = False
if self.raise_error and not result... | Checks authorization of a rule against the target and credentials. |
def absence_info(self):
from ..eighth.models import EighthSignup
return EighthSignup.objects.filter(user=self, was_absent=True, scheduled_activity__attendance_taken=True) | Return information about the user's absences. |
def users(self, username=None, pk=None, **kwargs):
request_params = {
: username,
: pk,
}
if kwargs:
request_params.update(**kwargs)
r = self._request(, self._build_url(), params=request_params)
if r.status_code != requests.codes.ok:... | Users of KE-chain.
Provide a list of :class:`User`s of KE-chain. You can filter on username or id or any other advanced filter.
:param username: (optional) username to filter
:type username: basestring or None
:param pk: (optional) id of the user to filter
:type pk: basestring ... |
def date_to_string(date):
if isinstance(date, datetime.datetime):
date_str = date.strftime()
tzstr = date.strftime()
if tzstr:
return date_str | Transform a date or datetime object into a string and return it.
Examples:
>>> date_to_string(datetime.datetime(2012, 1, 3, 12, 23, 34, tzinfo=UTC))
'2012-01-03T12:23:34+00:00'
>>> date_to_string(datetime.datetime(2012, 1, 3, 12, 23, 34))
'2012-01-03T12:23:34'
>>> date_to_string(datetime.date(2... |
def ClientCertFromCSR(cls, csr):
builder = x509.CertificateBuilder()
common_name = csr.GetCN()
serial = int(common_name.split(".")[1], 16)
builder = builder.serial_number(serial)
builder = builder.subject_name(
x509.Name(
[x509.NameAttribute(oid.NameOID.COMMON_NAME... | Creates a new cert for the given common name.
Args:
csr: A CertificateSigningRequest.
Returns:
The signed cert. |
def attr_gen(self, attr):
HiisiHDF._clear_cache()
HiisiHDF.CACHE[] = attr
HiisiHDF._find_attr_paths(, self[])
self.visititems(HiisiHDF._find_attr_paths)
path_attr_gen = (PathValue(attr_path, self[attr_path].attrs.get(attr)) for attr_path in HiisiHDF.CACHE[])
ret... | Returns attribute generator that yields namedtuples containing
path value pairs
Parameters
----------
attr : str
Name of the search attribute
Returns
-------
attr_generator : generator
Returns a generator that yields named tuples ... |
def ss_reg(self):
return np.sum(np.square(self.predicted - self.ybar), axis=0) | Sum of squares of the regression. |
def uinit(self, ushape):
if self.opt[] is None:
return np.zeros(ushape, dtype=self.dtype)
else:
U0 = np.sign(self.block_sep0(self.Y)) / self.rho
U1 = self.block_sep1(self.Y) - self.S
return self.block_cat(U0... | Return initialiser for working variable U. |
def alphabetical_formula(self):
alph_formula = super().alphabetical_formula
chg_str = ""
if self.charge > 0:
chg_str = " +" + formula_double_format(self.charge, False)
elif self.charge < 0:
chg_str = " " + formula_double_format(self.charge, False)
... | Returns a reduced formula string with appended charge |
def to_nifti(obj, like=None, header=None, affine=None, extensions=Ellipsis, version=1):
s voxel and native
orientation matrices. All other specific options below override anything deduced from the
like argument.
* header (default: None) may be a Nifti1 or Niti2 image header to be used as the n... | to_nifti(obj) yields a Nifti2Image object that is as equivalent as possible to the given object
obj. If obj is a Nifti2Image already, then it is returned unmolested; other deduction rules
are described below.
The following options are accepted:
* like (default: None) may be provided to give a gui... |
def _get_bandwidth_price_id(items,
hourly=True,
no_public=False,
location=None):
for item in items:
capacity = float(item.get(, 0))
if any([utils.lookup(item,
,
... | Choose a valid price id for bandwidth. |
def to_gremlin(self):
self.validate()
template = (
u
u
u)
field_representations = (
u.format(name=key, expr=self.fields[key].to_gremlin())
for key in sorted(self.fields.keys())
)
return template.format(u.joi... | Return a unicode object with the Gremlin representation of this block. |
def findall(self, string):
output = []
for match in self.pattern.findall(string):
if hasattr(match, ):
match = [match]
self._list_add(output, self.run(match))
return output | Parse string, returning all outputs as parsed by functions |
def connect_to_syslog(address=None, facility=None, level=None):
if not address:
address = find_syslog_address()
if facility is None:
facility = logging.handlers.SysLogHandler.LOG_USER
if level is None:
level = DEFAULT_LOG_LEVEL
for socktype in socket.SOCK_RAW, socket.SOCK_ST... | Create a :class:`~logging.handlers.SysLogHandler`.
:param address: The device file or network address of the system logging
daemon (a string or tuple, defaults to the result of
:func:`find_syslog_address()`).
:param facility: Refer to :class:`~logging.handlers.SysLogHand... |
def _build_block_element_list(self):
return sorted(
[e for e in self.block_elements.values() if not e.virtual],
key=lambda e: e.priority,
reverse=True
) | Return a list of block elements, ordered from highest priority to lowest. |
def example_exc_handler(tries_remaining, exception, delay):
print >> sys.stderr, "Caught , %d tries remaining, sleeping for %s seconds" % (
exception, tries_remaining, delay) | Example exception handler; prints a warning to stderr.
tries_remaining: The number of tries remaining.
exception: The exception instance which was raised. |
def _validate_isvalid_uncertainty(self, isvalid_uncertainty, field, value):
self._validate_isvalid_quantity(True, field, value)
| Checks for valid given value and appropriate units with uncertainty.
Args:
isvalid_uncertainty (`bool`): flag from schema indicating uncertainty to be checked
field (`str`): property associated with the quantity in question.
value (`list`): list with the string of the value ... |
def _check_self_cls_assign(self, node):
assign_names = {
target.name
for target in node.targets
if isinstance(target, astroid.AssignName)
}
scope = node.scope()
nonlocals_with_same_name = any(
child
for child in scope.b... | Check that self/cls don't get assigned |
def run(self):
for cls in self.get_test_classes():
self.logger.info(.format(cls=cls))
test = cls(runner=self)
if test._run():
self.logger.passed(.format(cls=cls))
else:
sel... | Runs all enabled tests. |
def update(self, ava):
for key, val in ava.items():
self[key] = val | Implements the dict.update() method |
def findTextBackward(self, block, column, needle):
if column is not None:
index = block.text()[:column].rfind(needle)
else:
index = block.text().rfind(needle)
if index != -1:
return block, index
for block in self.iterateBlocksBackFrom(block.... | Search for a needle and return (block, column)
Raise ValueError, if not found |
def setVisible(self, value):
if self.timer is not None:
if value:
self.timer.start(self._interval)
else:
self.timer.stop()
super(BaseTimerStatus, self).setVisible(value) | Override Qt method to stops timers if widget is not visible. |
def shorrocks_index(A):
r
A = np.asarray(A)
m, n = A.shape
if m != n:
raise ValueError()
diag_sum = np.diag(A).sum()
return (m - diag_sum) / (m - 1) | r"""
Implements Shorrocks mobility index
Parameters
-----------
A : array_like(float)
Square matrix with transition probabilities (mobility matrix) of
dimension m
Returns
--------
Shorrocks index: float
The Shorrocks mobility index calculated as
.. math::
... |
def get_mac_address_table_input_request_type_get_next_request_mac_address_type(self, **kwargs):
config = ET.Element("config")
get_mac_address_table = ET.Element("get_mac_address_table")
config = get_mac_address_table
input = ET.SubElement(get_mac_address_table, "input")
... | Auto Generated Code |
def callback_parent(attr, old, new):
import os
new = new.strip()
parent_input.value = new
if os.path.exists(new):
joinisdir = lambda parent, d: os.path.isdir(os.path.join(parent, d))
options = sorted([d for d in os.listdir(new) if joinisdir(new, d)])
... | Update data directories drop down with new parent directory |
def sv_variant(store, institute_id, case_name, variant_id=None, variant_obj=None, add_case=True,
get_overlapping=True):
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
if not variant_obj:
variant_obj = store.variant(variant_id)
if add_case:
... | Pre-process an SV variant entry for detail page.
Adds information to display variant
Args:
store(scout.adapter.MongoAdapter)
institute_id(str)
case_name(str)
variant_id(str)
variant_obj(dcit)
add_case(bool): If information about case files should be added
R... |
def charge(self, cart, request):
token_id = cart.extra[][]
if LooseVersion(SHOP_VERSION) < LooseVersion():
charge = stripe.Charge.create(
amount=cart.total.as_integer(),
currency=cart.total.currency,
source=token_id,
de... | Use the Stripe token from the request and charge immediately.
This view is invoked by the Javascript function `scope.charge()` delivered
by `get_payment_request`. |
def _ShouldPrintError(category, confidence, linenum):
if category.startswith(one_filter[1:]):
is_filtered = False
else:
assert False
if is_filtered:
return False
return True | If confidence >= verbose, category passes filter and is not suppressed. |
def get_context(line, pos):
commands = split_commandline(line) + []
i = 0
start = 0
end = len(commands[i])
while pos > end:
i += 1
start = end + 1
end += 1 + len(commands[i])
return start, end | computes start and end position of substring of line that is the
command string under given position |
def check_geographic_region(self, ds):
ret_val = []
region_list = [
,
,
,
,
,
,
,
,
,
,
,
,
,
,
,
,
... | 6.1.1 When data is representative of geographic regions which can be identified by names but which have complex
boundaries that cannot practically be specified using longitude and latitude boundary coordinates, a labeled
axis should be used to identify the regions.
Recommend that the names be c... |
def save_sequence_rule(self, sequence_rule_form, *args, **kwargs):
if sequence_rule_form.is_for_update():
return self.update_sequence_rule(sequence_rule_form, *args, **kwargs)
else:
return self.create_sequence_rule(sequence_rule_form, *args, **kwargs) | Pass through to provider SequenceRuleAdminSession.update_sequence_rule |
def element_wise(self, other, op):
if not isscalar(other) and not self.shape == other.shape:
raise ValueError("shapes %s and %s must be equal" % (self.shape, other.shape))
if not isscalar(other) and isinstance(other, Data) and not self.mode == other.mode:
raise NotImple... | Apply an elementwise operation to data.
Both self and other data must have the same mode.
If self is in local mode, other can also be a numpy array.
Self and other must have the same shape, or other must be a scalar.
Parameters
----------
other : Data or numpy array
... |
def update_terminal_regions(self, tree, X, y, residual, y_pred,
sample_weight, sample_mask,
learning_rate=1.0, k=0):
y_pred[:, k] += learning_rate * tree.predict(X).ravel() | Least squares does not need to update terminal regions.
But it has to update the predictions. |
def _update_srcmap_file(self, sources, overwrite=True):
if not os.path.isfile(self.files[]):
return
hdulist = fits.open(self.files[])
hdunames = [hdu.name.upper() for hdu in hdulist]
srcmaps = {}
for src in sources:
if src.name.upper() in hdu... | Check the contents of the source map file and generate
source maps for any components that are not present. |
def index_list_for_sort_order(x: List[Any], key: Callable[[Any], Any] = None,
reverse: bool = False) -> List[int]:
def key_with_user_func(idx_val: Tuple[int, Any]):
return key(idx_val[1])
if key:
sort_key = key_with_user_func
else:
sort_key... | Returns a list of indexes of ``x``, IF ``x`` WERE TO BE SORTED.
Args:
x: data
key: function to be applied to the data to generate a sort key; this
function is passed as the ``key=`` parameter to :func:`sorted`;
the default is ``itemgetter(1)``
reverse: reverse the so... |
def _load_scoped_variable_models(self):
self.scoped_variables = []
for scoped_variable in self.state.scoped_variables.values():
self._add_model(self.scoped_variables, scoped_variable, ScopedVariableModel) | Adds models for each scoped variable of the state |
def get_renderer(app, id):
renderer = app.extensions.get(, {})[id]
if isinstance(renderer, tuple):
mod_name, cls_name = renderer
mod = import_module(mod_name)
cls = mod
for name in cls_name.split():
cls = getattr(cls, name)
return cls
return rende... | Retrieve a renderer.
:param app: :class:`~flask.Flask` application to look ``id`` up on
:param id: Internal renderer id-string to look up |
def get_extra_radiation(doy, solar_constant=1366.1):
B = (2. * math.pi / 365.) * (doy - 1)
RoverR0sqrd = (1.00011 + 0.034221 * math.cos(B) + 0.00128 * math.sin(B) +
0.000719 * math.cos(2 * B) + 7.7e-05 * math.sin(2 * B))
Ea = solar_constant * RoverR0sqrd
return Ea | Determine extraterrestrial radiation from day of year (using the spencer method).
Note:
[1] M. Reno, C. Hansen, and J. Stein, "Global Horizontal Irradiance
Clear Sky Models: Implementation and Analysis", Sandia National
Laboratories, SAND2012-2389, 2012.
[2] <http://solardat.uorego... |
def get_batch(sequence, size, start=0, endpoint=None, complete=False):
batch = make_batch(sequence, size, start)
return {
"pagesize": batch.get_pagesize(),
"next": batch.make_next_url(),
"previous": batch.make_prev_url(),
"page": batch.get_pagenumber(),
"pages": ba... | create a batched result record out of a sequence (catalog brains) |
def _assert_recur_is_tail(node: Node) -> None:
if node.op == NodeOp.DO:
assert isinstance(node, Do)
for child in node.statements:
_assert_no_recur(child)
_assert_recur_is_tail(node.ret)
elif node.op in {NodeOp.FN, NodeOp.FN_METHOD, NodeOp.METHOD}:
assert isinst... | Assert that `recur` forms only appear in the tail position of this
or child AST nodes.
`recur` forms may only appear in `do` nodes (both literal and synthetic
`do` nodes) and in either the :then or :else expression of an `if` node. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.