code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|
def updateUserTone(conversationPayload, toneAnalyzerPayload, maintainHistory):
emotionTone = None
writingTone = None
socialTone = None
if not in conversationPayload:
conversationPayload[] = {}
if not in conversationPayload[]:
conversationPayload[] = initUser()
... | updateUserTone processes the Tone Analyzer payload to pull out the emotion,
writing and social tones, and identify the meaningful tones (i.e.,
those tones that meet the specified thresholds).
The conversationPayload json object is updated to include these tones.
@param conversationPayload json object re... |
def contained_bins(start, stop=None):
if stop is None:
stop = start + 1
min_bin = assign_bin(start, stop)
return [bin for bin in overlapping_bins(start, stop) if bin >= min_bin] | Given an interval `start:stop`, return bins for intervals completely
*contained by* `start:stop`. The order is according to the bin level
(starting with the smallest bins), and within a level according to the bin
number (ascending).
:arg int start, stop: Interval positions (zero-based, open-ended). If
... |
def _merge_headers(self, call_specific_headers):
merged_headers = requests.structures.CaseInsensitiveDict({
"User-Agent": self.user_agent
})
if call_specific_headers:
merged_headers.update(call_specific_headers)
... | Merge headers from different sources together. Headers passed to the
post/get methods have highest priority, then headers associated with
the connection object itself have next priority.
:param call_specific_headers: A header dict from the get/post call, or
None (the default for th... |
def get_source(self, doc):
start_iter = doc.get_start_iter()
end_iter = doc.get_end_iter()
source = doc.get_text(start_iter, end_iter, False)
return source | Grab contents of 'doc' and return it
:param doc: The active document
:return: |
def delete_checkpoint(self, checkpoint_dir):
if os.path.isfile(checkpoint_dir):
shutil.rmtree(os.path.dirname(checkpoint_dir))
else:
shutil.rmtree(checkpoint_dir) | Removes subdirectory within checkpoint_folder
Parameters
----------
checkpoint_dir : path to checkpoint |
def check_type(self, value):
if self.__dict__[] is None:
return
elif value is None:
return
elif isinstance(value, self.__dict__[]):
return
msg = "Value of type %s, when %s was expected." % (
type(value), self.__dict__[])
ra... | Hook for type-checking, invoked during assignment.
raises TypeError if neither value nor self.dtype are None and they
do not match.
will not raise an exception if either value or self.dtype is None |
def confirmation_pdf(self, confirmation_id):
return self._create_get_request(resource=CONFIRMATIONS, billomat_id=confirmation_id, command=PDF) | Opens a pdf of a confirmation
:param confirmation_id: the confirmation id
:return: dict |
def trans_his(self, symbol=, start=0, offset=10, date=):
market = get_stock_market(symbol)
with self.client.connect(*self.bestip):
data = self.client.get_history_transaction_data(
int(market), symbol, int(start), int(offset), date)
return self.client.to_... | 查询历史分笔成交
:param market: 市场代码
:param symbol: 股票代码
:param start: 起始位置
:param offset: 数量
:param date: 日期
:return: pd.dataFrame or None |
def failures():
if not HAVE_BIN_LIBS:
click.echo("missing required binary libs (lz4, msgpack)")
return
q = Queue(, connection=worker.connection)
for i in q.get_job_ids():
j = q.job_class.fetch(i, connection=q.connection)
click.echo("%s on %s" % (j.func_name, j.origin))
... | Show any unexpected failures |
def available_languages(wordlist=):
if wordlist == :
available = available_languages()
available.update(available_languages())
return available
elif wordlist == :
logger.warning(
"The wordlists have been renamed to ."
)
wordlist =
available... | Given a wordlist name, return a dictionary of language codes to filenames,
representing all the languages in which that wordlist is available. |
def register(self, name, callback, filter):
if name in self.names:
raise ValueError("A callback has already been registered with \
the name " % name)
self.handlers.append({
: name,
: callback,
: filter
})
... | register: string, function: string, data -> None,
function: data -> boolean -> None
Register will save the given name, callback, and filter function
for use when a packet arrives. When one arrives, the filter
function will be called to determine whether to call its associated
ca... |
def _get_scenarios(network_id, include_data, user_id, scenario_ids=None):
scen_qry = db.DBSession.query(Scenario).filter(
Scenario.network_id == network_id).options(
noload()).filter(
Scenario.status == )
if scenario_ids:
logging.... | Get all the scenarios in a network |
def small_parts(script, ratio=0.2, non_closed_only=False):
select.small_parts(script, ratio, non_closed_only)
selected(script)
return None | Select & delete the small disconnected parts (components) of a mesh.
Args:
script: the FilterScript object or script filename to write
the filter to.
ratio (float): This ratio (between 0 and 1) defines the meaning of
'small' as the threshold ratio between the number of faces... |
def dx_orbit_sys(t, X):
(m1x, m1y,
m2x, m2y,
m3x, m3y,
m4x, m4y,
m1vx, m1vy,
m2vx, m2vy,
m3vx, m3vy,
m4vx, m4vy) = X
m_moon1 = 7.342*(10**22)
m_moon2 = 7.342*(10**22)
m_moon3 = 7.342*(10**22)
m_moon4 = 7.342*(10**22)
G = 6.67408*(10**-11)
dm1... | X = [
m1x, m1y,
m2x, m2y,
m3x, m3y,
m4x, m4y,
m1vx, m1vy,
m2vx, m2vy,
m3vx, m3vy,
m4vx, m4vy
] |
def set_label(self, label, lang):
try:
self.metadata.add(SKOS.prefLabel, Literal(label, lang=lang))
self.graph.addN([
(self.asNode(), RDFS.label, Literal(label, lang=lang), self.graph),
])
except Exception as E:
pass | Add the label of the collection in given lang
:param label: Label Value
:param lang: Language code |
def ckan_extension_template(name, target):
setupdir = .format(target, name)
extdir = setupdir + .format(name)
templatedir = extdir +
staticdir = extdir +
makedirs(templatedir + )
makedirs(staticdir)
here = dirname(__file__)
copyfile(here + , staticdir + )
copyfile(here + ,
... | Create ckanext-(name) in target directory. |
def shutdown(self):
_v and LOG.debug(, self)
def _shutdown():
self._alive = False
if self._alive and not self._exitted:
self.defer(_shutdown) | Request broker gracefully disconnect streams and stop. Safe to call
from any thread. |
def remove_client(self, client):
try:
self._clients.remove(id(client))
except ValueError:
pass
if len(self._clients) < 1:
self.close() | Remove the client from the users of the socket.
If there are no more clients for the socket, it
will close automatically. |
def __require_kytos_config(self):
if self.__enabled is None:
uri = self._kytos_api +
try:
options = json.loads(urllib.request.urlopen(uri).read())
except urllib.error.URLError:
print()
sys.exit()
self.__ena... | Set path locations from kytosd API.
It should not be called directly, but from properties that require a
running kytosd instance. |
def set_selected_submission(self, course, task, submissionid):
submission = self.submission_manager.get_submission(submissionid)
if not submission:
return False
if submission["taskid"] != task.get_id() or submission["courseid"] != course.get_id():
... | Set submission whose id is `submissionid` to selected grading submission for the given course/task.
Returns a boolean indicating whether the operation was successful or not. |
def get_dsn(d):
try:
return d["dataSetName"]
except Exception as e:
logger_misc.warn("get_dsn: Exception: No datasetname found, unable to continue: {}".format(e))
exit(1) | Get the dataset name from a record
:param dict d: Metadata
:return str: Dataset name |
def signals_blocker(instance, attribute, *args, **kwargs):
value = None
try:
hasattr(instance, "blockSignals") and instance.blockSignals(True)
value = attribute(*args, **kwargs)
finally:
hasattr(instance, "blockSignals") and instance.blockSignals(False)
return value | Blocks given instance signals before calling the given attribute with \
given arguments and then unblocks the signals.
:param instance: Instance object.
:type instance: QObject
:param attribute: Attribute to call.
:type attribute: QObject
:param \*args: Arguments.
:type \*args: \*
:para... |
def get_labels(obj):
if Clustering.is_pyclustering_instance(obj.model):
return obj._labels_from_pyclusters
else:
return obj.model.labels_ | Retrieve the labels of a clustering.rst object
:param obj: the clustering.rst object
:return: the resulting labels |
def list_orgs(self):
orgs = list(self.orgs.keys())
orgs.sort()
return orgs | list the orgs configured in the keychain |
def merge(obj_a, obj_b, strategy=, renderer=, merge_lists=False):
*{foo: Foo}{bar: Bar}
return salt.utils.dictupdate.merge(obj_a, obj_b, strategy, renderer,
merge_lists) | Merge a data structure into another by choosing a merge strategy
Strategies:
* aggregate
* list
* overwrite
* recurse
* smart
CLI Example:
.. code-block:: shell
salt '*' slsutil.merge '{foo: Foo}' '{bar: Bar}' |
def load_page_buffer(self, buffer_number, address, bytes):
assert buffer_number < len(self.page_buffers), "Invalid buffer number"
bytes = self.override_security_bits(address, bytes)
self.target.write_memory_block8(self.page_buffers[buffer_number], bytes) | !
@brief Load data to a numbered page buffer.
This method is used in conjunction with start_program_page_with_buffer() to implement
double buffered programming. |
def _get_classifier(self, prefix):
with self.name_scope():
classifier = nn.Dense(2, prefix=prefix)
return classifier | Construct a decoder for the next sentence prediction task |
def rename_state_fluent(name: str) -> str:
i = name.index()
functor = name[:i]
arity = name[i+1:]
return "{}'/{}".format(functor, arity) | Returns current state fluent canonical name.
Args:
name (str): The next state fluent name.
Returns:
str: The current state fluent name. |
def delete_keys(self, keys, quiet=False, mfa_token=None, headers=None):
ikeys = iter(keys)
result = MultiDeleteResult(self)
provider = self.connection.provider
query_args =
def delete_keys2(hdrs):
hdrs = hdrs or {}
data = u
data += u"... | Deletes a set of keys using S3's Multi-object delete API. If a
VersionID is specified for that key then that version is removed.
Returns a MultiDeleteResult Object, which contains Deleted
and Error elements for each key you ask to delete.
:type keys: list
:param keys: A ... |
def login(self, pin, user_type=CKU_USER):
pin1 = ckbytelist(pin)
rv = self.lib.C_Login(self.session, user_type, pin1)
if rv != CKR_OK:
raise PyKCS11Error(rv) | C_Login
:param pin: the user's PIN or None for CKF_PROTECTED_AUTHENTICATION_PATH
:type pin: string
:param user_type: the user type. The default value is
CKU_USER. You may also use CKU_SO
:type user_type: integer |
def predict_proba(self, X):
Xt, _, _ = self._transform(X)
return self._final_estimator.predict_proba(Xt) | Apply transforms, and predict_proba of the final estimator
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step
of the pipeline.
Returns
-------
y_proba : array-like, shape = [n_samples, n_classes]
... |
def readAlignments(self, reads):
if self._fp is None:
self._open(self._filename)
reads = iter(reads)
try:
for lineNumber, line in enumerate(self._fp, start=2):
try:
record = loads(line[:-1])
except ValueError ... | Read lines of JSON from self._filename, convert them to read alignments
and yield them.
@param reads: An iterable of L{Read} instances, corresponding to the
reads that were given to BLAST.
@raise ValueError: If any of the lines in the file cannot be converted
to JSON.
... |
def dump_xml(props, fp, comment=None, encoding=, sort_keys=False):
fp = codecs.lookup(encoding).streamwriter(fp, errors=)
print(
.format(quoteattr(encoding)), file=fp)
for s in _stream_xml(props, comment, sort_keys):
print(s, file=fp) | Write a series ``props`` of key-value pairs to a binary filehandle ``fp``
in the format of an XML properties file. The file will include both an XML
declaration and a doctype declaration.
:param props: A mapping or iterable of ``(key, value)`` pairs to write to
``fp``. All keys and values in ``pr... |
def add_highlight(self, hl_group, line, col_start=0,
col_end=-1, src_id=-1, async_=None,
**kwargs):
async_ = check_async(async_, kwargs, src_id != 0)
return self.request(, src_id, hl_group,
line, col_start, col_end, async_=... | Add a highlight to the buffer. |
def evaluate(self, dataset, metric=, missing_value_action=):
_raise_error_evaluation_metric_is_valid(
metric, [, , ])
return super(RandomForestRegression, self).evaluate(dataset,
missing_value_action=missing_value_action,
... | Evaluate the model on the given dataset.
Parameters
----------
dataset : SFrame
Dataset in the same format used for training. The columns names and
types of the dataset must be the same as that used in training.
metric : str, optional
Name of the ev... |
def execute(self):
self.print_info()
self._config.provisioner.converge()
self._config.state.change_state(, True) | Execute the actions necessary to perform a `molecule converge` and
returns None.
:return: None |
def _make_routing_list(api_provider):
routes = []
for api in api_provider.get_all():
route = Route(methods=[api.method], function_name=api.function_name, path=api.path,
binary_types=api.binary_media_types)
routes.append(route)
return r... | Returns a list of routes to configure the Local API Service based on the APIs configured in the template.
Parameters
----------
api_provider : samcli.commands.local.lib.sam_api_provider.SamApiProvider
Returns
-------
list(samcli.local.apigw.service.Route)
Li... |
def mmPrettyPrintDataOverlap(self):
matrix = self.mmGetDataOverlap()
resetsTrace = self.mmGetTraceResets()
text = ""
for i, row in enumerate(matrix):
if resetsTrace.data[i]:
text += "\n"
for j, item in enumerate(row):
if resetsTrace.data[j]:
text += " "
... | Returns pretty-printed string representation of overlap metric data.
(See `mmGetDataOverlap`.)
@return (string) Pretty-printed data |
def meraculous_runner(self):
os.chdir(self.allAssembliesDir)
print(self.callString)
p = subprocess.run(self.callString, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
output = s... | Check to make sure that the allAssembliesDir has been created, if not,
make it. This will only execute for the first time an assembly has been
run in this directory.
Run the directory from allAssembliesDir. The self.callString instance
attribute tells Meraculous to name the assembly dir... |
def chooseReliableActiveFiringRate(cellsPerAxis, bumpSigma,
minimumActiveDiameter=None):
firingFieldDiameter = 2 * (1./cellsPerAxis)*(2./math.sqrt(3))
if minimumActiveDiameter:
firingFieldDiameter = max(firingFieldDiameter, minimumActiveDiameter)
return Thre... | When a cell is activated by sensory input, this implies that the phase is
within a particular small patch of the rhombus. This patch is roughly
equivalent to a circle of diameter (1/cellsPerAxis)(2/sqrt(3)), centered on
the cell. This 2/sqrt(3) accounts for the fact that when circles are packed
into hex... |
def sensoryCompute(self, activeMinicolumns, learn):
inputParams = {
"activeColumns": activeMinicolumns,
"basalInput": self.getLocationRepresentation(),
"basalGrowthCandidates": self.getLearnableLocationRepresentation(),
"learn": learn
}
self.L4.compute(**inputParams)
locati... | @param activeMinicolumns (numpy array)
List of indices of minicolumns to activate.
@param learn (bool)
If True, the two layers should learn this association.
@return (tuple of dicts)
Data for logging/tracing. |
def get_stp_mst_detail_output_cist_port_interface_name(self, **kwargs):
config = ET.Element("config")
get_stp_mst_detail = ET.Element("get_stp_mst_detail")
config = get_stp_mst_detail
output = ET.SubElement(get_stp_mst_detail, "output")
cist = ET.SubElement(output, "cist... | Auto Generated Code |
def check_validity_of_long_form_args(model_obj, wide_weights, rows_to_obs):
ensure_model_obj_has_mapping_constructor(model_obj)
ensure_wide_weights_is_1D_or_2D_ndarray(wide_weights)
ensure_rows_to_obs_validity(rows_to_obs)
return None | Ensures the args to `create_long_form_weights` have expected properties. |
def quote_header_value(value, extra_chars=, allow_token=True):
value = to_string(value)
if allow_token:
token_chars = HEADER_TOKEN_CHARS | set(extra_chars)
if set(value).issubset(token_chars):
return value
return % value.replace(, ).replace(, ) | Quote a header value if necessary.
:param value: the value to quote.
:param extra_chars: a list of extra characters to skip quoting.
:param allow_token: if this is enabled token values are returned
unchanged. |
def workload_state_compare(current_workload_state, workload_state):
hierarchy = {: -1,
: 0,
: 1,
: 2,
: 3,
}
if hierarchy.get(workload_state) is None:
workload_state =
if hierarchy.get(current_workload_state)... | Return highest priority of two states |
def decode_response(client_message, to_object=None):
parameters = dict(base=None, increment=None, batch_size=None)
parameters[] = client_message.read_long()
parameters[] = client_message.read_long()
parameters[] = client_message.read_int()
return parameters | Decode response from client message |
def unlisten_to_node(self, id_):
id_pubsub = _pubsub_key(id_)
if id_pubsub in self._listening_to:
del self._listening_to[id_pubsub]
self.toredis.unsubscribe(id_pubsub)
parent = json_decode(r_client.get(id_)).get(, None)
if parent is not None:
... | Stop listening to a job
Parameters
----------
id_ : str
An ID to remove
Returns
--------
str or None
The ID removed or None if the ID was not removed |
def should_display_warnings_for(to_type):
if not hasattr(to_type, ):
return True
elif to_type.__module__ in {} or to_type.__module__.startswith() \
or to_type.__name__ in {}:
return False
elif issubclass(to_type, int) or issubclass(to_type, str) \
or issubclass(t... | Central method where we control whether warnings should be displayed |
def _margtimephase_loglr(self, mf_snr, opt_snr):
return special.logsumexp(numpy.log(special.i0(mf_snr)),
b=self._deltat) - 0.5*opt_snr | Returns the log likelihood ratio marginalized over time and phase. |
def from_maildir(self, codes: str) -> FrozenSet[Flag]:
flags = set()
for code in codes:
if code == :
break
to_sys = self._to_sys.get(code)
if to_sys is not None:
flags.add(to_sys)
else:
to_kwd = self... | Return the set of IMAP flags that correspond to the letter codes.
Args:
codes: The letter codes to map. |
def safe_makedirs(path):
try:
os.makedirs(path)
except OSError as err:
if err.errno == errno.EEXIST:
if not os.path.isdir(path):
raise
else:
raise | A safe function for creating a directory tree. |
def dolnp3_0(Data):
if len(Data) == 0:
print("This function requires input Data have at least 1 entry")
return {}
if len(Data) == 1:
ReturnData = {}
ReturnData["dec"] = Data[0][]
ReturnData["inc"] = Data[0][]
ReturnData["n_total"] =
if "DE-BFP" in Da... | DEPRECATED!! USE dolnp()
Desciption: takes a list of dicts with the controlled vocabulary of 3_0 and calls dolnp on them after reformating for compatibility.
Parameters
__________
Data : nested list of dictionarys with keys
dir_dec
dir_inc
dir_tilt_correction
method_code... |
def unsurt(surt):
try:
index = surt.index()
parts = surt[0:index].split()
parts.reverse()
host = .join(parts)
host += surt[index + 1:]
return host
except ValueError:
return surt | # Simple surt
>>> unsurt('com,example)/')
'example.com/'
# Broken surt
>>> unsurt('com,example)')
'com,example)'
# Long surt
>>> unsurt('suffix,domain,sub,subsub,another,subdomain)/path/file/\
index.html?a=b?c=)/')
'subdomain.another.subsub.sub.domain.suffix/path/file/index.html?a=b?c=... |
def save(self, *args):
with open(self.file_root + , "wb") as f:
pickle.dump(args, f, protocol=pickle.HIGHEST_PROTOCOL) | Save cache to file using pickle.
Parameters
----------
*args:
All but the last argument are inputs to the cached function. The
last is the actual value of the function. |
def update_handler(feeds):
if Feed._filters_update_handler_lock: return
return Feed._filters_update_handler(Feed, feeds, force=True) | Update all cross-referencing filters results for feeds and others, related to them.
Intended to be called from non-Feed update hooks (like new Post saving). |
def load_edbfile(file=None):
import ephem,string,math
if file is None:
import tkFileDialog
try:
file=tkFileDialog.askopenfilename()
except:
return
if file is None or file == :
return
f=open(file)
lines=f.readlines()
f.close()
for line in... | Load the targets from a file |
def _api_post(self, url, **kwargs):
kwargs[] = self.url + url
kwargs[] = self.auth
headers = deepcopy(self.headers)
headers.update(kwargs.get(, {}))
kwargs[] = headers
self._post(**kwargs) | A convenience wrapper for _post. Adds headers, auth and base url by
default |
def update_batch_count(instance, **kwargs):
batch = instance.batch
count = batch.samples.filter(published=True).count()
if count != batch.count:
batch.count = count
if AUTO_PUBLISH_BATCH:
batch.published = bool(count)
batch.save() | Sample post-save handler to update the sample's batch count.
Batches are unpublished by default (to prevent publishing empty batches).
If the `AUTO_PUBLISH_BATCH` setting is true, the batch will be published
automatically when at least one published sample is in the batch. |
def read_settings(self):
extent = setting(, None, str)
if extent:
extent = QgsGeometry.fromWkt(extent)
if not extent.isGeosValid():
extent = None
crs = setting(, None, str)
if crs:
crs = QgsCoordinateReferenceSystem(crs)
... | Set the dock state from QSettings.
Do this on init and after changing options in the options dialog. |
def assign(self, bugids, user):
payload = {: (bugids,), : user}
d = self.call(, payload)
d.addCallback(self._parse_bug_assigned_callback)
return d | Assign a bug to a user.
param bugid: ``int``, bug ID number.
param user: ``str``, the login name of the user to whom the bug is
assigned
returns: deferred that when fired returns True if the change succeeded,
False if the change was unnecessary (because the ... |
def start(name=None,
user=None,
group=None,
chroot=None,
caps=None,
no_caps=False,
pidfile=None,
enable_core=False,
fd_limit=None,
verbose=False,
debug=False,
trace=False,
yydebug=False,
per... | Ensures, that syslog-ng is started via the given parameters. This function
is intended to be used from the state module.
Users shouldn't use this function, if the service module is available on
their system. If :mod:`syslog_ng.set_config_file
<salt.modules.syslog_ng.set_binary_path>`, is called before,... |
def annotation(self, type, set=None):
l = self.count(type,set,True,default_ignore_annotations)
if len(l) >= 1:
return l[0]
else:
raise NoSuchAnnotation() | Will return a **single** annotation (even if there are multiple). Raises a ``NoSuchAnnotation`` exception if none was found |
def createCollection(self, className = , **colProperties) :
colClass = COL.getCollectionClass(className)
if len(colProperties) > 0 :
colProperties = dict(colProperties)
else :
try :
colProperties = dict(colClass._properties)
except A... | Creates a collection and returns it.
ClassName the name of a class inheriting from Collection or Egdes, it can also be set to 'Collection' or 'Edges' in order to create untyped collections of documents or edges.
Use colProperties to put things such as 'waitForSync = True' (see ArangoDB's doc
for... |
def visualRectRC(self, row, column):
rect = self._rects[row][column]
if rect.isValid():
return QtCore.QRect(rect.x() - self.horizontalScrollBar().value(),
rect.y() - self.verticalScrollBar().value(),
rect.width(), rect.height())
... | The rectangle for the bounds of the item at *row*, *column*
:param row: row of the item
:type row: int
:param column: column of the item
:type column: int
:returns: :qtdoc:`QRect` -- rectangle of the borders of the item |
def triangle(self, verts=True, lines=True):
tf = vtk.vtkTriangleFilter()
tf.SetPassLines(lines)
tf.SetPassVerts(verts)
tf.SetInputData(self.poly)
tf.Update()
return self.updateMesh(tf.GetOutput()) | Converts actor polygons and strips to triangles. |
def makeOuputDir(outputDir, force):
if outputDir:
if exists(outputDir):
if not force:
print(
, file=sys.stderr)
sys.exit(1)
else:
mkdir(outputDir)
else:
outputDir = mkdtemp()
print( % outputDir)
... | Create or check for an output directory.
@param outputDir: A C{str} output directory name, or C{None}.
@param force: If C{True}, allow overwriting of pre-existing files.
@return: The C{str} output directory name. |
def imshow(
self,
data=None,
save=False,
ax=None,
interpolation="none",
extra_title=None,
show_resonances="some",
set_extent=True,
equalized=False,
rmin=None,
rmax=None,
savepath=".",
**kwargs,
):
... | Powerful default display.
show_resonances can be True, a list, 'all', or 'some' |
def refresh(name):
ret = {: name,
: {},
: None,
: }
if __opts__[]:
ret[] =
return ret
__salt__[]()
ret[] = True
ret[] =
return ret | Initiate a Traffic Server configuration file reread. Use this command to
update the running configuration after any configuration file modification.
The timestamp of the last reconfiguration event (in seconds since epoch) is
published in the proxy.node.config.reconfigure_time metric.
.. code-block:: y... |
def min_pulse_sp(self):
self._min_pulse_sp, value = self.get_attr_int(self._min_pulse_sp, )
return value | Used to set the pulse size in milliseconds for the signal that tells the
servo to drive to the miniumum (counter-clockwise) position_sp. Default value
is 600. Valid values are 300 to 700. You must write to the position_sp
attribute for changes to this attribute to take effect. |
def _get_bufsize_linux(iface):
ret = {: False}
cmd = .format(iface)
out = __salt__[](cmd)
pat = re.compile(r)
suffix =
for line in out.splitlines():
res = pat.match(line)
if res:
ret[res.group(1).lower().replace(, ) + suffix] = int(res.group(2))
ret... | Return network interface buffer information using ethtool |
def format_text_as_docstr(text):
r
import utool as ut
import re
min_indent = ut.get_minimum_indentation(text)
indent_ = * min_indent
formated_text = re.sub( + indent_, + indent_ + , text,
flags=re.MULTILINE)
formated_text = re.sub(, + indent_ + , formated_text,... | r"""
CommandLine:
python ~/local/vim/rc/pyvim_funcs.py --test-format_text_as_docstr
Example:
>>> # DISABLE_DOCTEST
>>> from pyvim_funcs import * # NOQA
>>> text = testdata_text()
>>> formated_text = format_text_as_docstr(text)
>>> result = ('formated_text = \... |
def calc_abort(request, calc_id):
job = logs.dbcmd(, calc_id)
if job is None:
message = {: % calc_id}
return HttpResponse(content=json.dumps(message), content_type=JSON)
if job.status not in (, ):
message = {: % job.id}
return HttpResponse(content=json.dumps(message),... | Abort the given calculation, it is it running |
def select_action(self, nb_actions, probs):
action = np.random.choice(range(nb_actions), p=probs)
return action | Return the selected action
# Arguments
probs (np.ndarray) : Probabilty for each action
# Returns
action |
def run(self):
os.environ[] =
self.python.check_valid_python()
args = self.parse_options(self.args)
if args:
arg = args[0]
else:
arg = os.curdir
if arg:
arg = expanduser(arg)
if isfile(arg):
outfile = sel... | Render and display Python package documentation. |
def send_article_message(self, user_id, articles=None, media_id=None):
if articles is None and media_id is None:
raise TypeError()
if articles:
articles_data = []
for article in articles:
article = Article(**article)
... | 发送图文消息
详情请参考 http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID, 就是你收到的 WechatMessage 的 source
:param articles: list 对象, 每个元素为一个 dict 对象, key 包含 `title`, `description`, `picurl`, `url`
:param media_id: 待发送的图文 Media ID
:return: 返回的 JSON 数据包 |
def use_plenary_sequence_rule_view(self):
self._object_views[] = PLENARY
for session in self._get_provider_sessions():
try:
session.use_plenary_sequence_rule_view()
except AttributeError:
pass | Pass through to provider SequenceRuleLookupSession.use_plenary_sequence_rule_view |
def sync(self, json_obj=None):
LOGGER.debug("Transport.sync")
if json_obj is None:
params = None
if self.id is not None:
params = SessionService.complete_transactional_req({: self.id})
if params is not None:
if MappingService.... | synchronize this transport with the Ariane server transport
:return: |
def delete_guest(userid):
guest_list_info = client.send_request()
print("\nFailed to delete guest %s!" % userid)
else:
print("\nSucceeded to delete guest %s!" % userid) | Destroy a virtual machine.
Input parameters:
:userid: USERID of the guest, last 8 if length > 8 |
def shutdown(self, restart=False):
msg = self.session.msg(, {:restart})
self._queue_send(msg)
return msg[][] | Request an immediate kernel shutdown.
Upon receipt of the (empty) reply, client code can safely assume that
the kernel has shut down and it's safe to forcefully terminate it if
it's still alive.
The kernel will send the reply via a function registered with Python's
atexit modul... |
def fetch_protein_list(self, taxon_id):
protein_list = list()
col = [, ]
params = urllib.parse.urlencode(
{: self._build_biomart_gene_query(taxon_id, col)})
conn = http.client.HTTPConnection(ENS_URL)
conn.request("GET", + params)
response =... | Fetch a list of proteins for a species in biomart
:param taxid:
:return: list |
def unfold_lines(string):
assert isinstance(string, str), .format(type(string))
lines = string.splitlines()
line_buffer = io.StringIO()
for line_number in range(len(lines)):
line = lines[line_number]
if line and line[0:1] in (, ):
line_buffer.write()
elif line_n... | Join lines that are wrapped.
Any line that starts with a space or tab is joined to the previous
line. |
def collect_loaded_packages() -> List[Tuple[str, str]]:
dists = get_installed_distributions()
get_dist_files = DistFilesFinder()
file_table = {}
for dist in dists:
for file in get_dist_files(dist):
file_table[file] = dist
used_dists = set()
for module in list(s... | Return the currently loaded package names and their versions. |
def load_code_info(phases_or_groups):
if isinstance(phases_or_groups, PhaseGroup):
return phases_or_groups.load_code_info()
ret = []
for phase in phases_or_groups:
if isinstance(phase, PhaseGroup):
ret.append(phase.load_code_info())
else:
ret.append(
mutablerecords.CopyRecord(... | Recursively load code info for a PhaseGroup or list of phases or groups. |
def compute_trip_stats(
feed: "Feed",
route_ids: Optional[List[str]] = None,
*,
compute_dist_from_shapes: bool = False,
) -> DataFrame:
f = feed.trips.copy()
if route_ids is not None:
f = f[f["route_id"].isin(route_ids)].copy()
if "direction_id" not in f.co... | Return a DataFrame with the following columns:
- ``'trip_id'``
- ``'route_id'``
- ``'route_short_name'``
- ``'route_type'``
- ``'direction_id'``: NaN if missing from feed
- ``'shape_id'``: NaN if missing from feed
- ``'num_stops'``: number of stops on trip
- ``'start_time'``: first depa... |
def date_time_between_dates(
self,
datetime_start=None,
datetime_end=None,
tzinfo=None):
if datetime_start is None:
datetime_start = datetime.now(tzinfo)
if datetime_end is None:
datetime_end = datetime.now(tzinfo)
... | Takes two DateTime objects and returns a random datetime between the two
given datetimes.
Accepts DateTime objects.
:param datetime_start: DateTime
:param datetime_end: DateTime
:param tzinfo: timezone, instance of datetime.tzinfo subclass
:example DateTime('1999-02-02 1... |
def setVisibleColumns(self, visible):
colnames = self.columns()
for c, column in enumerate(colnames):
self.setColumnHidden(c, column not in visible) | Sets the list of visible columns for this widget. This method will
take any column in this tree's list NOT found within the inputed column
list and hide them.
:param columns | [<str>, ..] |
def _integration(data, sample_rate):
wind_size = int(0.080 * sample_rate)
int_ecg = numpy.zeros_like(data)
cum_sum = data.cumsum()
int_ecg[wind_size:] = (cum_sum[wind_size:] - cum_sum[:-wind_size]) / wind_size
int_ecg[:wind_size] = cum_sum[:wind_size] / numpy.arange(1, wind_size + 1)
retur... | Moving window integration. N is the number of samples in the width of the integration
window
----------
Parameters
----------
data : ndarray
Samples of the signal where a moving window integration will be applied.
sample_rate : int
Sampling rate at which the acquisition took pla... |
def list_storage_class(self, **kwargs):
kwargs[] = True
if kwargs.get():
return self.list_storage_class_with_http_info(**kwargs)
else:
(data) = self.list_storage_class_with_http_info(**kwargs)
return data | list or watch objects of kind StorageClass
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_storage_class(async_req=True)
>>> result = thread.get()
:param async_req bool
:param... |
def register( cls, plugin ):
if ( not plugin ):
return
if ( cls._plugins is None ):
cls._plugins = {}
cls._plugins[plugin.uniqueName()] = plugin | Registers a particular plugin to the global system at the given name.
:param plugin | <XWizardPlugin> |
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
assert wait_for_completion is True
lpar_oid = uri_parms[0]
lpar_uri = + lpar_oid
try:
lpar = hmc.lookup_by_uri(lpar_uri)
except KeyError:
raise Inval... | Operation: Load Logical Partition (requires classic mode). |
def load_clients_file(filename, configuration_class=ClientConfiguration):
with open(filename, ) as f:
return load_clients(f, configuration_class=configuration_class) | Loads client configurations from a YAML file.
:param filename: YAML file name.
:type filename: unicode | str
:param configuration_class: Class of the configuration object to create.
:type configuration_class: class
:return: A dictionary of client configuration objects.
:rtype: dict[unicode | st... |
def search(self, **kwargs):
query_string = {}
for key, value in kwargs.iteritems():
query_string[key] = value
return self.__request("GET","search",querystring=query_string) | Searches for files/folders
Args:
\*\*kwargs (dict): A dictionary containing necessary parameters
(check https://developers.box.com/docs/#search for
list of parameters)
Returns:
dict. Response from Box.
Raises:
... |
def TerminalSize():
try:
with open(os.ctermid(), ) as tty_instance:
length_width = struct.unpack(
, fcntl.ioctl(tty_instance.fileno(), termios.TIOCGWINSZ, ))
except (IOError, OSError):
try:
length_width = (int(os.environ[]),
int(os.environ[]))
except (Value... | Returns terminal length and width as a tuple. |
def plot(self, stachans=, size=(10, 7), show=True):
return subspace_detector_plot(detector=self, stachans=stachans,
size=size, show=show) | Plot the output basis vectors for the detector at the given dimension.
Corresponds to the first n horizontal vectors of the V matrix.
:type stachans: list
:param stachans: list of tuples of station, channel pairs to plot.
:type stachans: list
:param stachans: List of tuples of ... |
def _do_shell(self, line):
if not line:
return
sp = Popen(line,
shell=True,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
close_fds=not WINDOWS)
(fo, fe) = (sp.stdout, sp.stderr)
i... | Send a command to the Unix shell.\n==> Usage: shell ls ~ |
def clean_form_template(self):
form_template = self.cleaned_data.get(, )
if form_template:
try:
get_template(form_template)
except TemplateDoesNotExist:
msg = _()
raise forms.ValidationError(msg)
return form_templat... | Check if template exists |
def perform_create(self, serializer):
with transaction.atomic():
instance = serializer.save()
assign_contributor_permissions(instance) | Create a resource. |
def iscsi_settings(self):
return ISCSISettings(
self._conn, utils.get_subresource_path_by(
self, ["@Redfish.Settings", "SettingsObject"]),
redfish_version=self.redfish_version) | Property to provide reference to iSCSI settings instance
It is calculated once when the first time it is queried. On refresh,
this property gets reset. |
def getEmailTemplate(request):
if request.method != :
return HttpResponse(_())
if not hasattr(request,):
return HttpResponse(_())
template_id = request.POST.get()
if not template_id:
return HttpResponse(_("Error, no template ID provided."))
try:
... | This function handles the Ajax call made when a user wants a specific email template |
def get_creator(self, lang=None):
return self.metadata.get_single(key=DC.creator, lang=lang) | Get the DC Creator literal value
:param lang: Language to retrieve
:return: Creator string representation
:rtype: Literal |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.