code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|
def removeRedundantVerbChains( foundChains, removeOverlapping = True, removeSingleAraAndEi = False ):
eiära
toDelete = []
for i in range(len(foundChains)):
matchObj1 = foundChains[i]
if removeOverlapping:
for j in range(i+1, len(foundChains)):
matchObj2 = f... | Eemaldab yleliigsed verbiahelad: ahelad, mis katavad osaliselt v6i t2ielikult
teisi ahelaid (removeOverlapping == True), yhes6nalised 'ei' ja 'ära' ahelad (kui
removeSingleAraAndEi == True);
Yldiselt on nii, et ylekattuvaid ei tohiks palju olla, kuna fraaside laiendamisel
... |
def sum(self):
return self._constructor(self.values.sum(axis=self.baseaxes, keepdims=True)) | Compute the sum across records. |
def query(
self, queryEngine, query_str, vendorSpecific=None, do_post=False, **kwargs
):
response = self.queryResponse(
queryEngine, query_str, vendorSpecific, do_post, **kwargs
)
if self._content_type_is_json(response):
return self._read_json_respons... | See Also: queryResponse()
Args:
queryEngine:
query_str:
vendorSpecific:
do_post:
**kwargs:
Returns: |
def passageLoop(parent, new_tree, xpath1, xpath2=None, preceding_siblings=False, following_siblings=False):
current_1, queue_1 = __formatXpath__(xpath1)
if xpath2 is None:
result_1, loop = performXpath(parent, current_1)
if loop is True:
queue_1 = xpath1
central = Non... | Loop over passages to construct and increment new tree given a parent and XPaths
:param parent: Parent on which to perform xpath
:param new_tree: Parent on which to add nodes
:param xpath1: List of xpath elements
:type xpath1: [str]
:param xpath2: List of xpath elements
:type xpath2: [str]
... |
def _map(self, event):
description = event.get(, )
start_time = google_base.parse_rfc3339_utc_string(
event.get(, ))
for name, regex in _EVENT_REGEX_MAP.items():
match = regex.match(description)
if match:
return {: name, : start_time}, match
return {: description, : st... | Extract elements from an operation event and map to a named event. |
def open(self, dbname=None):
databases = self.list()
if self.is_closed():
self.db_path = os.path.join(self.path, dbname or (databases and databases[0] or self.new()))
if not self._opened:
self.list_tables()
self._opened = True | Open database from the path with the name or latest.
If there are no yet databases, create a new implicitly.
:return: |
def main():
parser = ArgumentParser()
parser.add_argument("direction",
choices=(
"up", "down", "left", "right",
"next", "prev"
),
help="Direction to put the focus on")
args = ... | Entry point |
def start(self):
if self.pre_start_check():
raise AlreadyRunning(self)
super(Executor, self).start()
self.wait_for(self.check_subprocess)
return self | Start executor with additional checks.
Checks if previous executor isn't running then start process
(executor) and wait until it's started.
:returns: itself
:rtype: Executor |
def process_delivery(message, notification):
mail = message[]
delivery = message[]
if in delivery:
delivered_datetime = clean_time(delivery[])
else:
delivered_datetime = None
deliveries = []
for eachrecipient in delivery[]:
deliveries += [Delivery.objects... | Function to process a delivery notification |
def check(self):
st = self._stat()
if self._st == st:
return
self._st = st
self._remove_existing()
if st is None:
LOG.debug(, self)
else:
self._on_file_changed() | Compare the :func:`os.stat` for the pam_env style environmnt file
`path` with the previous result `old_st`, which may be :data:`None` if
the previous stat attempt failed. Reload its contents if the file has
changed or appeared since last attempt.
:returns:
New :func:`os.stat... |
def delete(self):
response = self.session.request("delete:Message", [ self.message_id ])
self.data = response
return self | Delete the draft. |
def mongodb(line):
2017-08-17T07:56:33.489+0200 I REPL [signalProcessingThread] shutting down replication subsystemsdatacomponentREPLcontext[signalProcessingThread]messageshutting down replication subsystemsseverityItimestamp2017-08-17T07:56:33.489+0200timestamp2017-08-17T07:56:33.489+0200typelog2017-08-17T07:5... | >>> import pprint
>>> input_line1 = '2017-08-17T07:56:33.489+0200 I REPL [signalProcessingThread] shutting down replication subsystems'
>>> output_line1 = mongodb(input_line1)
>>> pprint.pprint(output_line1)
{'data': {'component': 'REPL',
'context': '[signalProcessingThread]',
... |
def fromXml(cls, xparent):
output = cls()
for xentry in xparent:
key = xentry.get()
if not key:
continue
typ = xentry.get(, )
if typ in DataSet._xmlTypes:
value = DataSet._xmlTypes[typ][1](xentry)
els... | Loads the settings for this dataset to the inputted parent xml.
:param xparent | <xml.etree.ElementTree.Element> |
def _get_action_type(self, action):
action_name = next(iter(action or {: None}))
if not in action_name:
action_name = .format(self.CALL_TYPE, action_name)
return action_name.split()[0] or None | Get action type.
:param action:
:return: |
def start(self):
if self.pid is not None:
LOG.error(
"The process is already running with pid {0}.".format(self.pid)
)
sys.exit(exit.ALREADY_RUNNING)
self.daemonize()
LOG.info("Beginning run loop for process.")
try:
... | Start the process with daemonization.
If the process is already started this call should exit with code
ALREADY_RUNNING. Otherwise it must call the 'daemonize' method and then
call 'run'. |
def _string_hash(s):
h = 5381
for c in s:
h = h * 33 + ord(c)
return h | String hash (djb2) with consistency between py2/py3 and persistency between runs (unlike `hash`). |
def is_middleware(func) -> bool:
if inspect.isfunction(func):
_check = func
_name = func.__name__
else:
_check = func.__call__
_name = func.__class__.__name__
if not inspect.iscoroutinefunction(_check):
raise UnknownMiddlewareException("Middleware {} should be ... | test whether it is a middleware
:return: Boolean |
def stage_tc_batch(self, owner, staging_data):
batch = self.tcex.batch(owner)
for group in staging_data.get() or []:
variable = group.pop(, None)
path = group.pop(, None)
data = self.path_data(group, path)
if group.get() ... | Stage data in ThreatConnect Platform using batch API.
Args:
owner (str): The ThreatConnect owner to submit batch job.
staging_data (dict): A dict of ThreatConnect batch data. |
def plot_cap_exposures_longshort(long_exposures, short_exposures, ax=None):
if ax is None:
ax = plt.gca()
color_list = plt.cm.gist_rainbow(np.linspace(0, 1, 5))
ax.stackplot(long_exposures[0].index, long_exposures,
labels=CAP_BUCKETS.keys(), colors=color_list, alpha=0.8,
... | Plots outputs of compute_cap_exposures as area charts
Parameters
----------
long_exposures, short_exposures : arrays
Arrays of long and short market cap exposures (output of
compute_cap_exposures). |
def p_namespace(p):
global NAMESPACE
NAMESPACE = normalize_namespace(p[2])
__DEBUG__( + (NAMESPACE.rstrip(DOT) or DOT), level=1) | asm : NAMESPACE ID |
def get_appstruct(self):
result = []
for k in self._get_keys():
result.append((k, getattr(self, k)))
return result | return list of tuples keys and values corresponding to this model's
data |
def _self_referential_fk(klass_model):
for f in klass_model._meta.concrete_fields:
if f.related_model:
if issubclass(klass_model, f.related_model):
return f.attname
return None | Return whether this model has a self ref FK, and the name for the field |
def parse_command_line() -> Namespace:
import tornado.options
parser.parse_known_args(namespace=config)
set_loglevel()
for k, v in vars(config).items():
if k.startswith():
tornado.options.options.__setattr__(k, v)
return config | Parse command line options and set them to ``config``.
This function skips unknown command line options. After parsing options,
set log level and set options in ``tornado.options``. |
def train(self, data=None, epochs=10, radius0=0, radiusN=1,
radiuscooling="linear",
scale0=0.1, scaleN=0.01, scalecooling="linear"):
_check_cooling_parameters(radiuscooling, scalecooling)
if self._data is None and data is None:
raise Exception("No data wa... | Train the map on the current data in the Somoclu object.
:param data: Optional parameter to provide training data. It is not
necessary if the data was added via the method
`update_data`.
:type data: 2D numpy.array of float32.
:param epochs: The number of... |
def disconnect(self):
if not self._connected:
return
log.info("Session: %s, Tree: %s - Disconnecting from Tree Connect"
% (self.session.username, self.share_name))
req = SMB2TreeDisconnect()
log.info("Session: %s, Tree: %s - Sending Tree Disconnect... | Disconnects the tree connection. |
def RegisterHasher(cls, hasher_class):
hasher_name = hasher_class.NAME.lower()
if hasher_name in cls._hasher_classes:
raise KeyError((
).format(
hasher_class.NAME))
cls._hasher_classes[hasher_name] = hasher_class | Registers a hasher class.
The hasher classes are identified based on their lower case name.
Args:
hasher_class (type): class object of the hasher.
Raises:
KeyError: if hasher class is already set for the corresponding name. |
def _shorten_render(renderer, max_len):
def short_renderer(expr):
res = renderer(expr)
if len(res) > max_len:
return
else:
return res
return short_renderer | Return a modified that returns the representation of expr, or '...' if
that representation is longer than `max_len` |
def set_chassis_location(location,
host=None,
admin_username=None,
admin_password=None):
*
return __execute_cmd(.format(location),
host=host, admin_username=admin_username,
admin_password... | Set the location of the chassis.
location
The name of the location to be set on the chassis.
host
The chassis host.
admin_username
The username used to access the chassis.
admin_password
The password used to access the chassis.
CLI Example:
.. code-block:: b... |
def bootstrap_results(self, init_state):
with tf.compat.v1.name_scope(
name=mcmc_util.make_name(self.name, , ),
values=[init_state]):
replica_results = [
self.replica_kernels[i].bootstrap_results(init_state)
for i in range(self.num_replica)
]
init_state_pa... | Returns an object with the same type as returned by `one_step`.
Args:
init_state: `Tensor` or Python `list` of `Tensor`s representing the
initial state(s) of the Markov chain(s).
Returns:
kernel_results: A (possibly nested) `tuple`, `namedtuple` or `list` of
`Tensor`s representing ... |
def _map_trajectory(self):
self.trajectory_map = {}
with open(self.filepath, ) as trajectory_file:
with closing(
mmap(
trajectory_file.fileno(), 0,
access=ACCESS_READ)) as mapped_file:
progress = 0
... | Return filepath as a class attribute |
def kill(args):
import shlex
from jcvi.apps.base import sh, getusername
from subprocess import check_output, CalledProcessError
import xml.etree.ElementTree as ET
valid_methods = ("pattern", "jobid")
p = OptionParser(kill.__doc__)
p.add_option("--method", choices=valid_methods,
... | %prog kill [options] JOBNAMEPAT/JOBIDs
Kill jobs based on JOBNAME pattern matching (case-sensitive)
or list of JOBIDs (comma separated)
Examples:
%prog kill "pyth*" # Use regex
%prog kill 160253,160245,160252 # Use list of job ids
%prog kill all # Everyth... |
def copy(
self,
extractor=None,
needs=None,
store=None,
data_writer=None,
persistence=None,
extractor_args=None):
f = Feature(
extractor or self.extractor,
needs=needs,
store=self.sto... | Use self as a template to build a new feature, replacing
values in kwargs |
def get_save_as_filename(defaultfilename: str,
defaultextension: str,
title: str = "Save As") -> str:
root = tkinter.Tk()
root.attributes(, True)
filename = filedialog.asksaveasfilename(
initialfile=defaultfilename,
defaultexten... | Provides a GUI "Save As" dialogue (via ``tkinter``) and returns the
filename. |
def extrusion(target, throat_perimeter=,
throat_length=):
r
P = target[throat_perimeter]
L = target[throat_length]
value = P*L
return value | r"""
Calculate surface area for an arbitrary shaped throat give the perimeter
and length.
Parameters
----------
target : OpenPNM Object
The object which this model is associated with. This controls the
length of the calculated array, and also provides access to other
necessa... |
def client_status(self, config_path):
c = self.client_for(config_path)
status = "stopped"
if not c or not c.ensime:
status =
elif c.ensime.is_ready():
status =
elif c.ensime.is_running():
status =
elif c.ensime.aborted():
... | Get status of client for a project, given path to its config. |
def crop(self, top=None, bottom=None, right=None, left=None):
extractVOI = vtk.vtkExtractVOI()
extractVOI.SetInputData(self.GetInput())
extractVOI.IncludeBoundaryOn()
d = self.GetInput().GetDimensions()
bx0, bx1, by0, by1 = 0, d[0]-1, 0, d[1]-1
if left is not No... | Crop image.
:param float top: fraction to crop from the top margin
:param float bottom: fraction to crop from the bottom margin
:param float left: fraction to crop from the left margin
:param float right: fraction to crop from the right margin |
def run(self):
while True:
self.update_log_filenames()
self.open_closed_files()
anything_published = self.check_log_files_and_publish_updates()
if not anything_published:
time.sleep(0.05) | Run the log monitor.
This will query Redis once every second to check if there are new log
files to monitor. It will also store those log files in Redis. |
def get_interface_detail_output_interface_line_protocol_exception_info(self, **kwargs):
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface ... | Auto Generated Code |
def apply_statusbar_settings(self):
show_status_bar = CONF.get(, )
self.statusBar().setVisible(show_status_bar)
if show_status_bar:
for widget, name in ((self.mem_status, ),
(self.cpu_status, )):
if widget is not None... | Update status bar widgets settings |
def setDirty(self, state=True):
self._dirty = state
if self._inputNode and self._outputNode:
vis = self._inputNode.isVisible() and self._outputNode.isVisible()
self.setVisible(vis) | Flags the connection as being dirty and needing a rebuild.
:param state | <bool> |
def insert_tabs(self, tab, no_tabs=1):
post_command_event(self.main_window, self.ContentChangedMsg)
self.code_array.insert(tab, no_tabs, axis=2)
shape = self.grid.code_array.shape
post_command_event(self.main_window, self.ResizeGridMsg, shape=shape) | Adds no_tabs tabs before table, appends if tab > maxtabs
and marks grid as changed |
def cls_slots(self, cls: CLASS_OR_CLASSNAME) -> List[SlotDefinition]:
if not isinstance(cls, ClassDefinition):
cls = self.schema.classes[cls]
return [self.schema.slots[s] for s in cls.slots] | Return the list of slots directly included in the class definition. Includes slots whose
domain is cls -- as declared in slot.domain or class.slots
Does not include slots declared in mixins, apply_to or is_a links
@param cls: class name or class definition name
@return: all direct cla... |
def precision(self, label=None):
if label is None:
return self.call("precision")
else:
return self.call("precision", float(label)) | Returns precision or precision for a given label (category) if specified. |
def set_empty_for_all(self, row_column_list):
for row, column in row_column_list:
self.set_empty(row, column) | Keep all specified subplots completely empty.
:param row_column_list: a list containing (row, column) tuples to
specify the subplots, or None to indicate *all* subplots.
:type row_column_list: list or None |
def handle_xmlrpc(self, request_text):
response = self._marshaled_dispatch(request_text)
sys.stdout.write()
sys.stdout.write( % len(response))
sys.stdout.write()
sys.stdout.write(response) | Handle a single XML-RPC request |
def lp10(self, subset_k, subset_p, weights={}):
if self._z is None:
self._add_minimization_vars()
positive = set(subset_k) - self._flipped
negative = set(subset_k) & self._flipped
v = self._v.set(positive)
cs = self._prob.add_linear_constraints(v >= self._... | Force reactions in K above epsilon while minimizing support of P.
This program forces reactions in subset K to attain flux > epsilon
while minimizing the sum of absolute flux values for reactions
in subset P (L1-regularization). |
def object_download(self, bucket, key, start_offset=0, byte_count=None):
args = {: }
headers = {}
if start_offset > 0 or byte_count is not None:
header = % start_offset
if byte_count is not None:
header += % byte_count
headers[] = header
url = Api._DOWNLOAD_ENDPOINT + (A... | Reads the contents of an object as text.
Args:
bucket: the name of the bucket containing the object.
key: the key of the object to be read.
start_offset: the start offset of bytes to read.
byte_count: the number of bytes to read. If None, it reads to the end.
Returns:
The text con... |
def data(self):
header = struct.pack(,
4,
self.created,
self.algo_id)
oid = util.prefix_len(, self.curve_info[])
blob = self.curve_info[](self.verifying_key)
return header + o... | Data for packet creation. |
def _zforce(self,R,z,phi=0.,t=0.):
sqrtbz= nu.sqrt(self._b2+z**2.)
asqrtbz= self._a+sqrtbz
if isinstance(R,float) and sqrtbz == asqrtbz:
return (-z/
(R**2.+(self._a+nu.sqrt(z**2.+self._b2))**2.)**(3./2.))
else:
return (-z*asqrtbz/sqrt... | NAME:
_zforce
PURPOSE:
evaluate the vertical force for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the vertical force
HISTORY:
2010-0... |
def to_packets(pages, strict=False):
serial = pages[0].serial
sequence = pages[0].sequence
packets = []
if strict:
if pages[0].continued:
raise ValueError("first packet is continued")
if not pages[-1].complete:
raise Valu... | Construct a list of packet data from a list of Ogg pages.
If strict is true, the first page must start a new packet,
and the last page must end the last packet. |
def __get_host(node, vm_):
node
if __get_ssh_interface(vm_) == or vm_[] is None:
ip_address = node.private_ips[0]
log.info(, ip_address)
else:
ip_address = node.public_ips[0]
log.info(, ip_address)
if ip_address:
return ip_address
return node.name | Return public IP, private IP, or hostname for the libcloud 'node' object |
def update_billing_info(self, billing_info):
url = urljoin(self._url, )
response = billing_info.http_request(url, , billing_info,
{: })
if response.status == 200:
pass
elif response.status == 201:
billing_info._url = response.getheader()
... | Change this account's billing information to the given `BillingInfo`. |
def parse_date(date, default=None):
if date == "":
if default is not None:
return default
else:
raise Exception("Unknown format for " + date)
for format_type in ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M", "%Y-%m-%d %H", "%Y-%m-%d", "%d/%m/%Y %H:%M:%S", "%d/%m/%Y %H:%M",... | Parse a valid date |
def list_tags(self, pattern: str = None) -> typing.List[str]:
tags: typing.List[str] = [str(tag) for tag in self.repo.tags]
if not pattern:
LOGGER.debug(, tags)
return tags
LOGGER.debug(, pattern)
filtered_tags: typing.List[str] = [tag for tag in tags if... | Returns list of tags, optionally matching "pattern"
:param pattern: optional pattern to filter results
:type pattern: str
:return: existing tags
:rtype: list of str |
def delete_user(self, user):
self.service.delete_user(
user, self.url_prefix, self.auth, self.session, self.session_send_opts) | Delete the given user.
Args:
user (string): User name.
Raises:
requests.HTTPError on failure. |
def atleast(cls, lits, bound=1, top_id=None, encoding=EncType.seqcounter):
if encoding < 0 or encoding > 9:
raise(NoSuchEncodingError(encoding))
if not top_id:
top_id = max(map(lambda x: abs(x), lits))
ret = CNFPlus()
if encoding == ... | This method can be used for creating a CNF encoding of an AtLeastK
constraint, i.e. of :math:`\sum_{i=1}^{n}{x_i}\geq k`. The method
takes 1 mandatory argument ``lits`` and 3 default arguments can be
specified: ``bound``, ``top_id``, and ``encoding``.
:param lits: a list... |
def _get_supply_array_construct(self):
bus_no = integer.setResultsName("bus_no")
s_rating = real.setResultsName("s_rating")
p_direction = real.setResultsName("p_direction")
p_bid_max = real.setResultsName("p_bid_max")
p_bid_min = real.setResultsName("p_bid_min")
... | Returns a construct for an array of power supply data. |
def processRequest(self, request: Request, frm: str):
logger.debug("{} received client request: {} from {}".
format(self.name, request, frm))
self.nodeRequestSpikeMonitorData[] += 1
| Handle a REQUEST from the client.
If the request has already been executed, the node re-sends the reply to
the client. Otherwise, the node acknowledges the client request, adds it
to its list of client requests, and sends a PROPAGATE to the
remaining nodes.
:param request: the R... |
def clean(self, text, **kwargs):
if sys.version_info < (3, 0):
if not isinstance(text, unicode):
raise exceptions.UnicodeRequired
clean_chunks = []
filth = Filth()
for next_filth in self.iter_filth(text):
clean_chunks.append(... | This is the master method that cleans all of the filth out of the
dirty dirty ``text``. All keyword arguments to this function are passed
through to the ``Filth.replace_with`` method to fine-tune how the
``Filth`` is cleaned. |
def netdev():
*
def linux_netdev():
ret = {}
try:
with salt.utils.files.fopen(, ) as fp_:
stats = salt.utils.stringutils.to_unicode(fp_.read())
except IOError:
pass
else:
for line in stats.splitlines():
... | .. versionchanged:: 2016.3.2
Return the network device stats for this minion
.. versionchanged:: 2016.11.4
Added support for AIX
CLI Example:
.. code-block:: bash
salt '*' status.netdev |
def get_conf_from_module(mod):
conf = ModuleConfig(CONF_SPEC)
mod = _get_correct_module(mod)
conf.set_module(mod)
if hasattr(mod, ):
default = mod.default
conf = extract_conf_from(default, conf)
else:
conf = extract_conf_from(mod, conf)
return conf | return configuration from module with defaults no worry about None type |
def read_from(self, provider, **options):
for item in iter(self):
if is_mixin(item):
item.read_from(provider, **options) | All :class:`Pointer` fields in the `Sequence` read the necessary
number of bytes from the data :class:`Provider` for their referenced
:attr:`~Pointer.data` object. Null pointer are ignored.
:param Provider provider: data :class:`Provider`.
:keyword bool nested: if ``True`` all :class:`P... |
def get_default_url():
from bugzilla.base import _open_bugzillarc
cfg = _open_bugzillarc()
if cfg:
cfgurl = cfg.defaults().get("url", None)
if cfgurl is not None:
log.debug("bugzillarc: found cli url=%s", cfgurl)
return cfgurl
return DEFAULT_BZ | Grab a default URL from bugzillarc [DEFAULT] url=X |
def QA_fetch_get_hkfund_list(ip=None, port=None):
global extension_market_list
extension_market_list = QA_fetch_get_extensionmarket_list(
) if extension_market_list is None else extension_market_list
return extension_market_list.query() | [summary]
Keyword Arguments:
ip {[type]} -- [description] (default: {None})
port {[type]} -- [description] (default: {None})
# 港股 HKMARKET
27 5 香港指数 FH
31 2 香港主板 KH
48 2 香港创业板 KG
49 2 香港基... |
def p_expr_new(p):
p[0] = ast.New(p[2], p[3], lineno=p.lineno(1)) | expr : NEW class_name_reference ctor_arguments |
def get_data_blob(self, rawtx):
tx = deserialize.tx(rawtx)
data = control.get_data_blob(tx)
return serialize.data(data) | TODO add docstring |
def lookupGeoInfo(positions):
list_data=[]
oldlat=0
oldlon=0
d={}
for pos in positions:
diff_lat=abs(float(pos[])-oldlat)
diff_lon=abs(float(pos[])-oldlon)
if (diff_lat>POS_THRESHOLD_DEG) or\
(diff_lon>POS_THRESHOLD_DEG):
d=lookup_by_latlo... | Looks up lat/lon info with goole given a list
of positions as parsed by parsePositionFile.
Returns google results in form of dicionary |
def DeleteGRRTempFile(path):
precondition.AssertType(path, Text)
if not os.path.isabs(path):
raise ErrorBadPath("Path must be absolute")
prefix = config.CONFIG["Client.tempfile_prefix"]
directories = [
GetTempDirForRoot(root) for root in config.CONFIG["Client.tempdir_roots"]
]
if not _CheckIf... | Delete a GRR temp file.
To limit possible damage the path must be absolute and either the
file must be within any of the Client.tempdir_roots or the file name
must begin with Client.tempfile_prefix.
Args:
path: path string to file to be deleted.
Raises:
OSError: Permission denied, or file not found... |
def connect_to_nsqd(self, host, port):
assert isinstance(host, string_types)
assert isinstance(port, int)
conn = AsyncConn(host, port, **self.conn_kwargs)
conn.on(, self._on_connection_identify)
conn.on(, self._on_connection_identify_response)
conn.on(, self._on... | Adds a connection to ``nsqd`` at the specified address.
:param host: the address to connect to
:param port: the port to connect to |
def delete_change_set(awsclient, change_set_name, stack_name):
client = awsclient.get_client()
response = client.delete_change_set(
ChangeSetName=change_set_name,
StackName=stack_name) | Delete specified change set. Currently we only use this during
automated regression testing. But we have plans so lets locate this
functionality here
:param awsclient:
:param change_set_name:
:param stack_name: |
def IsDerivedFunction(clean_lines, linenum):
for i in xrange(linenum, max(-1, linenum - 10), -1):
match = Match(r, clean_lines.elided[i])
if match:
line, _, closing_paren = CloseExpression(
clean_lines, i, len(match.group(1)))
return (closing_paren >= 0 and
Sea... | Check if current line contains an inherited function.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
Returns:
True if current line contains a function with "override"
virt-specifier. |
def down_ec2(instance_id, region, access_key_id, secret_access_key):
conn = connect_to_ec2(region, access_key_id, secret_access_key)
instance = conn.stop_instances(instance_ids=instance_id)[0]
while instance.state != "stopped":
log_yellow("Instance state: %s" % instance.state)
slee... | shutdown of an existing EC2 instance |
def cartoon(args):
p = OptionParser(cartoon.__doc__)
opts, args, iopts = p.set_image_options(args, figsize="10x7")
fig = plt.figure(1, (iopts.w, iopts.h))
root = fig.add_axes([0, 0, 1, 1])
A = CartoonRegion(41)
A.draw(root, .35, .85, strip=False, color=False)
x1, x2 = A.x1, A.x2
... | %prog synteny.py
Generate cartoon illustration of SynFind. |
def getRaw(self, instance, **kwargs):
value = ObjectField.get(self, instance, **kwargs)
if callable(value):
value = value()
return value | Returns raw field value (possible wrapped in BaseUnit) |
def _try_coerce_args(self, values, other):
values = values.view()
if isinstance(other, bool):
raise TypeError
elif is_null_datetimelike(other):
other = tslibs.iNaT
elif isinstance(other, (datetime, np.datetime64, date)):
other = self._box_fu... | Coerce values and other to dtype 'i8'. NaN and NaT convert to
the smallest i8, and will correctly round-trip to NaT if converted
back in _try_coerce_result. values is always ndarray-like, other
may not be
Parameters
----------
values : ndarray-like
other : ndarra... |
def log_results(self, output_path=None, run_id=None):
best_ind = self.halloffame[0]
model_params = self.parse_individual(
best_ind)
if output_path is None:
output_path = os.getcwd()
if run_id is None:
run_id = .format(
dateti... | Saves files for the minimization.
Notes
-----
Currently saves a logfile with best individual and a pdb of
the best model. |
def hessian_component(self, index1, index2):
result = np.zeros((3, 3), float)
if index1 == index2:
for index3 in range(self.numc):
if self.scaling[index1, index3] > 0:
d_1 = 1/self.distances[index1, index3]
for (se, ve), (sg, v... | Compute the hessian of the energy for one atom pair |
def copy(self):
a = Motif()
a.__dict__ = self.__dict__.copy()
return a | m.copy() -- Return a 'deep' copy of the motif |
async def fastStreamedQuery(self, url, *, headers=None, verify=True):
response = await self.session.get(url,
headers=self._buildHeaders(headers),
timeout=HTTP_SHORT_TIMEOUT,
ssl=verify)
respon... | Send a GET request with short timeout, do not retry, and return streamed response. |
def solver(AA, N_max, symNx = 2, throw_out_modes=False):
angs = unroll_angles(AA.T[3:].T,np.ones(3))
symNz = 2
NNx = range(-N_max, N_max+1, symNx)
NNy = range(-N_max, N_max+1, symNz)
NNz = range(-N_max, N_max+1, symNz)
n_vectors = np.array([[i,j,k] for (i,j,k) in... | Constructs the matrix A and the vector b from a timeseries of toy
action-angles AA to solve for the vector x = (J_0,J_1,J_2,S...) where
x contains all Fourier components of the generating function with |n|<N_max |
def val_factory(val, datatypes):
exceptions = []
for dt in datatypes:
try:
if isinstance(val, dt):
return val
return type_handler_object(val, dt)
except Exception as e:
exceptions.append(str(e))
raise ValueError(.
... | return an instance of `val` that is of type `datatype`.
keep track of exceptions so we can produce meaningful error messages. |
def _getMemoryBit(cpu, bitbase, bitoffset):
assert bitbase.type ==
assert bitbase.size >= bitoffset.size
addr = bitbase.address()
offt = Operators.SEXTEND(bitoffset.read(), bitoffset.size, bitbase.size)
offt_is_neg = offt >= (1 << (bitbase.size - 1))
offt_in_byt... | Calculate address and bit offset given a base address and a bit offset
relative to that address (in the form of asm operands) |
def detect_volume_shadow_copies(self):
self._make_mountpoint(var_name=, suffix="vss", in_paths=True)
try:
_util.check_call_(["vshadowmount", "-o", str(self.offset), self.get_raw_path(), self._paths[]])
except Exception as e:
logger.exception("Failed mounting th... | Method to call vshadowmount and mount NTFS volume shadow copies.
:return: iterable with the :class:`Volume` objects of the VSS
:raises CommandNotFoundError: if the underlying command does not exist
:raises SubSystemError: if the underlying command fails
:raises NoMountpointAvailableErro... |
def format(self, vertices):
index = .join(str(vertices[vn].index) for vn in self.vnames)
vcom = .join(self.vnames)
return \
.format(
index, self.cells, self.name, vcom, self.grading.format()) | Format instance to dump
vertices is dict of name to Vertex |
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
post = self.object
topic = post.topic
try:
if hasattr(topic, ) and topic.poll.options.exists():
poll = topic.poll
context[] = poll
... | Returns the context data to provide to the template. |
def get_mac_dot_app_dir(directory):
return os.path.dirname(os.path.dirname(os.path.dirname(directory))) | Returns parent directory of mac .app
Args:
directory (str): Current directory
Returns:
(str): Parent directory of mac .app |
def checkCorpNums(self, MemberCorpNum, CorpNumList):
if CorpNumList == None or len(CorpNumList) < 1:
raise PopbillException(-99999999,"조죄할 사업자번호 목록이 입력되지 않았습니다.")
postData = self._stringtify(CorpNumList)
return self._httppost(,postData,MemberCorpNum) | 휴폐업조회 대량 확인, 최대 1000건
args
MemberCorpNum : 팝빌회원 사업자번호
CorpNumList : 조회할 사업자번호 배열
return
휴폐업정보 Object as List
raise
PopbillException |
def SLH_to_qutip(slh, full_space=None, time_symbol=None,
convert_as=):
if full_space:
if not full_space >= slh.space:
raise AlgebraError("full_space="+str(full_space)+" needs to "
"at least include slh.space = "+str(slh.space))
else:
... | Generate and return QuTiP representation matrices for the Hamiltonian
and the collapse operators. Any inhomogeneities in the Lindblad operators
(resulting from coherent drives) will be moved into the Hamiltonian, cf.
:func:`~qnet.algebra.circuit_algebra.move_drive_to_H`.
Args:
slh (SLH): The SL... |
def register(self, name, option):
if name in self._options:
raise ValueError("Option {0} already exists.".format(name))
if not isinstance(option, opt.Option):
raise TypeError("Options must be of type Option.")
self._options[name] = option | Register a new option with the namespace.
Args:
name (str): The name to register the option under.
option (option.Option): The option object to register.
Raises:
TypeError: If the option is not an option.Option object.
ValueError: If the name is ... |
def get_content(self, offset, size):
return _bfd.section_get_content(self.bfd, self._ptr, offset, size) | Return the specified number of bytes from the current section. |
def load_chkpt_vars(model_path):
model_path = get_checkpoint_path(model_path)
reader = tfv1.train.NewCheckpointReader(model_path)
var_names = reader.get_variable_to_shape_map().keys()
result = {}
for n in var_names:
result[n] = reader.get_tensor(n)
return result | Load all variables from a checkpoint to a dict.
Args:
model_path(str): path to a checkpoint.
Returns:
dict: a name:value dict |
def get_resource_object_doc_lines() -> List[str]:
for resource_name, a_type in ALL_RESOURCES.copy().items():
for prop_a_type in a_type.properties.values():
if issubclass(prop_a_type, Object):
resource_name = prop_a_type.title
if resource_name i... | Generate documentation lines for all collected resource objects.
As API documentation is generated we keep a running list of objects used
in request parameters and responses. This section will generate
documentation for each object and provide an inline reference in the API
documentation.
:return... |
def store_file(self, folder, name):
path = os.path.join(folder, name)
length = self.headers[]
with open(path, ) as sample:
sample.write(self.rfile.read(int(length)))
return path | Stores the uploaded file in the given path. |
def for_model(self, model):
ct = ContentType.objects.get_for_model(model)
qs = self.get_queryset().filter(content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_text(model._get_pk_val()))
return qs | QuerySet for all comments for a particular model (either an instance or
a class). |
def create(self, name=None, **kwargs):
data = self._client.api.create_project({: name})
return self.Meta.model(data, client=self._client, collection=self) | Create a new project.
:param name: The name of the project.
:returns: An instance of the newly create project.
:rtype: renku.models.projects.Project |
def get_counter(self, transport, bucket, key, r=None, pr=None,
basic_quorum=None, notfound_ok=None):
return transport.get_counter(bucket, key, r=r, pr=pr) | get_counter(bucket, key, r=None, pr=None, basic_quorum=None,\
notfound_ok=None)
Gets the value of a counter.
.. deprecated:: 2.1.0 (Riak 2.0) Riak 1.4-style counters are
deprecated in favor of the :class:`~riak.datatypes.Counter`
datatype.
.. note:... |
def visible_width(string):
if in string:
string = RE_COLOR_ANSI.sub(, string)
try:
string = string.decode()
except (AttributeError, UnicodeEncodeError):
pass
width = 0
for char in string:
if unicodedata.east_asian_width(char) in (, ):
width +=... | Get the visible width of a unicode string.
Some CJK unicode characters are more than one byte unlike ASCII and latin unicode characters.
From: https://github.com/Robpol86/terminaltables/pull/9
:param str string: String to measure.
:return: String's width.
:rtype: int |
def _fix_sitk_bug(path, metadata):
ds = dicom.read_file(path)
try:
metadata["voxelsize_mm"][0] = ds.SpacingBetweenSlices
except Exception as e:
logger.warning("Read dicom failed: ", e)
return metadata | There is a bug in simple ITK for Z axis in 3D images. This is a fix.
:param path: path to dicom file to read
:param metadata: metadata to correct
:return: corrected metadata |
def check_md5(filename, stored_md5):
computed_md5 = _get_file_md5(filename)
if stored_md5 != computed_md5:
print ("MD5 checksum of filename", filename,
"failed. Expected MD5 was", stored_md5,
"but computed MD5 was", computed_md5, ,
"Please check if the d... | Computes the md5 of filename and check if it matches with the supplied
string md5
Input
-----
filename : string
Path to a file.
md5 : string
Known md5 of filename to check against. |
def db_get(name, **connection_args):
dbname*
dbc = _connect(**connection_args)
if dbc is None:
return []
cur = dbc.cursor()
qry = (
)
args = {"dbname": name}
_execute(cur, qry, args)
if cur.rowcount:
rows = cur.fetchall()
return {: rows[0][0],
... | Return a list of databases of a MySQL server using the output
from the ``SELECT DEFAULT_CHARACTER_SET_NAME, DEFAULT_COLLATION_NAME FROM
INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME='dbname';`` query.
CLI Example:
.. code-block:: bash
salt '*' mysql.db_get test |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.