code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|
def get_stories(self, userids: Optional[List[int]] = None) -> Iterator[Story]:
if not userids:
data = self.context.graphql_query("d15efd8c0c5b23f0ef71f18bf363c704",
{"only_stories": True})["data"]["user"]
if data is None:
... | Get available stories from followees or all stories of users whose ID are given.
Does not mark stories as seen.
To use this, one needs to be logged in
:param userids: List of user IDs to be processed in terms of downloading their stories, or None. |
def raise_exception_if_baseline_file_is_unstaged(filename):
try:
files_changed_but_not_staged = subprocess.check_output(
[
,
,
,
],
).split()
except subprocess.CalledProcessError:
).format(
... | We want to make sure that if there are changes to the baseline
file, they will be included in the commit. This way, we can keep
our baselines up-to-date.
:raises: ValueError |
async def display_columns_and_rows(
self,
database,
table,
description,
rows,
link_column=False,
truncate_cells=0,
):
"Returns columns, rows for specified table - including fancy foreign key treatment"
table_metadata = self.ds.table_metadata(da... | Returns columns, rows for specified table - including fancy foreign key treatment |
def from_uint8(arr_uint8, shape, min_value=0.0, max_value=1.0):
arr_0to1 = arr_uint8.astype(np.float32) / 255.0
return HeatmapsOnImage.from_0to1(arr_0to1, shape, min_value=min_value, max_value=max_value) | Create a heatmaps object from an heatmap array containing values ranging from 0 to 255.
Parameters
----------
arr_uint8 : (H,W) ndarray or (H,W,C) ndarray
Heatmap(s) array, where ``H`` is height, ``W`` is width and ``C`` is the number of heatmap channels.
Expected dtype ... |
def with_mfa(self, mfa_token):
if hasattr(mfa_token, ):
self.context.mfa_token = mfa_token.__call__()
else:
self.context.mfa_token = mfa_token
return self | Set the MFA token for the next request.
`mfa_token`s are only good for one request. Use this method to chain into
the protected action you want to perform.
Note: Only useful for Application authentication.
Usage:
account.with_mfa(application.totp.now()).pay(...)
Args:... |
def get_best_splitting_attr(self):
best = (-1e999999, None)
for attr in self.attributes:
best = max(best, (self.get_gain(attr), attr))
best_gain, best_attr = best
return best_attr | Returns the name of the attribute with the highest gain. |
def decrease_writes_in_units(
current_provisioning, units, min_provisioned_writes, log_tag):
updated_provisioning = int(current_provisioning) - int(units)
min_provisioned_writes = __get_min_writes(
current_provisioning,
min_provisioned_writes,
log_tag)
if updated_provis... | Decrease the current_provisioning with units units
:type current_provisioning: int
:param current_provisioning: The current provisioning
:type units: int
:param units: How many units should we decrease with
:returns: int -- New provisioning value
:type min_provisioned_writes: int
:param min... |
def safe_call(cls, method, *args):
return cls.call(method, *args, safe=True) | Call a remote api method but don't raise if an error occurred. |
def ap_state(value, failure_string=None):
try:
return statestyle.get(value).ap
except:
if failure_string:
return failure_string
else:
return value | Converts a state's name, postal abbreviation or FIPS to A.P. style.
Example usage:
>> ap_state("California")
'Calif.' |
def _get_path(entity_id):
try:
path = entity_id.path()
except AttributeError:
path = entity_id
if path.startswith():
path = path[3:]
return path | Get the entity_id as a string if it is a Reference.
@param entity_id The ID either a reference or a string of the entity
to get.
@return entity_id as a string |
def downgrades(src):
def _(f):
destination = src - 1
@do(operator.setitem(_downgrade_methods, destination))
@wraps(f)
def wrapper(op, conn, version_info_table):
conn.execute(version_info_table.delete())
f(op)
write_version_info(conn, versio... | Decorator for marking that a method is a downgrade to a version to the
previous version.
Parameters
----------
src : int
The version this downgrades from.
Returns
-------
decorator : callable[(callable) -> callable]
The decorator to apply. |
def observe(self, seconds=None):
if self._observer.isRunning:
return False
if seconds is not None:
timeout = time.time() + seconds
else:
timeout = None
while (not self._observer.isStopped) and (seconds is None or ... | Begins the observer loop (synchronously).
Loops for ``seconds`` or until this region's stopObserver() method is called.
If ``seconds`` is None, the observer loop cycles until stopped. If this
method is called while the observer loop is already running, it returns False.
Returns True if... |
def infer_named_tuple(node, context=None):
tuple_base_name = nodes.Name(name="tuple", parent=node.root())
class_node, name, attributes = infer_func_form(
node, tuple_base_name, context=context
)
call_site = arguments.CallSite.from_call(node)
func = next(extract_node("import collections;... | Specific inference function for namedtuple Call node |
def all_devices(cl_device_type=None, platform=None):
if isinstance(cl_device_type, str):
cl_device_type = device_type_from_string(cl_device_type)
runtime_list = []
if platform is None:
platforms = cl.get_platforms()
else:
platforms = [platfo... | Get multiple device environments, optionally only of the indicated type.
This will only fetch devices that support double point precision.
Args:
cl_device_type (cl.device_type.* or string): The type of the device we want,
can be a opencl device type or a string matching 'GP... |
def resolve_polytomy(
self,
dist=1.0,
support=100,
recursive=True):
nself = self.copy()
nself.treenode.resolve_polytomy(
default_dist=dist,
default_support=support,
recursive=recursive)
nself._coords.update()
re... | Returns a copy of the tree with all polytomies randomly resolved.
Does not transform tree in-place. |
def enr_at_fpr(fg_vals, bg_vals, fpr=0.01):
pos = np.array(fg_vals)
neg = np.array(bg_vals)
s = scoreatpercentile(neg, 100 - fpr * 100)
neg_matches = float(len(neg[neg >= s]))
if neg_matches == 0:
return float("inf")
return len(pos[pos >= s]) / neg_matches * len(neg) / float(len(pos... | Computes the enrichment at a specific FPR (default 1%).
Parameters
----------
fg_vals : array_like
The list of values for the positive set.
bg_vals : array_like
The list of values for the negative set.
fpr : float, optional
The FPR (between 0.0 and 1.0).
Retur... |
def main(params=None):
if params == None:
parser = getParser()
args = parser.parse_args(params)
else:
args = params
results = []
print(general.title(banner.text))
sayingHello = .format(general.LICENSE_URL)
print(general.info(sayingHello))
if args.lice... | Main function to launch usufy.
The function is created in this way so as to let other applications make
use of the full configuration capabilities of the application. The
parameters received are used as parsed by this modules `getParser()`.
Args:
-----
params: A list with the parameters as... |
def _find_ancestor(self, task_spec):
if self.parent is None:
return self
if self.parent.task_spec == task_spec:
return self.parent
return self.parent._find_ancestor(task_spec) | Returns the ancestor that has the given task spec assigned.
If no such ancestor was found, the root task is returned.
:type task_spec: TaskSpec
:param task_spec: The wanted task spec.
:rtype: Task
:returns: The ancestor. |
def det_curve(y_true, scores, distances=False):
if distances:
scores = -scores
fpr, tpr, thresholds = sklearn.metrics.roc_curve(
y_true, scores, pos_label=True)
fnr = 1 - tpr
if distances:
thresholds = -thresholds
eer_index = np.where(fpr > fnr)[0][0]
... | DET curve
Parameters
----------
y_true : (n_samples, ) array-like
Boolean reference.
scores : (n_samples, ) array-like
Predicted score.
distances : boolean, optional
When True, indicate that `scores` are actually `distances`
Returns
-------
fpr : numpy array
... |
def slice(self, start, stop=None, axis=0):
if stop is None:
stop = start
axis = self.get_axis_number(axis)
start_bin = max(0, self.get_axis_bin_index(start, axis))
stop_bin = min(len(self.bin_centers(axis)) - 1,
self.get_axis_bin... | Restrict histogram to bins whose data values (not bin numbers) along axis are between start and stop
(both inclusive). Returns d dimensional histogram. |
def get_between_times(self, t1, t2, target=None):
try:
t1 = t1.isoformat()
t2 = t2.isoformat()
except AttributeError:
pass
myquery = self._get_time_query(t1, t2)
if target is not None:
myquery["target"] = ... | Query for OPUS data between times t1 and t2.
Parameters
----------
t1, t2 : datetime.datetime, strings
Start and end time for the query. If type is datetime, will be
converted to isoformat string. If type is string already, it needs
to be in an accepted inter... |
def filter_queryset(self, request, queryset, view):
start_value = self.get_start(request)
if start_value:
queryset = self.apply_published_filter(queryset, "after", start_value)
end_value = self.get_end(request)
if end_value:
quer... | Apply the relevant behaviors to the view queryset. |
def read(self, nrml_file, validate=False,
simple_fault_spacing=1.0, complex_mesh_spacing=5.0,
mfd_spacing=0.1):
self.source_file = nrml_file
if validate:
converter = SourceConverter(1.0, simple_fault_spacing,
complex_... | Build the source model from nrml format |
def local_temp_dir():
path = tempfile.mkdtemp()
yield path
shutil.rmtree(path, ignore_errors=True) | Creates a local temporary directory. The directory is removed when no longer needed. Failure to do
so will be ignored.
:return: Path to the temporary directory.
:rtype: unicode |
def _incr_exceptions(self, conn):
self._pool_manager.get_connection(self.pid, conn).exceptions += 1 | Increment the number of exceptions for the current connection.
:param psycopg2.extensions.connection conn: the psycopg2 connection |
def QRatio(s1, s2, force_ascii=True, full_process=True):
if full_process:
p1 = utils.full_process(s1, force_ascii=force_ascii)
p2 = utils.full_process(s2, force_ascii=force_ascii)
else:
p1 = s1
p2 = s2
if not utils.validate_string(p1):
return 0
if not utils... | Quick ratio comparison between two strings.
Runs full_process from utils on both strings
Short circuits if either of the strings is empty after processing.
:param s1:
:param s2:
:param force_ascii: Allow only ASCII characters (Default: True)
:full_process: Process inputs, used here to avoid do... |
def _parse_handler_result(self, result):
if isinstance(result, (list, tuple)):
payload = result[0]
list_result = list(result)
else:
payload = result
list_result = [""]
return payload, list_result | Parses the item(s) returned by your handler implementation.
Handlers may return a single item (payload), or a tuple that gets
passed to the Response class __init__ method of your HTTP layer.
_parse_handler_result separates the payload from the rest the tuple,
as well as providing the t... |
def merge_keywords(x,y):
z = x.copy()
z.update(y)
return z | Given two dicts, merge them into a new dict as a shallow copy. |
def get_function_from_config(item):
config = get_configuration()
func_path = config.get(item)
module_path, func_name = func_path.rsplit(".", 1)
module = importlib.import_module(module_path)
func = getattr(module, func_name)
return func | Import the function to get profile by handle. |
def split_bezier(bpoints, t):
def split_bezier_recursion(bpoints_left_, bpoints_right_, bpoints_, t_):
if len(bpoints_) == 1:
bpoints_left_.append(bpoints_[0])
bpoints_right_.append(bpoints_[0])
else:
new_points = [None]*(len(bpoints_) - 1)
bpoint... | Uses deCasteljau's recursion to split the Bezier curve at t into two
Bezier curves of the same order. |
def get_parser(segmenter, **options):
if segmenter == :
return NLAPIParser(**options)
elif segmenter == :
return MecabParser()
elif segmenter == :
return TinysegmenterParser()
else:
raise ValueError(.format(segmenter)) | Gets a parser.
Args:
segmenter (str): Segmenter to use.
options (:obj:`dict`, optional): Optional settings.
Returns:
Parser (:obj:`budou.parser.Parser`)
Raises:
ValueError: If unsupported segmenter is specified. |
def _read_opt_type(self, kind):
bin_ = bin(kind)[2:].zfill(8)
type_ = dict(
value=kind,
action=_IPv6_Opts_ACT.get(bin_[:2]),
change=True if int(bin_[2], base=2) else False,
)
return type_ | Read option type field.
Positional arguments:
* kind -- int, option kind value
Returns:
* dict -- extracted IPv6_Opts option
Structure of option type field [RFC 791]:
Octets Bits Name Descriptions
0 ... |
def find_types_removed_from_unions(
old_schema: GraphQLSchema, new_schema: GraphQLSchema
) -> List[BreakingChange]:
old_type_map = old_schema.type_map
new_type_map = new_schema.type_map
types_removed_from_union = []
for old_type_name, old_type in old_type_map.items():
new_type = new_ty... | Find types removed from unions.
Given two schemas, returns a list containing descriptions of any breaking changes
in the new_schema related to removing types from a union type. |
def strip_possessives(self, word):
if word.endswith(""):
return word[:-3]
elif word.endswith(""):
return word[:-1]
else:
return word | Get rid of apostrophes indicating possession. |
def list_snapshots(self):
return [snap for snap in self.manager.list_snapshots()
if snap.volume_id == self.id] | Returns a list of all snapshots of this volume. |
def matches(self, pattern, flags=0):
if not re.match(pattern, self._subject, flags):
raise self._error_factory(_format("Expected {} to match {}", self._subject, pattern))
return ChainInspector(self._subject) | Ensures :attr:`subject` matches regular expression *pattern*. |
def describe_config_variable(self, config_id):
config = self._config_variables.get(config_id)
if config is None:
return [Error.INVALID_ARRAY_KEY, 0, 0, 0, 0]
packed_size = config.total_size
packed_size |= int(config.variable) << 15
return [0, 0, 0, config_... | Describe the config variable by its id. |
def loadPng(varNumVol, tplPngSize, strPathPng):
print()
lstPngPaths = [None] * varNumVol
for idx01 in range(0, varNumVol):
lstPngPaths[idx01] = (strPathPng + str(idx01) + )
aryPngData = np.zeros((tplPngSize[0],
tplPngSize[1],
... | Load PNG files.
Parameters
----------
varNumVol : float
Number of volumes, i.e. number of time points in all runs.
tplPngSize : tuple
Shape of the stimulus image (i.e. png).
strPathPng: str
Path to the folder cointaining the png files.
Returns
-------
aryPngData ... |
def print_output(self, per_identity_data: ) -> None:
if not self._window_bts:
data = per_identity_data.flatMap(
lambda x: [json.dumps(data, cls=BlurrJSONEncoder) for data in x[1][0].items()])
else:
data = per_identity_data.map(
... | Basic helper function to write data to stdout. If window BTS was provided then the window
BTS output is written, otherwise, the streaming BTS output is written to stdout.
WARNING - For large datasets this will be extremely slow.
:param per_identity_data: Output of the `execute()` call. |
def _type_string(label, case=None):
return label, abstractSearch.in_string, lambda s: abstractRender.default(s, case=case), "" | Shortcut for string like fields |
def serialize(self, data=None):
if data is not None and self.response is not None:
self.response[] = self.media_types[0]
self.response.write(data)
return data | Transforms the object into an acceptable format for transmission.
@throws ValueError
To indicate this serializer does not support the encoding of the
specified object. |
def is_vert_aligned(c):
return all(
[
_to_span(c[i]).sentence.is_visual()
and bbox_vert_aligned(
bbox_from_span(_to_span(c[i])), bbox_from_span(_to_span(c[0]))
)
for i in range(len(c))
]
) | Return true if all the components of c are vertically aligned.
Vertical alignment means that the bounding boxes of each Mention of c
shares a similar x-axis value in the visual rendering of the document.
:param c: The candidate to evaluate
:rtype: boolean |
def from_function(cls, function):
module_name = function.__module__
function_name = function.__name__
class_name = ""
function_source_hasher = hashlib.sha1()
try:
source = inspect.getsource(function)
if sys.version_info[... | Create a FunctionDescriptor from a function instance.
This function is used to create the function descriptor from
a python function. If a function is a class function, it should
not be used by this function.
Args:
cls: Current class which is required argument for classmeth... |
def write_to_file(filename, content):
if not config["destdir"]:
print("{destdir} config variable not present. Did you forget to run init()?")
sys.exit(8)
abs_filename = os.path.abspath(config["destdir"] + "/" + filename)
abs_filepath = os.path.dirname(abs_filename)
if not os.path.ex... | Writes content to the given file. The file's directory will be created if needed.
:param filename: name of the output file, relative to the "destination folder" provided by the user
:param content: iterable (line-by-line) that should be written to the file. Either a list or a generator. Each
... |
def namer(cls, imageUrl, pageUrl):
index = int(compile(r).search(pageUrl).group(1))
ext = imageUrl.rsplit(, 1)[1]
return "SnowFlakes-%d.%s" % (index, ext) | Use strip index number for image name. |
def visit(self, event):
to_visit = False
if event.arr_time_ut <= self.min_transfer_time+self.get_min_visit_time():
to_visit = True
else:
for ve in self.visit_events:
if (event.trip_I == ve.trip_I) and event.arr_time_ut < ve.arr_time_ut:
... | Visit the stop if it has not been visited already by an event with
earlier arr_time_ut (or with other trip that does not require a transfer)
Parameters
----------
event : Event
an instance of the Event (namedtuple)
Returns
-------
visited : bool
... |
def autocommit(data_access):
if not data_access.autocommit:
data_access.commit()
old_autocommit = data_access.autocommit
data_access.autocommit = True
try:
yield data_access
finally:
data_access.autocommit = old_autocommit | Make statements autocommit.
:param data_access: a DataAccess instance |
def _node_le(self, node_self, node_other):
for x in [, , ]:
if node_self.__getattribute__(x) != node_other.__getattribute__(x):
return False
for a in node_self.attrib:
if a not in node_other.attrib or \
node_self.attrib[a] != node_other.at... | _node_le
Low-level api: Return True if all descendants of one node exist in the
other node. Otherwise False. This is a recursive method.
Parameters
----------
node_self : `Element`
A node to be compared.
node_other : `Element`
Another node to b... |
def cli(env, identifier, enabled, port, weight, healthcheck_type, ip_address):
mgr = SoftLayer.LoadBalancerManager(env.client)
loadbal_id, group_id = loadbal.parse_id(identifier)
ip_address_id = None
if ip_address:
ip_service = env.client[]
ip_record = ip_service.getByIpAddr... | Adds a new load balancer service. |
def update_checkplotdict_nbrlcs(
checkplotdict,
timecol, magcol, errcol,
lcformat=,
lcformatdir=None,
verbose=True,
):
bests
checkplot. This is used to extract the correct times-series from the
neighborsve stored
your lcformat description JSONs, other... | For all neighbors in a checkplotdict, make LCs and phased LCs.
Parameters
----------
checkplotdict : dict
This is the checkplot to process. The light curves for the neighbors to
the object here will be extracted from the stored file paths, and this
function will make plots of these... |
def parse_field(fld, selectable, aggregated=True, default_aggregation=):
aggregation_lookup = {
: func.sum,
: func.min,
: func.max,
: func.avg,
: func.count,
: lambda fld: func.count(distinct(fld)),
: lambda fld: func.date_trunc(, fld),
... | Parse a field object from yaml into a sqlalchemy expression |
def stop(opts, bot, event):
name = opts[]
slack_username = opts[]
now = datetime.datetime.now()
delta = now - bot.timers.pop(name)
response = bot.stop_fmt.format(delta)
if slack_username:
mention =
users = bot.slack.users.list().body[]
for user in user... | Usage: stop [--name=<name>] [--notify=<slack_username>]
Stop a timer.
_name_ works the same as for `start`.
If given _slack_username_, reply with an at-mention to the given user. |
def _gpdfit(x):
prior_bs = 3
prior_k = 10
len_x = len(x)
m_est = 30 + int(len_x ** 0.5)
b_ary = 1 - np.sqrt(m_est / (np.arange(1, m_est + 1, dtype=float) - 0.5))
b_ary /= prior_bs * x[int(len_x / 4 + 0.5) - 1]
b_ary += 1 / x[-1]
k_ary = np.log1p(-b_ary[:, None] * x).mean(axis=1) ... | Estimate the parameters for the Generalized Pareto Distribution (GPD).
Empirical Bayes estimate for the parameters of the generalized Pareto
distribution given the data.
Parameters
----------
x : array
sorted 1D data array
Returns
-------
k : float
estimated shape para... |
def l2traceroute_input_rbridge_id(self, **kwargs):
config = ET.Element("config")
l2traceroute = ET.Element("l2traceroute")
config = l2traceroute
input = ET.SubElement(l2traceroute, "input")
rbridge_id = ET.SubElement(input, "rbridge-id")
rbridge_id.text = kwargs.... | Auto Generated Code |
def to_grpc_address(target: str) -> str:
u = urlparse(target)
if u.scheme == "dns":
raise ValueError("dns:// not supported")
if u.scheme == "unix":
return "unix:"+u.path
return u.netloc | Converts a standard gRPC target to one that is supported by grpcio
:param target: the server address.
:returns: the converted address. |
def download(ctx):
user, project_name = get_project_or_local(ctx.obj.get())
try:
PolyaxonClient().project.download_repo(user, project_name)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error(.format(project_name))
Printer.print... | Download code of the current project. |
def passed(self):
return [test for test in self.all() if not test.failed() and not test.skipped()] | Return all the passing testcases
:return: |
def user_identity_show(self, user_id, id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/user_identities
api_path = "/api/v2/users/{user_id}/identities/{id}.json"
api_path = api_path.format(user_id=user_id, id=id)
return self.call(api_path, **kwargs) | https://developer.zendesk.com/rest_api/docs/core/user_identities#show-identity |
def bell(self, percent = 0, onerror = None):
request.Bell(display = self.display,
onerror = onerror,
percent = percent) | Ring the bell at the volume percent which is relative the base
volume. See XBell(3X11). |
def _validate_and_parse(self, batch_object):
if not waffle.waffle().is_enabled(waffle.ENABLE_COMPLETION_TRACKING):
raise ValidationError(
_("BlockCompletion.objects.submit_batch_completion should not be called when the feature is disabled.")
)
for key in... | Performs validation on the batch object to make sure it is in the proper format.
Parameters:
* batch_object: The data provided to a POST. The expected format is the following:
{
"username": "username",
"course_key": "course-key",
"blocks":... |
def rgb_to_websafe(r, g=None, b=None, alt=False):
if type(r) in [list,tuple]:
r, g, b = r
websafeComponent = _websafe_component
return tuple((websafeComponent(v, alt) for v in (r, g, b))) | Convert the color from RGB to 'web safe' RGB
Parameters:
:r:
The Red component value [0...1]
:g:
The Green component value [0...1]
:b:
The Blue component value [0...1]
:alt:
If True, use the alternative color instead of the nearest one.
Can be used for dithering.
Retu... |
def add_device_callback(self, devices, callback):
if not devices:
return False
if not isinstance(devices, (tuple, list)):
devices = [devices]
for device in devices:
device_id = device
if isinstance(device, Abod... | Register a device callback. |
def calf(self, spec):
if not isinstance(spec, Spec):
raise TypeError()
if not spec.get(BUILD_DIR):
tempdir = realpath(mkdtemp())
spec.advise(CLEANUP, shutil.rmtree, tempdir)
build_dir = join(tempdir, )
mkdir(build_dir)
sp... | Typical safe usage is this, which sets everything that could be
problematic up.
Requires the filename which everything will be produced to. |
def hops(node1, node2):
if node1 == node2:
return 0
elif set(node1.interfaces) & set(node2.interfaces):
return 1
else:
return 0 | returns # of hops it takes to get from node1 to node2, 1 means they're on the same link |
def qnh_estimate(self):
alt_gps = self.master.field(, , 0) * 0.001
pressure2 = self.master.field(, , 0)
ground_temp = self.get_mav_param(, 21)
temp = ground_temp + 273.15
pressure1 = pressure2 / math.exp(math.log(1.0 - (alt_gps / (153.8462 * temp))) / 0.190259)
r... | estimate QNH pressure from GPS altitude and scaled pressure |
def device_statistics(fritz, args):
stats = fritz.get_device_statistics(args.ain)
print(stats) | Command that prints the device statistics. |
def get_role_by_code(role_code,**kwargs):
try:
role = db.DBSession.query(Role).filter(Role.code==role_code).one()
return role
except NoResultFound:
raise ResourceNotFoundError("Role not found (role_code={})".format(role_code)) | Get a role by its code |
def is_text_visible(driver, text, selector, by=By.CSS_SELECTOR):
try:
element = driver.find_element(by=by, value=selector)
return element.is_displayed() and text in element.text
except Exception:
return False | Returns whether the specified text is visible in the specified selector.
@Params
driver - the webdriver object (required)
text - the text string to search for
selector - the locator that is used (required)
by - the method to search for the locator (Default: By.CSS_SELECTOR)
@Returns
Boolean ... |
def get_active_for(self, user, user_agent=_MARK, ip_address=_MARK):
conditions = [LoginSession.user == user]
if user_agent is not _MARK:
if user_agent is None:
user_agent = request.environ.get("HTTP_USER_AGENT", "")
conditions.append(LoginSession.user_ag... | Return last known session for given user.
:param user: user session
:type user: `abilian.core.models.subjects.User`
:param user_agent: *exact* user agent string to lookup, or `None` to have
user_agent extracted from request object. If not provided at all, no
filtering on user_a... |
def clean_headers(headers):
clean = {}
try:
for k, v in six.iteritems(headers):
if not isinstance(k, six.binary_type):
k = str(k)
if not isinstance(v, six.binary_type):
v = str(v)
clean[_helpers._to_bytes(k)] = _helpers._to_bytes(v... | Forces header keys and values to be strings, i.e not unicode.
The httplib module just concats the header keys and values in a way that
may make the message header a unicode string, which, if it then tries to
contatenate to a binary request body may result in a unicode decode error.
Args:
heade... |
def get_step(self, grad):
if self._momentum is None:
self._momentum = self.initial_accumulator_value * np.ones_like(grad)
self._momentum += grad ** 2
return self.learning_rate * grad / np.sqrt(self._momentum) | Computes the 'step' to take for the next gradient descent update.
Returns the step rather than performing the update so that
parameters can be updated in place rather than overwritten.
Examples
--------
>>> gradient = # ...
>>> optimizer = AdaGradOptimizer(0.01)
... |
def parse_args(self, args, scope):
arguments = list(zip(args,
[] * len(args))) if args and args[0] else None
zl = itertools.zip_longest if sys.version_info[
0] == 3 else itertools.izip_longest
if self.args:
parsed = [
... | Parse arguments to mixin. Add them to scope
as variables. Sets upp special variable @arguments
as well.
args:
args (list): arguments
scope (Scope): current scope
raises:
SyntaxError |
def put_settings(self, body=None, params=None):
return self.transport.perform_request(, ,
params=params, body=body) | Update cluster wide specific settings.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html>`_
:arg body: The settings to be updated. Can be either `transient` or
`persistent` (survives cluster restart).
:arg flat_settings: Return settings in... |
def get_distribute_verbatim_metadata(self):
metadata = dict(self._mdata[])
metadata.update({: self._my_map[]})
return Metadata(**metadata) | Gets the metadata for the distribute verbatim rights flag.
return: (osid.Metadata) - metadata for the distribution rights
fields
*compliance: mandatory -- This method must be implemented.* |
def moveoutletstostrm(np, flowdir, streamRaster, outlet, modifiedOutlet,
workingdir=None, mpiexedir=None,
exedir=None, log_file=None, runtime_file=None, hostfile=None):
fname = TauDEM.func_name()
return TauDEM.run(FileClass.get_executable_full... | Run move the given outlets to stream |
def find_primitive(cell, symprec=1e-5):
lattice, positions, numbers = spg.find_primitive(cell.totuple(), symprec)
if lattice is None:
return None
else:
return Atoms(numbers=numbers,
scaled_positions=positions,
cell=lattice,
... | A primitive cell is searched in the input cell. When a primitive
cell is found, an object of Atoms class of the primitive cell is
returned. When not, None is returned. |
def output(self, value):
return super(Map, self).output(self.stream, value) | SPL output port assignment expression.
Arguments:
value(str): SPL expression used for an output assignment. This can be a string, a constant, or an :py:class:`Expression`.
Returns:
Expression: Output assignment expression that is valid as a the context of this operator. |
def get(self, name, acc=None, default=None):
if acc in self.data[] and name in self.data[][acc]:
return self.data[][acc][name]
if name in self.data:
return self.data[name]
return default | Return the named config for the given account.
If an account is given, first checks the account space for the name.
If no account given, or if the name not found in the account space,
look for the name in the global config space. If still not found,
return the default, if given, otherw... |
def shell_sqlalchemy(session: SqlalchemySession, backend: ShellBackend):
namespace = {
: session
}
namespace.update(backend.get_namespace())
embed(user_ns=namespace, header=backend.header) | This command includes SQLAlchemy DB Session |
def count_leases_by_owner(self, leases):
owners = [l.owner for l in leases]
return dict(Counter(owners)) | Returns a dictionary of leases by current owner. |
def _get_network(project_id, network_name, service):
return service.networks().get(project=project_id,
network=network_name).execute() | Fetch network selfLink from network name. |
def get_attrs(cls):
ignore = dir(type(, (object,), {})) + []
attrs = [
item for item in inspect.getmembers(cls) if item[0] not in ignore
and not isinstance(
item[1], (
types.FunctionType,
types.MethodType,
... | Get all class attributes ordered by definition |
def update_dataset_marker(self):
start_time = self.parent.overview.start_time
markers = []
if self.parent.info.markers is not None:
markers = self.parent.info.markers
self.idx_marker.clearContents()
self.idx_marker.setRowCount(len(markers))
for i, ... | Update markers which are in the dataset. It always updates the list
of events. Depending on the settings, it might add the markers to
overview and traces. |
def from_string(contents):
lines = [l.strip() for l in contents.split("\n")]
link0_patt = re.compile(r"^(%.+)\s*=\s*(.+)")
link0_dict = {}
for i, l in enumerate(lines):
if link0_patt.match(l):
m = link0_patt.match(l)
link0_dict[m.grou... | Creates GaussianInput from a string.
Args:
contents: String representing an Gaussian input file.
Returns:
GaussianInput object |
def sequence(context, data):
number = data.get(, context.params.get(, 1))
stop = context.params.get()
step = context.params.get(, 1)
delay = context.params.get()
prefix = context.params.get()
while True:
tag = None if prefix is None else % (prefix, number)
if tag is None o... | Generate a sequence of numbers.
It is the memorious equivalent of the xrange function, accepting the
``start``, ``stop`` and ``step`` parameters.
This can run in two ways:
* As a single function generating all numbers in the given range.
* Recursively, generating numbers one by one with an optiona... |
def _updateWordSet(self):
self._wordSet = set(self._keywords) | set(self._customCompletions)
start = time.time()
for line in self._qpart.lines:
for match in _wordRegExp.findall(line):
self._wordSet.add(match)
if time.time() - start > self._WORD_... | Make a set of words, which shall be completed, from text |
def rename_acquisition(self, plate_name, name, new_name):
logger.info(
,
name, self.experiment_name, plate_name
)
content = {: new_name}
acquisition_id = self._get_acquisition_id(plate_name, name)
url = self._build_api_url(
.format(
... | Renames an acquisition.
Parameters
----------
plate_name: str
name of the parent plate
name: str
name of the acquisition that should be renamed
new_name: str
name that should be given to the acquisition
See also
--------
... |
def backlink(node):
seen = set()
to_see = [node]
while to_see:
node = to_see.pop()
seen.add(node)
for succ in node.next:
succ.prev.add(node)
if succ not in seen:
to_see.append(succ) | Given a CFG with outgoing links, create incoming links. |
def makeOrmValuesSubqueryCondition(ormSession, column, values: List[Union[int, str]]):
if isPostGreSQLDialect(ormSession.bind):
return column.in_(values)
if not isMssqlDialect(ormSession.bind):
raise NotImplementedError()
sql = _createMssqlSqlText(values)
sub_qry = ormSession.que... | Make Orm Values Subquery
:param ormSession: The orm session instance
:param column: The column from the Declarative table, eg TableItem.colName
:param values: A list of string or int values |
def hide(self):
self._hidden = True
for artist in self.annotations.values():
artist.set_visible(False)
for fig in self.figures:
fig.canvas.draw()
return self | Hides all annotation artists associated with the DataCursor. Returns
self to allow "chaining". (e.g. ``datacursor.hide().disable()``) |
def _straight_line_vertices(adjacency_mat, node_coords, directed=False):
if not issparse(adjacency_mat):
adjacency_mat = np.asarray(adjacency_mat, float)
if (adjacency_mat.ndim != 2 or adjacency_mat.shape[0] !=
adjacency_mat.shape[1]):
raise ValueError("Adjacency matrix should... | Generate the vertices for straight lines between nodes.
If it is a directed graph, it also generates the vertices which can be
passed to an :class:`ArrowVisual`.
Parameters
----------
adjacency_mat : array
The adjacency matrix of the graph
node_coords : array
The current coordi... |
def list(self,table, **kparams):
result = self.table_api_get(table, **kparams)
return self.to_records(result, table) | get a collection of records by table name.
returns a dict (the json map) for python 3.4 |
def have_thumbnail(self, fitsimage, image):
chname = self.fv.get_channel_name(fitsimage)
idx = image.get(, None)
path = image.get(, None)
if path is not None:
path = os.path.abspath(path)
name = iohelper.name_image_from_path(path, idx=idx)
... | Returns True if we already have a thumbnail version of this image
cached, False otherwise. |
def setupNodding(self):
g = get_root(self).globals
if not self.nod():
if not self.isDrift():
self.clear.enable()
self.nodPattern = {}
self.check()
return
self.nod.set(False)
... | Setup Nodding for GTC |
def copy(self, key):
copy = Set(key=key, db=self.db)
copy.clear()
copy |= self
return copy | Copy the set to another key and return the new Set.
WARNING: If the key exists, it overwrites it. |
def parameterize(
self,
country: Optional[str] = "South Sudan",
state: Optional[str] = None,
year: Optional[int] = None,
month: Optional[int] = None,
unit: Optional[str] = None,
fallback_aggaxes: List[str] = ["year", "month"],
aggfunc: Callable = np.mean,
... | Parameterize the analysis graph.
Args:
country
year
month
fallback_aggaxes:
An iterable of strings denoting the axes upon which to perform
fallback aggregation if the desired constraints cannot be met.
aggfunc: The func... |
def kill_process(procname, scriptname):
import signal
import subprocess
p = subprocess.Popen([, ], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.decode().splitlines():
if procname in line and scriptname in line:
pid = int(line.split()[1])
... | kill WSGI processes that may be running in development |
def check_snmp(self):
from glances.snmp import GlancesSNMPClient
clientsnmp = GlancesSNMPClient(host=self.args.client,
port=self.args.snmp_port,
version=self.args.snmp_version,
... | Chek if SNMP is available on the server. |
def next_page(self, max_=None):
result = type(self)()
result.after = After(self.last.value)
result.max_ = max_
return result | Return a query set which requests the page after this response.
:param max_: Maximum number of items to return.
:type max_: :class:`int` or :data:`None`
:rtype: :class:`ResultSetMetadata`
:return: A new request set up to request the next page.
Must be called on a result set whi... |
def read(self):
if self._is_initialized:
return
self._is_initialized = True
if not isinstance(self._file_or_files, (tuple, list)):
files_to_read = [self._file_or_files]
else:
files_to_read = list(self._file_or_files)
seen = ... | Reads the data stored in the files we have been initialized with. It will
ignore files that cannot be read, possibly leaving an empty configuration
:return: Nothing
:raise IOError: if a file cannot be handled |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.