code
stringlengths 51
2.38k
| docstring
stringlengths 4
15.2k
|
---|---|
def date_to_datetime(self, time_input, tz=None):
dt = None
try:
dt = parser.parse(time_input)
if tz is not None and tz != dt.tzname():
if dt.tzinfo is None:
dt = self._replace_timezone(dt)
dt = dt.astimezone(timezone(tz))
except IndexError:
pass
except TypeError:
pass
except ValueError:
pass
return dt
|
Convert ISO 8601 and other date strings to datetime.datetime type.
Args:
time_input (string): The time input string (see formats above).
tz (string): The time zone for the returned data.
Returns:
(datetime.datetime): Python datetime.datetime object.
|
def _on_wheel_event(self, event):
try:
delta = event.angleDelta().y()
except AttributeError:
delta = event.delta()
if int(event.modifiers()) & QtCore.Qt.ControlModifier > 0:
if delta < self.prev_delta:
self.editor.zoom_out()
event.accept()
else:
self.editor.zoom_in()
event.accept()
|
Increments or decrements editor fonts settings on mouse wheel event
if ctrl modifier is on.
:param event: wheel event
:type event: QWheelEvent
|
def display_exc(self):
errmsg = self.get_error()
if errmsg is not None:
if self.path is not None:
errmsg_lines = ["in " + self.path + ":"]
for line in errmsg.splitlines():
if line:
line = " " * taberrfmt + line
errmsg_lines.append(line)
errmsg = "\n".join(errmsg_lines)
printerr(errmsg)
|
Properly prints an exception in the exception context.
|
def load_event_list(filename, **kwargs):
return dcase_util.containers.MetaDataContainer().load(filename=filename, **kwargs)
|
Load event list from csv formatted text-file
Supported formats (see more `dcase_util.containers.MetaDataContainer.load()` method):
- [event onset (float >= 0)][delimiter][event offset (float >= 0)]
- [event onset (float >= 0)][delimiter][event offset (float >= 0)][delimiter][label]
- [filename][delimiter][event onset (float >= 0)][delimiter][event offset (float >= 0)][delimiter][event label]
- [filename][delimiter][scene_label][delimiter][event onset (float >= 0)][delimiter][event offset (float >= 0)][delimiter][event label]
- [filename]
Supported delimiters: ``,``, ``;``, ``tab``
Example of event list file::
21.64715 23.00552 alert
36.91184 38.27021 alert
69.72575 71.09029 alert
63.53990 64.89827 alert
84.25553 84.83920 alert
20.92974 21.82661 clearthroat
28.39992 29.29679 clearthroat
80.47837 81.95937 clearthroat
44.48363 45.96463 clearthroat
78.13073 79.05953 clearthroat
15.17031 16.27235 cough
20.54931 21.65135 cough
27.79964 28.90168 cough
75.45959 76.32490 cough
70.81708 71.91912 cough
21.23203 22.55902 doorslam
7.546220 9.014880 doorslam
34.11303 35.04183 doorslam
45.86001 47.32867 doorslam
Parameters
----------
filename : str
Path to the csv-file
Returns
-------
list of dict
Event list
|
def iiscgi(application):
try:
from wsgiref.handlers import IISCGIHandler
except ImportError:
print("Python 3.2 or newer is required.")
if not __debug__:
warnings.warn("Interactive debugging and other persistence-based processes will not work.")
IISCGIHandler().run(application)
|
A specialized version of the reference WSGI-CGI server to adapt to Microsoft IIS quirks.
This is not a production quality interface and will behave badly under load.
|
def emit_containers(self, containers, verbose=True):
containers = sorted(containers, key=lambda c: c.get('name'))
task_definition = {
'family': self.family,
'containerDefinitions': containers,
'volumes': self.volumes or []
}
if verbose:
return json.dumps(task_definition, indent=4, sort_keys=True)
else:
return json.dumps(task_definition)
|
Emits the task definition and sorts containers by name
:param containers: List of the container definitions
:type containers: list of dict
:param verbose: Print out newlines and indented JSON
:type verbose: bool
:returns: The text output
:rtype: str
|
def create_config(name=None,
subvolume=None,
fstype=None,
template=None,
extra_opts=None):
def raise_arg_error(argname):
raise CommandExecutionError(
'You must provide a "{0}" for the new configuration'.format(argname)
)
if not name:
raise_arg_error("name")
if not subvolume:
raise_arg_error("subvolume")
if not fstype:
raise_arg_error("fstype")
if not template:
template = ""
try:
snapper.CreateConfig(name, subvolume, fstype, template)
if extra_opts:
set_config(name, **extra_opts)
return get_config(name)
except dbus.DBusException as exc:
raise CommandExecutionError(
'Error encountered while creating the new configuration: {0}'
.format(_dbus_exception_to_reason(exc, locals()))
)
|
Creates a new Snapper configuration
name
Name of the new Snapper configuration.
subvolume
Path to the related subvolume.
fstype
Filesystem type of the subvolume.
template
Configuration template to use. (Default: default)
extra_opts
Extra Snapper configuration opts dictionary. It will override the values provided
by the given template (if any).
CLI example:
.. code-block:: bash
salt '*' snapper.create_config name=myconfig subvolume=/foo/bar/ fstype=btrfs
salt '*' snapper.create_config name=myconfig subvolume=/foo/bar/ fstype=btrfs template="default"
salt '*' snapper.create_config name=myconfig subvolume=/foo/bar/ fstype=btrfs extra_opts='{"NUMBER_CLEANUP": False}'
|
def register_components(self):
unregistered_components = []
for path in self.paths:
for file in foundations.walkers.files_walker(path, ("\.{0}$".format(self.__extension),), ("\._",)):
if not self.register_component(file):
unregistered_components.append(file)
if not unregistered_components:
return True
else:
raise manager.exceptions.ComponentRegistrationError(
"{0} | '{1}' Components failed to register!".format(self.__class__.__name__,
", ".join(unregistered_components)))
|
Registers the Components.
Usage::
>>> manager = Manager(("./manager/tests/tests_manager/resources/components/core",))
>>> manager.register_components()
True
>>> manager.components.keys()
[u'core.tests_component_a', u'core.tests_component_b']
:return: Method success.
:rtype: bool
|
def get_all_external_accounts(resource_root, type_name, view=None):
return call(resource_root.get,
EXTERNAL_ACCOUNT_FETCH_PATH % ("type", type_name,),
ApiExternalAccount, True, params=view and dict(view=view) or None)
|
Lookup all external accounts of a particular type, by type name.
@param resource_root: The root Resource object.
@param type_name: Type name
@param view: View
@return: An ApiList of ApiExternalAccount objects matching the specified type
|
def addUnexpectedSuccess(self, test):
result = self._handle_result(
test, TestCompletionStatus.unexpected_success)
self.unexpectedSuccesses.append(result)
|
Register a test that passed unexpectedly.
Parameters
----------
test : unittest.TestCase
The test that has completed.
|
def get_what_follows_raw(s: str,
prefix: str,
onlyatstart: bool = True,
stripwhitespace: bool = True) -> Tuple[bool, str]:
prefixstart = s.find(prefix)
if ((prefixstart == 0 and onlyatstart) or
(prefixstart != -1 and not onlyatstart)):
resultstart = prefixstart + len(prefix)
result = s[resultstart:]
if stripwhitespace:
result = result.strip()
return True, result
return False, ""
|
Find the part of ``s`` that is after ``prefix``.
Args:
s: string to analyse
prefix: prefix to find
onlyatstart: only accept the prefix if it is right at the start of
``s``
stripwhitespace: remove whitespace from the result
Returns:
tuple: ``(found, result)``
|
def get_all():
info_dir = _get_info_dir()
results = []
for filename in os.listdir(info_dir):
filepath = os.path.join(info_dir, filename)
try:
with open(filepath) as infile:
contents = infile.read()
except IOError as e:
if e.errno == errno.EACCES:
continue
else:
raise
try:
info = _info_from_string(contents)
except ValueError:
tb_logging.get_logger().warning(
"invalid info file: %r",
filepath,
exc_info=True,
)
else:
results.append(info)
return results
|
Return TensorBoardInfo values for running TensorBoard processes.
This function may not provide a perfect snapshot of the set of running
processes. Its result set may be incomplete if the user has cleaned
their /tmp/ directory while TensorBoard processes are running. It may
contain extraneous entries if TensorBoard processes exited uncleanly
(e.g., with SIGKILL or SIGQUIT).
Returns:
A fresh list of `TensorBoardInfo` objects.
|
def drop_columns(self, colnames, **kwargs):
new_arr = rfn.drop_fields(
self, colnames, usemask=False, asrecarray=True, **kwargs
)
return self.__class__(
new_arr,
h5loc=self.h5loc,
split_h5=self.split_h5,
name=self.name,
h5singleton=self.h5singleton
)
|
Drop columns from the table.
See the docs for ``numpy.lib.recfunctions.drop_fields`` for an
explanation of the remaining options.
|
def reference(self):
if self.__reference is None:
self.__reference = _ConstructReference(self.__class__,
pairs=self.__pairs,
app=self.__app,
namespace=self.__namespace)
return self.__reference
|
Return the Reference object for this Key.
This is a entity_pb.Reference instance -- a protocol buffer class
used by the lower-level API to the datastore.
NOTE: The caller should not mutate the return value.
|
def add_relationship(self, term1, relationship, term2):
url = self.base_path + 'term/add-relationship'
data = {'term1_id': term1['id'],
'relationship_tid': relationship['id'],
'term2_id': term2['id'],
'term1_version': term1['version'],
'relationship_term_version': relationship['version'],
'term2_version': term2['version']}
return self.post(url, data)
|
Creates a relationship between 3 entities in database
|
def step(self, provided_inputs):
for wire, value in provided_inputs.items():
wire = self.block.get_wirevector_by_name(wire) if isinstance(wire, str) else wire
if value > wire.bitmask or value < 0:
raise PyrtlError("Wire {} has value {} which cannot be represented"
" using its bitwidth".format(wire, value))
ins = {self._to_name(wire): value for wire, value in provided_inputs.items()}
ins.update(self.regs)
ins.update(self.mems)
self.regs, self.outs, mem_writes = self.sim_func(ins)
for mem, addr, value in mem_writes:
self.mems[mem][addr] = value
self.context = self.outs.copy()
self.context.update(ins)
if self.tracer is not None:
self.tracer.add_fast_step(self)
check_rtl_assertions(self)
|
Run the simulation for a cycle
:param provided_inputs: a dictionary mapping WireVectors (or their names)
to their values for this step
eg: {wire: 3, "wire_name": 17}
|
def get_background_sids(self, src_filter):
branch_key = self.idx_set["grid_key"]
idist = src_filter.integration_distance(DEFAULT_TRT)
with h5py.File(self.source_file, 'r') as hdf5:
bg_locations = hdf5["Grid/Locations"].value
distances = min_geodetic_distance(
src_filter.sitecol.xyz,
(bg_locations[:, 0], bg_locations[:, 1]))
mmax_areas = self.msr.get_median_area(
hdf5["/".join(["Grid", branch_key, "MMax"])].value, 0.0)
mmax_lengths = numpy.sqrt(mmax_areas / self.aspect)
ok = distances <= (0.5 * mmax_lengths + idist)
return numpy.where(ok)[0].tolist()
|
We can apply the filtering of the background sites as a pre-processing
step - this is done here rather than in the sampling of the ruptures
themselves
|
def get(self, key, fallback=None):
value = None
if key in self._config:
value = self._config[key]
if isinstance(value, Section):
value = None
if value is None:
value = fallback
return value
|
look up global config values from alot's config
:param key: key to look up
:type key: str
:param fallback: fallback returned if key is not present
:type fallback: str
:returns: config value with type as specified in the spec-file
|
def delete(self, db_session=None):
db_session = get_db_session(db_session, self)
db_session.delete(self)
|
Deletes the object via session, this will permanently delete the
object from storage on commit
:param db_session:
:return:
|
def _range_along_dimension(range_dim, shape):
rank = len(shape)
if range_dim >= rank:
raise ValueError("Cannot calculate range along non-existent index.")
indices = tf.range(start=0, limit=shape[range_dim])
indices = tf.reshape(
indices,
shape=[1 if i != range_dim else shape[range_dim] for i in range(rank)])
return tf.tile(indices,
[shape[i] if i != range_dim else 1 for i in range(rank)])
|
Construct a Tensor whose values are the index along a dimension.
Construct a Tensor that counts the distance along a single dimension. This is
useful, for example, when constructing an identity matrix,
>>> x = _range_along_dimension(0, [2, 2]).eval()
>>> x
array([[0, 0],
[1, 1]], dtype=int32)
>>> y = _range_along_dimension(1, [2, 2]).eval()
>>> y
array([[0, 1],
[0, 1]], dtype=int32)
>>> tf.cast(tf.equal(x, y), dtype=tf.int32).eval()
array([[1, 0],
[0, 1]], dtype=int32)
Args:
range_dim: int. Dimension to count indices on.
shape: 1D Tensor of ints. Shape of Tensor to construct.
Returns:
A Tensor whose values are the same as the range along dimension range_dim.
Raises:
ValueError: If range_dim isn't a valid dimension.
|
def toggle(path_or_id, badge_kind):
if exists(path_or_id):
with open(path_or_id) as open_file:
for id_or_slug in open_file.readlines():
toggle_badge(id_or_slug.strip(), badge_kind)
else:
toggle_badge(path_or_id, badge_kind)
|
Toggle a `badge_kind` for a given `path_or_id`
The `path_or_id` is either an id, a slug or a file containing a list
of ids or slugs.
|
def GetName(obj):
precondition.AssertType(obj, (type, types.FunctionType))
if PY2:
return obj.__name__.decode("ascii")
else:
return obj.__name__
|
A compatibility wrapper for getting object's name.
In Python 2 class names are returned as `bytes` (since class names can contain
only ASCII characters) whereas in Python 3 they are `unicode` (since class
names can contain arbitrary unicode characters).
This function makes this behaviour consistent and always returns class name as
an unicode string.
Once support for Python 2 is dropped all invocations of this call can be
replaced with ordinary `__name__` access.
Args:
obj: A type or function object to get the name for.
Returns:
Name of the specified class as unicode string.
|
def drop_namespaces(self):
self.session.query(NamespaceEntry).delete()
self.session.query(Namespace).delete()
self.session.commit()
|
Drop all namespaces.
|
def load_file(self, file):
if not foundations.common.path_exists(file):
raise foundations.exceptions.FileExistsError(
"{0} | '{1}' file doesn't exists!".format(self.__class__.__name__,
file))
LOGGER.debug("> Loading '{0}' file.".format(file))
reader = foundations.io.File(file)
self.setPlainText(reader.read())
self.set_file(file)
self.__set_document_signals()
self.file_loaded.emit()
return True
|
Reads and loads given file into the editor.
:param File: File to load.
:type File: unicode
:return: Method success.
:rtype: bool
|
def deserialize(self, data, status_code):
if status_code == 204:
return data
return serializer.Serializer().deserialize(
data)['body']
|
Deserializes a JSON string into a dictionary.
|
def _get_bmu(self, activations):
if self.argfunc == 'argmax':
activations = -activations
sort = np.argsort(activations, 1)
return sort.argsort()
|
Get indices of bmus, sorted by their distance from input.
|
def imread(path, grayscale=False, size=None, interpolate="bilinear",
channel_first=False, as_uint16=False, num_channels=-1):
_imread_before(grayscale, num_channels)
r_mode = cv2.IMREAD_GRAYSCALE if grayscale else cv2.IMREAD_UNCHANGED
img = _imread_helper(path, r_mode)
if as_uint16 and img.dtype != np.uint16:
if img.dtype == np.uint8:
logger.warning("You want to read image as uint16, but the original bit-depth is 8 bit."
"All pixel values are simply increased by 256 times.")
img = img.astype(np.uint16) * 256
else:
raise ValueError(
"casting {} to uint16 is not safe.".format(img.dtype))
img = _cvtColor_helper(img, num_channels)
img = _imread_after(img, size, interpolate, channel_first, imresize)
return img
|
Read image by cv2 module.
Args:
path (str or 'file object'): File path or object to read.
grayscale (bool):
size (tupple of int):
(width, height).
If None, output img shape depends on the files to read.
channel_first (bool):
This argument specifies the shape of img is whether (height, width, channel) or (channel, height, width).
Default value is False, which means the img shape is (height, width, channel).
interpolate (str):
must be one of ["nearest", "box", "bilinear", "hamming", "bicubic", "lanczos"].
as_uint16 (bool):
If True, this function reads image as uint16.
num_channels (int):
channel size of output array.
Default is -1 which preserves raw image shape.
Returns:
numpy.ndarray
|
def create_primary_zone_by_axfr(self, account_name, zone_name, master, tsig_key=None, key_value=None):
zone_properties = {"name": zone_name, "accountName": account_name, "type": "PRIMARY"}
if tsig_key is not None and key_value is not None:
name_server_info = {"ip": master, "tsigKey": tsig_key, "tsigKeyValue": key_value}
else:
name_server_info = {"ip": master}
primary_zone_info = {"forceImport": True, "createType": "TRANSFER", "nameServer": name_server_info}
zone_data = {"properties": zone_properties, "primaryCreateInfo": primary_zone_info}
return self.rest_api_connection.post("/v1/zones", json.dumps(zone_data))
|
Creates a new primary zone by zone transferring off a master.
Arguments:
account_name -- The name of the account that will contain this zone.
zone_name -- The name of the zone. It must be unique.
master -- Primary name server IP address.
Keyword Arguments:
tsig_key -- For TSIG-enabled zones: The transaction signature key.
NOTE: Requires key_value.
key_value -- TSIG key secret.
|
def coinc(self, s0, s1, slide, step):
loglr = - s0 - s1
threshes = [self.fits_by_tid[i]['thresh'] for i in self.ifos]
loglr += sum([t**2. / 2. for t in threshes])
return (2. * loglr) ** 0.5
|
Calculate the final coinc ranking statistic
|
def expect(instr, expected, context):
if not isinstance(instr, expected):
raise DecompilationError(
"Expected a {expected} instruction {context}. Got {instr}.".format(
instr=instr, expected=expected, context=context,
)
)
return instr
|
Check that an instruction is of the expected type.
|
def annotatedcore(self):
logging.info('Calculating annotated core')
self.total_core()
for sample in self.metadata:
if sample.general.bestassemblyfile != 'NA':
sample[self.analysistype].coreset = set()
if sample.general.referencegenus == 'Escherichia':
self.runmetadata.samples.append(sample)
try:
report = sample[self.analysistype].report
self.blastparser(report=report,
sample=sample,
fieldnames=self.fieldnames)
except KeyError:
sample[self.analysistype].coreset = list()
self.reporter()
|
Calculates the core genome of organisms using custom databases
|
def attach(self, engine, start=Events.STARTED, pause=Events.COMPLETED, resume=None, step=None):
engine.add_event_handler(start, self.reset)
engine.add_event_handler(pause, self.pause)
if resume is not None:
engine.add_event_handler(resume, self.resume)
if step is not None:
engine.add_event_handler(step, self.step)
return self
|
Register callbacks to control the timer.
Args:
engine (Engine):
Engine that this timer will be attached to.
start (Events):
Event which should start (reset) the timer.
pause (Events):
Event which should pause the timer.
resume (Events, optional):
Event which should resume the timer.
step (Events, optional):
Event which should call the `step` method of the counter.
Returns:
self (Timer)
|
def reactivate(self):
self._protocol.connectionLost(None)
self._protocol = None
self.terminal.reset()
self._window.filthy()
self._window.repaint()
|
Called when a sub-protocol is finished. This disconnects the
sub-protocol and redraws the main menu UI.
|
def generate_sigv4_auth_request(header_value=None):
request = requests.Request(
method='POST',
url='https://sts.amazonaws.com/',
headers={'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8', 'Host': 'sts.amazonaws.com'},
data='Action=GetCallerIdentity&Version=2011-06-15',
)
if header_value:
request.headers['X-Vault-AWS-IAM-Server-ID'] = header_value
prepared_request = request.prepare()
return prepared_request
|
Helper function to prepare a AWS API request to subsequently generate a "AWS Signature Version 4" header.
:param header_value: Vault allows you to require an additional header, X-Vault-AWS-IAM-Server-ID, to be present
to mitigate against different types of replay attacks. Depending on the configuration of the AWS auth
backend, providing a argument to this optional parameter may be required.
:type header_value: str
:return: A PreparedRequest instance, optionally containing the provided header value under a
'X-Vault-AWS-IAM-Server-ID' header name pointed to AWS's simple token service with action "GetCallerIdentity"
:rtype: requests.PreparedRequest
|
def get_descriptions(self, description_type):
(desc_type, max_units) = description_type
results = [None] * max_units
self.elk._descriptions_in_progress[desc_type] = (max_units,
results,
self._got_desc)
self.elk.send(sd_encode(desc_type=desc_type, unit=0))
|
Gets the descriptions for specified type.
When complete the callback is called with a list of descriptions
|
def enter_room(self, sid, namespace, room):
if namespace not in self.rooms:
self.rooms[namespace] = {}
if room not in self.rooms[namespace]:
self.rooms[namespace][room] = {}
self.rooms[namespace][room][sid] = True
|
Add a client to a room.
|
def hangup(self):
if self.active:
self._gsmModem.write('ATH')
self.answered = False
self.active = False
if self.id in self._gsmModem.activeCalls:
del self._gsmModem.activeCalls[self.id]
|
End the phone call.
Does nothing if the call is already inactive.
|
def check(self, feature):
found = False
for handler in self.handlers:
try:
if handler(feature):
return True
except StopCheckingFeatureFlags:
return False
except NoFeatureFlagFound:
pass
else:
found = True
if not found:
message = u"No feature flag defined for {feature}".format(feature=feature)
if current_app.debug and current_app.config.get(RAISE_ERROR_ON_MISSING_FEATURES, False):
raise KeyError(message)
else:
log.info(message)
missing_feature.send(self, feature=feature)
return False
|
Loop through all our feature flag checkers and return true if any of them are true.
The order of handlers matters - we will immediately return True if any handler returns true.
If you want to a handler to return False and stop the chain, raise the StopCheckingFeatureFlags exception.
|
def ordered(self, ord='desc'):
if ord not in ('asc', 'desc', ):
raise
ord_f = getattr(PIDRelation.index, ord)()
return self.order_by(ord_f)
|
Order the query result on the relations' indexes.
|
def get_settings(config_file):
default_settings = {
'general': {
'endpoint': 'http://guacamole.antojitos.io/files/',
'shortener': 'http://t.antojitos.io/api/v1/urls',
}
}
settings = configparser.ConfigParser()
try:
settings.read_dict(default_settings)
except AttributeError:
for section, options in default_settings.items():
settings.add_section(section)
for option, value in options.items():
settings.set(section, option, value)
if config_file is not None and os.path.exists(config_file):
settings.read(config_file)
return settings
if os.path.exists(CONFIG_FILE):
settings.read(CONFIG_FILE)
return settings
return settings
|
Search and load a configuration file.
|
def get_colormap(name, *args, **kwargs):
if isinstance(name, BaseColormap):
cmap = name
else:
if not isinstance(name, string_types):
raise TypeError('colormap must be a Colormap or string name')
if name not in _colormaps:
raise KeyError('colormap name %s not found' % name)
cmap = _colormaps[name]
if inspect.isclass(cmap):
cmap = cmap(*args, **kwargs)
return cmap
|
Obtain a colormap
Some colormaps can have additional configuration parameters. Refer to
their corresponding documentation for more information.
Parameters
----------
name : str | Colormap
Colormap name. Can also be a Colormap for pass-through.
Examples
--------
>>> get_colormap('autumn')
>>> get_colormap('single_hue', hue=10)
|
def rm(pattern):
paths = glob.glob(pattern)
for path in paths:
if path.startswith('.git/'):
continue
if os.path.isdir(path):
def onerror(fun, path, excinfo):
exc = excinfo[1]
if exc.errno != errno.ENOENT:
raise
safe_print("rmdir -f %s" % path)
shutil.rmtree(path, onerror=onerror)
else:
safe_print("rm %s" % path)
os.remove(path)
|
Recursively remove a file or dir by pattern.
|
def create_and_configure_wrapper(context_or_world):
context_or_world.driver_wrapper = DriverWrappersPool.get_default_wrapper()
context_or_world.utils = context_or_world.driver_wrapper.utils
try:
behave_properties = context_or_world.config.userdata
except AttributeError:
behave_properties = None
context_or_world.driver_wrapper.configure(context_or_world.config_files, behave_properties=behave_properties)
context_or_world.toolium_config = context_or_world.driver_wrapper.config
context_or_world.logger = logging.getLogger(__name__)
|
Create and configure driver wrapper in behave or lettuce tests
:param context_or_world: behave context or lettuce world
|
def iter(self, start=0, end=None):
if end is None:
end = self._index + 1
elif end == 0:
raise StopIteration()
if start >= end:
raise StopIteration()
assert 0 <= end <= len(self._history)
assert 0 <= start <= end - 1
for i in range(start, end):
yield self._history[i]
|
Iterate through successive history items.
Parameters
----------
end : int
Index of the last item to loop through + 1.
start : int
Initial index for the loop (0 by default).
|
def _write_to_datastore(self):
roots_and_submissions = zip([ATTACKS_ENTITY_KEY,
TARGET_ATTACKS_ENTITY_KEY,
DEFENSES_ENTITY_KEY],
[self._attacks,
self._targeted_attacks,
self._defenses])
client = self._datastore_client
with client.no_transact_batch() as batch:
for root_key, submissions in roots_and_submissions:
batch.put(client.entity(client.key(*root_key)))
for k, v in iteritems(submissions):
entity = client.entity(client.key(
*(root_key + [KIND_SUBMISSION, k])))
entity['submission_path'] = v.path
entity.update(participant_from_submission_path(v.path))
batch.put(entity)
|
Writes all submissions to datastore.
|
def list_to_bytes_list(strList):
pList = c_char_p * len(strList)
if isinstance(strList, (pList, type(None))):
return strList
if not isinstance(strList, (list, set, tuple)):
raise TypeError("strList must be list, set or tuple, not " +
str(type(strList)))
pList = pList()
for i, elem in enumerate(strList):
pList[i] = str_to_bytes(elem)
return pList
|
This function turns an array of strings into a pointer array
with pointers pointing to the encodings of those strings
Possibly contained bytes are kept as they are.
:param strList: List of strings that shall be converted
:type strList: List of strings
:returns: Pointer array with pointers pointing to bytes
:raises: TypeError if strList is not list, set or tuple
|
def pop_scope(self):
child_scope = self.stack.current.current.copy()
self.stack.current.pop()
parent_scope = self.stack.current.current.copy()
self.stack.current.current = {
key: child_scope[key] for key in child_scope if key in parent_scope
}
|
Delete the current scope in the current scope.
|
def in_reply_to(self) -> Optional[UnstructuredHeader]:
try:
return cast(UnstructuredHeader, self[b'in-reply-to'][0])
except (KeyError, IndexError):
return None
|
The ``In-Reply-To`` header.
|
def is_console(self, users_text):
if users_text is None:
self.log("Console information not collected")
return None
for line in users_text.split('\n'):
if '*' in line:
match = re.search(self.vty_re, line)
if match:
self.log("Detected connection to vty")
return False
else:
match = re.search(self.console_re, line)
if match:
self.log("Detected connection to console")
return True
self.log("Connection port unknown")
return None
|
Return if device is connected over console.
|
def _format_volume_string(self, volume_string):
self.actual_volume = int(volume_string.split(self.volume_string)[1].split(',')[0].split()[0])
return '[Vol: {}%] '.format(int(100 * self.actual_volume / self.max_volume))
|
format vlc's volume
|
def vicinity(self):
if self._vicinity == '' and self.details != None and 'vicinity' in self.details:
self._vicinity = self.details['vicinity']
return self._vicinity
|
Returns a feature name of a nearby location.
Often this feature refers to a street or neighborhood within the given
results.
|
def get_messages(self, page=0):
endpoint = 'https://outlook.office.com/api/v2.0/me/messages'
if page > 0:
endpoint = endpoint + '/?%24skip=' + str(page) + '0'
log.debug('Getting messages from endpoint: {} with Headers: {}'.format(endpoint, self._headers))
r = requests.get(endpoint, headers=self._headers)
check_response(r)
return Message._json_to_messages(self, r.json())
|
Get first 10 messages in account, across all folders.
Keyword Args:
page (int): Integer representing the 'page' of results to fetch
Returns:
List[:class:`Message <pyOutlook.core.message.Message>`]
|
def mobile_template(template):
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
ctx = stack.top
if ctx is not None and hasattr(ctx, 'request'):
request = ctx.request
is_mobile = getattr(request, 'MOBILE', None)
kwargs['template'] = re.sub(r'{(.+?)}',
r'\1' if is_mobile else '',
template)
return f(*args, **kwargs)
return wrapper
return decorator
|
Mark a function as mobile-ready and pass a mobile template if MOBILE.
For example::
@mobile_template('a/{mobile/}b.html')
def view(template=None):
...
if ``request.MOBILE=True`` the template will be `a/mobile/b.html`.
if ``request.MOBILE=False`` the template will be `a/b.html`.
This function is useful if the mobile view uses the same context but a
different template.
|
def DisplayEstimate(message, min_estimate, max_estimate):
mean_avg_cpc = (_CalculateMean(min_estimate['averageCpc']['microAmount'],
max_estimate['averageCpc']['microAmount'])
if 'averageCpc' in min_estimate
and min_estimate['averageCpc'] else None)
mean_avg_pos = (_CalculateMean(min_estimate['averagePosition'],
max_estimate['averagePosition'])
if 'averagePosition' in min_estimate
and min_estimate['averagePosition'] else None)
mean_clicks = _CalculateMean(min_estimate['clicksPerDay'],
max_estimate['clicksPerDay'])
mean_total_cost = _CalculateMean(min_estimate['totalCost']['microAmount'],
max_estimate['totalCost']['microAmount'])
print message
print ' Estimated average CPC: %s' % _FormatMean(mean_avg_cpc)
print ' Estimated ad position: %s' % _FormatMean(mean_avg_pos)
print ' Estimated daily clicks: %s' % _FormatMean(mean_clicks)
print ' Estimated daily cost: %s' % _FormatMean(mean_total_cost)
|
Displays mean average cpc, position, clicks, and total cost for estimate.
Args:
message: str message to display for the given estimate.
min_estimate: sudsobject containing a minimum estimate from the
TrafficEstimatorService response.
max_estimate: sudsobject containing a maximum estimate from the
TrafficEstimatorService response.
|
def show_tooltip(self, pos, tooltip, _sender_deco=None):
if _sender_deco is not None and _sender_deco not in self.decorations:
return
QtWidgets.QToolTip.showText(pos, tooltip[0: 1024], self)
|
Show a tool tip at the specified position
:param pos: Tooltip position
:param tooltip: Tooltip text
:param _sender_deco: TextDecoration which is the sender of the show
tooltip request. (for internal use only).
|
def load_reader_options():
options = os.environ['PANDOC_READER_OPTIONS']
options = json.loads(options, object_pairs_hook=OrderedDict)
return options
|
Retrieve Pandoc Reader options from the environment
|
def _make_minimal(dictionary):
new_dict = {}
for key, value in dictionary.items():
if value is not None:
if isinstance(value, dict):
new_value = _make_minimal(value)
if new_value:
new_dict[key] = new_value
else:
new_dict[key] = value
return new_dict
|
This function removes all the keys whose value is either None or an empty
dictionary.
|
def is_valid_vpnv4_prefix(prefix):
if not isinstance(prefix, str):
return False
tokens = prefix.split(':', 2)
if len(tokens) != 3:
return False
if not is_valid_route_dist(':'.join([tokens[0], tokens[1]])):
return False
return is_valid_ipv4_prefix(tokens[2])
|
Returns True if given prefix is a string represent vpnv4 prefix.
Vpnv4 prefix is made up of RD:Ipv4, where RD is represents route
distinguisher and Ipv4 represents valid dot-decimal ipv4 notation string.
|
def start(self):
if not self._done_event.is_set():
return
self._done_event.clear()
nb_pending_tasks = self._queue.qsize()
if nb_pending_tasks > self._max_threads:
nb_threads = self._max_threads
nb_pending_tasks = self._max_threads
elif nb_pending_tasks < self._min_threads:
nb_threads = self._min_threads
else:
nb_threads = nb_pending_tasks
for _ in range(nb_pending_tasks):
self.__nb_pending_task += 1
self.__start_thread()
for _ in range(nb_threads - nb_pending_tasks):
self.__start_thread()
|
Starts the thread pool. Does nothing if the pool is already started.
|
def write(self, file_or_path, append=False, timeout=10):
if isinstance(file_or_path, six.string_types):
if self.coverage:
file_or_path = get_smother_filename(
file_or_path, self.coverage.config.parallel)
outfile = Lock(
file_or_path, mode='a+',
timeout=timeout,
fail_when_locked=False
)
else:
outfile = noclose(file_or_path)
with outfile as fh:
if append:
fh.seek(0)
try:
other = Smother.load(fh)
except ValueError:
pass
else:
self |= other
fh.seek(0)
fh.truncate()
json.dump(self.data, fh)
|
Write Smother results to a file.
Parameters
----------
fiile_or_path : str
Path to write report to
append : bool
If True, read an existing smother report from `outpath`
and combine it with this file before writing.
timeout : int
Time in seconds to wait to acquire a file lock, before
raising an error.
Note
----
Append mode is atomic when file_or_path is a path,
and can be safely run in a multithreaded or
multiprocess test environment.
When using `parallel_mode`, file_or_path is given a unique
suffix based on the machine name and process id.
|
def error_log(self, msg='', level=20, traceback=False):
sys.stderr.write(msg + '\n')
sys.stderr.flush()
if traceback:
tblines = traceback_.format_exc()
sys.stderr.write(tblines)
sys.stderr.flush()
|
Write error message to log.
Args:
msg (str): error message
level (int): logging level
traceback (bool): add traceback to output or not
|
def handleOneClientMsg(self, wrappedMsg):
try:
vmsg = self.validateClientMsg(wrappedMsg)
if vmsg:
self.unpackClientMsg(*vmsg)
except BlowUp:
raise
except Exception as ex:
msg, frm = wrappedMsg
friendly = friendlyEx(ex)
if isinstance(ex, SuspiciousClient):
self.reportSuspiciousClient(frm, friendly)
self.handleInvalidClientMsg(ex, wrappedMsg)
|
Validate and process a client message
:param wrappedMsg: a message from a client
|
def GetUserinfo(credentials, http=None):
http = http or httplib2.Http()
url = _GetUserinfoUrl(credentials)
response, content = http.request(url)
if response.status == http_client.BAD_REQUEST:
credentials.refresh(http)
url = _GetUserinfoUrl(credentials)
response, content = http.request(url)
return json.loads(content or '{}')
|
Get the userinfo associated with the given credentials.
This is dependent on the token having either the userinfo.email or
userinfo.profile scope for the given token.
Args:
credentials: (oauth2client.client.Credentials) incoming credentials
http: (httplib2.Http, optional) http instance to use
Returns:
The email address for this token, or None if the required scopes
aren't available.
|
def compute_colors_for_labels(self, labels):
colors = labels[:, None] * self.palette
colors = (colors % 255).numpy().astype("uint8")
return colors
|
Simple function that adds fixed colors depending on the class
|
def _process_batch_write_response(request, response, table_crypto_config):
try:
unprocessed_items = response["UnprocessedItems"]
except KeyError:
return response
for table_name, unprocessed in unprocessed_items.items():
original_items = request[table_name]
crypto_config = table_crypto_config[table_name]
if crypto_config.encryption_context.partition_key_name:
items_match = partial(_item_keys_match, crypto_config)
else:
items_match = partial(_item_attributes_match, crypto_config)
for pos, operation in enumerate(unprocessed):
for request_type, item in operation.items():
if request_type != "PutRequest":
continue
for plaintext_item in original_items:
if plaintext_item.get(request_type) and items_match(
plaintext_item[request_type]["Item"], item["Item"]
):
unprocessed[pos] = plaintext_item.copy()
break
return response
|
Handle unprocessed items in the response from a transparently encrypted write.
:param dict request: The DynamoDB plaintext request dictionary
:param dict response: The DynamoDB response from the batch operation
:param Dict[Text, CryptoConfig] table_crypto_config: table level CryptoConfig used in encrypting the request items
:return: DynamoDB response, with any unprocessed items reverted back to the original plaintext values
:rtype: dict
|
def publish_server_heartbeat_succeeded(self, connection_id, duration,
reply):
event = ServerHeartbeatSucceededEvent(duration, reply, connection_id)
for subscriber in self.__server_heartbeat_listeners:
try:
subscriber.succeeded(event)
except Exception:
_handle_exception()
|
Publish a ServerHeartbeatSucceededEvent to all server heartbeat
listeners.
:Parameters:
- `connection_id`: The address (host/port pair) of the connection.
- `duration`: The execution time of the event in the highest possible
resolution for the platform.
- `reply`: The command reply.
|
def put(self, body, priority=DEFAULT_PRIORITY, delay=0, ttr=DEFAULT_TTR):
assert isinstance(body, str), 'Job body must be a str instance'
jid = self._interact_value('put %d %d %d %d\r\n%s\r\n' % (
priority, delay, ttr, len(body), body),
['INSERTED'],
['JOB_TOO_BIG', 'BURIED', 'DRAINING'])
return int(jid)
|
Put a job into the current tube. Returns job id.
|
def _char_density(self, c, font=ImageFont.load_default()):
image = Image.new('1', font.getsize(c), color=255)
draw = ImageDraw.Draw(image)
draw.text((0, 0), c, fill="white", font=font)
return collections.Counter(image.getdata())[0]
|
Count the number of black pixels in a rendered character.
|
def _tile_ticks(self, frac, tickvec):
origins = np.tile(self.axis._vec, (len(frac), 1))
origins = self.axis.pos[0].T + (origins.T*frac).T
endpoints = tickvec + origins
return origins, endpoints
|
Tiles tick marks along the axis.
|
def _peek_buffer(self, i=0):
while len(self._buffer) <= i:
self._buffer.append(next(self._source))
return self._buffer[i]
|
Get the next line without consuming it.
|
def filing_history(self, num, transaction=None, **kwargs):
baseuri = self._BASE_URI + "company/{}/filing-history".format(num)
if transaction is not None:
baseuri += "/{}".format(transaction)
res = self.session.get(baseuri, params=kwargs)
self.handle_http_error(res)
return res
|
Search for a company's filling history by company number.
Args:
num (str): Company number to search on.
transaction (Optional[str]): Filing record number.
kwargs (dict): additional keywords passed into
requests.session.get params keyword.
|
def validate_reaction(self):
if self.reaction not in self._reaction_valid_values:
raise ValueError("reaction should be one of: {valid}".format(
valid=", ".join(self._reaction_valid_values)
))
|
Ensure reaction is of a certain type.
Mainly for future expansion.
|
def collection(name=None):
if name is None:
collection = Collection.query.get_or_404(1)
else:
collection = Collection.query.filter(
Collection.name == name).first_or_404()
return render_template([
'invenio_collections/collection_{0}.html'.format(collection.id),
'invenio_collections/collection_{0}.html'.format(slugify(name, '_')),
current_app.config['COLLECTIONS_DEFAULT_TEMPLATE']
], collection=collection)
|
Render the collection page.
It renders it either with a collection specific template (aka
collection_{collection_name}.html) or with the default collection
template (collection.html).
|
def merge(cls, *others):
for other in others:
for k, v in other:
setattr(cls, k, BoundValue(cls, k, v.value))
|
Merge the `others` schema into this instance.
The values will all be read from the provider of the original object.
|
def extract_params(params):
values = []
if isinstance(params, dict):
for key, value in params.items():
values.extend(extract_params(value))
elif isinstance(params, list):
for value in params:
values.extend(extract_params(value))
else:
values.append(params)
return values
|
Extracts the values of a set of parameters, recursing into nested dictionaries.
|
def _check_list_minions(self, expr, greedy, ignore_missing=False):
if isinstance(expr, six.string_types):
expr = [m for m in expr.split(',') if m]
minions = self._pki_minions()
return {'minions': [x for x in expr if x in minions],
'missing': [] if ignore_missing else [x for x in expr if x not in minions]}
|
Return the minions found by looking via a list
|
def create_password_reset(cls, email, valid_for=3600) -> str:
user = cls.where_email(email)
if user is None:
return None
PasswordResetModel.delete_where_user_id(user.id)
token = JWT().create_token({
'code': Security.random_string(5),
'user_id': user.id},
token_valid_for=valid_for)
code = Security.generate_uuid(1) + "-" + Security.random_string(5)
password_reset_model = PasswordResetModel()
password_reset_model.token = token
password_reset_model.code = code
password_reset_model.user_id = user.id
password_reset_model.save()
return code
|
Create a password reset request in the user_password_resets
database table. Hashed code gets stored in the database.
Returns unhashed reset code
|
def VerifyStructure(self, parser_mediator, lines):
match_generator = self._VERIFICATION_GRAMMAR.scanString(lines, maxMatches=1)
return bool(list(match_generator))
|
Verifies that this is a bash history file.
Args:
parser_mediator (ParserMediator): mediates interactions between
parsers and other components, such as storage and dfvfs.
lines (str): one or more lines from the text file.
Returns:
bool: True if this is the correct parser, False otherwise.
|
def link_sources(self):
"Returns potential Link or Stream sources."
if isinstance(self, GenericOverlayPlot):
zorders = []
elif self.batched:
zorders = list(range(self.zorder, self.zorder+len(self.hmap.last)))
else:
zorders = [self.zorder]
if isinstance(self, GenericOverlayPlot) and not self.batched:
sources = []
elif not self.static or isinstance(self.hmap, DynamicMap):
sources = [o for i, inputs in self.stream_sources.items()
for o in inputs if i in zorders]
else:
sources = [self.hmap.last]
return sources
|
Returns potential Link or Stream sources.
|
def visit_Call(self, node: AST, dfltChaining: bool = True) -> str:
args = node.args
try:
kwds = node.keywords
except AttributeError:
kwds = []
self.compact = True
args_src = (self.visit(arg) for arg in args)
kwds_src = (self.visit(kwd) for kwd in kwds)
param_src = ', '.join(chain(args_src, kwds_src))
src = f"{self.visit(node.func)}({param_src})"
self.compact = False
return src
|
Return `node`s representation as function call.
|
def abundances(self, ids=None):
if ids is None:
return self.table()
else:
res = self.table()
return res[res["tax_id"].isin(ids)]
|
Query the results table to get abundance data for all or some tax ids
|
def add_external_reference_to_entity(self,entity_id, external_ref):
if self.entity_layer is not None:
self.entity_layer.add_external_reference_to_entity(entity_id,external_ref)
|
Adds an external reference to the given entity identifier in the entity layer
@type entity_id: string
@param entity_id: the entity identifier
@param external_ref: an external reference object
@type external_ref: L{CexternalReference}
|
def binOp(op, indx, amap, bmap, fill_vec):
def op_or_missing(id):
va = amap.get(id, None)
vb = bmap.get(id, None)
if va is None or vb is None:
result = fill_vec
else:
try:
result = op(va, vb)
except Exception:
result = None
if result is None:
result = fill_vec
return result
seq_arys = map(op_or_missing, indx)
data = np.vstack(seq_arys)
return data
|
Combines the values from two map objects using the indx values
using the op operator. In situations where there is a missing value
it will use the callable function handle_missing
|
def update_allowed(self):
return self.update_action.allowed(self.column.table.request,
self.datum,
self)
|
Determines whether update of given cell is allowed.
Calls allowed action of defined UpdateAction of the Column.
|
def get_delete_branch_command(self, branch_name, message, author):
tokens = ['hg update --rev=%s && hg commit' % quote(branch_name)]
if author:
tokens.append('--user=%s' % quote(author.combined))
tokens.append('--message=%s' % quote(message))
tokens.append('--close-branch')
return [' '.join(tokens)]
|
Get the command to delete or close a branch in the local repository.
|
def insert(self, index, value):
return super(Collection, self).insert(
index, self._ensure_value_is_valid(value))
|
Insert an item at a given position.
|
def cli(obj, ids, query, filters, tags):
client = obj['client']
if ids:
total = len(ids)
else:
if query:
query = [('q', query)]
else:
query = build_query(filters)
total, _, _ = client.get_count(query)
ids = [a.id for a in client.get_alerts(query)]
with click.progressbar(ids, label='Untagging {} alerts'.format(total)) as bar:
for id in bar:
client.untag_alert(id, tags)
|
Remove tags from alerts.
|
def clean(self):
if self.event not in HOOK_EVENTS.keys():
raise ValidationError(
"Invalid hook event {evt}.".format(evt=self.event)
)
|
Validation for events.
|
def download_and_bootstrap(src, name, prereq=None):
if prereq:
prereq_cmd = '{0} -c "{1}"'.format(PY_EXE, prereq)
rv = os.system(prereq_cmd)
if rv == 0:
return
ulp = urllib2.urlopen(src)
fp = open(name, "wb")
fp.write(ulp.read())
fp.close()
cmdline = "{0} {1}".format(PY_EXE, name)
rv = os.system(cmdline)
assert rv == 0
|
Download and install something if 'prerequisite' fails
|
def run(self, start_command_srv):
if start_command_srv:
self._command_server.start()
self._drop_privs()
self._task_runner.start()
self._reg_sighandlers()
while self.running:
time.sleep(self._sleep_period)
self.shutdown()
|
Setup daemon process, start child forks, and sleep until
events are signalled.
`start_command_srv`
Set to ``True`` if command server should be started.
|
def _update_physical_disk_details(raid_config, server):
raid_config['physical_disks'] = []
physical_drives = server.get_physical_drives()
for physical_drive in physical_drives:
physical_drive_dict = physical_drive.get_physical_drive_dict()
raid_config['physical_disks'].append(physical_drive_dict)
|
Adds the physical disk details to the RAID configuration passed.
|
def _load(self, scale=1.0):
LOG.debug("File: %s", str(self.requested_band_filename))
ncf = Dataset(self.requested_band_filename, 'r')
wvl = ncf.variables['wavelength'][:] * scale
resp = ncf.variables['response'][:]
self.rsr = {'wavelength': wvl, 'response': resp}
|
Load the SLSTR relative spectral responses
|
def signRequest(self,
req: Request,
identifier: Identifier=None) -> Request:
idr = self.requiredIdr(idr=identifier or req._identifier)
req._identifier = idr
req.reqId = req.gen_req_id()
req.signature = self.signMsg(msg=req.signingPayloadState(identifier=idr),
identifier=idr,
otherIdentifier=req.identifier)
return req
|
Signs request. Modifies reqId and signature. May modify identifier.
:param req: request
:param requestIdStore: request id generator
:param identifier: signer identifier
:return: signed request
|
def matrixToMathTransform(matrix):
if isinstance(matrix, ShallowTransform):
return matrix
off, scl, rot = MathTransform(matrix).decompose()
return ShallowTransform(off, scl, rot)
|
Take a 6-tuple and return a ShallowTransform object.
|
def run_and_print_log(workflow, highlight=None):
from noodles.run.threading.sqlite3 import run_parallel
from noodles import serial
import io
import logging
log = io.StringIO()
log_handler = logging.StreamHandler(log)
formatter = logging.Formatter('%(asctime)s - %(message)s')
log_handler.setFormatter(formatter)
logger = logging.getLogger('noodles')
logger.setLevel(logging.INFO)
logger.handlers = [log_handler]
result = run_parallel(
workflow, n_threads=4, registry=serial.base, db_file='tutorial.db',
always_cache=True, echo_log=False)
display_text(log.getvalue(), highlight or [], split_at=40)
return result
|
Run workflow on multi-threaded worker cached with Sqlite3.
:param workflow: workflow to evaluate.
:param highlight: highlight these lines.
|
def process_management_config_section(config, management_config):
if 'commands' in management_config:
for command in management_config['commands']:
config.management['commands'].append(command)
|
Processes the management section from a configuration data dict.
:param config: The config reference of the object that will hold the
configuration data from the config_data.
:param management_config: Management section from a config data dict.
|
def tangent(obj, params, **kwargs):
normalize = kwargs.get('normalize', True)
if isinstance(obj, abstract.Curve):
if isinstance(params, (list, tuple)):
return ops.tangent_curve_single_list(obj, params, normalize)
else:
return ops.tangent_curve_single(obj, params, normalize)
if isinstance(obj, abstract.Surface):
if isinstance(params[0], float):
return ops.tangent_surface_single(obj, params, normalize)
else:
return ops.tangent_surface_single_list(obj, params, normalize)
|
Evaluates the tangent vector of the curves or surfaces at the input parameter values.
This function is designed to evaluate tangent vectors of the B-Spline and NURBS shapes at single or
multiple parameter positions.
:param obj: input shape
:type obj: abstract.Curve or abstract.Surface
:param params: parameters
:type params: float, list or tuple
:return: a list containing "point" and "vector" pairs
:rtype: tuple
|
def get(self, key):
res = self.connection.get(key)
print(res)
return res
|
get a set of keys from redis
|
def to_timezone(dt, tzinfo=None):
if not dt:
return dt
tz = pick_timezone(tzinfo, __timezone__)
if not tz:
return dt
dttz = getattr(dt, 'tzinfo', None)
if not dttz:
return dt.replace(tzinfo=tz)
else:
return dt.astimezone(tz)
|
Convert a datetime to timezone
|
def use(module=None, decode=None, encode=None):
global _decode, _encode, _initialized, _using
if module is not None:
if not isinstance(module, basestring):
module = module.__name__
if module not in ('cjson', 'json', 'simplejson'):
raise ValueError('Unsupported JSON module %s' % module)
_using = module
_initialized = False
else:
assert decode is not None and encode is not None
_using = 'custom'
_decode = decode
_encode = encode
_initialized = True
|
Set the JSON library that should be used, either by specifying a known
module name, or by providing a decode and encode function.
The modules "simplejson", "cjson", and "json" are currently supported for
the ``module`` parameter.
If provided, the ``decode`` parameter must be a callable that accepts a
JSON string and returns a corresponding Python data structure. The
``encode`` callable must accept a Python data structure and return the
corresponding JSON string. Exceptions raised by decoding and encoding
should be propagated up unaltered.
@param module: the name of the JSON library module to use, or the module
object itself
@type module: str or module
@param decode: a function for decoding JSON strings
@type decode: callable
@param encode: a function for encoding objects as JSON strings
@type encode: callable
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.