nwo
stringlengths
5
91
sha
stringlengths
40
40
path
stringlengths
5
174
language
stringclasses
1 value
identifier
stringlengths
1
120
parameters
stringlengths
0
3.15k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
24.1k
docstring
stringlengths
0
27.3k
docstring_summary
stringlengths
0
13.8k
docstring_tokens
sequence
function
stringlengths
22
139k
function_tokens
sequence
url
stringlengths
87
283
jliljebl/flowblade
995313a509b80e99eb1ad550d945bdda5995093b
flowblade-trunk/Flowblade/medialog.py
python
media_log_filtering_changed
()
[]
def media_log_filtering_changed(): widgets.media_log_view.fill_data_model()
[ "def", "media_log_filtering_changed", "(", ")", ":", "widgets", ".", "media_log_view", ".", "fill_data_model", "(", ")" ]
https://github.com/jliljebl/flowblade/blob/995313a509b80e99eb1ad550d945bdda5995093b/flowblade-trunk/Flowblade/medialog.py#L124-L125
bikalims/bika.lims
35e4bbdb5a3912cae0b5eb13e51097c8b0486349
bika/lims/validators.py
python
ResultOptionsValidator.__call__
(self, value, *args, **kwargs)
return True
[]
def __call__(self, value, *args, **kwargs): instance = kwargs['instance'] fieldname = kwargs['field'].getName() request = kwargs.get('REQUEST', {}) form = request.form form_value = form.get(fieldname) translate = getToolByName(instance, 'translation_service').translate # bsc = getToolByName(instance, 'bika_setup_catalog') # ResultValue must always be a number for field in form_value: try: float(field['ResultValue']) except: return to_utf8(translate(_("Validation failed: " "Result Values must be numbers"))) if 'ResultText' not in field: return to_utf8(translate( _("Validation failed: Result Text cannot be blank"))) return True
[ "def", "__call__", "(", "self", ",", "value", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "instance", "=", "kwargs", "[", "'instance'", "]", "fieldname", "=", "kwargs", "[", "'field'", "]", ".", "getName", "(", ")", "request", "=", "kwargs", ".", "get", "(", "'REQUEST'", ",", "{", "}", ")", "form", "=", "request", ".", "form", "form_value", "=", "form", ".", "get", "(", "fieldname", ")", "translate", "=", "getToolByName", "(", "instance", ",", "'translation_service'", ")", ".", "translate", "# bsc = getToolByName(instance, 'bika_setup_catalog')", "# ResultValue must always be a number", "for", "field", "in", "form_value", ":", "try", ":", "float", "(", "field", "[", "'ResultValue'", "]", ")", "except", ":", "return", "to_utf8", "(", "translate", "(", "_", "(", "\"Validation failed: \"", "\"Result Values must be numbers\"", ")", ")", ")", "if", "'ResultText'", "not", "in", "field", ":", "return", "to_utf8", "(", "translate", "(", "_", "(", "\"Validation failed: Result Text cannot be blank\"", ")", ")", ")", "return", "True" ]
https://github.com/bikalims/bika.lims/blob/35e4bbdb5a3912cae0b5eb13e51097c8b0486349/bika/lims/validators.py#L513-L534
lmb-freiburg/netdef_models
7d3311579cf712b31d05ec29f3dc63df067aa07b
FlowNetH/Pred-Merged-FT-KITTI/controller.py
python
Controller.__init__
(self, net_actions=NetActions)
[]
def __init__(self, net_actions=NetActions): super().__init__(net_actions=net_actions)
[ "def", "__init__", "(", "self", ",", "net_actions", "=", "NetActions", ")", ":", "super", "(", ")", ".", "__init__", "(", "net_actions", "=", "net_actions", ")" ]
https://github.com/lmb-freiburg/netdef_models/blob/7d3311579cf712b31d05ec29f3dc63df067aa07b/FlowNetH/Pred-Merged-FT-KITTI/controller.py#L9-L10
metamorphose/metamorphose2
d2bdd6a86340b9668e93b35a6a568894c9909d68
src/mutagen/aiff.py
python
delete
(filename)
Completely removes the ID3 chunk from the AIFF file
Completely removes the ID3 chunk from the AIFF file
[ "Completely", "removes", "the", "ID3", "chunk", "from", "the", "AIFF", "file" ]
def delete(filename): """Completely removes the ID3 chunk from the AIFF file""" with open(filename, "rb+") as file_: try: del IFFFile(file_)[u'ID3'] except KeyError: pass
[ "def", "delete", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "\"rb+\"", ")", "as", "file_", ":", "try", ":", "del", "IFFFile", "(", "file_", ")", "[", "u'ID3'", "]", "except", "KeyError", ":", "pass" ]
https://github.com/metamorphose/metamorphose2/blob/d2bdd6a86340b9668e93b35a6a568894c9909d68/src/mutagen/aiff.py#L312-L319
sunpy/sunpy
528579df0a4c938c133bd08971ba75c131b189a7
sunpy/net/base_client.py
python
BaseClient.check_attr_types_in_query
(query, required_attrs={}, optional_attrs={})
return required_attrs.issubset(query_attrs) and query_attrs.issubset(all_attrs)
Check a query againsted required and optional attributes. Returns `True` if *query* contains all the attrs in *required_attrs*, and if *query* contains only attrs in both *required_attrs* and *optional_attrs*.
Check a query againsted required and optional attributes.
[ "Check", "a", "query", "againsted", "required", "and", "optional", "attributes", "." ]
def check_attr_types_in_query(query, required_attrs={}, optional_attrs={}): """ Check a query againsted required and optional attributes. Returns `True` if *query* contains all the attrs in *required_attrs*, and if *query* contains only attrs in both *required_attrs* and *optional_attrs*. """ query_attrs = {type(x) for x in query} all_attrs = required_attrs.union(optional_attrs) return required_attrs.issubset(query_attrs) and query_attrs.issubset(all_attrs)
[ "def", "check_attr_types_in_query", "(", "query", ",", "required_attrs", "=", "{", "}", ",", "optional_attrs", "=", "{", "}", ")", ":", "query_attrs", "=", "{", "type", "(", "x", ")", "for", "x", "in", "query", "}", "all_attrs", "=", "required_attrs", ".", "union", "(", "optional_attrs", ")", "return", "required_attrs", ".", "issubset", "(", "query_attrs", ")", "and", "query_attrs", ".", "issubset", "(", "all_attrs", ")" ]
https://github.com/sunpy/sunpy/blob/528579df0a4c938c133bd08971ba75c131b189a7/sunpy/net/base_client.py#L485-L495
pyparallel/pyparallel
11e8c6072d48c8f13641925d17b147bf36ee0ba3
Lib/site-packages/ipython-4.0.0-py3.3.egg/IPython/core/formatters.py
python
JSONFormatter._check_return
(self, r, obj)
return super(JSONFormatter, self)._check_return(r, obj)
Check that a return value is appropriate Return the value if so, None otherwise, warning if invalid.
Check that a return value is appropriate Return the value if so, None otherwise, warning if invalid.
[ "Check", "that", "a", "return", "value", "is", "appropriate", "Return", "the", "value", "if", "so", "None", "otherwise", "warning", "if", "invalid", "." ]
def _check_return(self, r, obj): """Check that a return value is appropriate Return the value if so, None otherwise, warning if invalid. """ if r is None: return md = None if isinstance(r, tuple): # unpack data, metadata tuple for type checking on first element r, md = r # handle deprecated JSON-as-string form from IPython < 3 if isinstance(r, string_types): warnings.warn("JSON expects JSONable list/dict containers, not JSON strings", FormatterWarning) r = json.loads(r) if md is not None: # put the tuple back together r = (r, md) return super(JSONFormatter, self)._check_return(r, obj)
[ "def", "_check_return", "(", "self", ",", "r", ",", "obj", ")", ":", "if", "r", "is", "None", ":", "return", "md", "=", "None", "if", "isinstance", "(", "r", ",", "tuple", ")", ":", "# unpack data, metadata tuple for type checking on first element", "r", ",", "md", "=", "r", "# handle deprecated JSON-as-string form from IPython < 3", "if", "isinstance", "(", "r", ",", "string_types", ")", ":", "warnings", ".", "warn", "(", "\"JSON expects JSONable list/dict containers, not JSON strings\"", ",", "FormatterWarning", ")", "r", "=", "json", ".", "loads", "(", "r", ")", "if", "md", "is", "not", "None", ":", "# put the tuple back together", "r", "=", "(", "r", ",", "md", ")", "return", "super", "(", "JSONFormatter", ",", "self", ")", ".", "_check_return", "(", "r", ",", "obj", ")" ]
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/site-packages/ipython-4.0.0-py3.3.egg/IPython/core/formatters.py#L819-L840
realpython/book2-exercises
cde325eac8e6d8cff2316601c2e5b36bb46af7d0
web2py-rest/gluon/contrib/aes.py
python
AES.setkey
(self, key)
Sets the key and performs key expansion.
Sets the key and performs key expansion.
[ "Sets", "the", "key", "and", "performs", "key", "expansion", "." ]
def setkey(self, key): """Sets the key and performs key expansion.""" self.key = key self.key_size = len(key) if self.key_size == 16: self.rounds = 10 elif self.key_size == 24: self.rounds = 12 elif self.key_size == 32: self.rounds = 14 else: raise ValueError("Key length must be 16, 24 or 32 bytes") self.expand_key()
[ "def", "setkey", "(", "self", ",", "key", ")", ":", "self", ".", "key", "=", "key", "self", ".", "key_size", "=", "len", "(", "key", ")", "if", "self", ".", "key_size", "==", "16", ":", "self", ".", "rounds", "=", "10", "elif", "self", ".", "key_size", "==", "24", ":", "self", ".", "rounds", "=", "12", "elif", "self", ".", "key_size", "==", "32", ":", "self", ".", "rounds", "=", "14", "else", ":", "raise", "ValueError", "(", "\"Key length must be 16, 24 or 32 bytes\"", ")", "self", ".", "expand_key", "(", ")" ]
https://github.com/realpython/book2-exercises/blob/cde325eac8e6d8cff2316601c2e5b36bb46af7d0/web2py-rest/gluon/contrib/aes.py#L81-L96
MDAnalysis/mdanalysis
3488df3cdb0c29ed41c4fb94efe334b541e31b21
package/MDAnalysis/analysis/waterdynamics.py
python
MeanSquareDisplacement._sameMolecTandDT
(self, selection, t0d, tf)
return sort
Compare the molecules in the t0d selection and the t0d+dt selection and select only the particles that are repeated in both frame. This is to consider only the molecules that remains in the selection after the dt time has elapsed. The result is a list with the indexs of the atoms.
Compare the molecules in the t0d selection and the t0d+dt selection and select only the particles that are repeated in both frame. This is to consider only the molecules that remains in the selection after the dt time has elapsed. The result is a list with the indexs of the atoms.
[ "Compare", "the", "molecules", "in", "the", "t0d", "selection", "and", "the", "t0d", "+", "dt", "selection", "and", "select", "only", "the", "particles", "that", "are", "repeated", "in", "both", "frame", ".", "This", "is", "to", "consider", "only", "the", "molecules", "that", "remains", "in", "the", "selection", "after", "the", "dt", "time", "has", "elapsed", ".", "The", "result", "is", "a", "list", "with", "the", "indexs", "of", "the", "atoms", "." ]
def _sameMolecTandDT(self, selection, t0d, tf): """ Compare the molecules in the t0d selection and the t0d+dt selection and select only the particles that are repeated in both frame. This is to consider only the molecules that remains in the selection after the dt time has elapsed. The result is a list with the indexs of the atoms. """ a = set(selection[t0d]) b = set(selection[tf]) sort = sorted(list(a.intersection(b))) return sort
[ "def", "_sameMolecTandDT", "(", "self", ",", "selection", ",", "t0d", ",", "tf", ")", ":", "a", "=", "set", "(", "selection", "[", "t0d", "]", ")", "b", "=", "set", "(", "selection", "[", "tf", "]", ")", "sort", "=", "sorted", "(", "list", "(", "a", ".", "intersection", "(", "b", ")", ")", ")", "return", "sort" ]
https://github.com/MDAnalysis/mdanalysis/blob/3488df3cdb0c29ed41c4fb94efe334b541e31b21/package/MDAnalysis/analysis/waterdynamics.py#L867-L877
apeterswu/RL4NMT
3c66a2d8142abc5ce73db63e05d3cc9bf4663b65
tensor2tensor/data_generators/generator_utils.py
python
generate_files_distributed
(generator, output_name, output_dir, num_shards=1, max_cases=None, task_id=0)
return output_file
generate_files but with a single writer writing to shard task_id.
generate_files but with a single writer writing to shard task_id.
[ "generate_files", "but", "with", "a", "single", "writer", "writing", "to", "shard", "task_id", "." ]
def generate_files_distributed(generator, output_name, output_dir, num_shards=1, max_cases=None, task_id=0): """generate_files but with a single writer writing to shard task_id.""" assert task_id < num_shards output_filename = sharded_name(output_name, task_id, num_shards) output_file = os.path.join(output_dir, output_filename) tf.logging.info("Writing to file %s", output_file) writer = tf.python_io.TFRecordWriter(output_file) counter = 0 for case in generator: if counter % 100000 == 0: tf.logging.info("Generating case %d for %s." % (counter, output_name)) counter += 1 if max_cases and counter > max_cases: break sequence_example = to_example(case) writer.write(sequence_example.SerializeToString()) writer.close() return output_file
[ "def", "generate_files_distributed", "(", "generator", ",", "output_name", ",", "output_dir", ",", "num_shards", "=", "1", ",", "max_cases", "=", "None", ",", "task_id", "=", "0", ")", ":", "assert", "task_id", "<", "num_shards", "output_filename", "=", "sharded_name", "(", "output_name", ",", "task_id", ",", "num_shards", ")", "output_file", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "output_filename", ")", "tf", ".", "logging", ".", "info", "(", "\"Writing to file %s\"", ",", "output_file", ")", "writer", "=", "tf", ".", "python_io", ".", "TFRecordWriter", "(", "output_file", ")", "counter", "=", "0", "for", "case", "in", "generator", ":", "if", "counter", "%", "100000", "==", "0", ":", "tf", ".", "logging", ".", "info", "(", "\"Generating case %d for %s.\"", "%", "(", "counter", ",", "output_name", ")", ")", "counter", "+=", "1", "if", "max_cases", "and", "counter", ">", "max_cases", ":", "break", "sequence_example", "=", "to_example", "(", "case", ")", "writer", ".", "write", "(", "sequence_example", ".", "SerializeToString", "(", ")", ")", "writer", ".", "close", "(", ")", "return", "output_file" ]
https://github.com/apeterswu/RL4NMT/blob/3c66a2d8142abc5ce73db63e05d3cc9bf4663b65/tensor2tensor/data_generators/generator_utils.py#L66-L90
hkust-vgd/scanobjectnn
fe60aeade9ceb8882bc3f1bc40612e65469d7e77
pointnet/utils/tf_util.py
python
conv2d
(inputs, num_output_channels, kernel_size, scope, stride=[1, 1], padding='SAME', use_xavier=True, stddev=1e-3, weight_decay=0.0, activation_fn=tf.nn.relu, bn=False, bn_decay=None, is_training=None)
2D convolution with non-linear operation. Args: inputs: 4-D tensor variable BxHxWxC num_output_channels: int kernel_size: a list of 2 ints scope: string stride: a list of 2 ints padding: 'SAME' or 'VALID' use_xavier: bool, use xavier_initializer if true stddev: float, stddev for truncated_normal init weight_decay: float activation_fn: function bn: bool, whether to use batch norm bn_decay: float or float tensor variable in [0,1] is_training: bool Tensor variable Returns: Variable tensor
2D convolution with non-linear operation.
[ "2D", "convolution", "with", "non", "-", "linear", "operation", "." ]
def conv2d(inputs, num_output_channels, kernel_size, scope, stride=[1, 1], padding='SAME', use_xavier=True, stddev=1e-3, weight_decay=0.0, activation_fn=tf.nn.relu, bn=False, bn_decay=None, is_training=None): """ 2D convolution with non-linear operation. Args: inputs: 4-D tensor variable BxHxWxC num_output_channels: int kernel_size: a list of 2 ints scope: string stride: a list of 2 ints padding: 'SAME' or 'VALID' use_xavier: bool, use xavier_initializer if true stddev: float, stddev for truncated_normal init weight_decay: float activation_fn: function bn: bool, whether to use batch norm bn_decay: float or float tensor variable in [0,1] is_training: bool Tensor variable Returns: Variable tensor """ with tf.variable_scope(scope) as sc: kernel_h, kernel_w = kernel_size num_in_channels = inputs.get_shape()[-1].value kernel_shape = [kernel_h, kernel_w, num_in_channels, num_output_channels] kernel = _variable_with_weight_decay('weights', shape=kernel_shape, use_xavier=use_xavier, stddev=stddev, wd=weight_decay) stride_h, stride_w = stride outputs = tf.nn.conv2d(inputs, kernel, [1, stride_h, stride_w, 1], padding=padding) biases = _variable_on_cpu('biases', [num_output_channels], tf.constant_initializer(0.0)) outputs = tf.nn.bias_add(outputs, biases) if bn: outputs = batch_norm_for_conv2d(outputs, is_training, bn_decay=bn_decay, scope='bn') if activation_fn is not None: outputs = activation_fn(outputs) return outputs
[ "def", "conv2d", "(", "inputs", ",", "num_output_channels", ",", "kernel_size", ",", "scope", ",", "stride", "=", "[", "1", ",", "1", "]", ",", "padding", "=", "'SAME'", ",", "use_xavier", "=", "True", ",", "stddev", "=", "1e-3", ",", "weight_decay", "=", "0.0", ",", "activation_fn", "=", "tf", ".", "nn", ".", "relu", ",", "bn", "=", "False", ",", "bn_decay", "=", "None", ",", "is_training", "=", "None", ")", ":", "with", "tf", ".", "variable_scope", "(", "scope", ")", "as", "sc", ":", "kernel_h", ",", "kernel_w", "=", "kernel_size", "num_in_channels", "=", "inputs", ".", "get_shape", "(", ")", "[", "-", "1", "]", ".", "value", "kernel_shape", "=", "[", "kernel_h", ",", "kernel_w", ",", "num_in_channels", ",", "num_output_channels", "]", "kernel", "=", "_variable_with_weight_decay", "(", "'weights'", ",", "shape", "=", "kernel_shape", ",", "use_xavier", "=", "use_xavier", ",", "stddev", "=", "stddev", ",", "wd", "=", "weight_decay", ")", "stride_h", ",", "stride_w", "=", "stride", "outputs", "=", "tf", ".", "nn", ".", "conv2d", "(", "inputs", ",", "kernel", ",", "[", "1", ",", "stride_h", ",", "stride_w", ",", "1", "]", ",", "padding", "=", "padding", ")", "biases", "=", "_variable_on_cpu", "(", "'biases'", ",", "[", "num_output_channels", "]", ",", "tf", ".", "constant_initializer", "(", "0.0", ")", ")", "outputs", "=", "tf", ".", "nn", ".", "bias_add", "(", "outputs", ",", "biases", ")", "if", "bn", ":", "outputs", "=", "batch_norm_for_conv2d", "(", "outputs", ",", "is_training", ",", "bn_decay", "=", "bn_decay", ",", "scope", "=", "'bn'", ")", "if", "activation_fn", "is", "not", "None", ":", "outputs", "=", "activation_fn", "(", "outputs", ")", "return", "outputs" ]
https://github.com/hkust-vgd/scanobjectnn/blob/fe60aeade9ceb8882bc3f1bc40612e65469d7e77/pointnet/utils/tf_util.py#L112-L169
healpy/healpy
c34d032edaef6e1b755929aa76cf0cc933fcc677
setup.py
python
custom_build_ext.run
(self)
[]
def run(self): # If we were asked to build any C/C++ libraries, add the directory # where we built them to the include path. (It's already on the library # path.) if self.distribution.has_c_libraries(): self.run_command("build_clib") build_clib = self.get_finalized_command("build_clib") for key, value in build_clib.build_args.items(): for ext in self.extensions: if not hasattr(ext, key) or getattr(ext, key) is None: setattr(ext, key, value) else: getattr(ext, key).extend(value) build_ext.run(self)
[ "def", "run", "(", "self", ")", ":", "# If we were asked to build any C/C++ libraries, add the directory", "# where we built them to the include path. (It's already on the library", "# path.)", "if", "self", ".", "distribution", ".", "has_c_libraries", "(", ")", ":", "self", ".", "run_command", "(", "\"build_clib\"", ")", "build_clib", "=", "self", ".", "get_finalized_command", "(", "\"build_clib\"", ")", "for", "key", ",", "value", "in", "build_clib", ".", "build_args", ".", "items", "(", ")", ":", "for", "ext", "in", "self", ".", "extensions", ":", "if", "not", "hasattr", "(", "ext", ",", "key", ")", "or", "getattr", "(", "ext", ",", "key", ")", "is", "None", ":", "setattr", "(", "ext", ",", "key", ",", "value", ")", "else", ":", "getattr", "(", "ext", ",", "key", ")", ".", "extend", "(", "value", ")", "build_ext", ".", "run", "(", "self", ")" ]
https://github.com/healpy/healpy/blob/c34d032edaef6e1b755929aa76cf0cc933fcc677/setup.py#L301-L314
DLTK/DLTK
f94d3bb509eb0741164149acbef0788769a869e4
examples/applications/IXI_HH_sex_classification_resnet/train.py
python
model_fn
(features, labels, mode, params)
return tf.estimator.EstimatorSpec(mode=mode, predictions=net_output_ops, loss=loss, train_op=train_op, eval_metric_ops=eval_metric_ops)
Model function to construct a tf.estimator.EstimatorSpec. It creates a network given input features (e.g. from a dltk.io.abstract_reader) and training targets (labels). Further, loss, optimiser, evaluation ops and custom tensorboard summary ops can be added. For additional information, please refer to https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator#model_fn. Args: features (tf.Tensor): Tensor of input features to train from. Required rank and dimensions are determined by the subsequent ops (i.e. the network). labels (tf.Tensor): Tensor of training targets or labels. Required rank and dimensions are determined by the network output. mode (str): One of the tf.estimator.ModeKeys: TRAIN, EVAL or PREDICT params (dict, optional): A dictionary to parameterise the model_fn (e.g. learning_rate) Returns: tf.estimator.EstimatorSpec: A custom EstimatorSpec for this experiment
Model function to construct a tf.estimator.EstimatorSpec. It creates a network given input features (e.g. from a dltk.io.abstract_reader) and training targets (labels). Further, loss, optimiser, evaluation ops and custom tensorboard summary ops can be added. For additional information, please refer to https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator#model_fn.
[ "Model", "function", "to", "construct", "a", "tf", ".", "estimator", ".", "EstimatorSpec", ".", "It", "creates", "a", "network", "given", "input", "features", "(", "e", ".", "g", ".", "from", "a", "dltk", ".", "io", ".", "abstract_reader", ")", "and", "training", "targets", "(", "labels", ")", ".", "Further", "loss", "optimiser", "evaluation", "ops", "and", "custom", "tensorboard", "summary", "ops", "can", "be", "added", ".", "For", "additional", "information", "please", "refer", "to", "https", ":", "//", "www", ".", "tensorflow", ".", "org", "/", "api_docs", "/", "python", "/", "tf", "/", "estimator", "/", "Estimator#model_fn", "." ]
def model_fn(features, labels, mode, params): """Model function to construct a tf.estimator.EstimatorSpec. It creates a network given input features (e.g. from a dltk.io.abstract_reader) and training targets (labels). Further, loss, optimiser, evaluation ops and custom tensorboard summary ops can be added. For additional information, please refer to https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator#model_fn. Args: features (tf.Tensor): Tensor of input features to train from. Required rank and dimensions are determined by the subsequent ops (i.e. the network). labels (tf.Tensor): Tensor of training targets or labels. Required rank and dimensions are determined by the network output. mode (str): One of the tf.estimator.ModeKeys: TRAIN, EVAL or PREDICT params (dict, optional): A dictionary to parameterise the model_fn (e.g. learning_rate) Returns: tf.estimator.EstimatorSpec: A custom EstimatorSpec for this experiment """ # 1. create a model and its outputs net_output_ops = resnet_3d( features['x'], num_res_units=2, num_classes=NUM_CLASSES, filters=(16, 32, 64, 128, 256), strides=((1, 1, 1), (2, 2, 2), (2, 2, 2), (2, 2, 2), (2, 2, 2)), mode=mode, kernel_regularizer=tf.contrib.layers.l2_regularizer(1e-3)) # 1.1 Generate predictions only (for `ModeKeys.PREDICT`) if mode == tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec( mode=mode, predictions=net_output_ops, export_outputs={'out': tf.estimator.export.PredictOutput(net_output_ops)}) # 2. set up a loss function one_hot_labels = tf.reshape(tf.one_hot(labels['y'], depth=NUM_CLASSES), [-1, NUM_CLASSES]) loss = tf.losses.softmax_cross_entropy( onehot_labels=one_hot_labels, logits=net_output_ops['logits']) # 3. define a training op and ops for updating moving averages (i.e. for # batch normalisation) global_step = tf.train.get_global_step() optimiser = tf.train.AdamOptimizer( learning_rate=params["learning_rate"], epsilon=1e-5) update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) with tf.control_dependencies(update_ops): train_op = optimiser.minimize(loss, global_step=global_step) # 4.1 (optional) create custom image summaries for tensorboard my_image_summaries = {} my_image_summaries['feat_t1'] = features['x'][0, 32, :, :, 0] expected_output_size = [1, 96, 96, 1] # [B, W, H, C] [tf.summary.image(name, tf.reshape(image, expected_output_size)) for name, image in my_image_summaries.items()] # 4.2 (optional) track the rmse (scaled back by 100, see reader.py) acc = tf.metrics.accuracy prec = tf.metrics.precision eval_metric_ops = {"accuracy": acc(labels['y'], net_output_ops['y_']), "precision": prec(labels['y'], net_output_ops['y_'])} # 5. Return EstimatorSpec object return tf.estimator.EstimatorSpec(mode=mode, predictions=net_output_ops, loss=loss, train_op=train_op, eval_metric_ops=eval_metric_ops)
[ "def", "model_fn", "(", "features", ",", "labels", ",", "mode", ",", "params", ")", ":", "# 1. create a model and its outputs", "net_output_ops", "=", "resnet_3d", "(", "features", "[", "'x'", "]", ",", "num_res_units", "=", "2", ",", "num_classes", "=", "NUM_CLASSES", ",", "filters", "=", "(", "16", ",", "32", ",", "64", ",", "128", ",", "256", ")", ",", "strides", "=", "(", "(", "1", ",", "1", ",", "1", ")", ",", "(", "2", ",", "2", ",", "2", ")", ",", "(", "2", ",", "2", ",", "2", ")", ",", "(", "2", ",", "2", ",", "2", ")", ",", "(", "2", ",", "2", ",", "2", ")", ")", ",", "mode", "=", "mode", ",", "kernel_regularizer", "=", "tf", ".", "contrib", ".", "layers", ".", "l2_regularizer", "(", "1e-3", ")", ")", "# 1.1 Generate predictions only (for `ModeKeys.PREDICT`)", "if", "mode", "==", "tf", ".", "estimator", ".", "ModeKeys", ".", "PREDICT", ":", "return", "tf", ".", "estimator", ".", "EstimatorSpec", "(", "mode", "=", "mode", ",", "predictions", "=", "net_output_ops", ",", "export_outputs", "=", "{", "'out'", ":", "tf", ".", "estimator", ".", "export", ".", "PredictOutput", "(", "net_output_ops", ")", "}", ")", "# 2. set up a loss function", "one_hot_labels", "=", "tf", ".", "reshape", "(", "tf", ".", "one_hot", "(", "labels", "[", "'y'", "]", ",", "depth", "=", "NUM_CLASSES", ")", ",", "[", "-", "1", ",", "NUM_CLASSES", "]", ")", "loss", "=", "tf", ".", "losses", ".", "softmax_cross_entropy", "(", "onehot_labels", "=", "one_hot_labels", ",", "logits", "=", "net_output_ops", "[", "'logits'", "]", ")", "# 3. define a training op and ops for updating moving averages (i.e. for", "# batch normalisation)", "global_step", "=", "tf", ".", "train", ".", "get_global_step", "(", ")", "optimiser", "=", "tf", ".", "train", ".", "AdamOptimizer", "(", "learning_rate", "=", "params", "[", "\"learning_rate\"", "]", ",", "epsilon", "=", "1e-5", ")", "update_ops", "=", "tf", ".", "get_collection", "(", "tf", ".", "GraphKeys", ".", "UPDATE_OPS", ")", "with", "tf", ".", "control_dependencies", "(", "update_ops", ")", ":", "train_op", "=", "optimiser", ".", "minimize", "(", "loss", ",", "global_step", "=", "global_step", ")", "# 4.1 (optional) create custom image summaries for tensorboard", "my_image_summaries", "=", "{", "}", "my_image_summaries", "[", "'feat_t1'", "]", "=", "features", "[", "'x'", "]", "[", "0", ",", "32", ",", ":", ",", ":", ",", "0", "]", "expected_output_size", "=", "[", "1", ",", "96", ",", "96", ",", "1", "]", "# [B, W, H, C]", "[", "tf", ".", "summary", ".", "image", "(", "name", ",", "tf", ".", "reshape", "(", "image", ",", "expected_output_size", ")", ")", "for", "name", ",", "image", "in", "my_image_summaries", ".", "items", "(", ")", "]", "# 4.2 (optional) track the rmse (scaled back by 100, see reader.py)", "acc", "=", "tf", ".", "metrics", ".", "accuracy", "prec", "=", "tf", ".", "metrics", ".", "precision", "eval_metric_ops", "=", "{", "\"accuracy\"", ":", "acc", "(", "labels", "[", "'y'", "]", ",", "net_output_ops", "[", "'y_'", "]", ")", ",", "\"precision\"", ":", "prec", "(", "labels", "[", "'y'", "]", ",", "net_output_ops", "[", "'y_'", "]", ")", "}", "# 5. Return EstimatorSpec object", "return", "tf", ".", "estimator", ".", "EstimatorSpec", "(", "mode", "=", "mode", ",", "predictions", "=", "net_output_ops", ",", "loss", "=", "loss", ",", "train_op", "=", "train_op", ",", "eval_metric_ops", "=", "eval_metric_ops", ")" ]
https://github.com/DLTK/DLTK/blob/f94d3bb509eb0741164149acbef0788769a869e4/examples/applications/IXI_HH_sex_classification_resnet/train.py#L31-L106
MozillaSecurity/grizzly
1c41478e32f323189a2c322ec041c3e0902a158a
grizzly/common/status.py
python
ReductionStatus._construct_milestone
(self, name, start, attempts, iterations, successes)
return _MilestoneTimer()
[]
def _construct_milestone(self, name, start, attempts, iterations, successes): # pylint: disable=no-self-argument class _MilestoneTimer: def __init__(sub): sub.name = name sub._start_time = start sub._start_attempts = attempts sub._start_iterations = iterations sub._start_successes = successes @property def size(sub): return self._testcase_size() # pylint: disable=protected-access @property def attempts(sub): return self.attempts - sub._start_attempts @property def iterations(sub): return self.iterations - sub._start_iterations @property def successes(sub): return self.successes - sub._start_successes @property def duration(sub): if self._db_file is None: # pylint: disable=protected-access return self.timestamp - sub._start_time return time() - sub._start_time def serialize(sub): return ( sub.name, sub._start_time, sub._start_attempts, sub._start_iterations, sub._start_successes, ) return _MilestoneTimer()
[ "def", "_construct_milestone", "(", "self", ",", "name", ",", "start", ",", "attempts", ",", "iterations", ",", "successes", ")", ":", "# pylint: disable=no-self-argument", "class", "_MilestoneTimer", ":", "def", "__init__", "(", "sub", ")", ":", "sub", ".", "name", "=", "name", "sub", ".", "_start_time", "=", "start", "sub", ".", "_start_attempts", "=", "attempts", "sub", ".", "_start_iterations", "=", "iterations", "sub", ".", "_start_successes", "=", "successes", "@", "property", "def", "size", "(", "sub", ")", ":", "return", "self", ".", "_testcase_size", "(", ")", "# pylint: disable=protected-access", "@", "property", "def", "attempts", "(", "sub", ")", ":", "return", "self", ".", "attempts", "-", "sub", ".", "_start_attempts", "@", "property", "def", "iterations", "(", "sub", ")", ":", "return", "self", ".", "iterations", "-", "sub", ".", "_start_iterations", "@", "property", "def", "successes", "(", "sub", ")", ":", "return", "self", ".", "successes", "-", "sub", ".", "_start_successes", "@", "property", "def", "duration", "(", "sub", ")", ":", "if", "self", ".", "_db_file", "is", "None", ":", "# pylint: disable=protected-access", "return", "self", ".", "timestamp", "-", "sub", ".", "_start_time", "return", "time", "(", ")", "-", "sub", ".", "_start_time", "def", "serialize", "(", "sub", ")", ":", "return", "(", "sub", ".", "name", ",", "sub", ".", "_start_time", ",", "sub", ".", "_start_attempts", ",", "sub", ".", "_start_iterations", ",", "sub", ".", "_start_successes", ",", ")", "return", "_MilestoneTimer", "(", ")" ]
https://github.com/MozillaSecurity/grizzly/blob/1c41478e32f323189a2c322ec041c3e0902a158a/grizzly/common/status.py#L1051-L1092
pantsbuild/pex
473c6ac732ed4bc338b4b20a9ec930d1d722c9b4
pex/vendor/_vendored/packaging/pyparsing.py
python
MatchFirst.parseImpl
(self, instring, loc, doActions=True)
[]
def parseImpl(self, instring, loc, doActions=True): maxExcLoc = -1 maxException = None for e in self.exprs: try: ret = e._parse(instring, loc, doActions) return ret except ParseException as err: if err.loc > maxExcLoc: maxException = err maxExcLoc = err.loc except IndexError: if len(instring) > maxExcLoc: maxException = ParseException(instring, len(instring), e.errmsg, self) maxExcLoc = len(instring) # only got here if no expression matched, raise exception for match that made it the furthest else: if maxException is not None: maxException.msg = self.errmsg raise maxException else: raise ParseException(instring, loc, "no defined alternatives to match", self)
[ "def", "parseImpl", "(", "self", ",", "instring", ",", "loc", ",", "doActions", "=", "True", ")", ":", "maxExcLoc", "=", "-", "1", "maxException", "=", "None", "for", "e", "in", "self", ".", "exprs", ":", "try", ":", "ret", "=", "e", ".", "_parse", "(", "instring", ",", "loc", ",", "doActions", ")", "return", "ret", "except", "ParseException", "as", "err", ":", "if", "err", ".", "loc", ">", "maxExcLoc", ":", "maxException", "=", "err", "maxExcLoc", "=", "err", ".", "loc", "except", "IndexError", ":", "if", "len", "(", "instring", ")", ">", "maxExcLoc", ":", "maxException", "=", "ParseException", "(", "instring", ",", "len", "(", "instring", ")", ",", "e", ".", "errmsg", ",", "self", ")", "maxExcLoc", "=", "len", "(", "instring", ")", "# only got here if no expression matched, raise exception for match that made it the furthest", "else", ":", "if", "maxException", "is", "not", "None", ":", "maxException", ".", "msg", "=", "self", ".", "errmsg", "raise", "maxException", "else", ":", "raise", "ParseException", "(", "instring", ",", "loc", ",", "\"no defined alternatives to match\"", ",", "self", ")" ]
https://github.com/pantsbuild/pex/blob/473c6ac732ed4bc338b4b20a9ec930d1d722c9b4/pex/vendor/_vendored/packaging/pyparsing.py#L4249-L4271
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/django-1.4/django/contrib/gis/maps/google/gmap.py
python
GoogleMap.js
(self)
return self.render()
Returns only the generated Google Maps JavaScript (no <script> tags).
Returns only the generated Google Maps JavaScript (no <script> tags).
[ "Returns", "only", "the", "generated", "Google", "Maps", "JavaScript", "(", "no", "<script", ">", "tags", ")", "." ]
def js(self): "Returns only the generated Google Maps JavaScript (no <script> tags)." return self.render()
[ "def", "js", "(", "self", ")", ":", "return", "self", ".", "render", "(", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/django-1.4/django/contrib/gis/maps/google/gmap.py#L127-L129
openatx/uiautomator2
a6ebc2446a8babb4ce14dc62cfdb5590ea95c709
uiautomator2/__init__.py
python
_Device.set_orientation
(self, value)
setter of orientation property.
setter of orientation property.
[ "setter", "of", "orientation", "property", "." ]
def set_orientation(self, value): '''setter of orientation property.''' for values in self.__orientation: if value in values: # can not set upside-down until api level 18. self.jsonrpc.setOrientation(values[1]) break else: raise ValueError("Invalid orientation.")
[ "def", "set_orientation", "(", "self", ",", "value", ")", ":", "for", "values", "in", "self", ".", "__orientation", ":", "if", "value", "in", "values", ":", "# can not set upside-down until api level 18.", "self", ".", "jsonrpc", ".", "setOrientation", "(", "values", "[", "1", "]", ")", "break", "else", ":", "raise", "ValueError", "(", "\"Invalid orientation.\"", ")" ]
https://github.com/openatx/uiautomator2/blob/a6ebc2446a8babb4ce14dc62cfdb5590ea95c709/uiautomator2/__init__.py#L1183-L1191
wxWidgets/Phoenix
b2199e299a6ca6d866aa6f3d0888499136ead9d6
wx/lib/agw/ribbon/buttonbar.py
python
RibbonButtonBar.OnMouseMove
(self, event)
Handles the ``wx.EVT_MOTION`` event for :class:`RibbonButtonBar`. :param `event`: a :class:`MouseEvent` event to be processed.
Handles the ``wx.EVT_MOTION`` event for :class:`RibbonButtonBar`.
[ "Handles", "the", "wx", ".", "EVT_MOTION", "event", "for", ":", "class", ":", "RibbonButtonBar", "." ]
def OnMouseMove(self, event): """ Handles the ``wx.EVT_MOTION`` event for :class:`RibbonButtonBar`. :param `event`: a :class:`MouseEvent` event to be processed. """ cursor = event.GetPosition() new_hovered = None new_hovered_state = 0 layout = self._layouts[self._current_layout] for instance in layout.buttons: size = instance.base.sizes[instance.size] btn_rect = wx.Rect() btn_rect.SetTopLeft(self._layout_offset + instance.position) btn_rect.SetSize(size.size) if btn_rect.Contains(cursor) and self.IsButtonEnabled(instance.base.id): new_hovered = instance new_hovered_state = instance.base.state new_hovered_state &= ~RIBBON_BUTTONBAR_BUTTON_HOVER_MASK offset = wx.Point(*cursor) offset -= btn_rect.GetTopLeft() if size.normal_region.Contains(offset): new_hovered_state |= RIBBON_BUTTONBAR_BUTTON_NORMAL_HOVERED if size.dropdown_region.Contains(offset): new_hovered_state |= RIBBON_BUTTONBAR_BUTTON_DROPDOWN_HOVERED break if new_hovered is None and self.GetToolTip(): self.SetToolTip("") if new_hovered != self._hovered_button or (self._hovered_button is not None and \ new_hovered_state != self._hovered_button.base.state): if self._hovered_button is not None: self._hovered_button.base.state &= ~RIBBON_BUTTONBAR_BUTTON_HOVER_MASK self._hovered_button = new_hovered if self._hovered_button is not None: self._hovered_button.base.state = new_hovered_state self.SetToolTip(self._hovered_button.base.help_string) self.Refresh(False) if self._active_button and not self._lock_active_state: new_active_state = self._active_button.base.state new_active_state &= ~RIBBON_BUTTONBAR_BUTTON_ACTIVE_MASK size = self._active_button.base.sizes[self._active_button.size] btn_rect = wx.Rect() btn_rect.SetTopLeft(self._layout_offset + self._active_button.position) btn_rect.SetSize(size.size) if btn_rect.Contains(cursor): offset = wx.Point(*cursor) offset -= btn_rect.GetTopLeft() if size.normal_region.Contains(offset): new_active_state |= RIBBON_BUTTONBAR_BUTTON_NORMAL_ACTIVE if size.dropdown_region.Contains(offset): new_active_state |= RIBBON_BUTTONBAR_BUTTON_DROPDOWN_ACTIVE if new_active_state != self._active_button.base.state: self._active_button.base.state = new_active_state self.Refresh(False)
[ "def", "OnMouseMove", "(", "self", ",", "event", ")", ":", "cursor", "=", "event", ".", "GetPosition", "(", ")", "new_hovered", "=", "None", "new_hovered_state", "=", "0", "layout", "=", "self", ".", "_layouts", "[", "self", ".", "_current_layout", "]", "for", "instance", "in", "layout", ".", "buttons", ":", "size", "=", "instance", ".", "base", ".", "sizes", "[", "instance", ".", "size", "]", "btn_rect", "=", "wx", ".", "Rect", "(", ")", "btn_rect", ".", "SetTopLeft", "(", "self", ".", "_layout_offset", "+", "instance", ".", "position", ")", "btn_rect", ".", "SetSize", "(", "size", ".", "size", ")", "if", "btn_rect", ".", "Contains", "(", "cursor", ")", "and", "self", ".", "IsButtonEnabled", "(", "instance", ".", "base", ".", "id", ")", ":", "new_hovered", "=", "instance", "new_hovered_state", "=", "instance", ".", "base", ".", "state", "new_hovered_state", "&=", "~", "RIBBON_BUTTONBAR_BUTTON_HOVER_MASK", "offset", "=", "wx", ".", "Point", "(", "*", "cursor", ")", "offset", "-=", "btn_rect", ".", "GetTopLeft", "(", ")", "if", "size", ".", "normal_region", ".", "Contains", "(", "offset", ")", ":", "new_hovered_state", "|=", "RIBBON_BUTTONBAR_BUTTON_NORMAL_HOVERED", "if", "size", ".", "dropdown_region", ".", "Contains", "(", "offset", ")", ":", "new_hovered_state", "|=", "RIBBON_BUTTONBAR_BUTTON_DROPDOWN_HOVERED", "break", "if", "new_hovered", "is", "None", "and", "self", ".", "GetToolTip", "(", ")", ":", "self", ".", "SetToolTip", "(", "\"\"", ")", "if", "new_hovered", "!=", "self", ".", "_hovered_button", "or", "(", "self", ".", "_hovered_button", "is", "not", "None", "and", "new_hovered_state", "!=", "self", ".", "_hovered_button", ".", "base", ".", "state", ")", ":", "if", "self", ".", "_hovered_button", "is", "not", "None", ":", "self", ".", "_hovered_button", ".", "base", ".", "state", "&=", "~", "RIBBON_BUTTONBAR_BUTTON_HOVER_MASK", "self", ".", "_hovered_button", "=", "new_hovered", "if", "self", ".", "_hovered_button", "is", "not", "None", ":", "self", ".", "_hovered_button", ".", "base", ".", "state", "=", "new_hovered_state", "self", ".", "SetToolTip", "(", "self", ".", "_hovered_button", ".", "base", ".", "help_string", ")", "self", ".", "Refresh", "(", "False", ")", "if", "self", ".", "_active_button", "and", "not", "self", ".", "_lock_active_state", ":", "new_active_state", "=", "self", ".", "_active_button", ".", "base", ".", "state", "new_active_state", "&=", "~", "RIBBON_BUTTONBAR_BUTTON_ACTIVE_MASK", "size", "=", "self", ".", "_active_button", ".", "base", ".", "sizes", "[", "self", ".", "_active_button", ".", "size", "]", "btn_rect", "=", "wx", ".", "Rect", "(", ")", "btn_rect", ".", "SetTopLeft", "(", "self", ".", "_layout_offset", "+", "self", ".", "_active_button", ".", "position", ")", "btn_rect", ".", "SetSize", "(", "size", ".", "size", ")", "if", "btn_rect", ".", "Contains", "(", "cursor", ")", ":", "offset", "=", "wx", ".", "Point", "(", "*", "cursor", ")", "offset", "-=", "btn_rect", ".", "GetTopLeft", "(", ")", "if", "size", ".", "normal_region", ".", "Contains", "(", "offset", ")", ":", "new_active_state", "|=", "RIBBON_BUTTONBAR_BUTTON_NORMAL_ACTIVE", "if", "size", ".", "dropdown_region", ".", "Contains", "(", "offset", ")", ":", "new_active_state", "|=", "RIBBON_BUTTONBAR_BUTTON_DROPDOWN_ACTIVE", "if", "new_active_state", "!=", "self", ".", "_active_button", ".", "base", ".", "state", ":", "self", ".", "_active_button", ".", "base", ".", "state", "=", "new_active_state", "self", ".", "Refresh", "(", "False", ")" ]
https://github.com/wxWidgets/Phoenix/blob/b2199e299a6ca6d866aa6f3d0888499136ead9d6/wx/lib/agw/ribbon/buttonbar.py#L1128-L1199
PaddlePaddle/Research
2da0bd6c72d60e9df403aff23a7802779561c4a1
NLP/MRQA2019-BASELINE/src/model/transformer_encoder.py
python
encoder
(enc_input, attn_bias, n_layer, n_head, d_key, d_value, d_model, d_inner_hid, prepostprocess_dropout, attention_dropout, relu_dropout, hidden_act, preprocess_cmd="n", postprocess_cmd="da", param_initializer=None, name='')
return enc_output
The encoder is composed of a stack of identical layers returned by calling encoder_layer.
The encoder is composed of a stack of identical layers returned by calling encoder_layer.
[ "The", "encoder", "is", "composed", "of", "a", "stack", "of", "identical", "layers", "returned", "by", "calling", "encoder_layer", "." ]
def encoder(enc_input, attn_bias, n_layer, n_head, d_key, d_value, d_model, d_inner_hid, prepostprocess_dropout, attention_dropout, relu_dropout, hidden_act, preprocess_cmd="n", postprocess_cmd="da", param_initializer=None, name=''): """ The encoder is composed of a stack of identical layers returned by calling encoder_layer. """ for i in range(n_layer): enc_output = encoder_layer( enc_input, attn_bias, n_head, d_key, d_value, d_model, d_inner_hid, prepostprocess_dropout, attention_dropout, relu_dropout, hidden_act, preprocess_cmd, postprocess_cmd, param_initializer=param_initializer, name=name + '_layer_' + str(i)) enc_input = enc_output enc_output = pre_process_layer( enc_output, preprocess_cmd, prepostprocess_dropout, name="post_encoder") return enc_output
[ "def", "encoder", "(", "enc_input", ",", "attn_bias", ",", "n_layer", ",", "n_head", ",", "d_key", ",", "d_value", ",", "d_model", ",", "d_inner_hid", ",", "prepostprocess_dropout", ",", "attention_dropout", ",", "relu_dropout", ",", "hidden_act", ",", "preprocess_cmd", "=", "\"n\"", ",", "postprocess_cmd", "=", "\"da\"", ",", "param_initializer", "=", "None", ",", "name", "=", "''", ")", ":", "for", "i", "in", "range", "(", "n_layer", ")", ":", "enc_output", "=", "encoder_layer", "(", "enc_input", ",", "attn_bias", ",", "n_head", ",", "d_key", ",", "d_value", ",", "d_model", ",", "d_inner_hid", ",", "prepostprocess_dropout", ",", "attention_dropout", ",", "relu_dropout", ",", "hidden_act", ",", "preprocess_cmd", ",", "postprocess_cmd", ",", "param_initializer", "=", "param_initializer", ",", "name", "=", "name", "+", "'_layer_'", "+", "str", "(", "i", ")", ")", "enc_input", "=", "enc_output", "enc_output", "=", "pre_process_layer", "(", "enc_output", ",", "preprocess_cmd", ",", "prepostprocess_dropout", ",", "name", "=", "\"post_encoder\"", ")", "return", "enc_output" ]
https://github.com/PaddlePaddle/Research/blob/2da0bd6c72d60e9df403aff23a7802779561c4a1/NLP/MRQA2019-BASELINE/src/model/transformer_encoder.py#L301-L342
AstroPrint/AstroBox
e7e3b8a7d33ea85fcb6b2696869c0d719ceb8b75
src/ext/makerbot_driver/EEPROM/EepromAnalyzer.py
python
eeprom_analyzer.parse_out_name_and_location
(self, line)
return name, location
Given a line in the form of: const static uint16_t <name> = <location>; parses out the name and location. If we get a line not of this form, we will fail. @param str line: the line we want information from @return tuple: Information in the form of (name, location)
Given a line in the form of: const static uint16_t <name> = <location>; parses out the name and location. If we get a line not of this form, we will fail.
[ "Given", "a", "line", "in", "the", "form", "of", ":", "const", "static", "uint16_t", "<name", ">", "=", "<location", ">", ";", "parses", "out", "the", "name", "and", "location", ".", "If", "we", "get", "a", "line", "not", "of", "this", "form", "we", "will", "fail", "." ]
def parse_out_name_and_location(self, line): """ Given a line in the form of: const static uint16_t <name> = <location>; parses out the name and location. If we get a line not of this form, we will fail. @param str line: the line we want information from @return tuple: Information in the form of (name, location) """ before_semi_regex = "(.*?);" match = re.search(before_semi_regex, line) substring = match.group(1) for w in ['const', 'static', 'uint16_t']: substring = substring.replace(w, '') substring = substring.replace('\t', '') substring = substring.replace(" ", "") (name, location) = substring.split("=") return name, location
[ "def", "parse_out_name_and_location", "(", "self", ",", "line", ")", ":", "before_semi_regex", "=", "\"(.*?);\"", "match", "=", "re", ".", "search", "(", "before_semi_regex", ",", "line", ")", "substring", "=", "match", ".", "group", "(", "1", ")", "for", "w", "in", "[", "'const'", ",", "'static'", ",", "'uint16_t'", "]", ":", "substring", "=", "substring", ".", "replace", "(", "w", ",", "''", ")", "substring", "=", "substring", ".", "replace", "(", "'\\t'", ",", "''", ")", "substring", "=", "substring", ".", "replace", "(", "\" \"", ",", "\"\"", ")", "(", "name", ",", "location", ")", "=", "substring", ".", "split", "(", "\"=\"", ")", "return", "name", ",", "location" ]
https://github.com/AstroPrint/AstroBox/blob/e7e3b8a7d33ea85fcb6b2696869c0d719ceb8b75/src/ext/makerbot_driver/EEPROM/EepromAnalyzer.py#L122-L140
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/django-1.4/django/views/defaults.py
python
permission_denied
(request, template_name='403.html')
return http.HttpResponseForbidden(template.render(RequestContext(request)))
Permission denied (403) handler. Templates: `403.html` Context: None If the template does not exist, an Http403 response containing the text "403 Forbidden" (as per RFC 2616) will be returned.
Permission denied (403) handler.
[ "Permission", "denied", "(", "403", ")", "handler", "." ]
def permission_denied(request, template_name='403.html'): """ Permission denied (403) handler. Templates: `403.html` Context: None If the template does not exist, an Http403 response containing the text "403 Forbidden" (as per RFC 2616) will be returned. """ try: template = loader.get_template(template_name) except TemplateDoesNotExist: return http.HttpResponseForbidden('<h1>403 Forbidden</h1>') return http.HttpResponseForbidden(template.render(RequestContext(request)))
[ "def", "permission_denied", "(", "request", ",", "template_name", "=", "'403.html'", ")", ":", "try", ":", "template", "=", "loader", ".", "get_template", "(", "template_name", ")", "except", "TemplateDoesNotExist", ":", "return", "http", ".", "HttpResponseForbidden", "(", "'<h1>403 Forbidden</h1>'", ")", "return", "http", ".", "HttpResponseForbidden", "(", "template", ".", "render", "(", "RequestContext", "(", "request", ")", ")", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/django-1.4/django/views/defaults.py#L40-L54
evennia/evennia
fa79110ba6b219932f22297838e8ac72ebc0be0e
evennia/contrib/tutorial_examples/cmdset_red_button.py
python
CmdPush.func
(self)
Note that we choose to implement this with checking for if the lid is open/closed. This is because this command is likely to be tried regardless of the state of the lid. An alternative would be to make two versions of this command and tuck them into the cmdset linked to the Open and Closed lid-state respectively.
Note that we choose to implement this with checking for if the lid is open/closed. This is because this command is likely to be tried regardless of the state of the lid.
[ "Note", "that", "we", "choose", "to", "implement", "this", "with", "checking", "for", "if", "the", "lid", "is", "open", "/", "closed", ".", "This", "is", "because", "this", "command", "is", "likely", "to", "be", "tried", "regardless", "of", "the", "state", "of", "the", "lid", "." ]
def func(self): """ Note that we choose to implement this with checking for if the lid is open/closed. This is because this command is likely to be tried regardless of the state of the lid. An alternative would be to make two versions of this command and tuck them into the cmdset linked to the Open and Closed lid-state respectively. """ if self.obj.db.lid_open: string = "You reach out to press the big red button ..." string += "\n\nA BOOM! A bright light blinds you!" string += "\nThe world goes dark ..." self.caller.msg(string) self.caller.location.msg_contents( "%s presses the button. BOOM! %s is blinded by a flash!" % (self.caller.name, self.caller.name), exclude=self.caller, ) # the button's method will handle all setup of scripts etc. self.obj.press_button(self.caller) else: string = "You cannot push the button - there is a glass lid covering it." self.caller.msg(string)
[ "def", "func", "(", "self", ")", ":", "if", "self", ".", "obj", ".", "db", ".", "lid_open", ":", "string", "=", "\"You reach out to press the big red button ...\"", "string", "+=", "\"\\n\\nA BOOM! A bright light blinds you!\"", "string", "+=", "\"\\nThe world goes dark ...\"", "self", ".", "caller", ".", "msg", "(", "string", ")", "self", ".", "caller", ".", "location", ".", "msg_contents", "(", "\"%s presses the button. BOOM! %s is blinded by a flash!\"", "%", "(", "self", ".", "caller", ".", "name", ",", "self", ".", "caller", ".", "name", ")", ",", "exclude", "=", "self", ".", "caller", ",", ")", "# the button's method will handle all setup of scripts etc.", "self", ".", "obj", ".", "press_button", "(", "self", ".", "caller", ")", "else", ":", "string", "=", "\"You cannot push the button - there is a glass lid covering it.\"", "self", ".", "caller", ".", "msg", "(", "string", ")" ]
https://github.com/evennia/evennia/blob/fa79110ba6b219932f22297838e8ac72ebc0be0e/evennia/contrib/tutorial_examples/cmdset_red_button.py#L62-L88
geekan/scrapy-examples
edb1cb116bd6def65a6ef01f953b58eb43e54305
sinanews/sinanews/spiders/spider.py
python
sinanewsSpider.parse_0
(self, response)
[]
def parse_0(self, response): info('Parse 0 '+response.url) x = self.parse_with_rules(response, self.list_css_rules, dict) pp.pprint(x)
[ "def", "parse_0", "(", "self", ",", "response", ")", ":", "info", "(", "'Parse 0 '", "+", "response", ".", "url", ")", "x", "=", "self", ".", "parse_with_rules", "(", "response", ",", "self", ".", "list_css_rules", ",", "dict", ")", "pp", ".", "pprint", "(", "x", ")" ]
https://github.com/geekan/scrapy-examples/blob/edb1cb116bd6def65a6ef01f953b58eb43e54305/sinanews/sinanews/spiders/spider.py#L63-L66
4shadoww/hakkuframework
409a11fc3819d251f86faa3473439f8c19066a21
lib/scapy/layers/tls/session.py
python
tlsSession.mirror
(self)
return self
This function takes a tlsSession object and swaps the IP addresses, ports, connection ends and connection states. The triggered_commit are also swapped (though it is probably overkill, it is cleaner this way). It is useful for static analysis of a series of messages from both the client and the server. In such a situation, it should be used every time the message being read comes from a different side than the one read right before, as the reading state becomes the writing state, and vice versa. For instance you could do: client_hello = open('client_hello.raw').read() <read other messages> m1 = TLS(client_hello) m2 = TLS(server_hello, tls_session=m1.tls_session.mirror()) m3 = TLS(server_cert, tls_session=m2.tls_session) m4 = TLS(client_keyexchange, tls_session=m3.tls_session.mirror())
This function takes a tlsSession object and swaps the IP addresses, ports, connection ends and connection states. The triggered_commit are also swapped (though it is probably overkill, it is cleaner this way).
[ "This", "function", "takes", "a", "tlsSession", "object", "and", "swaps", "the", "IP", "addresses", "ports", "connection", "ends", "and", "connection", "states", ".", "The", "triggered_commit", "are", "also", "swapped", "(", "though", "it", "is", "probably", "overkill", "it", "is", "cleaner", "this", "way", ")", "." ]
def mirror(self): """ This function takes a tlsSession object and swaps the IP addresses, ports, connection ends and connection states. The triggered_commit are also swapped (though it is probably overkill, it is cleaner this way). It is useful for static analysis of a series of messages from both the client and the server. In such a situation, it should be used every time the message being read comes from a different side than the one read right before, as the reading state becomes the writing state, and vice versa. For instance you could do: client_hello = open('client_hello.raw').read() <read other messages> m1 = TLS(client_hello) m2 = TLS(server_hello, tls_session=m1.tls_session.mirror()) m3 = TLS(server_cert, tls_session=m2.tls_session) m4 = TLS(client_keyexchange, tls_session=m3.tls_session.mirror()) """ self.ipdst, self.ipsrc = self.ipsrc, self.ipdst self.dport, self.sport = self.sport, self.dport self.rcs, self.wcs = self.wcs, self.rcs if self.rcs: self.rcs.row = "read" if self.wcs: self.wcs.row = "write" self.prcs, self.pwcs = self.pwcs, self.prcs if self.prcs: self.prcs.row = "read" if self.pwcs: self.pwcs.row = "write" self.triggered_prcs_commit, self.triggered_pwcs_commit = \ self.triggered_pwcs_commit, self.triggered_prcs_commit if self.connection_end == "client": self.connection_end = "server" elif self.connection_end == "server": self.connection_end = "client" return self
[ "def", "mirror", "(", "self", ")", ":", "self", ".", "ipdst", ",", "self", ".", "ipsrc", "=", "self", ".", "ipsrc", ",", "self", ".", "ipdst", "self", ".", "dport", ",", "self", ".", "sport", "=", "self", ".", "sport", ",", "self", ".", "dport", "self", ".", "rcs", ",", "self", ".", "wcs", "=", "self", ".", "wcs", ",", "self", ".", "rcs", "if", "self", ".", "rcs", ":", "self", ".", "rcs", ".", "row", "=", "\"read\"", "if", "self", ".", "wcs", ":", "self", ".", "wcs", ".", "row", "=", "\"write\"", "self", ".", "prcs", ",", "self", ".", "pwcs", "=", "self", ".", "pwcs", ",", "self", ".", "prcs", "if", "self", ".", "prcs", ":", "self", ".", "prcs", ".", "row", "=", "\"read\"", "if", "self", ".", "pwcs", ":", "self", ".", "pwcs", ".", "row", "=", "\"write\"", "self", ".", "triggered_prcs_commit", ",", "self", ".", "triggered_pwcs_commit", "=", "self", ".", "triggered_pwcs_commit", ",", "self", ".", "triggered_prcs_commit", "if", "self", ".", "connection_end", "==", "\"client\"", ":", "self", ".", "connection_end", "=", "\"server\"", "elif", "self", ".", "connection_end", "==", "\"server\"", ":", "self", ".", "connection_end", "=", "\"client\"", "return", "self" ]
https://github.com/4shadoww/hakkuframework/blob/409a11fc3819d251f86faa3473439f8c19066a21/lib/scapy/layers/tls/session.py#L481-L525
andresriancho/w3af
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
w3af/plugins/evasion/rnd_param.py
python
rnd_param.get_long_desc
(self)
return """ This evasion plugin adds a random parameter. Example: Input: '/bar/foo.asp' Output: '/bar/foo.asp?alsfkj=f09' """
:return: A DETAILED description of the plugin functions and features.
:return: A DETAILED description of the plugin functions and features.
[ ":", "return", ":", "A", "DETAILED", "description", "of", "the", "plugin", "functions", "and", "features", "." ]
def get_long_desc(self): """ :return: A DETAILED description of the plugin functions and features. """ return """ This evasion plugin adds a random parameter. Example: Input: '/bar/foo.asp' Output: '/bar/foo.asp?alsfkj=f09' """
[ "def", "get_long_desc", "(", "self", ")", ":", "return", "\"\"\"\n This evasion plugin adds a random parameter.\n\n Example:\n Input: '/bar/foo.asp'\n Output: '/bar/foo.asp?alsfkj=f09'\n \"\"\"" ]
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/plugins/evasion/rnd_param.py#L88-L98
qibinlou/SinaWeibo-Emotion-Classification
f336fc104abd68b0ec4180fe2ed80fafe49cb790
nltk/inference/nonmonotonic.py
python
ClosedWorldProver._make_predicate_dict
(self, assumptions)
return predicates
Create a dictionary of predicates from the assumptions. :param assumptions: a list of ``Expression``s :return: dict mapping ``AbstractVariableExpression`` to ``PredHolder``
Create a dictionary of predicates from the assumptions.
[ "Create", "a", "dictionary", "of", "predicates", "from", "the", "assumptions", "." ]
def _make_predicate_dict(self, assumptions): """ Create a dictionary of predicates from the assumptions. :param assumptions: a list of ``Expression``s :return: dict mapping ``AbstractVariableExpression`` to ``PredHolder`` """ predicates = defaultdict(PredHolder) for a in assumptions: self._map_predicates(a, predicates) return predicates
[ "def", "_make_predicate_dict", "(", "self", ",", "assumptions", ")", ":", "predicates", "=", "defaultdict", "(", "PredHolder", ")", "for", "a", "in", "assumptions", ":", "self", ".", "_map_predicates", "(", "a", ",", "predicates", ")", "return", "predicates" ]
https://github.com/qibinlou/SinaWeibo-Emotion-Classification/blob/f336fc104abd68b0ec4180fe2ed80fafe49cb790/nltk/inference/nonmonotonic.py#L232-L242
theotherp/nzbhydra
4b03d7f769384b97dfc60dade4806c0fc987514e
libs/pycparser/c_parser.py
python
CParser.p_external_declaration_4
(self, p)
external_declaration : SEMI
external_declaration : SEMI
[ "external_declaration", ":", "SEMI" ]
def p_external_declaration_4(self, p): """ external_declaration : SEMI """ p[0] = None
[ "def", "p_external_declaration_4", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "None" ]
https://github.com/theotherp/nzbhydra/blob/4b03d7f769384b97dfc60dade4806c0fc987514e/libs/pycparser/c_parser.py#L543-L546
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
cb692f527e4e819b6c228187c5702d990a180043
external/Scripting Engine/Xenotix Python Scripting Engine/bin/x86/Debug/Lib/smtplib.py
python
SMTP.docmd
(self, cmd, args="")
return self.getreply()
Send a command, and return its response code.
Send a command, and return its response code.
[ "Send", "a", "command", "and", "return", "its", "response", "code", "." ]
def docmd(self, cmd, args=""): """Send a command, and return its response code.""" self.putcmd(cmd, args) return self.getreply()
[ "def", "docmd", "(", "self", ",", "cmd", ",", "args", "=", "\"\"", ")", ":", "self", ".", "putcmd", "(", "cmd", ",", "args", ")", "return", "self", ".", "getreply", "(", ")" ]
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/external/Scripting Engine/Xenotix Python Scripting Engine/bin/x86/Debug/Lib/smtplib.py#L373-L376
Jenyay/outwiker
50530cf7b3f71480bb075b2829bc0669773b835b
plugins/source/source/pygments/__init__.py
python
lex
(code, lexer)
Lex ``code`` with ``lexer`` and return an iterable of tokens.
Lex ``code`` with ``lexer`` and return an iterable of tokens.
[ "Lex", "code", "with", "lexer", "and", "return", "an", "iterable", "of", "tokens", "." ]
def lex(code, lexer): """ Lex ``code`` with ``lexer`` and return an iterable of tokens. """ try: return lexer.get_tokens(code) except TypeError as err: if (isinstance(err.args[0], str) and ('unbound method get_tokens' in err.args[0] or 'missing 1 required positional argument' in err.args[0])): raise TypeError('lex() argument must be a lexer instance, ' 'not a class') raise
[ "def", "lex", "(", "code", ",", "lexer", ")", ":", "try", ":", "return", "lexer", ".", "get_tokens", "(", "code", ")", "except", "TypeError", "as", "err", ":", "if", "(", "isinstance", "(", "err", ".", "args", "[", "0", "]", ",", "str", ")", "and", "(", "'unbound method get_tokens'", "in", "err", ".", "args", "[", "0", "]", "or", "'missing 1 required positional argument'", "in", "err", ".", "args", "[", "0", "]", ")", ")", ":", "raise", "TypeError", "(", "'lex() argument must be a lexer instance, '", "'not a class'", ")", "raise" ]
https://github.com/Jenyay/outwiker/blob/50530cf7b3f71480bb075b2829bc0669773b835b/plugins/source/source/pygments/__init__.py#L36-L48
Xilinx/finn
d1cc9cf94f1c33354cc169c5a6517314d0e94e3b
src/finn/custom_op/fpgadataflow/streamingmaxpool_batch.py
python
StreamingMaxPool_Batch.get_number_output_values
(self)
return np.prod(folded_oshape[:-1])
[]
def get_number_output_values(self): folded_oshape = self.get_folded_output_shape() return np.prod(folded_oshape[:-1])
[ "def", "get_number_output_values", "(", "self", ")", ":", "folded_oshape", "=", "self", ".", "get_folded_output_shape", "(", ")", "return", "np", ".", "prod", "(", "folded_oshape", "[", ":", "-", "1", "]", ")" ]
https://github.com/Xilinx/finn/blob/d1cc9cf94f1c33354cc169c5a6517314d0e94e3b/src/finn/custom_op/fpgadataflow/streamingmaxpool_batch.py#L114-L116
allenai/allennlp
a3d71254fcc0f3615910e9c3d48874515edf53e0
scripts/check_links.py
python
url_ok
(match_tuple: MatchTuple)
Check if a URL is reachable.
Check if a URL is reachable.
[ "Check", "if", "a", "URL", "is", "reachable", "." ]
def url_ok(match_tuple: MatchTuple) -> Tuple[bool, str]: """Check if a URL is reachable.""" try: result = http_session.head(match_tuple.link, timeout=5, allow_redirects=True) return ( result.ok or result.status_code in OK_STATUS_CODES, f"status code = {result.status_code}", ) except (requests.ConnectionError, requests.Timeout): return False, "connection error"
[ "def", "url_ok", "(", "match_tuple", ":", "MatchTuple", ")", "->", "Tuple", "[", "bool", ",", "str", "]", ":", "try", ":", "result", "=", "http_session", ".", "head", "(", "match_tuple", ".", "link", ",", "timeout", "=", "5", ",", "allow_redirects", "=", "True", ")", "return", "(", "result", ".", "ok", "or", "result", ".", "status_code", "in", "OK_STATUS_CODES", ",", "f\"status code = {result.status_code}\"", ",", ")", "except", "(", "requests", ".", "ConnectionError", ",", "requests", ".", "Timeout", ")", ":", "return", "False", ",", "\"connection error\"" ]
https://github.com/allenai/allennlp/blob/a3d71254fcc0f3615910e9c3d48874515edf53e0/scripts/check_links.py#L42-L51
scipy/scipy
e0a749f01e79046642ccfdc419edbf9e7ca141ad
scipy/optimize/_shgo_lib/triangulation.py
python
Complex.add_centroid
(self)
return
Split the central edge between the origin and supremum of a cell and add the new vertex to the complex
Split the central edge between the origin and supremum of a cell and add the new vertex to the complex
[ "Split", "the", "central", "edge", "between", "the", "origin", "and", "supremum", "of", "a", "cell", "and", "add", "the", "new", "vertex", "to", "the", "complex" ]
def add_centroid(self): """Split the central edge between the origin and supremum of a cell and add the new vertex to the complex""" self.centroid = list( (np.array(self.origin) + np.array(self.supremum)) / 2.0) self.C0.add_vertex(self.V[tuple(self.centroid)]) self.C0.centroid = self.centroid # Disconnect origin and supremum self.V[tuple(self.origin)].disconnect(self.V[tuple(self.supremum)]) # Connect centroid to all other vertices for v in self.C0(): self.V[tuple(self.centroid)].connect(self.V[tuple(v.x)]) self.centroid_added = True return
[ "def", "add_centroid", "(", "self", ")", ":", "self", ".", "centroid", "=", "list", "(", "(", "np", ".", "array", "(", "self", ".", "origin", ")", "+", "np", ".", "array", "(", "self", ".", "supremum", ")", ")", "/", "2.0", ")", "self", ".", "C0", ".", "add_vertex", "(", "self", ".", "V", "[", "tuple", "(", "self", ".", "centroid", ")", "]", ")", "self", ".", "C0", ".", "centroid", "=", "self", ".", "centroid", "# Disconnect origin and supremum", "self", ".", "V", "[", "tuple", "(", "self", ".", "origin", ")", "]", ".", "disconnect", "(", "self", ".", "V", "[", "tuple", "(", "self", ".", "supremum", ")", "]", ")", "# Connect centroid to all other vertices", "for", "v", "in", "self", ".", "C0", "(", ")", ":", "self", ".", "V", "[", "tuple", "(", "self", ".", "centroid", ")", "]", ".", "connect", "(", "self", ".", "V", "[", "tuple", "(", "v", ".", "x", ")", "]", ")", "self", ".", "centroid_added", "=", "True", "return" ]
https://github.com/scipy/scipy/blob/e0a749f01e79046642ccfdc419edbf9e7ca141ad/scipy/optimize/_shgo_lib/triangulation.py#L143-L159
titusjan/argos
5a9c31a8a9a2ca825bbf821aa1e685740e3682d7
argos/qt/treemodels.py
python
BaseTreeModel.hasChildren
(self, parentIndex=QtCore.QModelIndex())
return parentItem.hasChildren()
Returns true if parent has any children; otherwise returns false. Use rowCount() on the parent to find out the number of children.
Returns true if parent has any children; otherwise returns false. Use rowCount() on the parent to find out the number of children.
[ "Returns", "true", "if", "parent", "has", "any", "children", ";", "otherwise", "returns", "false", ".", "Use", "rowCount", "()", "on", "the", "parent", "to", "find", "out", "the", "number", "of", "children", "." ]
def hasChildren(self, parentIndex=QtCore.QModelIndex()): """ Returns true if parent has any children; otherwise returns false. Use rowCount() on the parent to find out the number of children. """ parentItem = self.getItem(parentIndex, altItem=self.invisibleRootTreeItem) return parentItem.hasChildren()
[ "def", "hasChildren", "(", "self", ",", "parentIndex", "=", "QtCore", ".", "QModelIndex", "(", ")", ")", ":", "parentItem", "=", "self", ".", "getItem", "(", "parentIndex", ",", "altItem", "=", "self", ".", "invisibleRootTreeItem", ")", "return", "parentItem", ".", "hasChildren", "(", ")" ]
https://github.com/titusjan/argos/blob/5a9c31a8a9a2ca825bbf821aa1e685740e3682d7/argos/qt/treemodels.py#L245-L250
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/CPython/27/Lib/json/__init__.py
python
load
(fp, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw)
return loads(fp.read(), encoding=encoding, cls=cls, object_hook=object_hook, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a JSON document) to a Python object. If the contents of ``fp`` is encoded with an ASCII based encoding other than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must be specified. Encodings that are not ASCII based (such as UCS-2) are not allowed, and should be wrapped with ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode`` object and passed to ``loads()`` ``object_hook`` is an optional function that will be called with the result of any object literal decode (a ``dict``). The return value of ``object_hook`` will be used instead of the ``dict``. This feature can be used to implement custom decoders (e.g. JSON-RPC class hinting). ``object_pairs_hook`` is an optional function that will be called with the result of any object literal decoded with an ordered list of pairs. The return value of ``object_pairs_hook`` will be used instead of the ``dict``. This feature can be used to implement custom decoders that rely on the order that the key and value pairs are decoded (for example, collections.OrderedDict will remember the order of insertion). If ``object_hook`` is also defined, the ``object_pairs_hook`` takes priority. To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg; otherwise ``JSONDecoder`` is used.
Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a JSON document) to a Python object.
[ "Deserialize", "fp", "(", "a", ".", "read", "()", "-", "supporting", "file", "-", "like", "object", "containing", "a", "JSON", "document", ")", "to", "a", "Python", "object", "." ]
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw): """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a JSON document) to a Python object. If the contents of ``fp`` is encoded with an ASCII based encoding other than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must be specified. Encodings that are not ASCII based (such as UCS-2) are not allowed, and should be wrapped with ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode`` object and passed to ``loads()`` ``object_hook`` is an optional function that will be called with the result of any object literal decode (a ``dict``). The return value of ``object_hook`` will be used instead of the ``dict``. This feature can be used to implement custom decoders (e.g. JSON-RPC class hinting). ``object_pairs_hook`` is an optional function that will be called with the result of any object literal decoded with an ordered list of pairs. The return value of ``object_pairs_hook`` will be used instead of the ``dict``. This feature can be used to implement custom decoders that rely on the order that the key and value pairs are decoded (for example, collections.OrderedDict will remember the order of insertion). If ``object_hook`` is also defined, the ``object_pairs_hook`` takes priority. To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg; otherwise ``JSONDecoder`` is used. """ return loads(fp.read(), encoding=encoding, cls=cls, object_hook=object_hook, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
[ "def", "load", "(", "fp", ",", "encoding", "=", "None", ",", "cls", "=", "None", ",", "object_hook", "=", "None", ",", "parse_float", "=", "None", ",", "parse_int", "=", "None", ",", "parse_constant", "=", "None", ",", "object_pairs_hook", "=", "None", ",", "*", "*", "kw", ")", ":", "return", "loads", "(", "fp", ".", "read", "(", ")", ",", "encoding", "=", "encoding", ",", "cls", "=", "cls", ",", "object_hook", "=", "object_hook", ",", "parse_float", "=", "parse_float", ",", "parse_int", "=", "parse_int", ",", "parse_constant", "=", "parse_constant", ",", "object_pairs_hook", "=", "object_pairs_hook", ",", "*", "*", "kw", ")" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/CPython/27/Lib/json/__init__.py#L245-L278
jython/frozen-mirror
b8d7aa4cee50c0c0fe2f4b235dd62922dd0f3f99
Lib/dbexts.py
python
dbexts.close
(self)
close the connection to the database
close the connection to the database
[ "close", "the", "connection", "to", "the", "database" ]
def close(self): """ close the connection to the database """ self.db.close()
[ "def", "close", "(", "self", ")", ":", "self", ".", "db", ".", "close", "(", ")" ]
https://github.com/jython/frozen-mirror/blob/b8d7aa4cee50c0c0fe2f4b235dd62922dd0f3f99/Lib/dbexts.py#L252-L254
aouyar/PyMunin
94624d4f56340cb2ed7e96ca3c5d9533a0721306
pysysinfo/diskio.py
python
DiskIOinfo.getSwapList
(self)
return self._swapList
Returns list of disk devices used for paging. @return: List of disk devices.
Returns list of disk devices used for paging.
[ "Returns", "list", "of", "disk", "devices", "used", "for", "paging", "." ]
def getSwapList(self): """Returns list of disk devices used for paging. @return: List of disk devices. """ if self._swapList is None: self._initSwapInfo() return self._swapList
[ "def", "getSwapList", "(", "self", ")", ":", "if", "self", ".", "_swapList", "is", "None", ":", "self", ".", "_initSwapInfo", "(", ")", "return", "self", ".", "_swapList" ]
https://github.com/aouyar/PyMunin/blob/94624d4f56340cb2ed7e96ca3c5d9533a0721306/pysysinfo/diskio.py#L357-L365
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/celery/bin/upgrade.py
python
upgrade._to_new_key
(self, line, keyfilter=pass1, source=defaults._TO_NEW_KEY)
return 0, line
[]
def _to_new_key(self, line, keyfilter=pass1, source=defaults._TO_NEW_KEY): # sort by length to avoid, for example, broker_transport overriding # broker_transport_options. for old_key in reversed(sorted(source, key=lambda x: len(x))): new_line = line.replace(old_key, keyfilter(source[old_key])) if line != new_line and 'CELERY_CELERY' not in new_line: return 1, new_line # only one match per line. return 0, line
[ "def", "_to_new_key", "(", "self", ",", "line", ",", "keyfilter", "=", "pass1", ",", "source", "=", "defaults", ".", "_TO_NEW_KEY", ")", ":", "# sort by length to avoid, for example, broker_transport overriding", "# broker_transport_options.", "for", "old_key", "in", "reversed", "(", "sorted", "(", "source", ",", "key", "=", "lambda", "x", ":", "len", "(", "x", ")", ")", ")", ":", "new_line", "=", "line", ".", "replace", "(", "old_key", ",", "keyfilter", "(", "source", "[", "old_key", "]", ")", ")", "if", "line", "!=", "new_line", "and", "'CELERY_CELERY'", "not", "in", "new_line", ":", "return", "1", ",", "new_line", "# only one match per line.", "return", "0", ",", "line" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/celery/bin/upgrade.py#L79-L86
yandex/yandex-tank
b41bcc04396c4ed46fc8b28a261197320854fd33
yandextank/common/util.py
python
get_callstack
()
return '.'.join(cleaned[1:])
Get call stack, clean wrapper functions from it and present in dotted notation form
Get call stack, clean wrapper functions from it and present in dotted notation form
[ "Get", "call", "stack", "clean", "wrapper", "functions", "from", "it", "and", "present", "in", "dotted", "notation", "form" ]
def get_callstack(): """ Get call stack, clean wrapper functions from it and present in dotted notation form """ stack = inspect.stack(context=0) cleaned = [frame[3] for frame in stack if frame[3] != 'wrapper'] return '.'.join(cleaned[1:])
[ "def", "get_callstack", "(", ")", ":", "stack", "=", "inspect", ".", "stack", "(", "context", "=", "0", ")", "cleaned", "=", "[", "frame", "[", "3", "]", "for", "frame", "in", "stack", "if", "frame", "[", "3", "]", "!=", "'wrapper'", "]", "return", "'.'", ".", "join", "(", "cleaned", "[", "1", ":", "]", ")" ]
https://github.com/yandex/yandex-tank/blob/b41bcc04396c4ed46fc8b28a261197320854fd33/yandextank/common/util.py#L745-L752
pyparallel/pyparallel
11e8c6072d48c8f13641925d17b147bf36ee0ba3
Lib/fractions.py
python
Fraction.__gt__
(a, b)
return a._richcmp(b, operator.gt)
a > b
a > b
[ "a", ">", "b" ]
def __gt__(a, b): """a > b""" return a._richcmp(b, operator.gt)
[ "def", "__gt__", "(", "a", ",", "b", ")", ":", "return", "a", ".", "_richcmp", "(", "b", ",", "operator", ".", "gt", ")" ]
https://github.com/pyparallel/pyparallel/blob/11e8c6072d48c8f13641925d17b147bf36ee0ba3/Lib/fractions.py#L597-L599
CGATOxford/cgat
326aad4694bdfae8ddc194171bb5d73911243947
obsolete/pipeline_species_conservation.py
python
nmiConservationFisherTest
( infile, outfile )
Plot heatmap of pairwise scores in R
Plot heatmap of pairwise scores in R
[ "Plot", "heatmap", "of", "pairwise", "scores", "in", "R" ]
def nmiConservationFisherTest( infile, outfile ): '''Plot heatmap of pairwise scores in R''' scriptsdir = PARAMS["scriptsdir"] R('''source("%(scriptsdir)s/R/proj007/proj007.R")''' % locals() ) #print '''nmi_conservation(infile="%(infile)s", outfile="%(outfile)s") ''' % locals() R('''nmi_conservation(infile="%(infile)s", outfile="%(outfile)s") ''' % locals() )
[ "def", "nmiConservationFisherTest", "(", "infile", ",", "outfile", ")", ":", "scriptsdir", "=", "PARAMS", "[", "\"scriptsdir\"", "]", "R", "(", "'''source(\"%(scriptsdir)s/R/proj007/proj007.R\")'''", "%", "locals", "(", ")", ")", "#print '''nmi_conservation(infile=\"%(infile)s\", outfile=\"%(outfile)s\") ''' % locals()", "R", "(", "'''nmi_conservation(infile=\"%(infile)s\", outfile=\"%(outfile)s\") '''", "%", "locals", "(", ")", ")" ]
https://github.com/CGATOxford/cgat/blob/326aad4694bdfae8ddc194171bb5d73911243947/obsolete/pipeline_species_conservation.py#L433-L438
fictorial/pygameui
af6a35f347d6fafa66c4255bbbe38736d842ff65
pygameui/textfield.py
python
TextField.key_down
(self, key, code)
[]
def key_down(self, key, code): if key == pygame.K_BACKSPACE: self.text = self.text[0:-1] elif key == pygame.K_RETURN: can_submit = True if self.placeholder and self.text == self.placeholder: can_submit = False if can_submit: self.on_return(self, self.text) else: try: self.text = '%s%s' % (self.text, str(code)) except: pass self.on_text_change(self, self.text) if self.max_len: self.text = self.text[0:self.max_len] self._update_text() self.label.shrink_wrap() self.label.layout() if self.label.frame.right > self.frame.w - self.padding[0] * 2: self.label.frame.right = self.frame.w - self.padding[0] * 2 else: self.label.frame.left = self.padding[0]
[ "def", "key_down", "(", "self", ",", "key", ",", "code", ")", ":", "if", "key", "==", "pygame", ".", "K_BACKSPACE", ":", "self", ".", "text", "=", "self", ".", "text", "[", "0", ":", "-", "1", "]", "elif", "key", "==", "pygame", ".", "K_RETURN", ":", "can_submit", "=", "True", "if", "self", ".", "placeholder", "and", "self", ".", "text", "==", "self", ".", "placeholder", ":", "can_submit", "=", "False", "if", "can_submit", ":", "self", ".", "on_return", "(", "self", ",", "self", ".", "text", ")", "else", ":", "try", ":", "self", ".", "text", "=", "'%s%s'", "%", "(", "self", ".", "text", ",", "str", "(", "code", ")", ")", "except", ":", "pass", "self", ".", "on_text_change", "(", "self", ",", "self", ".", "text", ")", "if", "self", ".", "max_len", ":", "self", ".", "text", "=", "self", ".", "text", "[", "0", ":", "self", ".", "max_len", "]", "self", ".", "_update_text", "(", ")", "self", ".", "label", ".", "shrink_wrap", "(", ")", "self", ".", "label", ".", "layout", "(", ")", "if", "self", ".", "label", ".", "frame", ".", "right", ">", "self", ".", "frame", ".", "w", "-", "self", ".", "padding", "[", "0", "]", "*", "2", ":", "self", ".", "label", ".", "frame", ".", "right", "=", "self", ".", "frame", ".", "w", "-", "self", ".", "padding", "[", "0", "]", "*", "2", "else", ":", "self", ".", "label", ".", "frame", ".", "left", "=", "self", ".", "padding", "[", "0", "]" ]
https://github.com/fictorial/pygameui/blob/af6a35f347d6fafa66c4255bbbe38736d842ff65/pygameui/textfield.py#L47-L73
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/twisted/twisted/internet/defer.py
python
Deferred.__init__
(self, canceller=None)
Initialize a L{Deferred}. @param canceller: a callable used to stop the pending operation scheduled by this L{Deferred} when L{Deferred.cancel} is invoked. The canceller will be passed the deferred whose cancelation is requested (i.e., self). If a canceller is not given, or does not invoke its argument's C{callback} or C{errback} method, L{Deferred.cancel} will invoke L{Deferred.errback} with a L{CancelledError}. Note that if a canceller is not given, C{callback} or C{errback} may still be invoked exactly once, even though defer.py will have already invoked C{errback}, as described above. This allows clients of code which returns a L{Deferred} to cancel it without requiring the L{Deferred} instantiator to provide any specific implementation support for cancellation. New in 10.1. @type canceller: a 1-argument callable which takes a L{Deferred}. The return result is ignored.
Initialize a L{Deferred}.
[ "Initialize", "a", "L", "{", "Deferred", "}", "." ]
def __init__(self, canceller=None): """ Initialize a L{Deferred}. @param canceller: a callable used to stop the pending operation scheduled by this L{Deferred} when L{Deferred.cancel} is invoked. The canceller will be passed the deferred whose cancelation is requested (i.e., self). If a canceller is not given, or does not invoke its argument's C{callback} or C{errback} method, L{Deferred.cancel} will invoke L{Deferred.errback} with a L{CancelledError}. Note that if a canceller is not given, C{callback} or C{errback} may still be invoked exactly once, even though defer.py will have already invoked C{errback}, as described above. This allows clients of code which returns a L{Deferred} to cancel it without requiring the L{Deferred} instantiator to provide any specific implementation support for cancellation. New in 10.1. @type canceller: a 1-argument callable which takes a L{Deferred}. The return result is ignored. """ self.callbacks = [] self._canceller = canceller if self.debug: self._debugInfo = DebugInfo() self._debugInfo.creator = traceback.format_stack()[:-1]
[ "def", "__init__", "(", "self", ",", "canceller", "=", "None", ")", ":", "self", ".", "callbacks", "=", "[", "]", "self", ".", "_canceller", "=", "canceller", "if", "self", ".", "debug", ":", "self", ".", "_debugInfo", "=", "DebugInfo", "(", ")", "self", ".", "_debugInfo", ".", "creator", "=", "traceback", ".", "format_stack", "(", ")", "[", ":", "-", "1", "]" ]
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/twisted/twisted/internet/defer.py#L240-L268
openai/random-network-distillation
f75c0f1efa473d5109d487062fd8ed49ddce6634
tf_util.py
python
switch
(condition, then_expression, else_expression)
return x
Switches between two operations depending on a scalar value (int or bool). Note that both `then_expression` and `else_expression` should be symbolic tensors of the *same shape*. # Arguments condition: scalar tensor. then_expression: TensorFlow operation. else_expression: TensorFlow operation.
Switches between two operations depending on a scalar value (int or bool). Note that both `then_expression` and `else_expression` should be symbolic tensors of the *same shape*.
[ "Switches", "between", "two", "operations", "depending", "on", "a", "scalar", "value", "(", "int", "or", "bool", ")", ".", "Note", "that", "both", "then_expression", "and", "else_expression", "should", "be", "symbolic", "tensors", "of", "the", "*", "same", "shape", "*", "." ]
def switch(condition, then_expression, else_expression): """Switches between two operations depending on a scalar value (int or bool). Note that both `then_expression` and `else_expression` should be symbolic tensors of the *same shape*. # Arguments condition: scalar tensor. then_expression: TensorFlow operation. else_expression: TensorFlow operation. """ x_shape = copy.copy(then_expression.get_shape()) x = tf.cond(tf.cast(condition, 'bool'), lambda: then_expression, lambda: else_expression) x.set_shape(x_shape) return x
[ "def", "switch", "(", "condition", ",", "then_expression", ",", "else_expression", ")", ":", "x_shape", "=", "copy", ".", "copy", "(", "then_expression", ".", "get_shape", "(", ")", ")", "x", "=", "tf", ".", "cond", "(", "tf", ".", "cast", "(", "condition", ",", "'bool'", ")", ",", "lambda", ":", "then_expression", ",", "lambda", ":", "else_expression", ")", "x", ".", "set_shape", "(", "x_shape", ")", "return", "x" ]
https://github.com/openai/random-network-distillation/blob/f75c0f1efa473d5109d487062fd8ed49ddce6634/tf_util.py#L9-L24
OpenEIT/OpenEIT
0448694e8092361ae5ccb45fba81dee543a6244b
OpenEIT/backend/bluetooth/old/build/dlib/Adafruit_BluefruitLE/bluez_dbus/gatt.py
python
BluezGattCharacteristic.start_notify
(self, on_change)
Enable notification of changes for this characteristic on the specified on_change callback. on_change should be a function that takes one parameter which is the value (as a string of bytes) of the changed characteristic value.
Enable notification of changes for this characteristic on the specified on_change callback. on_change should be a function that takes one parameter which is the value (as a string of bytes) of the changed characteristic value.
[ "Enable", "notification", "of", "changes", "for", "this", "characteristic", "on", "the", "specified", "on_change", "callback", ".", "on_change", "should", "be", "a", "function", "that", "takes", "one", "parameter", "which", "is", "the", "value", "(", "as", "a", "string", "of", "bytes", ")", "of", "the", "changed", "characteristic", "value", "." ]
def start_notify(self, on_change): """Enable notification of changes for this characteristic on the specified on_change callback. on_change should be a function that takes one parameter which is the value (as a string of bytes) of the changed characteristic value. """ # Setup a closure to be the first step in handling the on change callback. # This closure will verify the characteristic is changed and pull out the # new value to pass to the user's on change callback. def characteristic_changed(iface, changed_props, invalidated_props): # Check that this change is for a GATT characteristic and it has a # new value. if iface != _CHARACTERISTIC_INTERFACE: return if 'Value' not in changed_props: return # Send the new value to the on_change callback. on_change(''.join(map(chr, changed_props['Value']))) # Hook up the property changed signal to call the closure above. self._props.connect_to_signal('PropertiesChanged', characteristic_changed) # Enable notifications for changes on the characteristic. self._characteristic.StartNotify()
[ "def", "start_notify", "(", "self", ",", "on_change", ")", ":", "# Setup a closure to be the first step in handling the on change callback.", "# This closure will verify the characteristic is changed and pull out the", "# new value to pass to the user's on change callback.", "def", "characteristic_changed", "(", "iface", ",", "changed_props", ",", "invalidated_props", ")", ":", "# Check that this change is for a GATT characteristic and it has a", "# new value.", "if", "iface", "!=", "_CHARACTERISTIC_INTERFACE", ":", "return", "if", "'Value'", "not", "in", "changed_props", ":", "return", "# Send the new value to the on_change callback.", "on_change", "(", "''", ".", "join", "(", "map", "(", "chr", ",", "changed_props", "[", "'Value'", "]", ")", ")", ")", "# Hook up the property changed signal to call the closure above.", "self", ".", "_props", ".", "connect_to_signal", "(", "'PropertiesChanged'", ",", "characteristic_changed", ")", "# Enable notifications for changes on the characteristic.", "self", ".", "_characteristic", ".", "StartNotify", "(", ")" ]
https://github.com/OpenEIT/OpenEIT/blob/0448694e8092361ae5ccb45fba81dee543a6244b/OpenEIT/backend/bluetooth/old/build/dlib/Adafruit_BluefruitLE/bluez_dbus/gatt.py#L84-L105
TalwalkarLab/leaf
09ec454a5675e32e1f0546b456b77857fdece018
data/reddit/source/preprocess.py
python
order_data
(user_data)
return {'x': [z[0] for z in zipped], 'y': [z[1] for z in zipped]}
[]
def order_data(user_data): zipped = list(zip(user_data['x'], user_data['y'])) zipped = sorted(zipped, key=lambda x: x[1]['created_utc']) return {'x': [z[0] for z in zipped], 'y': [z[1] for z in zipped]}
[ "def", "order_data", "(", "user_data", ")", ":", "zipped", "=", "list", "(", "zip", "(", "user_data", "[", "'x'", "]", ",", "user_data", "[", "'y'", "]", ")", ")", "zipped", "=", "sorted", "(", "zipped", ",", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", "[", "'created_utc'", "]", ")", "return", "{", "'x'", ":", "[", "z", "[", "0", "]", "for", "z", "in", "zipped", "]", ",", "'y'", ":", "[", "z", "[", "1", "]", "for", "z", "in", "zipped", "]", "}" ]
https://github.com/TalwalkarLab/leaf/blob/09ec454a5675e32e1f0546b456b77857fdece018/data/reddit/source/preprocess.py#L71-L75
ZZUTK/SRNTT
c9a2cf95534e2d3c2c2210718c9903c9f389d67d
SRNTT/tensorlayer/ops.py
python
set_gpu_fraction
(sess=None, gpu_fraction=0.3)
return sess
Set the GPU memory fraction for the application. Parameters ---------- sess : a session instance of TensorFlow TensorFlow session gpu_fraction : a float Fraction of GPU memory, (0 ~ 1] References ---------- - `TensorFlow using GPU <https://www.tensorflow.org/versions/r0.9/how_tos/using_gpu/index.html>`_
Set the GPU memory fraction for the application.
[ "Set", "the", "GPU", "memory", "fraction", "for", "the", "application", "." ]
def set_gpu_fraction(sess=None, gpu_fraction=0.3): """Set the GPU memory fraction for the application. Parameters ---------- sess : a session instance of TensorFlow TensorFlow session gpu_fraction : a float Fraction of GPU memory, (0 ~ 1] References ---------- - `TensorFlow using GPU <https://www.tensorflow.org/versions/r0.9/how_tos/using_gpu/index.html>`_ """ print(" tensorlayer: GPU MEM Fraction %f" % gpu_fraction) gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_fraction) sess = tf.Session(config = tf.ConfigProto(gpu_options = gpu_options)) return sess
[ "def", "set_gpu_fraction", "(", "sess", "=", "None", ",", "gpu_fraction", "=", "0.3", ")", ":", "print", "(", "\" tensorlayer: GPU MEM Fraction %f\"", "%", "gpu_fraction", ")", "gpu_options", "=", "tf", ".", "GPUOptions", "(", "per_process_gpu_memory_fraction", "=", "gpu_fraction", ")", "sess", "=", "tf", ".", "Session", "(", "config", "=", "tf", ".", "ConfigProto", "(", "gpu_options", "=", "gpu_options", ")", ")", "return", "sess" ]
https://github.com/ZZUTK/SRNTT/blob/c9a2cf95534e2d3c2c2210718c9903c9f389d67d/SRNTT/tensorlayer/ops.py#L81-L98
magenta/magenta
be6558f1a06984faff6d6949234f5fe9ad0ffdb5
magenta/interfaces/midi/midi_hub.py
python
MidiHub._handle_message
(self, msg)
Handles a single incoming MIDI message. -If the message is being used as a signal, notifies threads waiting on the appropriate condition variable. -Adds the message to any capture queues. -Passes the message through to the output port, if appropriate. Args: msg: The mido.Message MIDI message to handle.
Handles a single incoming MIDI message.
[ "Handles", "a", "single", "incoming", "MIDI", "message", "." ]
def _handle_message(self, msg): """Handles a single incoming MIDI message. -If the message is being used as a signal, notifies threads waiting on the appropriate condition variable. -Adds the message to any capture queues. -Passes the message through to the output port, if appropriate. Args: msg: The mido.Message MIDI message to handle. """ # Notify any threads waiting for this message. msg_str = str(msg) for regex in list(self._signals): if regex.match(msg_str) is not None: self._signals[regex].notify_all() del self._signals[regex] # Call any callbacks waiting for this message. for regex in list(self._callbacks): if regex.match(msg_str) is not None: for fn in self._callbacks[regex]: threading.Thread(target=fn, args=(msg,)).start() del self._callbacks[regex] # Remove any captors that are no longer alive. self._captors[:] = [t for t in self._captors if t.is_alive()] # Add a different copy of the message to the receive queue of each live # capture thread. for t in self._captors: t.receive(msg.copy()) # Update control values if this is a control change message. if msg.type == 'control_change': if self._control_values.get(msg.control, None) != msg.value: tf.logging.debug('Control change %d: %d', msg.control, msg.value) self._control_values[msg.control] = msg.value # Pass the message through to the output port, if appropriate. if not self._passthrough: pass elif self._texture_type == TextureType.POLYPHONIC: if msg.type == 'note_on' and msg.velocity > 0: self._open_notes.add(msg.note) elif (msg.type == 'note_off' or (msg.type == 'note_on' and msg.velocity == 0)): self._open_notes.discard(msg.note) self._outport.send(msg) elif self._texture_type == TextureType.MONOPHONIC: assert len(self._open_notes) <= 1 if msg.type not in ['note_on', 'note_off']: self._outport.send(msg) elif ((msg.type == 'note_off' or msg.type == 'note_on' and msg.velocity == 0) and msg.note in self._open_notes): self._outport.send(msg) self._open_notes.remove(msg.note) elif msg.type == 'note_on' and msg.velocity > 0: if self._open_notes: self._outport.send( mido.Message('note_off', note=self._open_notes.pop())) self._outport.send(msg) self._open_notes.add(msg.note)
[ "def", "_handle_message", "(", "self", ",", "msg", ")", ":", "# Notify any threads waiting for this message.", "msg_str", "=", "str", "(", "msg", ")", "for", "regex", "in", "list", "(", "self", ".", "_signals", ")", ":", "if", "regex", ".", "match", "(", "msg_str", ")", "is", "not", "None", ":", "self", ".", "_signals", "[", "regex", "]", ".", "notify_all", "(", ")", "del", "self", ".", "_signals", "[", "regex", "]", "# Call any callbacks waiting for this message.", "for", "regex", "in", "list", "(", "self", ".", "_callbacks", ")", ":", "if", "regex", ".", "match", "(", "msg_str", ")", "is", "not", "None", ":", "for", "fn", "in", "self", ".", "_callbacks", "[", "regex", "]", ":", "threading", ".", "Thread", "(", "target", "=", "fn", ",", "args", "=", "(", "msg", ",", ")", ")", ".", "start", "(", ")", "del", "self", ".", "_callbacks", "[", "regex", "]", "# Remove any captors that are no longer alive.", "self", ".", "_captors", "[", ":", "]", "=", "[", "t", "for", "t", "in", "self", ".", "_captors", "if", "t", ".", "is_alive", "(", ")", "]", "# Add a different copy of the message to the receive queue of each live", "# capture thread.", "for", "t", "in", "self", ".", "_captors", ":", "t", ".", "receive", "(", "msg", ".", "copy", "(", ")", ")", "# Update control values if this is a control change message.", "if", "msg", ".", "type", "==", "'control_change'", ":", "if", "self", ".", "_control_values", ".", "get", "(", "msg", ".", "control", ",", "None", ")", "!=", "msg", ".", "value", ":", "tf", ".", "logging", ".", "debug", "(", "'Control change %d: %d'", ",", "msg", ".", "control", ",", "msg", ".", "value", ")", "self", ".", "_control_values", "[", "msg", ".", "control", "]", "=", "msg", ".", "value", "# Pass the message through to the output port, if appropriate.", "if", "not", "self", ".", "_passthrough", ":", "pass", "elif", "self", ".", "_texture_type", "==", "TextureType", ".", "POLYPHONIC", ":", "if", "msg", ".", "type", "==", "'note_on'", "and", "msg", ".", "velocity", ">", "0", ":", "self", ".", "_open_notes", ".", "add", "(", "msg", ".", "note", ")", "elif", "(", "msg", ".", "type", "==", "'note_off'", "or", "(", "msg", ".", "type", "==", "'note_on'", "and", "msg", ".", "velocity", "==", "0", ")", ")", ":", "self", ".", "_open_notes", ".", "discard", "(", "msg", ".", "note", ")", "self", ".", "_outport", ".", "send", "(", "msg", ")", "elif", "self", ".", "_texture_type", "==", "TextureType", ".", "MONOPHONIC", ":", "assert", "len", "(", "self", ".", "_open_notes", ")", "<=", "1", "if", "msg", ".", "type", "not", "in", "[", "'note_on'", ",", "'note_off'", "]", ":", "self", ".", "_outport", ".", "send", "(", "msg", ")", "elif", "(", "(", "msg", ".", "type", "==", "'note_off'", "or", "msg", ".", "type", "==", "'note_on'", "and", "msg", ".", "velocity", "==", "0", ")", "and", "msg", ".", "note", "in", "self", ".", "_open_notes", ")", ":", "self", ".", "_outport", ".", "send", "(", "msg", ")", "self", ".", "_open_notes", ".", "remove", "(", "msg", ".", "note", ")", "elif", "msg", ".", "type", "==", "'note_on'", "and", "msg", ".", "velocity", ">", "0", ":", "if", "self", ".", "_open_notes", ":", "self", ".", "_outport", ".", "send", "(", "mido", ".", "Message", "(", "'note_off'", ",", "note", "=", "self", ".", "_open_notes", ".", "pop", "(", ")", ")", ")", "self", ".", "_outport", ".", "send", "(", "msg", ")", "self", ".", "_open_notes", ".", "add", "(", "msg", ".", "note", ")" ]
https://github.com/magenta/magenta/blob/be6558f1a06984faff6d6949234f5fe9ad0ffdb5/magenta/interfaces/midi/midi_hub.py#L973-L1036
open-mmlab/mmsegmentation
af9ccd3d47fda8c7b50eee3675072692e3e54da5
mmseg/models/losses/lovasz_loss.py
python
lovasz_grad
(gt_sorted)
return jaccard
Computes gradient of the Lovasz extension w.r.t sorted errors. See Alg. 1 in paper.
Computes gradient of the Lovasz extension w.r.t sorted errors.
[ "Computes", "gradient", "of", "the", "Lovasz", "extension", "w", ".", "r", ".", "t", "sorted", "errors", "." ]
def lovasz_grad(gt_sorted): """Computes gradient of the Lovasz extension w.r.t sorted errors. See Alg. 1 in paper. """ p = len(gt_sorted) gts = gt_sorted.sum() intersection = gts - gt_sorted.float().cumsum(0) union = gts + (1 - gt_sorted).float().cumsum(0) jaccard = 1. - intersection / union if p > 1: # cover 1-pixel case jaccard[1:p] = jaccard[1:p] - jaccard[0:-1] return jaccard
[ "def", "lovasz_grad", "(", "gt_sorted", ")", ":", "p", "=", "len", "(", "gt_sorted", ")", "gts", "=", "gt_sorted", ".", "sum", "(", ")", "intersection", "=", "gts", "-", "gt_sorted", ".", "float", "(", ")", ".", "cumsum", "(", "0", ")", "union", "=", "gts", "+", "(", "1", "-", "gt_sorted", ")", ".", "float", "(", ")", ".", "cumsum", "(", "0", ")", "jaccard", "=", "1.", "-", "intersection", "/", "union", "if", "p", ">", "1", ":", "# cover 1-pixel case", "jaccard", "[", "1", ":", "p", "]", "=", "jaccard", "[", "1", ":", "p", "]", "-", "jaccard", "[", "0", ":", "-", "1", "]", "return", "jaccard" ]
https://github.com/open-mmlab/mmsegmentation/blob/af9ccd3d47fda8c7b50eee3675072692e3e54da5/mmseg/models/losses/lovasz_loss.py#L15-L27
lisa-lab/pylearn2
af81e5c362f0df4df85c3e54e23b2adeec026055
pylearn2/training_algorithms/sgd.py
python
LinearDecayOverEpoch.on_monitor
(self, model, dataset, algorithm)
Updates the learning rate based on the linear decay schedule. Parameters ---------- model : a Model instance dataset : Dataset algorithm : WRITEME
Updates the learning rate based on the linear decay schedule.
[ "Updates", "the", "learning", "rate", "based", "on", "the", "linear", "decay", "schedule", "." ]
def on_monitor(self, model, dataset, algorithm): """ Updates the learning rate based on the linear decay schedule. Parameters ---------- model : a Model instance dataset : Dataset algorithm : WRITEME """ self._count += 1 self._apply_learning_rate(algorithm)
[ "def", "on_monitor", "(", "self", ",", "model", ",", "dataset", ",", "algorithm", ")", ":", "self", ".", "_count", "+=", "1", "self", ".", "_apply_learning_rate", "(", "algorithm", ")" ]
https://github.com/lisa-lab/pylearn2/blob/af81e5c362f0df4df85c3e54e23b2adeec026055/pylearn2/training_algorithms/sgd.py#L1040-L1051
openstack/keystone
771c943ad2116193e7bb118c74993c829d93bd71
keystone/server/flask/common.py
python
ResourceBase.query_filter_is_true
(filter_name)
return val
Determine if bool query param is 'True'. We treat this the same way as we do for policy enforcement: {bool_param}=0 is treated as False Any other value is considered to be equivalent to True, including the absence of a value (but existence as a parameter). False Examples for param named `p`: * http://host/url * http://host/url?p=0 All other forms of the param 'p' would be result in a True value including: `http://host/url?param`.
Determine if bool query param is 'True'.
[ "Determine", "if", "bool", "query", "param", "is", "True", "." ]
def query_filter_is_true(filter_name): """Determine if bool query param is 'True'. We treat this the same way as we do for policy enforcement: {bool_param}=0 is treated as False Any other value is considered to be equivalent to True, including the absence of a value (but existence as a parameter). False Examples for param named `p`: * http://host/url * http://host/url?p=0 All other forms of the param 'p' would be result in a True value including: `http://host/url?param`. """ val = False if filter_name in flask.request.args: filter_value = flask.request.args.get(filter_name) if (isinstance(filter_value, str) and filter_value == '0'): val = False else: val = True return val
[ "def", "query_filter_is_true", "(", "filter_name", ")", ":", "val", "=", "False", "if", "filter_name", "in", "flask", ".", "request", ".", "args", ":", "filter_value", "=", "flask", ".", "request", ".", "args", ".", "get", "(", "filter_name", ")", "if", "(", "isinstance", "(", "filter_value", ",", "str", ")", "and", "filter_value", "==", "'0'", ")", ":", "val", "=", "False", "else", ":", "val", "=", "True", "return", "val" ]
https://github.com/openstack/keystone/blob/771c943ad2116193e7bb118c74993c829d93bd71/keystone/server/flask/common.py#L785-L813
GluuFederation/community-edition-setup
d0c9427ed9e3ea3d95691677b73c1402ed9ca4db
setup_app/pylib/ldif4/ldif.py
python
LDIFWriter.unparse
(self, dn, record)
Write an entry or change record to the output file. :type dn: string :param dn: distinguished name :type record: Union[Dict[string, List[string]], List[Tuple]] :param record: Either a dictionary holding an entry or a list of additions (2-tuple) or modifications (3-tuple).
Write an entry or change record to the output file.
[ "Write", "an", "entry", "or", "change", "record", "to", "the", "output", "file", "." ]
def unparse(self, dn, record): """Write an entry or change record to the output file. :type dn: string :param dn: distinguished name :type record: Union[Dict[string, List[string]], List[Tuple]] :param record: Either a dictionary holding an entry or a list of additions (2-tuple) or modifications (3-tuple). """ self._unparse_attr("dn", dn) if isinstance(record, dict): self._unparse_entry_record(record) elif isinstance(record, list): self._unparse_change_record(record) else: raise ValueError("Argument record must be dictionary or list") self._output_file.write(self._line_sep) self.records_written += 1
[ "def", "unparse", "(", "self", ",", "dn", ",", "record", ")", ":", "self", ".", "_unparse_attr", "(", "\"dn\"", ",", "dn", ")", "if", "isinstance", "(", "record", ",", "dict", ")", ":", "self", ".", "_unparse_entry_record", "(", "record", ")", "elif", "isinstance", "(", "record", ",", "list", ")", ":", "self", ".", "_unparse_change_record", "(", "record", ")", "else", ":", "raise", "ValueError", "(", "\"Argument record must be dictionary or list\"", ")", "self", ".", "_output_file", ".", "write", "(", "self", ".", "_line_sep", ")", "self", ".", "records_written", "+=", "1" ]
https://github.com/GluuFederation/community-edition-setup/blob/d0c9427ed9e3ea3d95691677b73c1402ed9ca4db/setup_app/pylib/ldif4/ldif.py#L177-L195
PowerScript/KatanaFramework
0f6ad90a88de865d58ec26941cb4460501e75496
lib/future/src/future/backports/http/cookiejar.py
python
CookieJar.__len__
(self)
return i
Return number of contained cookies.
Return number of contained cookies.
[ "Return", "number", "of", "contained", "cookies", "." ]
def __len__(self): """Return number of contained cookies.""" i = 0 for cookie in self: i = i + 1 return i
[ "def", "__len__", "(", "self", ")", ":", "i", "=", "0", "for", "cookie", "in", "self", ":", "i", "=", "i", "+", "1", "return", "i" ]
https://github.com/PowerScript/KatanaFramework/blob/0f6ad90a88de865d58ec26941cb4460501e75496/lib/future/src/future/backports/http/cookiejar.py#L1734-L1738
abhishekkr/gmail-helper
f3f4e586cd19a920a70d689d301e0519ed91fdb0
_google_/gmail/__init__.py
python
delete_mail
(messages_obj, msg_id, user_id='me')
Delete a message from GMail by id. Args: messages_obj: Authenticated GMail user object. msg_id: ID of Message to delete. user_id: User's email address. The special value "me" can be used to indicate the authenticated user.
Delete a message from GMail by id.
[ "Delete", "a", "message", "from", "GMail", "by", "id", "." ]
def delete_mail(messages_obj, msg_id, user_id='me'): """Delete a message from GMail by id. Args: messages_obj: Authenticated GMail user object. msg_id: ID of Message to delete. user_id: User's email address. The special value "me" can be used to indicate the authenticated user. """ try: messages_obj.delete(userId=user_id, id=msg_id).execute() _log.logger.info('Message with id: %s deleted successfully.' % msg_id) except Exception as e: _log.logger.error('An error occurred: %s' % str(e))
[ "def", "delete_mail", "(", "messages_obj", ",", "msg_id", ",", "user_id", "=", "'me'", ")", ":", "try", ":", "messages_obj", ".", "delete", "(", "userId", "=", "user_id", ",", "id", "=", "msg_id", ")", ".", "execute", "(", ")", "_log", ".", "logger", ".", "info", "(", "'Message with id: %s deleted successfully.'", "%", "msg_id", ")", "except", "Exception", "as", "e", ":", "_log", ".", "logger", ".", "error", "(", "'An error occurred: %s'", "%", "str", "(", "e", ")", ")" ]
https://github.com/abhishekkr/gmail-helper/blob/f3f4e586cd19a920a70d689d301e0519ed91fdb0/_google_/gmail/__init__.py#L45-L58
SpiderClub/haipproxy
ab30ccf4b1d78e9304c27830006cc5800fe41bb3
haipproxy/scheduler/scheduler.py
python
squid_conf_update
(usage, interval)
Timertask for updating proxies for squid config file
Timertask for updating proxies for squid config file
[ "Timertask", "for", "updating", "proxies", "for", "squid", "config", "file" ]
def squid_conf_update(usage, interval): """Timertask for updating proxies for squid config file""" # client_logger.info('the updating task is starting...') client = SquidClient(usage) client.update_conf() schedule.every(interval).minutes.do(client.update_conf) while True: schedule.run_pending() time.sleep(1)
[ "def", "squid_conf_update", "(", "usage", ",", "interval", ")", ":", "# client_logger.info('the updating task is starting...')", "client", "=", "SquidClient", "(", "usage", ")", "client", ".", "update_conf", "(", ")", "schedule", ".", "every", "(", "interval", ")", ".", "minutes", ".", "do", "(", "client", ".", "update_conf", ")", "while", "True", ":", "schedule", ".", "run_pending", "(", ")", "time", ".", "sleep", "(", "1", ")" ]
https://github.com/SpiderClub/haipproxy/blob/ab30ccf4b1d78e9304c27830006cc5800fe41bb3/haipproxy/scheduler/scheduler.py#L247-L255
LabPy/lantz
3e878e3f765a4295b0089d04e241d4beb7b8a65b
lantz/drivers/labjack/_internal/u12.py
python
U12.reEnum
(self, idNum=None)
return {"idnum":idNum.value}
Name: U12.reEnum(idNum=None) Args: See section 4.29 of the User's Guide Desc: Causes the LabJack to electrically detach from and re-attach to the USB so it will re-enumerate >>> dev = U12() >>> dev.reEnum() >>> {'idnum': 1}
Name: U12.reEnum(idNum=None) Args: See section 4.29 of the User's Guide Desc: Causes the LabJack to electrically detach from and re-attach to the USB so it will re-enumerate >>> dev = U12() >>> dev.reEnum() >>> {'idnum': 1}
[ "Name", ":", "U12", ".", "reEnum", "(", "idNum", "=", "None", ")", "Args", ":", "See", "section", "4", ".", "29", "of", "the", "User", "s", "Guide", "Desc", ":", "Causes", "the", "LabJack", "to", "electrically", "detach", "from", "and", "re", "-", "attach", "to", "the", "USB", "so", "it", "will", "re", "-", "enumerate", ">>>", "dev", "=", "U12", "()", ">>>", "dev", ".", "reEnum", "()", ">>>", "{", "idnum", ":", "1", "}" ]
def reEnum(self, idNum=None): """ Name: U12.reEnum(idNum=None) Args: See section 4.29 of the User's Guide Desc: Causes the LabJack to electrically detach from and re-attach to the USB so it will re-enumerate >>> dev = U12() >>> dev.reEnum() >>> {'idnum': 1} """ #Check id number if idNum is None: idNum = self.id idNum = ctypes.c_long(idNum) ecode = staticLib.ReEnum(ctypes.byref(idNum)) if ecode != 0: raise U12Exception(ecode) return {"idnum":idNum.value}
[ "def", "reEnum", "(", "self", ",", "idNum", "=", "None", ")", ":", "#Check id number", "if", "idNum", "is", "None", ":", "idNum", "=", "self", ".", "id", "idNum", "=", "ctypes", ".", "c_long", "(", "idNum", ")", "ecode", "=", "staticLib", ".", "ReEnum", "(", "ctypes", ".", "byref", "(", "idNum", ")", ")", "if", "ecode", "!=", "0", ":", "raise", "U12Exception", "(", "ecode", ")", "return", "{", "\"idnum\"", ":", "idNum", ".", "value", "}" ]
https://github.com/LabPy/lantz/blob/3e878e3f765a4295b0089d04e241d4beb7b8a65b/lantz/drivers/labjack/_internal/u12.py#L2708-L2727
lad1337/XDM
0c1b7009fe00f06f102a6f67c793478f515e7efe
site-packages/logilab/common/table.py
python
TableWriter.set_renderer
(self, renderer)
sets the way to render cell
sets the way to render cell
[ "sets", "the", "way", "to", "render", "cell" ]
def set_renderer(self, renderer): """sets the way to render cell """ self.renderer = renderer
[ "def", "set_renderer", "(", "self", ",", "renderer", ")", ":", "self", ".", "renderer", "=", "renderer" ]
https://github.com/lad1337/XDM/blob/0c1b7009fe00f06f102a6f67c793478f515e7efe/site-packages/logilab/common/table.py#L846-L849
ipython/traitlets
34f596dd03b98434900a7d31c912fc168342bb80
traitlets/config/loader.py
python
ArgParseConfigLoader.load_config
(self, argv=None, aliases=None, flags=_deprecated, classes=None)
return self.config
Parse command line arguments and return as a Config object. Parameters ---------- argv : optional, list If given, a list with the structure of sys.argv[1:] to parse arguments from. If not given, the instance's self.argv attribute (given at construction time) is used. flags Deprecated in traitlets 5.0, instanciate the config loader with the flags.
Parse command line arguments and return as a Config object.
[ "Parse", "command", "line", "arguments", "and", "return", "as", "a", "Config", "object", "." ]
def load_config(self, argv=None, aliases=None, flags=_deprecated, classes=None): """Parse command line arguments and return as a Config object. Parameters ---------- argv : optional, list If given, a list with the structure of sys.argv[1:] to parse arguments from. If not given, the instance's self.argv attribute (given at construction time) is used. flags Deprecated in traitlets 5.0, instanciate the config loader with the flags. """ if flags is not _deprecated: warnings.warn( "The `flag` argument to load_config is deprecated since Traitlets " f"5.0 and will be ignored, pass flags the `{type(self)}` constructor.", DeprecationWarning, stacklevel=2, ) self.clear() if argv is None: argv = self.argv if aliases is not None: self.aliases = aliases if classes is not None: self.classes = classes self._create_parser() self._parse_args(argv) self._convert_to_config() return self.config
[ "def", "load_config", "(", "self", ",", "argv", "=", "None", ",", "aliases", "=", "None", ",", "flags", "=", "_deprecated", ",", "classes", "=", "None", ")", ":", "if", "flags", "is", "not", "_deprecated", ":", "warnings", ".", "warn", "(", "\"The `flag` argument to load_config is deprecated since Traitlets \"", "f\"5.0 and will be ignored, pass flags the `{type(self)}` constructor.\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ",", ")", "self", ".", "clear", "(", ")", "if", "argv", "is", "None", ":", "argv", "=", "self", ".", "argv", "if", "aliases", "is", "not", "None", ":", "self", ".", "aliases", "=", "aliases", "if", "classes", "is", "not", "None", ":", "self", ".", "classes", "=", "classes", "self", ".", "_create_parser", "(", ")", "self", ".", "_parse_args", "(", "argv", ")", "self", ".", "_convert_to_config", "(", ")", "return", "self", ".", "config" ]
https://github.com/ipython/traitlets/blob/34f596dd03b98434900a7d31c912fc168342bb80/traitlets/config/loader.py#L821-L853
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
source/openerp/tools/convert.py
python
convert_csv_import
(cr, module, fname, csvcontent, idref=None, mode='init', noupdate=False)
Import csv file : quote: " delimiter: , encoding: utf-8
Import csv file : quote: " delimiter: , encoding: utf-8
[ "Import", "csv", "file", ":", "quote", ":", "delimiter", ":", "encoding", ":", "utf", "-", "8" ]
def convert_csv_import(cr, module, fname, csvcontent, idref=None, mode='init', noupdate=False): '''Import csv file : quote: " delimiter: , encoding: utf-8''' if not idref: idref={} model = ('.'.join(fname.split('.')[:-1]).split('-'))[0] #remove folder path from model head, model = os.path.split(model) input = cStringIO.StringIO(csvcontent) #FIXME reader = csv.reader(input, quotechar='"', delimiter=',') fields = reader.next() fname_partial = "" if config.get('import_partial'): fname_partial = module + '/'+ fname if not os.path.isfile(config.get('import_partial')): pickle.dump({}, file(config.get('import_partial'),'w+')) else: data = pickle.load(file(config.get('import_partial'))) if fname_partial in data: if not data[fname_partial]: return else: for i in range(data[fname_partial]): reader.next() if not (mode == 'init' or 'id' in fields): _logger.error("Import specification does not contain 'id' and we are in init mode, Cannot continue.") return uid = 1 datas = [] for line in reader: if not (line and any(line)): continue try: datas.append(map(misc.ustr, line)) except: _logger.error("Cannot import the line: %s", line) registry = openerp.registry(cr.dbname) result, rows, warning_msg, dummy = registry[model].import_data(cr, uid, fields, datas,mode, module, noupdate, filename=fname_partial) if result < 0: # Report failed import and abort module install raise Exception(_('Module loading %s failed: file %s could not be processed:\n %s') % (module, fname, warning_msg)) if config.get('import_partial'): data = pickle.load(file(config.get('import_partial'))) data[fname_partial] = 0 pickle.dump(data, file(config.get('import_partial'),'wb')) cr.commit()
[ "def", "convert_csv_import", "(", "cr", ",", "module", ",", "fname", ",", "csvcontent", ",", "idref", "=", "None", ",", "mode", "=", "'init'", ",", "noupdate", "=", "False", ")", ":", "if", "not", "idref", ":", "idref", "=", "{", "}", "model", "=", "(", "'.'", ".", "join", "(", "fname", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", ".", "split", "(", "'-'", ")", ")", "[", "0", "]", "#remove folder path from model", "head", ",", "model", "=", "os", ".", "path", ".", "split", "(", "model", ")", "input", "=", "cStringIO", ".", "StringIO", "(", "csvcontent", ")", "#FIXME", "reader", "=", "csv", ".", "reader", "(", "input", ",", "quotechar", "=", "'\"'", ",", "delimiter", "=", "','", ")", "fields", "=", "reader", ".", "next", "(", ")", "fname_partial", "=", "\"\"", "if", "config", ".", "get", "(", "'import_partial'", ")", ":", "fname_partial", "=", "module", "+", "'/'", "+", "fname", "if", "not", "os", ".", "path", ".", "isfile", "(", "config", ".", "get", "(", "'import_partial'", ")", ")", ":", "pickle", ".", "dump", "(", "{", "}", ",", "file", "(", "config", ".", "get", "(", "'import_partial'", ")", ",", "'w+'", ")", ")", "else", ":", "data", "=", "pickle", ".", "load", "(", "file", "(", "config", ".", "get", "(", "'import_partial'", ")", ")", ")", "if", "fname_partial", "in", "data", ":", "if", "not", "data", "[", "fname_partial", "]", ":", "return", "else", ":", "for", "i", "in", "range", "(", "data", "[", "fname_partial", "]", ")", ":", "reader", ".", "next", "(", ")", "if", "not", "(", "mode", "==", "'init'", "or", "'id'", "in", "fields", ")", ":", "_logger", ".", "error", "(", "\"Import specification does not contain 'id' and we are in init mode, Cannot continue.\"", ")", "return", "uid", "=", "1", "datas", "=", "[", "]", "for", "line", "in", "reader", ":", "if", "not", "(", "line", "and", "any", "(", "line", ")", ")", ":", "continue", "try", ":", "datas", ".", "append", "(", "map", "(", "misc", ".", "ustr", ",", "line", ")", ")", "except", ":", "_logger", ".", "error", "(", "\"Cannot import the line: %s\"", ",", "line", ")", "registry", "=", "openerp", ".", "registry", "(", "cr", ".", "dbname", ")", "result", ",", "rows", ",", "warning_msg", ",", "dummy", "=", "registry", "[", "model", "]", ".", "import_data", "(", "cr", ",", "uid", ",", "fields", ",", "datas", ",", "mode", ",", "module", ",", "noupdate", ",", "filename", "=", "fname_partial", ")", "if", "result", "<", "0", ":", "# Report failed import and abort module install", "raise", "Exception", "(", "_", "(", "'Module loading %s failed: file %s could not be processed:\\n %s'", ")", "%", "(", "module", ",", "fname", ",", "warning_msg", ")", ")", "if", "config", ".", "get", "(", "'import_partial'", ")", ":", "data", "=", "pickle", ".", "load", "(", "file", "(", "config", ".", "get", "(", "'import_partial'", ")", ")", ")", "data", "[", "fname_partial", "]", "=", "0", "pickle", ".", "dump", "(", "data", ",", "file", "(", "config", ".", "get", "(", "'import_partial'", ")", ",", "'wb'", ")", ")", "cr", ".", "commit", "(", ")" ]
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/source/openerp/tools/convert.py#L912-L964
acarabott/ChromeREPL
009ab7884b14fb8536bf0066436cb532b1b5b43c
libs/six.py
python
_add_doc
(func, doc)
Add documentation to a function.
Add documentation to a function.
[ "Add", "documentation", "to", "a", "function", "." ]
def _add_doc(func, doc): """Add documentation to a function.""" func.__doc__ = doc
[ "def", "_add_doc", "(", "func", ",", "doc", ")", ":", "func", ".", "__doc__", "=", "doc" ]
https://github.com/acarabott/ChromeREPL/blob/009ab7884b14fb8536bf0066436cb532b1b5b43c/libs/six.py#L75-L77
mnooner256/pyqrcode
674a77b5eaf850d063f518bd90c243ee34ad6b5d
pyqrcode/__init__.py
python
QRCode.show
(self, wait=1.2, scale=10, module_color=(0, 0, 0, 255), background=(255, 255, 255, 255), quiet_zone=4)
Displays this QR code. This method is mainly intended for debugging purposes. This method saves the output of the :py:meth:`png` method (with a default scaling factor of 10) to a temporary file and opens it with the standard PNG viewer application or within the standard webbrowser. The temporary file is deleted afterwards. If this method does not show any result, try to increase the `wait` parameter. This parameter specifies the time in seconds to wait till the temporary file is deleted. Note, that this method does not return until the provided amount of seconds (default: 1.2) has passed. The other parameters are simply passed on to the `png` method.
Displays this QR code.
[ "Displays", "this", "QR", "code", "." ]
def show(self, wait=1.2, scale=10, module_color=(0, 0, 0, 255), background=(255, 255, 255, 255), quiet_zone=4): """Displays this QR code. This method is mainly intended for debugging purposes. This method saves the output of the :py:meth:`png` method (with a default scaling factor of 10) to a temporary file and opens it with the standard PNG viewer application or within the standard webbrowser. The temporary file is deleted afterwards. If this method does not show any result, try to increase the `wait` parameter. This parameter specifies the time in seconds to wait till the temporary file is deleted. Note, that this method does not return until the provided amount of seconds (default: 1.2) has passed. The other parameters are simply passed on to the `png` method. """ import os import time import tempfile import webbrowser try: # Python 2 from urlparse import urljoin from urllib import pathname2url except ImportError: # Python 3 from urllib.parse import urljoin from urllib.request import pathname2url f = tempfile.NamedTemporaryFile('wb', suffix='.png', delete=False) self.png(f, scale=scale, module_color=module_color, background=background, quiet_zone=quiet_zone) f.close() webbrowser.open_new_tab(urljoin('file:', pathname2url(f.name))) time.sleep(wait) os.unlink(f.name)
[ "def", "show", "(", "self", ",", "wait", "=", "1.2", ",", "scale", "=", "10", ",", "module_color", "=", "(", "0", ",", "0", ",", "0", ",", "255", ")", ",", "background", "=", "(", "255", ",", "255", ",", "255", ",", "255", ")", ",", "quiet_zone", "=", "4", ")", ":", "import", "os", "import", "time", "import", "tempfile", "import", "webbrowser", "try", ":", "# Python 2", "from", "urlparse", "import", "urljoin", "from", "urllib", "import", "pathname2url", "except", "ImportError", ":", "# Python 3", "from", "urllib", ".", "parse", "import", "urljoin", "from", "urllib", ".", "request", "import", "pathname2url", "f", "=", "tempfile", ".", "NamedTemporaryFile", "(", "'wb'", ",", "suffix", "=", "'.png'", ",", "delete", "=", "False", ")", "self", ".", "png", "(", "f", ",", "scale", "=", "scale", ",", "module_color", "=", "module_color", ",", "background", "=", "background", ",", "quiet_zone", "=", "quiet_zone", ")", "f", ".", "close", "(", ")", "webbrowser", ".", "open_new_tab", "(", "urljoin", "(", "'file:'", ",", "pathname2url", "(", "f", ".", "name", ")", ")", ")", "time", ".", "sleep", "(", "wait", ")", "os", ".", "unlink", "(", "f", ".", "name", ")" ]
https://github.com/mnooner256/pyqrcode/blob/674a77b5eaf850d063f518bd90c243ee34ad6b5d/pyqrcode/__init__.py#L353-L389
openstack/neutron
fb229fb527ac8b95526412f7762d90826ac41428
neutron/services/network_ip_availability/plugin.py
python
NetworkIPAvailabilityPlugin.get_network_ip_availabilities
(self, context, filters=None, fields=None)
return [db_utils.resource_fields(net_ip_availability, fields) for net_ip_availability in net_ip_availabilities]
Returns ip availability data for a collection of networks.
Returns ip availability data for a collection of networks.
[ "Returns", "ip", "availability", "data", "for", "a", "collection", "of", "networks", "." ]
def get_network_ip_availabilities(self, context, filters=None, fields=None): """Returns ip availability data for a collection of networks.""" net_ip_availabilities = super( NetworkIPAvailabilityPlugin, self ).get_network_ip_availabilities(context, filters) return [db_utils.resource_fields(net_ip_availability, fields) for net_ip_availability in net_ip_availabilities]
[ "def", "get_network_ip_availabilities", "(", "self", ",", "context", ",", "filters", "=", "None", ",", "fields", "=", "None", ")", ":", "net_ip_availabilities", "=", "super", "(", "NetworkIPAvailabilityPlugin", ",", "self", ")", ".", "get_network_ip_availabilities", "(", "context", ",", "filters", ")", "return", "[", "db_utils", ".", "resource_fields", "(", "net_ip_availability", ",", "fields", ")", "for", "net_ip_availability", "in", "net_ip_availabilities", "]" ]
https://github.com/openstack/neutron/blob/fb229fb527ac8b95526412f7762d90826ac41428/neutron/services/network_ip_availability/plugin.py#L46-L53
DataDog/integrations-core
934674b29d94b70ccc008f76ea172d0cdae05e1e
gitlab/datadog_checks/gitlab/config_models/defaults.py
python
instance_aws_host
(field, value)
return get_default_field_value(field, value)
[]
def instance_aws_host(field, value): return get_default_field_value(field, value)
[ "def", "instance_aws_host", "(", "field", ",", "value", ")", ":", "return", "get_default_field_value", "(", "field", ",", "value", ")" ]
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/gitlab/datadog_checks/gitlab/config_models/defaults.py#L49-L50
IronLanguages/ironpython2
51fdedeeda15727717fb8268a805f71b06c0b9f1
Src/StdLib/Lib/fractions.py
python
Fraction.__neg__
(a)
return Fraction(-a._numerator, a._denominator)
-a
-a
[ "-", "a" ]
def __neg__(a): """-a""" return Fraction(-a._numerator, a._denominator)
[ "def", "__neg__", "(", "a", ")", ":", "return", "Fraction", "(", "-", "a", ".", "_numerator", ",", "a", ".", "_denominator", ")" ]
https://github.com/IronLanguages/ironpython2/blob/51fdedeeda15727717fb8268a805f71b06c0b9f1/Src/StdLib/Lib/fractions.py#L493-L495
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/whoosh/src/whoosh/fields.py
python
Schema.scorable_names
(self)
return [name for name, field in self.items() if field.scorable]
Returns a list of the names of fields that store field lengths.
Returns a list of the names of fields that store field lengths.
[ "Returns", "a", "list", "of", "the", "names", "of", "fields", "that", "store", "field", "lengths", "." ]
def scorable_names(self): """Returns a list of the names of fields that store field lengths. """ return [name for name, field in self.items() if field.scorable]
[ "def", "scorable_names", "(", "self", ")", ":", "return", "[", "name", "for", "name", ",", "field", "in", "self", ".", "items", "(", ")", "if", "field", ".", "scorable", "]" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/whoosh/src/whoosh/fields.py#L1317-L1322
nneonneo/ffsend
4595be598c8397751681f1e9f2811fe716b8f420
ffsend.py
python
FFSendAPI.get_metadata
(self, id, auth_key)
return resp
GET /api/metadata/:id id: file id auth_key: file's auth key The response's json will include raw encrypted file metadata.
GET /api/metadata/:id
[ "GET", "/", "api", "/", "metadata", "/", ":", "id" ]
def get_metadata(self, id, auth_key): ''' GET /api/metadata/:id id: file id auth_key: file's auth key The response's json will include raw encrypted file metadata. ''' # TODO configurable retries for i in range(5): nonce = self._get_nonce(id) resp = requests.get(self.baseurl + "api/metadata/" + id, headers={'Authorization': self._auth_header(auth_key, nonce)}) self._set_nonce(id, resp) if resp.status_code == 401: continue return resp return resp
[ "def", "get_metadata", "(", "self", ",", "id", ",", "auth_key", ")", ":", "# TODO configurable retries", "for", "i", "in", "range", "(", "5", ")", ":", "nonce", "=", "self", ".", "_get_nonce", "(", "id", ")", "resp", "=", "requests", ".", "get", "(", "self", ".", "baseurl", "+", "\"api/metadata/\"", "+", "id", ",", "headers", "=", "{", "'Authorization'", ":", "self", ".", "_auth_header", "(", "auth_key", ",", "nonce", ")", "}", ")", "self", ".", "_set_nonce", "(", "id", ",", "resp", ")", "if", "resp", ".", "status_code", "==", "401", ":", "continue", "return", "resp", "return", "resp" ]
https://github.com/nneonneo/ffsend/blob/4595be598c8397751681f1e9f2811fe716b8f420/ffsend.py#L240-L258
lunixbochs/ActualVim
1f555ce719e49d6584f0e35e9f0db2f216b98fa5
lib/asyncio/selectors.py
python
_BaseSelectorImpl._fileobj_lookup
(self, fileobj)
Return a file descriptor from a file object. This wraps _fileobj_to_fd() to do an exhaustive search in case the object is invalid but we still have it in our map. This is used by unregister() so we can unregister an object that was previously registered even if it is closed. It is also used by _SelectorMapping.
Return a file descriptor from a file object.
[ "Return", "a", "file", "descriptor", "from", "a", "file", "object", "." ]
def _fileobj_lookup(self, fileobj): """Return a file descriptor from a file object. This wraps _fileobj_to_fd() to do an exhaustive search in case the object is invalid but we still have it in our map. This is used by unregister() so we can unregister an object that was previously registered even if it is closed. It is also used by _SelectorMapping. """ try: return _fileobj_to_fd(fileobj) except ValueError: # Do an exhaustive search. for key in self._fd_to_key.values(): if key.fileobj is fileobj: return key.fd # Raise ValueError after all. raise
[ "def", "_fileobj_lookup", "(", "self", ",", "fileobj", ")", ":", "try", ":", "return", "_fileobj_to_fd", "(", "fileobj", ")", "except", "ValueError", ":", "# Do an exhaustive search.", "for", "key", "in", "self", ".", "_fd_to_key", ".", "values", "(", ")", ":", "if", "key", ".", "fileobj", "is", "fileobj", ":", "return", "key", ".", "fd", "# Raise ValueError after all.", "raise" ]
https://github.com/lunixbochs/ActualVim/blob/1f555ce719e49d6584f0e35e9f0db2f216b98fa5/lib/asyncio/selectors.py#L214-L231
nortikin/sverchok
7b460f01317c15f2681bfa3e337c5e7346f3711b
nodes/matrix/matrix_track_to.py
python
SvMatrixTrackToNode.split_columns
(self, panel, ratios, aligns)
return cols
Splits the given panel into columns based on the given set of ratios. e.g ratios = [1, 2, 1] or [.2, .3, .2] etc Note: The sum of all ratio numbers doesn't need to be normalized
Splits the given panel into columns based on the given set of ratios. e.g ratios = [1, 2, 1] or [.2, .3, .2] etc Note: The sum of all ratio numbers doesn't need to be normalized
[ "Splits", "the", "given", "panel", "into", "columns", "based", "on", "the", "given", "set", "of", "ratios", ".", "e", ".", "g", "ratios", "=", "[", "1", "2", "1", "]", "or", "[", ".", "2", ".", "3", ".", "2", "]", "etc", "Note", ":", "The", "sum", "of", "all", "ratio", "numbers", "doesn", "t", "need", "to", "be", "normalized" ]
def split_columns(self, panel, ratios, aligns): """ Splits the given panel into columns based on the given set of ratios. e.g ratios = [1, 2, 1] or [.2, .3, .2] etc Note: The sum of all ratio numbers doesn't need to be normalized """ col2 = panel cols = [] ns = len(ratios) - 1 # number of splits for n in range(ns): n1 = ratios[n] # size of the current column n2 = sum(ratios[n + 1:]) # size of all remaining columns p = n1 / (n1 + n2) # percentage split of current vs remaining columns # print("n = ", n, " n1 = ", n1, " n2 = ", n2, " p = ", p) split = col2.split(factor=p, align=aligns[n]) col1 = split.column(align=True) col2 = split.column(align=True) cols.append(col1) cols.append(col2) return cols
[ "def", "split_columns", "(", "self", ",", "panel", ",", "ratios", ",", "aligns", ")", ":", "col2", "=", "panel", "cols", "=", "[", "]", "ns", "=", "len", "(", "ratios", ")", "-", "1", "# number of splits", "for", "n", "in", "range", "(", "ns", ")", ":", "n1", "=", "ratios", "[", "n", "]", "# size of the current column", "n2", "=", "sum", "(", "ratios", "[", "n", "+", "1", ":", "]", ")", "# size of all remaining columns", "p", "=", "n1", "/", "(", "n1", "+", "n2", ")", "# percentage split of current vs remaining columns", "# print(\"n = \", n, \" n1 = \", n1, \" n2 = \", n2, \" p = \", p)", "split", "=", "col2", ".", "split", "(", "factor", "=", "p", ",", "align", "=", "aligns", "[", "n", "]", ")", "col1", "=", "split", ".", "column", "(", "align", "=", "True", ")", "col2", "=", "split", ".", "column", "(", "align", "=", "True", ")", "cols", ".", "append", "(", "col1", ")", "cols", ".", "append", "(", "col2", ")", "return", "cols" ]
https://github.com/nortikin/sverchok/blob/7b460f01317c15f2681bfa3e337c5e7346f3711b/nodes/matrix/matrix_track_to.py#L84-L104
naftaliharris/tauthon
5587ceec329b75f7caf6d65a036db61ac1bae214
Lib/threading.py
python
BoundedSemaphore
(*args, **kwargs)
return _BoundedSemaphore(*args, **kwargs)
A factory function that returns a new bounded semaphore. A bounded semaphore checks to make sure its current value doesn't exceed its initial value. If it does, ValueError is raised. In most situations semaphores are used to guard resources with limited capacity. If the semaphore is released too many times it's a sign of a bug. If not given, value defaults to 1. Like regular semaphores, bounded semaphores manage a counter representing the number of release() calls minus the number of acquire() calls, plus an initial value. The acquire() method blocks if necessary until it can return without making the counter negative. If not given, value defaults to 1.
A factory function that returns a new bounded semaphore.
[ "A", "factory", "function", "that", "returns", "a", "new", "bounded", "semaphore", "." ]
def BoundedSemaphore(*args, **kwargs): """A factory function that returns a new bounded semaphore. A bounded semaphore checks to make sure its current value doesn't exceed its initial value. If it does, ValueError is raised. In most situations semaphores are used to guard resources with limited capacity. If the semaphore is released too many times it's a sign of a bug. If not given, value defaults to 1. Like regular semaphores, bounded semaphores manage a counter representing the number of release() calls minus the number of acquire() calls, plus an initial value. The acquire() method blocks if necessary until it can return without making the counter negative. If not given, value defaults to 1. """ return _BoundedSemaphore(*args, **kwargs)
[ "def", "BoundedSemaphore", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_BoundedSemaphore", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/naftaliharris/tauthon/blob/5587ceec329b75f7caf6d65a036db61ac1bae214/Lib/threading.py#L497-L513
buke/GreenOdoo
3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df
runtime/python/lib/python2.7/decimal.py
python
Decimal.__mod__
(self, other, context=None)
return remainder
self % other
self % other
[ "self", "%", "other" ]
def __mod__(self, other, context=None): """ self % other """ other = _convert_other(other) if other is NotImplemented: return other if context is None: context = getcontext() ans = self._check_nans(other, context) if ans: return ans if self._isinfinity(): return context._raise_error(InvalidOperation, 'INF % x') elif not other: if self: return context._raise_error(InvalidOperation, 'x % 0') else: return context._raise_error(DivisionUndefined, '0 % 0') remainder = self._divide(other, context)[1] remainder = remainder._fix(context) return remainder
[ "def", "__mod__", "(", "self", ",", "other", ",", "context", "=", "None", ")", ":", "other", "=", "_convert_other", "(", "other", ")", "if", "other", "is", "NotImplemented", ":", "return", "other", "if", "context", "is", "None", ":", "context", "=", "getcontext", "(", ")", "ans", "=", "self", ".", "_check_nans", "(", "other", ",", "context", ")", "if", "ans", ":", "return", "ans", "if", "self", ".", "_isinfinity", "(", ")", ":", "return", "context", ".", "_raise_error", "(", "InvalidOperation", ",", "'INF % x'", ")", "elif", "not", "other", ":", "if", "self", ":", "return", "context", ".", "_raise_error", "(", "InvalidOperation", ",", "'x % 0'", ")", "else", ":", "return", "context", ".", "_raise_error", "(", "DivisionUndefined", ",", "'0 % 0'", ")", "remainder", "=", "self", ".", "_divide", "(", "other", ",", "context", ")", "[", "1", "]", "remainder", "=", "remainder", ".", "_fix", "(", "context", ")", "return", "remainder" ]
https://github.com/buke/GreenOdoo/blob/3d8c55d426fb41fdb3f2f5a1533cfe05983ba1df/runtime/python/lib/python2.7/decimal.py#L1438-L1463
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/min/fileinput.py
python
filelineno
()
return _state.filelineno()
Return the line number in the current file. Before the first line has been read, returns 0. After the last line of the last file has been read, returns the line number of that line within the file.
Return the line number in the current file. Before the first line has been read, returns 0. After the last line of the last file has been read, returns the line number of that line within the file.
[ "Return", "the", "line", "number", "in", "the", "current", "file", ".", "Before", "the", "first", "line", "has", "been", "read", "returns", "0", ".", "After", "the", "last", "line", "of", "the", "last", "file", "has", "been", "read", "returns", "the", "line", "number", "of", "that", "line", "within", "the", "file", "." ]
def filelineno(): """ Return the line number in the current file. Before the first line has been read, returns 0. After the last line of the last file has been read, returns the line number of that line within the file. """ if not _state: raise RuntimeError("no active input()") return _state.filelineno()
[ "def", "filelineno", "(", ")", ":", "if", "not", "_state", ":", "raise", "RuntimeError", "(", "\"no active input()\"", ")", "return", "_state", ".", "filelineno", "(", ")" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/min/fileinput.py#L134-L142
kornia/kornia
b12d6611b1c41d47b2c93675f0ea344b5314a688
kornia/feature/loftr/utils/supervision.py
python
spvs_fine
(data, config)
Update: data (dict):{ "expec_f_gt": [M, 2]}
Update: data (dict):{ "expec_f_gt": [M, 2]}
[ "Update", ":", "data", "(", "dict", ")", ":", "{", "expec_f_gt", ":", "[", "M", "2", "]", "}" ]
def spvs_fine(data, config): """ Update: data (dict):{ "expec_f_gt": [M, 2]} """ # 1. misc # w_pt0_i, pt1_i = data.pop('spv_w_pt0_i'), data.pop('spv_pt1_i') w_pt0_i, pt1_i = data['spv_w_pt0_i'], data['spv_pt1_i'] scale = config['LOFTR']['RESOLUTION'][1] radius = config['LOFTR']['FINE_WINDOW_SIZE'] // 2 # 2. get coarse prediction b_ids, i_ids, j_ids = data['b_ids'], data['i_ids'], data['j_ids'] # 3. compute gt scale = scale * data['scale1'][b_ids] if 'scale0' in data else scale # `expec_f_gt` might exceed the window, i.e. abs(*) > 1, which would be filtered later expec_f_gt = (w_pt0_i[b_ids, i_ids] - pt1_i[b_ids, j_ids]) / scale / radius # [M, 2] data.update({"expec_f_gt": expec_f_gt})
[ "def", "spvs_fine", "(", "data", ",", "config", ")", ":", "# 1. misc", "# w_pt0_i, pt1_i = data.pop('spv_w_pt0_i'), data.pop('spv_pt1_i')", "w_pt0_i", ",", "pt1_i", "=", "data", "[", "'spv_w_pt0_i'", "]", ",", "data", "[", "'spv_pt1_i'", "]", "scale", "=", "config", "[", "'LOFTR'", "]", "[", "'RESOLUTION'", "]", "[", "1", "]", "radius", "=", "config", "[", "'LOFTR'", "]", "[", "'FINE_WINDOW_SIZE'", "]", "//", "2", "# 2. get coarse prediction", "b_ids", ",", "i_ids", ",", "j_ids", "=", "data", "[", "'b_ids'", "]", ",", "data", "[", "'i_ids'", "]", ",", "data", "[", "'j_ids'", "]", "# 3. compute gt", "scale", "=", "scale", "*", "data", "[", "'scale1'", "]", "[", "b_ids", "]", "if", "'scale0'", "in", "data", "else", "scale", "# `expec_f_gt` might exceed the window, i.e. abs(*) > 1, which would be filtered later", "expec_f_gt", "=", "(", "w_pt0_i", "[", "b_ids", ",", "i_ids", "]", "-", "pt1_i", "[", "b_ids", ",", "j_ids", "]", ")", "/", "scale", "/", "radius", "# [M, 2]", "data", ".", "update", "(", "{", "\"expec_f_gt\"", ":", "expec_f_gt", "}", ")" ]
https://github.com/kornia/kornia/blob/b12d6611b1c41d47b2c93675f0ea344b5314a688/kornia/feature/loftr/utils/supervision.py#L124-L143
LMFDB/lmfdb
6cf48a4c18a96e6298da6ae43f587f96845bcb43
lmfdb/verify/mf_newforms.py
python
mf_newforms.check_self_twist_type
(self)
return (self.check_non_null(['is_cm', 'is_rm']) + self.check_iff({'self_twist_type':0}, {'is_cm':False, 'is_rm':False}) + self.check_iff({'self_twist_type':1}, {'is_cm':True, 'is_rm':False}) + self.check_iff({'self_twist_type':2}, {'is_cm':False, 'is_rm':True}) + self.check_iff({'self_twist_type':3}, {'is_cm':True, 'is_rm':True}))
check that self_twist_type is in {0,1,2,3} and matches is_cm and is_rm
check that self_twist_type is in {0,1,2,3} and matches is_cm and is_rm
[ "check", "that", "self_twist_type", "is", "in", "{", "0", "1", "2", "3", "}", "and", "matches", "is_cm", "and", "is_rm" ]
def check_self_twist_type(self): """ check that self_twist_type is in {0,1,2,3} and matches is_cm and is_rm """ # TIME about 6s return (self.check_non_null(['is_cm', 'is_rm']) + self.check_iff({'self_twist_type':0}, {'is_cm':False, 'is_rm':False}) + self.check_iff({'self_twist_type':1}, {'is_cm':True, 'is_rm':False}) + self.check_iff({'self_twist_type':2}, {'is_cm':False, 'is_rm':True}) + self.check_iff({'self_twist_type':3}, {'is_cm':True, 'is_rm':True}))
[ "def", "check_self_twist_type", "(", "self", ")", ":", "# TIME about 6s", "return", "(", "self", ".", "check_non_null", "(", "[", "'is_cm'", ",", "'is_rm'", "]", ")", "+", "self", ".", "check_iff", "(", "{", "'self_twist_type'", ":", "0", "}", ",", "{", "'is_cm'", ":", "False", ",", "'is_rm'", ":", "False", "}", ")", "+", "self", ".", "check_iff", "(", "{", "'self_twist_type'", ":", "1", "}", ",", "{", "'is_cm'", ":", "True", ",", "'is_rm'", ":", "False", "}", ")", "+", "self", ".", "check_iff", "(", "{", "'self_twist_type'", ":", "2", "}", ",", "{", "'is_cm'", ":", "False", ",", "'is_rm'", ":", "True", "}", ")", "+", "self", ".", "check_iff", "(", "{", "'self_twist_type'", ":", "3", "}", ",", "{", "'is_cm'", ":", "True", ",", "'is_rm'", ":", "True", "}", ")", ")" ]
https://github.com/LMFDB/lmfdb/blob/6cf48a4c18a96e6298da6ae43f587f96845bcb43/lmfdb/verify/mf_newforms.py#L153-L162
mchristopher/PokemonGo-DesktopMap
ec37575f2776ee7d64456e2a1f6b6b78830b4fe0
app/pywin/Lib/encodings/hex_codec.py
python
hex_decode
(input,errors='strict')
return (output, len(input))
Decodes the object input and returns a tuple (output object, length consumed). input must be an object which provides the bf_getreadbuf buffer slot. Python strings, buffer objects and memory mapped files are examples of objects providing this slot. errors defines the error handling to apply. It defaults to 'strict' handling which is the only currently supported error handling for this codec.
Decodes the object input and returns a tuple (output object, length consumed).
[ "Decodes", "the", "object", "input", "and", "returns", "a", "tuple", "(", "output", "object", "length", "consumed", ")", "." ]
def hex_decode(input,errors='strict'): """ Decodes the object input and returns a tuple (output object, length consumed). input must be an object which provides the bf_getreadbuf buffer slot. Python strings, buffer objects and memory mapped files are examples of objects providing this slot. errors defines the error handling to apply. It defaults to 'strict' handling which is the only currently supported error handling for this codec. """ assert errors == 'strict' output = binascii.a2b_hex(input) return (output, len(input))
[ "def", "hex_decode", "(", "input", ",", "errors", "=", "'strict'", ")", ":", "assert", "errors", "==", "'strict'", "output", "=", "binascii", ".", "a2b_hex", "(", "input", ")", "return", "(", "output", ",", "len", "(", "input", ")", ")" ]
https://github.com/mchristopher/PokemonGo-DesktopMap/blob/ec37575f2776ee7d64456e2a1f6b6b78830b4fe0/app/pywin/Lib/encodings/hex_codec.py#L27-L43
pybuilder/pybuilder
12ea2f54e04f97daada375dc3309a3f525f1b5e1
src/main/python/pybuilder/_vendor/filelock/_api.py
python
BaseFileLock.is_locked
(self)
return self._lock_file_fd is not None
:return: A boolean indicating if the lock file is holding the lock currently. .. versionchanged:: 2.0.0 This was previously a method and is now a property.
[]
def is_locked(self) -> bool: """ :return: A boolean indicating if the lock file is holding the lock currently. .. versionchanged:: 2.0.0 This was previously a method and is now a property. """ return self._lock_file_fd is not None
[ "def", "is_locked", "(", "self", ")", "->", "bool", ":", "return", "self", ".", "_lock_file_fd", "is", "not", "None" ]
https://github.com/pybuilder/pybuilder/blob/12ea2f54e04f97daada375dc3309a3f525f1b5e1/src/main/python/pybuilder/_vendor/filelock/_api.py#L99-L108
joxeankoret/diaphora
dcb5a25ac9fe23a285b657e5389cf770de7ac928
pygments/lexer.py
python
RegexLexerMeta._process_new_state
(cls, new_state, unprocessed, processed)
Preprocess the state transition action of a token definition.
Preprocess the state transition action of a token definition.
[ "Preprocess", "the", "state", "transition", "action", "of", "a", "token", "definition", "." ]
def _process_new_state(cls, new_state, unprocessed, processed): """Preprocess the state transition action of a token definition.""" if isinstance(new_state, str): # an existing state if new_state == '#pop': return -1 elif new_state in unprocessed: return (new_state,) elif new_state == '#push': return new_state elif new_state[:5] == '#pop:': return -int(new_state[5:]) else: assert False, 'unknown new state %r' % new_state elif isinstance(new_state, combined): # combine a new state from existing ones tmp_state = '_tmp_%d' % cls._tmpname cls._tmpname += 1 itokens = [] for istate in new_state: assert istate != new_state, 'circular state ref %r' % istate itokens.extend(cls._process_state(unprocessed, processed, istate)) processed[tmp_state] = itokens return (tmp_state,) elif isinstance(new_state, tuple): # push more than one state for istate in new_state: assert (istate in unprocessed or istate in ('#pop', '#push')), \ 'unknown new state ' + istate return new_state else: assert False, 'unknown new state def %r' % new_state
[ "def", "_process_new_state", "(", "cls", ",", "new_state", ",", "unprocessed", ",", "processed", ")", ":", "if", "isinstance", "(", "new_state", ",", "str", ")", ":", "# an existing state", "if", "new_state", "==", "'#pop'", ":", "return", "-", "1", "elif", "new_state", "in", "unprocessed", ":", "return", "(", "new_state", ",", ")", "elif", "new_state", "==", "'#push'", ":", "return", "new_state", "elif", "new_state", "[", ":", "5", "]", "==", "'#pop:'", ":", "return", "-", "int", "(", "new_state", "[", "5", ":", "]", ")", "else", ":", "assert", "False", ",", "'unknown new state %r'", "%", "new_state", "elif", "isinstance", "(", "new_state", ",", "combined", ")", ":", "# combine a new state from existing ones", "tmp_state", "=", "'_tmp_%d'", "%", "cls", ".", "_tmpname", "cls", ".", "_tmpname", "+=", "1", "itokens", "=", "[", "]", "for", "istate", "in", "new_state", ":", "assert", "istate", "!=", "new_state", ",", "'circular state ref %r'", "%", "istate", "itokens", ".", "extend", "(", "cls", ".", "_process_state", "(", "unprocessed", ",", "processed", ",", "istate", ")", ")", "processed", "[", "tmp_state", "]", "=", "itokens", "return", "(", "tmp_state", ",", ")", "elif", "isinstance", "(", "new_state", ",", "tuple", ")", ":", "# push more than one state", "for", "istate", "in", "new_state", ":", "assert", "(", "istate", "in", "unprocessed", "or", "istate", "in", "(", "'#pop'", ",", "'#push'", ")", ")", ",", "'unknown new state '", "+", "istate", "return", "new_state", "else", ":", "assert", "False", ",", "'unknown new state def %r'", "%", "new_state" ]
https://github.com/joxeankoret/diaphora/blob/dcb5a25ac9fe23a285b657e5389cf770de7ac928/pygments/lexer.py#L436-L469
reviewboard/reviewboard
7395902e4c181bcd1d633f61105012ffb1d18e1b
reviewboard/hostingsvcs/service.py
python
HostingService.get_remote_repository
(self, repository_id)
Return the remote repository for the ID. This method should be implemented by subclasses. Args: repository_id (unicode): The repository's identifier. This is unique to each hosting service. Returns: reviewboard.hostingsvcs.repository.RemoteRepository: The remote repository. Raises: django.core.excptions.ObjectDoesNotExist: If the remote repository does not exist.
Return the remote repository for the ID.
[ "Return", "the", "remote", "repository", "for", "the", "ID", "." ]
def get_remote_repository(self, repository_id): """Return the remote repository for the ID. This method should be implemented by subclasses. Args: repository_id (unicode): The repository's identifier. This is unique to each hosting service. Returns: reviewboard.hostingsvcs.repository.RemoteRepository: The remote repository. Raises: django.core.excptions.ObjectDoesNotExist: If the remote repository does not exist. """ raise NotImplementedError
[ "def", "get_remote_repository", "(", "self", ",", "repository_id", ")", ":", "raise", "NotImplementedError" ]
https://github.com/reviewboard/reviewboard/blob/7395902e4c181bcd1d633f61105012ffb1d18e1b/reviewboard/hostingsvcs/service.py#L1850-L1868
allenai/allennlp-models
b6923c362095a82829646912353425143f757143
allennlp_models/structured_prediction/predictors/openie.py
python
join_mwp
(tags: List[str])
return ret
Join multi-word predicates to a single predicate ('V') token.
Join multi-word predicates to a single predicate ('V') token.
[ "Join", "multi", "-", "word", "predicates", "to", "a", "single", "predicate", "(", "V", ")", "token", "." ]
def join_mwp(tags: List[str]) -> List[str]: """ Join multi-word predicates to a single predicate ('V') token. """ ret = [] verb_flag = False for tag in tags: if "V" in tag: # Create a continuous 'V' BIO span prefix, _ = tag.split("-", 1) if verb_flag: # Continue a verb label across the different predicate parts prefix = "I" ret.append(f"{prefix}-V") verb_flag = True else: ret.append(tag) verb_flag = False return ret
[ "def", "join_mwp", "(", "tags", ":", "List", "[", "str", "]", ")", "->", "List", "[", "str", "]", ":", "ret", "=", "[", "]", "verb_flag", "=", "False", "for", "tag", "in", "tags", ":", "if", "\"V\"", "in", "tag", ":", "# Create a continuous 'V' BIO span", "prefix", ",", "_", "=", "tag", ".", "split", "(", "\"-\"", ",", "1", ")", "if", "verb_flag", ":", "# Continue a verb label across the different predicate parts", "prefix", "=", "\"I\"", "ret", ".", "append", "(", "f\"{prefix}-V\"", ")", "verb_flag", "=", "True", "else", ":", "ret", ".", "append", "(", "tag", ")", "verb_flag", "=", "False", "return", "ret" ]
https://github.com/allenai/allennlp-models/blob/b6923c362095a82829646912353425143f757143/allennlp_models/structured_prediction/predictors/openie.py#L12-L32
Alephbet/gimel
e4814f378c749b78f37a4613220d2f909bf30db2
gimel/vendor/redis/client.py
python
StrictRedis.brpoplpush
(self, src, dst, timeout=0)
return self.execute_command('BRPOPLPUSH', src, dst, timeout)
Pop a value off the tail of ``src``, push it on the head of ``dst`` and then return it. This command blocks until a value is in ``src`` or until ``timeout`` seconds elapse, whichever is first. A ``timeout`` value of 0 blocks forever.
Pop a value off the tail of ``src``, push it on the head of ``dst`` and then return it.
[ "Pop", "a", "value", "off", "the", "tail", "of", "src", "push", "it", "on", "the", "head", "of", "dst", "and", "then", "return", "it", "." ]
def brpoplpush(self, src, dst, timeout=0): """ Pop a value off the tail of ``src``, push it on the head of ``dst`` and then return it. This command blocks until a value is in ``src`` or until ``timeout`` seconds elapse, whichever is first. A ``timeout`` value of 0 blocks forever. """ if timeout is None: timeout = 0 return self.execute_command('BRPOPLPUSH', src, dst, timeout)
[ "def", "brpoplpush", "(", "self", ",", "src", ",", "dst", ",", "timeout", "=", "0", ")", ":", "if", "timeout", "is", "None", ":", "timeout", "=", "0", "return", "self", ".", "execute_command", "(", "'BRPOPLPUSH'", ",", "src", ",", "dst", ",", "timeout", ")" ]
https://github.com/Alephbet/gimel/blob/e4814f378c749b78f37a4613220d2f909bf30db2/gimel/vendor/redis/client.py#L1185-L1196
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/full/encodings/cp1140.py
python
Codec.decode
(self,input,errors='strict')
return codecs.charmap_decode(input,errors,decoding_table)
[]
def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table)
[ "def", "decode", "(", "self", ",", "input", ",", "errors", "=", "'strict'", ")", ":", "return", "codecs", ".", "charmap_decode", "(", "input", ",", "errors", ",", "decoding_table", ")" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/encodings/cp1140.py#L14-L15
CedricGuillemet/Imogen
ee417b42747ed5b46cb11b02ef0c3630000085b3
bin/Lib/http/cookiejar.py
python
time2netscape
(t=None)
return "%s, %02d-%s-%04d %02d:%02d:%02d GMT" % ( DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1], dt.year, dt.hour, dt.minute, dt.second)
Return a string representing time in seconds since epoch, t. If the function is called without an argument, it will use the current time. The format of the returned string is like this: Wed, DD-Mon-YYYY HH:MM:SS GMT
Return a string representing time in seconds since epoch, t.
[ "Return", "a", "string", "representing", "time", "in", "seconds", "since", "epoch", "t", "." ]
def time2netscape(t=None): """Return a string representing time in seconds since epoch, t. If the function is called without an argument, it will use the current time. The format of the returned string is like this: Wed, DD-Mon-YYYY HH:MM:SS GMT """ if t is None: dt = datetime.datetime.utcnow() else: dt = datetime.datetime.utcfromtimestamp(t) return "%s, %02d-%s-%04d %02d:%02d:%02d GMT" % ( DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1], dt.year, dt.hour, dt.minute, dt.second)
[ "def", "time2netscape", "(", "t", "=", "None", ")", ":", "if", "t", "is", "None", ":", "dt", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "else", ":", "dt", "=", "datetime", ".", "datetime", ".", "utcfromtimestamp", "(", "t", ")", "return", "\"%s, %02d-%s-%04d %02d:%02d:%02d GMT\"", "%", "(", "DAYS", "[", "dt", ".", "weekday", "(", ")", "]", ",", "dt", ".", "day", ",", "MONTHS", "[", "dt", ".", "month", "-", "1", "]", ",", "dt", ".", "year", ",", "dt", ".", "hour", ",", "dt", ".", "minute", ",", "dt", ".", "second", ")" ]
https://github.com/CedricGuillemet/Imogen/blob/ee417b42747ed5b46cb11b02ef0c3630000085b3/bin/Lib/http/cookiejar.py#L105-L122
PowerScript/KatanaFramework
0f6ad90a88de865d58ec26941cb4460501e75496
lib/setuptools/setuptools/command/easy_install.py
python
CommandSpec._extract_options
(orig_script)
return options.strip()
Extract any options from the first line of the script.
Extract any options from the first line of the script.
[ "Extract", "any", "options", "from", "the", "first", "line", "of", "the", "script", "." ]
def _extract_options(orig_script): """ Extract any options from the first line of the script. """ first = (orig_script + '\n').splitlines()[0] match = _first_line_re().match(first) options = match.group(1) or '' if match else '' return options.strip()
[ "def", "_extract_options", "(", "orig_script", ")", ":", "first", "=", "(", "orig_script", "+", "'\\n'", ")", ".", "splitlines", "(", ")", "[", "0", "]", "match", "=", "_first_line_re", "(", ")", ".", "match", "(", "first", ")", "options", "=", "match", ".", "group", "(", "1", ")", "or", "''", "if", "match", "else", "''", "return", "options", ".", "strip", "(", ")" ]
https://github.com/PowerScript/KatanaFramework/blob/0f6ad90a88de865d58ec26941cb4460501e75496/lib/setuptools/setuptools/command/easy_install.py#L1970-L1977
jython/frozen-mirror
b8d7aa4cee50c0c0fe2f4b235dd62922dd0f3f99
Lib/inspect.py
python
getargspec
(func)
return ArgSpec(args, varargs, varkw, func.func_defaults)
Get the names and default values of a function's arguments. A tuple of four things is returned: (args, varargs, varkw, defaults). 'args' is a list of the argument names (it may contain nested lists). 'varargs' and 'varkw' are the names of the * and ** arguments or None. 'defaults' is an n-tuple of the default values of the last n arguments.
Get the names and default values of a function's arguments.
[ "Get", "the", "names", "and", "default", "values", "of", "a", "function", "s", "arguments", "." ]
def getargspec(func): """Get the names and default values of a function's arguments. A tuple of four things is returned: (args, varargs, varkw, defaults). 'args' is a list of the argument names (it may contain nested lists). 'varargs' and 'varkw' are the names of the * and ** arguments or None. 'defaults' is an n-tuple of the default values of the last n arguments. """ if ismethod(func): func = func.im_func if not isfunction(func): raise TypeError('{!r} is not a Python function'.format(func)) args, varargs, varkw = getargs(func.func_code) return ArgSpec(args, varargs, varkw, func.func_defaults)
[ "def", "getargspec", "(", "func", ")", ":", "if", "ismethod", "(", "func", ")", ":", "func", "=", "func", ".", "im_func", "if", "not", "isfunction", "(", "func", ")", ":", "raise", "TypeError", "(", "'{!r} is not a Python function'", ".", "format", "(", "func", ")", ")", "args", ",", "varargs", ",", "varkw", "=", "getargs", "(", "func", ".", "func_code", ")", "return", "ArgSpec", "(", "args", ",", "varargs", ",", "varkw", ",", "func", ".", "func_defaults", ")" ]
https://github.com/jython/frozen-mirror/blob/b8d7aa4cee50c0c0fe2f4b235dd62922dd0f3f99/Lib/inspect.py#L815-L829
SheffieldML/GPy
bb1bc5088671f9316bc92a46d356734e34c2d5c0
GPy/core/gp.py
python
GP.save_model
(self, output_filename, compress=True, save_data=True)
[]
def save_model(self, output_filename, compress=True, save_data=True): self._save_model(output_filename, compress=True, save_data=True)
[ "def", "save_model", "(", "self", ",", "output_filename", ",", "compress", "=", "True", ",", "save_data", "=", "True", ")", ":", "self", ".", "_save_model", "(", "output_filename", ",", "compress", "=", "True", ",", "save_data", "=", "True", ")" ]
https://github.com/SheffieldML/GPy/blob/bb1bc5088671f9316bc92a46d356734e34c2d5c0/GPy/core/gp.py#L185-L186
PaddlePaddle/PaddleFL
583691acd5db0a7ca331cc9a72415017b18669b8
python/paddle_fl/paddle_fl/core/strategy/details/vars_distributed.py
python
VarDistributed.equal
(var1, var2)
return var1.name == var2.name and \ var1.type == var2.type and \ var1.shape == var2.shape and \ var1.dtype == var2.dtype and \ var1.lod_level == var2.lod_level and \ var1.persistable == var2.persistable
the two var is equal or not. Returns: bool: equal will return True else False
the two var is equal or not. Returns: bool: equal will return True else False
[ "the", "two", "var", "is", "equal", "or", "not", ".", "Returns", ":", "bool", ":", "equal", "will", "return", "True", "else", "False" ]
def equal(var1, var2): """ the two var is equal or not. Returns: bool: equal will return True else False """ assert isinstance(var1, VarStruct) and isinstance(var2, VarStruct) return var1.name == var2.name and \ var1.type == var2.type and \ var1.shape == var2.shape and \ var1.dtype == var2.dtype and \ var1.lod_level == var2.lod_level and \ var1.persistable == var2.persistable
[ "def", "equal", "(", "var1", ",", "var2", ")", ":", "assert", "isinstance", "(", "var1", ",", "VarStruct", ")", "and", "isinstance", "(", "var2", ",", "VarStruct", ")", "return", "var1", ".", "name", "==", "var2", ".", "name", "and", "var1", ".", "type", "==", "var2", ".", "type", "and", "var1", ".", "shape", "==", "var2", ".", "shape", "and", "var1", ".", "dtype", "==", "var2", ".", "dtype", "and", "var1", ".", "lod_level", "==", "var2", ".", "lod_level", "and", "var1", ".", "persistable", "==", "var2", ".", "persistable" ]
https://github.com/PaddlePaddle/PaddleFL/blob/583691acd5db0a7ca331cc9a72415017b18669b8/python/paddle_fl/paddle_fl/core/strategy/details/vars_distributed.py#L93-L106
great-expectations/great_expectations
45224cb890aeae725af25905923d0dbbab2d969d
great_expectations/execution_engine/sparkdf_execution_engine.py
python
SparkDFExecutionEngine._split_on_divided_integer
( df, column_name: str, divisor: int, batch_identifiers: dict )
return res
Divide the values in the named column by `divisor`, and split on that
Divide the values in the named column by `divisor`, and split on that
[ "Divide", "the", "values", "in", "the", "named", "column", "by", "divisor", "and", "split", "on", "that" ]
def _split_on_divided_integer( df, column_name: str, divisor: int, batch_identifiers: dict ): """Divide the values in the named column by `divisor`, and split on that""" matching_divisor = batch_identifiers[column_name] res = ( df.withColumn( "div_temp", (F.col(column_name) / divisor).cast(sparktypes.IntegerType()), ) .filter(F.col("div_temp") == matching_divisor) .drop("div_temp") ) return res
[ "def", "_split_on_divided_integer", "(", "df", ",", "column_name", ":", "str", ",", "divisor", ":", "int", ",", "batch_identifiers", ":", "dict", ")", ":", "matching_divisor", "=", "batch_identifiers", "[", "column_name", "]", "res", "=", "(", "df", ".", "withColumn", "(", "\"div_temp\"", ",", "(", "F", ".", "col", "(", "column_name", ")", "/", "divisor", ")", ".", "cast", "(", "sparktypes", ".", "IntegerType", "(", ")", ")", ",", ")", ".", "filter", "(", "F", ".", "col", "(", "\"div_temp\"", ")", "==", "matching_divisor", ")", ".", "drop", "(", "\"div_temp\"", ")", ")", "return", "res" ]
https://github.com/great-expectations/great_expectations/blob/45224cb890aeae725af25905923d0dbbab2d969d/great_expectations/execution_engine/sparkdf_execution_engine.py#L724-L737
hatRiot/zarp
2e772350a01c2aeed3f4da9685cd0cc5d6b3ecad
src/lib/libmproxy/flow.py
python
Request.set_url
(self, url)
return True
Parses a URL specification, and updates the Request's information accordingly. Returns False if the URL was invalid, True if the request succeeded.
Parses a URL specification, and updates the Request's information accordingly.
[ "Parses", "a", "URL", "specification", "and", "updates", "the", "Request", "s", "information", "accordingly", "." ]
def set_url(self, url): """ Parses a URL specification, and updates the Request's information accordingly. Returns False if the URL was invalid, True if the request succeeded. """ parts = http.parse_url(url) if not parts: return False self.scheme, self.host, self.port, self.path = parts return True
[ "def", "set_url", "(", "self", ",", "url", ")", ":", "parts", "=", "http", ".", "parse_url", "(", "url", ")", "if", "not", "parts", ":", "return", "False", "self", ".", "scheme", ",", "self", ".", "host", ",", "self", ".", "port", ",", "self", ".", "path", "=", "parts", "return", "True" ]
https://github.com/hatRiot/zarp/blob/2e772350a01c2aeed3f4da9685cd0cc5d6b3ecad/src/lib/libmproxy/flow.py#L492-L503
edgedb/edgedb
872bf5abbb10f7c72df21f57635238ed27b9f280
edb/pgsql/compiler/pathctx.py
python
_find_in_output_tuple
( rel: pgast.Query, path_id: irast.PathId, aspect: str, env: context.Environment)
return None
Try indirecting a source tuple already present as an output. Normally tuple indirections are handled by process_set_as_tuple_indirection, but UNIONing an explicit tuple with a tuple coming from a base relation (like `(Foo.bar UNION (1,2)).0`) can lead to us looking for a tuple path in relations that only have the actual full tuple. (See test_edgeql_coalesce_tuple_{08,09}). We handle this by checking whether some prefix of the tuple path is present in the path_outputs. This is sufficient because the relevant cases are all caused by set ops, and the "fixup" done in set op cases ensures that the tuple will be already present.
Try indirecting a source tuple already present as an output.
[ "Try", "indirecting", "a", "source", "tuple", "already", "present", "as", "an", "output", "." ]
def _find_in_output_tuple( rel: pgast.Query, path_id: irast.PathId, aspect: str, env: context.Environment) -> Optional[pgast.BaseExpr]: """Try indirecting a source tuple already present as an output. Normally tuple indirections are handled by process_set_as_tuple_indirection, but UNIONing an explicit tuple with a tuple coming from a base relation (like `(Foo.bar UNION (1,2)).0`) can lead to us looking for a tuple path in relations that only have the actual full tuple. (See test_edgeql_coalesce_tuple_{08,09}). We handle this by checking whether some prefix of the tuple path is present in the path_outputs. This is sufficient because the relevant cases are all caused by set ops, and the "fixup" done in set op cases ensures that the tuple will be already present. """ steps = [] src_path_id = path_id.src_path() ptrref = path_id.rptr() while ( src_path_id and src_path_id.is_tuple_path() and isinstance(ptrref, irast.TupleIndirectionPointerRef) ): steps.append((ptrref.shortname.name, src_path_id)) if ( (var := rel.path_namespace.get((src_path_id, aspect))) and not isinstance(var, pgast.TupleVarBase) ): for name, src in reversed(steps): var = astutils.tuple_getattr(var, src.target, name) put_path_var(rel, path_id, var, aspect=aspect, env=env) return var ptrref = src_path_id.rptr() src_path_id = src_path_id.src_path() return None
[ "def", "_find_in_output_tuple", "(", "rel", ":", "pgast", ".", "Query", ",", "path_id", ":", "irast", ".", "PathId", ",", "aspect", ":", "str", ",", "env", ":", "context", ".", "Environment", ")", "->", "Optional", "[", "pgast", ".", "BaseExpr", "]", ":", "steps", "=", "[", "]", "src_path_id", "=", "path_id", ".", "src_path", "(", ")", "ptrref", "=", "path_id", ".", "rptr", "(", ")", "while", "(", "src_path_id", "and", "src_path_id", ".", "is_tuple_path", "(", ")", "and", "isinstance", "(", "ptrref", ",", "irast", ".", "TupleIndirectionPointerRef", ")", ")", ":", "steps", ".", "append", "(", "(", "ptrref", ".", "shortname", ".", "name", ",", "src_path_id", ")", ")", "if", "(", "(", "var", ":=", "rel", ".", "path_namespace", ".", "get", "(", "(", "src_path_id", ",", "aspect", ")", ")", ")", "and", "not", "isinstance", "(", "var", ",", "pgast", ".", "TupleVarBase", ")", ")", ":", "for", "name", ",", "src", "in", "reversed", "(", "steps", ")", ":", "var", "=", "astutils", ".", "tuple_getattr", "(", "var", ",", "src", ".", "target", ",", "name", ")", "put_path_var", "(", "rel", ",", "path_id", ",", "var", ",", "aspect", "=", "aspect", ",", "env", "=", "env", ")", "return", "var", "ptrref", "=", "src_path_id", ".", "rptr", "(", ")", "src_path_id", "=", "src_path_id", ".", "src_path", "(", ")", "return", "None" ]
https://github.com/edgedb/edgedb/blob/872bf5abbb10f7c72df21f57635238ed27b9f280/edb/pgsql/compiler/pathctx.py#L456-L500
GoogleCloudPlatform/python-docs-samples
937297c6a31bf4e598c660169d4fb6265eef565a
healthcare/api-client/v1/hl7v2/hl7v2_messages.py
python
create_hl7v2_message
( project_id, location, dataset_id, hl7v2_store_id, hl7v2_message_file )
return response
Creates an HL7v2 message and sends a notification to the Cloud Pub/Sub topic. See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/healthcare/api-client/v1/hl7v2 before running the sample.
Creates an HL7v2 message and sends a notification to the Cloud Pub/Sub topic.
[ "Creates", "an", "HL7v2", "message", "and", "sends", "a", "notification", "to", "the", "Cloud", "Pub", "/", "Sub", "topic", "." ]
def create_hl7v2_message( project_id, location, dataset_id, hl7v2_store_id, hl7v2_message_file ): """Creates an HL7v2 message and sends a notification to the Cloud Pub/Sub topic. See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/healthcare/api-client/v1/hl7v2 before running the sample.""" # Imports the Google API Discovery Service. from googleapiclient import discovery # Imports Python's built-in "json" module import json api_version = "v1" service_name = "healthcare" # Returns an authorized API client by discovering the Healthcare API # and using GOOGLE_APPLICATION_CREDENTIALS environment variable. client = discovery.build(service_name, api_version) # TODO(developer): Uncomment these lines and replace with your values. # project_id = 'my-project' # replace with your GCP project ID # location = 'us-central1' # replace with the parent dataset's location # dataset_id = 'my-dataset' # replace with the HL7v2 store's parent dataset ID # hl7v2_store_id = 'my-hl7v2-store' # replace with the HL7v2 store's ID # hl7v2_message_file = 'hl7v2-message.json' # replace with the path to the HL7v2 file hl7v2_parent = "projects/{}/locations/{}".format(project_id, location) hl7v2_store_name = "{}/datasets/{}/hl7V2Stores/{}".format( hl7v2_parent, dataset_id, hl7v2_store_id ) with open(hl7v2_message_file) as hl7v2_message: hl7v2_message_content = json.load(hl7v2_message) request = ( client.projects() .locations() .datasets() .hl7V2Stores() .messages() .create(parent=hl7v2_store_name, body=hl7v2_message_content) ) response = request.execute() print("Created HL7v2 message from file: {}".format(hl7v2_message_file)) return response
[ "def", "create_hl7v2_message", "(", "project_id", ",", "location", ",", "dataset_id", ",", "hl7v2_store_id", ",", "hl7v2_message_file", ")", ":", "# Imports the Google API Discovery Service.", "from", "googleapiclient", "import", "discovery", "# Imports Python's built-in \"json\" module", "import", "json", "api_version", "=", "\"v1\"", "service_name", "=", "\"healthcare\"", "# Returns an authorized API client by discovering the Healthcare API", "# and using GOOGLE_APPLICATION_CREDENTIALS environment variable.", "client", "=", "discovery", ".", "build", "(", "service_name", ",", "api_version", ")", "# TODO(developer): Uncomment these lines and replace with your values.", "# project_id = 'my-project' # replace with your GCP project ID", "# location = 'us-central1' # replace with the parent dataset's location", "# dataset_id = 'my-dataset' # replace with the HL7v2 store's parent dataset ID", "# hl7v2_store_id = 'my-hl7v2-store' # replace with the HL7v2 store's ID", "# hl7v2_message_file = 'hl7v2-message.json' # replace with the path to the HL7v2 file", "hl7v2_parent", "=", "\"projects/{}/locations/{}\"", ".", "format", "(", "project_id", ",", "location", ")", "hl7v2_store_name", "=", "\"{}/datasets/{}/hl7V2Stores/{}\"", ".", "format", "(", "hl7v2_parent", ",", "dataset_id", ",", "hl7v2_store_id", ")", "with", "open", "(", "hl7v2_message_file", ")", "as", "hl7v2_message", ":", "hl7v2_message_content", "=", "json", ".", "load", "(", "hl7v2_message", ")", "request", "=", "(", "client", ".", "projects", "(", ")", ".", "locations", "(", ")", ".", "datasets", "(", ")", ".", "hl7V2Stores", "(", ")", ".", "messages", "(", ")", ".", "create", "(", "parent", "=", "hl7v2_store_name", ",", "body", "=", "hl7v2_message_content", ")", ")", "response", "=", "request", ".", "execute", "(", ")", "print", "(", "\"Created HL7v2 message from file: {}\"", ".", "format", "(", "hl7v2_message_file", ")", ")", "return", "response" ]
https://github.com/GoogleCloudPlatform/python-docs-samples/blob/937297c6a31bf4e598c660169d4fb6265eef565a/healthcare/api-client/v1/hl7v2/hl7v2_messages.py#L20-L65
Source-Python-Dev-Team/Source.Python
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
addons/source-python/packages/site-packages/pygments/formatters/img.py
python
FontManager.get_font
(self, bold, oblique)
Get the font based on bold and italic flags.
Get the font based on bold and italic flags.
[ "Get", "the", "font", "based", "on", "bold", "and", "italic", "flags", "." ]
def get_font(self, bold, oblique): """ Get the font based on bold and italic flags. """ if bold and oblique: return self.fonts['BOLDITALIC'] elif bold: return self.fonts['BOLD'] elif oblique: return self.fonts['ITALIC'] else: return self.fonts['NORMAL']
[ "def", "get_font", "(", "self", ",", "bold", ",", "oblique", ")", ":", "if", "bold", "and", "oblique", ":", "return", "self", ".", "fonts", "[", "'BOLDITALIC'", "]", "elif", "bold", ":", "return", "self", ".", "fonts", "[", "'BOLD'", "]", "elif", "oblique", ":", "return", "self", ".", "fonts", "[", "'ITALIC'", "]", "else", ":", "return", "self", ".", "fonts", "[", "'NORMAL'", "]" ]
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/packages/site-packages/pygments/formatters/img.py#L162-L173
charlesq34/frustum-pointnets
2ffdd345e1fce4775ecb508d207e0ad465bcca80
models/pointnet_util.py
python
sample_and_group_all
(xyz, points, use_xyz=True)
return new_xyz, new_points, idx, grouped_xyz
Inputs: xyz: (batch_size, ndataset, 3) TF tensor points: (batch_size, ndataset, channel) TF tensor, if None will just use xyz as points use_xyz: bool, if True concat XYZ with local point features, otherwise just use point features Outputs: new_xyz: (batch_size, 1, 3) as (0,0,0) new_points: (batch_size, 1, ndataset, 3+channel) TF tensor Note: Equivalent to sample_and_group with npoint=1, radius=inf, use (0,0,0) as the centroid
Inputs: xyz: (batch_size, ndataset, 3) TF tensor points: (batch_size, ndataset, channel) TF tensor, if None will just use xyz as points use_xyz: bool, if True concat XYZ with local point features, otherwise just use point features Outputs: new_xyz: (batch_size, 1, 3) as (0,0,0) new_points: (batch_size, 1, ndataset, 3+channel) TF tensor Note: Equivalent to sample_and_group with npoint=1, radius=inf, use (0,0,0) as the centroid
[ "Inputs", ":", "xyz", ":", "(", "batch_size", "ndataset", "3", ")", "TF", "tensor", "points", ":", "(", "batch_size", "ndataset", "channel", ")", "TF", "tensor", "if", "None", "will", "just", "use", "xyz", "as", "points", "use_xyz", ":", "bool", "if", "True", "concat", "XYZ", "with", "local", "point", "features", "otherwise", "just", "use", "point", "features", "Outputs", ":", "new_xyz", ":", "(", "batch_size", "1", "3", ")", "as", "(", "0", "0", "0", ")", "new_points", ":", "(", "batch_size", "1", "ndataset", "3", "+", "channel", ")", "TF", "tensor", "Note", ":", "Equivalent", "to", "sample_and_group", "with", "npoint", "=", "1", "radius", "=", "inf", "use", "(", "0", "0", "0", ")", "as", "the", "centroid" ]
def sample_and_group_all(xyz, points, use_xyz=True): ''' Inputs: xyz: (batch_size, ndataset, 3) TF tensor points: (batch_size, ndataset, channel) TF tensor, if None will just use xyz as points use_xyz: bool, if True concat XYZ with local point features, otherwise just use point features Outputs: new_xyz: (batch_size, 1, 3) as (0,0,0) new_points: (batch_size, 1, ndataset, 3+channel) TF tensor Note: Equivalent to sample_and_group with npoint=1, radius=inf, use (0,0,0) as the centroid ''' batch_size = xyz.get_shape()[0].value nsample = xyz.get_shape()[1].value new_xyz = tf.constant(np.tile(np.array([0,0,0]).reshape((1,1,3)), (batch_size,1,1)),dtype=tf.float32) # (batch_size, 1, 3) idx = tf.constant(np.tile(np.array(range(nsample)).reshape((1,1,nsample)), (batch_size,1,1))) grouped_xyz = tf.reshape(xyz, (batch_size, 1, nsample, 3)) # (batch_size, npoint=1, nsample, 3) if points is not None: if use_xyz: new_points = tf.concat([xyz, points], axis=2) # (batch_size, 16, 259) else: new_points = points new_points = tf.expand_dims(new_points, 1) # (batch_size, 1, 16, 259) else: new_points = grouped_xyz return new_xyz, new_points, idx, grouped_xyz
[ "def", "sample_and_group_all", "(", "xyz", ",", "points", ",", "use_xyz", "=", "True", ")", ":", "batch_size", "=", "xyz", ".", "get_shape", "(", ")", "[", "0", "]", ".", "value", "nsample", "=", "xyz", ".", "get_shape", "(", ")", "[", "1", "]", ".", "value", "new_xyz", "=", "tf", ".", "constant", "(", "np", ".", "tile", "(", "np", ".", "array", "(", "[", "0", ",", "0", ",", "0", "]", ")", ".", "reshape", "(", "(", "1", ",", "1", ",", "3", ")", ")", ",", "(", "batch_size", ",", "1", ",", "1", ")", ")", ",", "dtype", "=", "tf", ".", "float32", ")", "# (batch_size, 1, 3)", "idx", "=", "tf", ".", "constant", "(", "np", ".", "tile", "(", "np", ".", "array", "(", "range", "(", "nsample", ")", ")", ".", "reshape", "(", "(", "1", ",", "1", ",", "nsample", ")", ")", ",", "(", "batch_size", ",", "1", ",", "1", ")", ")", ")", "grouped_xyz", "=", "tf", ".", "reshape", "(", "xyz", ",", "(", "batch_size", ",", "1", ",", "nsample", ",", "3", ")", ")", "# (batch_size, npoint=1, nsample, 3)", "if", "points", "is", "not", "None", ":", "if", "use_xyz", ":", "new_points", "=", "tf", ".", "concat", "(", "[", "xyz", ",", "points", "]", ",", "axis", "=", "2", ")", "# (batch_size, 16, 259)", "else", ":", "new_points", "=", "points", "new_points", "=", "tf", ".", "expand_dims", "(", "new_points", ",", "1", ")", "# (batch_size, 1, 16, 259)", "else", ":", "new_points", "=", "grouped_xyz", "return", "new_xyz", ",", "new_points", ",", "idx", ",", "grouped_xyz" ]
https://github.com/charlesq34/frustum-pointnets/blob/2ffdd345e1fce4775ecb508d207e0ad465bcca80/models/pointnet_util.py#L58-L83
seemoo-lab/internalblue
ba6ba0b99f835964395d6dd1b1eb7dd850398fd6
internalblue/core.py
python
InternalBlue.fuzzLmp
(self)
return True
Installs a patch inside the sendLmp HCI handler that allows sending arbitrary LMP payloads. Afterwards, use sendLmpPacket as before. Basically, this ignores LM_LmpInfoTable and LM_LmpInfoTableEsc4 contents, but only via sendLmp HCI and not during normal Link Manager operation.
Installs a patch inside the sendLmp HCI handler that allows sending arbitrary LMP payloads. Afterwards, use sendLmpPacket as before.
[ "Installs", "a", "patch", "inside", "the", "sendLmp", "HCI", "handler", "that", "allows", "sending", "arbitrary", "LMP", "payloads", ".", "Afterwards", "use", "sendLmpPacket", "as", "before", "." ]
def fuzzLmp(self): # type: ()-> bool """ Installs a patch inside the sendLmp HCI handler that allows sending arbitrary LMP payloads. Afterwards, use sendLmpPacket as before. Basically, this ignores LM_LmpInfoTable and LM_LmpInfoTableEsc4 contents, but only via sendLmp HCI and not during normal Link Manager operation. """ # Check if constants are defined in fw.py for const in [ "FUZZLMP_CODE_BASE_ADDRESS", "FUZZLMP_ASM_CODE", "FUZZLMP_HOOK_ADDRESS", ]: if const not in dir(self.fw): self.logger.warning( "fuzzLmpPacket: '%s' not in fw.py. FEATURE NOT SUPPORTED!" % const ) return False # Assemble the snippet and write it to FUZZLMP_CODE_BASE_ADDRESS code = asm( self.fw.FUZZLMP_ASM_CODE, vma=self.fw.FUZZLMP_CODE_BASE_ADDRESS, arch="thumb", ) self.writeMem(self.fw.FUZZLMP_CODE_BASE_ADDRESS, code) # Install a patch in the end of the original sendLmpPdu HCI handler patch = asm( "b 0x%x" % self.fw.FUZZLMP_CODE_BASE_ADDRESS, vma=self.fw.FUZZLMP_HOOK_ADDRESS, ) if not self.patchRom(self.fw.FUZZLMP_HOOK_ADDRESS, patch): self.logger.warning("Error writing to patchram when installing fuzzLmp patch!") return False return True
[ "def", "fuzzLmp", "(", "self", ")", ":", "# type: ()-> bool", "# Check if constants are defined in fw.py", "for", "const", "in", "[", "\"FUZZLMP_CODE_BASE_ADDRESS\"", ",", "\"FUZZLMP_ASM_CODE\"", ",", "\"FUZZLMP_HOOK_ADDRESS\"", ",", "]", ":", "if", "const", "not", "in", "dir", "(", "self", ".", "fw", ")", ":", "self", ".", "logger", ".", "warning", "(", "\"fuzzLmpPacket: '%s' not in fw.py. FEATURE NOT SUPPORTED!\"", "%", "const", ")", "return", "False", "# Assemble the snippet and write it to FUZZLMP_CODE_BASE_ADDRESS", "code", "=", "asm", "(", "self", ".", "fw", ".", "FUZZLMP_ASM_CODE", ",", "vma", "=", "self", ".", "fw", ".", "FUZZLMP_CODE_BASE_ADDRESS", ",", "arch", "=", "\"thumb\"", ",", ")", "self", ".", "writeMem", "(", "self", ".", "fw", ".", "FUZZLMP_CODE_BASE_ADDRESS", ",", "code", ")", "# Install a patch in the end of the original sendLmpPdu HCI handler", "patch", "=", "asm", "(", "\"b 0x%x\"", "%", "self", ".", "fw", ".", "FUZZLMP_CODE_BASE_ADDRESS", ",", "vma", "=", "self", ".", "fw", ".", "FUZZLMP_HOOK_ADDRESS", ",", ")", "if", "not", "self", ".", "patchRom", "(", "self", ".", "fw", ".", "FUZZLMP_HOOK_ADDRESS", ",", "patch", ")", ":", "self", ".", "logger", ".", "warning", "(", "\"Error writing to patchram when installing fuzzLmp patch!\"", ")", "return", "False", "return", "True" ]
https://github.com/seemoo-lab/internalblue/blob/ba6ba0b99f835964395d6dd1b1eb7dd850398fd6/internalblue/core.py#L1638-L1677
lehrblogger/where-do-you-go
51c1fb3a66d8babe00a9412c72ebbd66fe348d88
httplib2/__init__.py
python
_parse_www_authenticate
(headers, headername='www-authenticate')
return retval
Returns a dictionary of dictionaries, one dict per auth_scheme.
Returns a dictionary of dictionaries, one dict per auth_scheme.
[ "Returns", "a", "dictionary", "of", "dictionaries", "one", "dict", "per", "auth_scheme", "." ]
def _parse_www_authenticate(headers, headername='www-authenticate'): """Returns a dictionary of dictionaries, one dict per auth_scheme.""" retval = {} if headers.has_key(headername): authenticate = headers[headername].strip() www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED while authenticate: # Break off the scheme at the beginning of the line if headername == 'authentication-info': (auth_scheme, the_rest) = ('digest', authenticate) else: (auth_scheme, the_rest) = authenticate.split(" ", 1) # Now loop over all the key value pairs that come after the scheme, # being careful not to roll into the next scheme match = www_auth.search(the_rest) auth_params = {} while match: if match and len(match.groups()) == 3: (key, value, the_rest) = match.groups() auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')]) match = www_auth.search(the_rest) retval[auth_scheme.lower()] = auth_params authenticate = the_rest.strip() return retval
[ "def", "_parse_www_authenticate", "(", "headers", ",", "headername", "=", "'www-authenticate'", ")", ":", "retval", "=", "{", "}", "if", "headers", ".", "has_key", "(", "headername", ")", ":", "authenticate", "=", "headers", "[", "headername", "]", ".", "strip", "(", ")", "www_auth", "=", "USE_WWW_AUTH_STRICT_PARSING", "and", "WWW_AUTH_STRICT", "or", "WWW_AUTH_RELAXED", "while", "authenticate", ":", "# Break off the scheme at the beginning of the line", "if", "headername", "==", "'authentication-info'", ":", "(", "auth_scheme", ",", "the_rest", ")", "=", "(", "'digest'", ",", "authenticate", ")", "else", ":", "(", "auth_scheme", ",", "the_rest", ")", "=", "authenticate", ".", "split", "(", "\" \"", ",", "1", ")", "# Now loop over all the key value pairs that come after the scheme, ", "# being careful not to roll into the next scheme", "match", "=", "www_auth", ".", "search", "(", "the_rest", ")", "auth_params", "=", "{", "}", "while", "match", ":", "if", "match", "and", "len", "(", "match", ".", "groups", "(", ")", ")", "==", "3", ":", "(", "key", ",", "value", ",", "the_rest", ")", "=", "match", ".", "groups", "(", ")", "auth_params", "[", "key", ".", "lower", "(", ")", "]", "=", "UNQUOTE_PAIRS", ".", "sub", "(", "r'\\1'", ",", "value", ")", "# '\\\\'.join([x.replace('\\\\', '') for x in value.split('\\\\\\\\')])", "match", "=", "www_auth", ".", "search", "(", "the_rest", ")", "retval", "[", "auth_scheme", ".", "lower", "(", ")", "]", "=", "auth_params", "authenticate", "=", "the_rest", ".", "strip", "(", ")", "return", "retval" ]
https://github.com/lehrblogger/where-do-you-go/blob/51c1fb3a66d8babe00a9412c72ebbd66fe348d88/httplib2/__init__.py#L242-L266
census-instrumentation/opencensus-python
15c122dd7e0187b35f956f5d3b77b78455a2aadb
opencensus/tags/validation.py
python
is_valid_tag_value
(value)
return is_legal_chars(value) if len(value) <= 255 else False
Checks if the value is valid :type value: str :param value: the value to be checked :rtype: bool :returns: True if valid, if not, False.
Checks if the value is valid
[ "Checks", "if", "the", "value", "is", "valid" ]
def is_valid_tag_value(value): """Checks if the value is valid :type value: str :param value: the value to be checked :rtype: bool :returns: True if valid, if not, False. """ return is_legal_chars(value) if len(value) <= 255 else False
[ "def", "is_valid_tag_value", "(", "value", ")", ":", "return", "is_legal_chars", "(", "value", ")", "if", "len", "(", "value", ")", "<=", "255", "else", "False" ]
https://github.com/census-instrumentation/opencensus-python/blob/15c122dd7e0187b35f956f5d3b77b78455a2aadb/opencensus/tags/validation.py#L32-L42
mouna99/dien
1f314d16aa1700ee02777e6163fb8ca94e3d2810
script/utils.py
python
VecAttGRUCell.state_size
(self)
return self._num_units
[]
def state_size(self): return self._num_units
[ "def", "state_size", "(", "self", ")", ":", "return", "self", ".", "_num_units" ]
https://github.com/mouna99/dien/blob/1f314d16aa1700ee02777e6163fb8ca94e3d2810/script/utils.py#L107-L108
mesalock-linux/mesapy
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
lib-python/2.7/xml/dom/minidom.py
python
ElementInfo.isId
(self, aname)
return False
Returns true iff the named attribute is a DTD-style ID.
Returns true iff the named attribute is a DTD-style ID.
[ "Returns", "true", "iff", "the", "named", "attribute", "is", "a", "DTD", "-", "style", "ID", "." ]
def isId(self, aname): """Returns true iff the named attribute is a DTD-style ID.""" return False
[ "def", "isId", "(", "self", ",", "aname", ")", ":", "return", "False" ]
https://github.com/mesalock-linux/mesapy/blob/ed546d59a21b36feb93e2309d5c6b75aa0ad95c9/lib-python/2.7/xml/dom/minidom.py#L1452-L1454
gwastro/pycbc
1e1c85534b9dba8488ce42df693230317ca63dea
pycbc/distributions/mass.py
python
QfromUniformMass1Mass2._pdf
(self, **kwargs)
Returns the pdf at the given values. The keyword arguments must contain all of parameters in self's params. Unrecognized arguments are ignored.
Returns the pdf at the given values. The keyword arguments must contain all of parameters in self's params. Unrecognized arguments are ignored.
[ "Returns", "the", "pdf", "at", "the", "given", "values", ".", "The", "keyword", "arguments", "must", "contain", "all", "of", "parameters", "in", "self", "s", "params", ".", "Unrecognized", "arguments", "are", "ignored", "." ]
def _pdf(self, **kwargs): """Returns the pdf at the given values. The keyword arguments must contain all of parameters in self's params. Unrecognized arguments are ignored. """ for p in self._params: if p not in kwargs.keys(): raise ValueError( 'Missing parameter {} to construct pdf.'.format(p)) if kwargs in self: pdf = self._norm * \ numpy.prod([(1.+kwargs[p])**(2./5)/kwargs[p]**(6./5) for p in self._params]) return float(pdf) else: return 0.0
[ "def", "_pdf", "(", "self", ",", "*", "*", "kwargs", ")", ":", "for", "p", "in", "self", ".", "_params", ":", "if", "p", "not", "in", "kwargs", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "'Missing parameter {} to construct pdf.'", ".", "format", "(", "p", ")", ")", "if", "kwargs", "in", "self", ":", "pdf", "=", "self", ".", "_norm", "*", "numpy", ".", "prod", "(", "[", "(", "1.", "+", "kwargs", "[", "p", "]", ")", "**", "(", "2.", "/", "5", ")", "/", "kwargs", "[", "p", "]", "**", "(", "6.", "/", "5", ")", "for", "p", "in", "self", ".", "_params", "]", ")", "return", "float", "(", "pdf", ")", "else", ":", "return", "0.0" ]
https://github.com/gwastro/pycbc/blob/1e1c85534b9dba8488ce42df693230317ca63dea/pycbc/distributions/mass.py#L186-L201
chribsen/simple-machine-learning-examples
dc94e52a4cebdc8bb959ff88b81ff8cfeca25022
venv/lib/python2.7/site-packages/pandas/tools/plotting.py
python
SeriesPlotMethods.hist
(self, bins=10, **kwds)
return self(kind='hist', bins=bins, **kwds)
Histogram .. versionadded:: 0.17.0 Parameters ---------- bins: integer, default 10 Number of histogram bins to be used **kwds : optional Keyword arguments to pass on to :py:meth:`pandas.Series.plot`. Returns ------- axes : matplotlib.AxesSubplot or np.array of them
Histogram
[ "Histogram" ]
def hist(self, bins=10, **kwds): """ Histogram .. versionadded:: 0.17.0 Parameters ---------- bins: integer, default 10 Number of histogram bins to be used **kwds : optional Keyword arguments to pass on to :py:meth:`pandas.Series.plot`. Returns ------- axes : matplotlib.AxesSubplot or np.array of them """ return self(kind='hist', bins=bins, **kwds)
[ "def", "hist", "(", "self", ",", "bins", "=", "10", ",", "*", "*", "kwds", ")", ":", "return", "self", "(", "kind", "=", "'hist'", ",", "bins", "=", "bins", ",", "*", "*", "kwds", ")" ]
https://github.com/chribsen/simple-machine-learning-examples/blob/dc94e52a4cebdc8bb959ff88b81ff8cfeca25022/venv/lib/python2.7/site-packages/pandas/tools/plotting.py#L3670-L3687
googlearchive/appengine-flask-skeleton
8c25461d003a0bd99a9ff3b339c2791ee6919242
lib/werkzeug/http.py
python
remove_entity_headers
(headers, allowed=('expires', 'content-location'))
Remove all entity headers from a list or :class:`Headers` object. This operation works in-place. `Expires` and `Content-Location` headers are by default not removed. The reason for this is :rfc:`2616` section 10.3.5 which specifies some entity headers that should be sent. .. versionchanged:: 0.5 added `allowed` parameter. :param headers: a list or :class:`Headers` object. :param allowed: a list of headers that should still be allowed even though they are entity headers.
Remove all entity headers from a list or :class:`Headers` object. This operation works in-place. `Expires` and `Content-Location` headers are by default not removed. The reason for this is :rfc:`2616` section 10.3.5 which specifies some entity headers that should be sent.
[ "Remove", "all", "entity", "headers", "from", "a", "list", "or", ":", "class", ":", "Headers", "object", ".", "This", "operation", "works", "in", "-", "place", ".", "Expires", "and", "Content", "-", "Location", "headers", "are", "by", "default", "not", "removed", ".", "The", "reason", "for", "this", "is", ":", "rfc", ":", "2616", "section", "10", ".", "3", ".", "5", "which", "specifies", "some", "entity", "headers", "that", "should", "be", "sent", "." ]
def remove_entity_headers(headers, allowed=('expires', 'content-location')): """Remove all entity headers from a list or :class:`Headers` object. This operation works in-place. `Expires` and `Content-Location` headers are by default not removed. The reason for this is :rfc:`2616` section 10.3.5 which specifies some entity headers that should be sent. .. versionchanged:: 0.5 added `allowed` parameter. :param headers: a list or :class:`Headers` object. :param allowed: a list of headers that should still be allowed even though they are entity headers. """ allowed = set(x.lower() for x in allowed) headers[:] = [(key, value) for key, value in headers if not is_entity_header(key) or key.lower() in allowed]
[ "def", "remove_entity_headers", "(", "headers", ",", "allowed", "=", "(", "'expires'", ",", "'content-location'", ")", ")", ":", "allowed", "=", "set", "(", "x", ".", "lower", "(", ")", "for", "x", "in", "allowed", ")", "headers", "[", ":", "]", "=", "[", "(", "key", ",", "value", ")", "for", "key", ",", "value", "in", "headers", "if", "not", "is_entity_header", "(", "key", ")", "or", "key", ".", "lower", "(", ")", "in", "allowed", "]" ]
https://github.com/googlearchive/appengine-flask-skeleton/blob/8c25461d003a0bd99a9ff3b339c2791ee6919242/lib/werkzeug/http.py#L808-L823
google-research/language
61fa7260ac7d690d11ef72ca863e45a37c0bdc80
language/question_answering/bert_joint/run_nq.py
python
convert_examples_to_features
(examples, tokenizer, is_training, output_fn)
return num_spans_to_ids
Converts a list of NqExamples into InputFeatures.
Converts a list of NqExamples into InputFeatures.
[ "Converts", "a", "list", "of", "NqExamples", "into", "InputFeatures", "." ]
def convert_examples_to_features(examples, tokenizer, is_training, output_fn): """Converts a list of NqExamples into InputFeatures.""" num_spans_to_ids = collections.defaultdict(list) for example in examples: example_index = example.example_id features = convert_single_example(example, tokenizer, is_training) num_spans_to_ids[len(features)].append(example.qas_id) for feature in features: feature.example_index = example_index feature.unique_id = feature.example_index + feature.doc_span_index output_fn(feature) return num_spans_to_ids
[ "def", "convert_examples_to_features", "(", "examples", ",", "tokenizer", ",", "is_training", ",", "output_fn", ")", ":", "num_spans_to_ids", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "example", "in", "examples", ":", "example_index", "=", "example", ".", "example_id", "features", "=", "convert_single_example", "(", "example", ",", "tokenizer", ",", "is_training", ")", "num_spans_to_ids", "[", "len", "(", "features", ")", "]", ".", "append", "(", "example", ".", "qas_id", ")", "for", "feature", "in", "features", ":", "feature", ".", "example_index", "=", "example_index", "feature", ".", "unique_id", "=", "feature", ".", "example_index", "+", "feature", ".", "doc_span_index", "output_fn", "(", "feature", ")", "return", "num_spans_to_ids" ]
https://github.com/google-research/language/blob/61fa7260ac7d690d11ef72ca863e45a37c0bdc80/language/question_answering/bert_joint/run_nq.py#L544-L558
tomplus/kubernetes_asyncio
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
kubernetes_asyncio/client/models/v1_limit_range_list.py
python
V1LimitRangeList.items
(self, items)
Sets the items of this V1LimitRangeList. Items is a list of LimitRange objects. More info: https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/ # noqa: E501 :param items: The items of this V1LimitRangeList. # noqa: E501 :type: list[V1LimitRange]
Sets the items of this V1LimitRangeList.
[ "Sets", "the", "items", "of", "this", "V1LimitRangeList", "." ]
def items(self, items): """Sets the items of this V1LimitRangeList. Items is a list of LimitRange objects. More info: https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/ # noqa: E501 :param items: The items of this V1LimitRangeList. # noqa: E501 :type: list[V1LimitRange] """ if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501 raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501 self._items = items
[ "def", "items", "(", "self", ",", "items", ")", ":", "if", "self", ".", "local_vars_configuration", ".", "client_side_validation", "and", "items", "is", "None", ":", "# noqa: E501", "raise", "ValueError", "(", "\"Invalid value for `items`, must not be `None`\"", ")", "# noqa: E501", "self", ".", "_items", "=", "items" ]
https://github.com/tomplus/kubernetes_asyncio/blob/f028cc793e3a2c519be6a52a49fb77ff0b014c9b/kubernetes_asyncio/client/models/v1_limit_range_list.py#L104-L115
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v7/services/services/feed_placeholder_view_service/client.py
python
FeedPlaceholderViewServiceClient.feed_placeholder_view_path
( customer_id: str, placeholder_type: str, )
return "customers/{customer_id}/feedPlaceholderViews/{placeholder_type}".format( customer_id=customer_id, placeholder_type=placeholder_type, )
Return a fully-qualified feed_placeholder_view string.
Return a fully-qualified feed_placeholder_view string.
[ "Return", "a", "fully", "-", "qualified", "feed_placeholder_view", "string", "." ]
def feed_placeholder_view_path( customer_id: str, placeholder_type: str, ) -> str: """Return a fully-qualified feed_placeholder_view string.""" return "customers/{customer_id}/feedPlaceholderViews/{placeholder_type}".format( customer_id=customer_id, placeholder_type=placeholder_type, )
[ "def", "feed_placeholder_view_path", "(", "customer_id", ":", "str", ",", "placeholder_type", ":", "str", ",", ")", "->", "str", ":", "return", "\"customers/{customer_id}/feedPlaceholderViews/{placeholder_type}\"", ".", "format", "(", "customer_id", "=", "customer_id", ",", "placeholder_type", "=", "placeholder_type", ",", ")" ]
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v7/services/services/feed_placeholder_view_service/client.py#L164-L170