nwo
stringlengths
5
91
sha
stringlengths
40
40
path
stringlengths
5
174
language
stringclasses
1 value
identifier
stringlengths
1
120
parameters
stringlengths
0
3.15k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
24.1k
docstring
stringlengths
0
27.3k
docstring_summary
stringlengths
0
13.8k
docstring_tokens
sequence
function
stringlengths
22
139k
function_tokens
sequence
url
stringlengths
87
283
spotify/luigi
c3b66f4a5fa7eaa52f9a72eb6704b1049035c789
luigi/contrib/s3.py
python
S3Client.put_multipart
(self, local_path, destination_s3_path, part_size=DEFAULT_PART_SIZE, **kwargs)
Put an object stored locally to an S3 path using S3 multi-part upload (for files > 8Mb). :param local_path: Path to source local file :param destination_s3_path: URL for target S3 location :param part_size: Part size in bytes. Default: 8388608 (8MB) :param kwargs: Keyword arguments are passed to the boto function `upload_fileobj` as ExtraArgs
Put an object stored locally to an S3 path using S3 multi-part upload (for files > 8Mb). :param local_path: Path to source local file :param destination_s3_path: URL for target S3 location :param part_size: Part size in bytes. Default: 8388608 (8MB) :param kwargs: Keyword arguments are passed to the boto function `upload_fileobj` as ExtraArgs
[ "Put", "an", "object", "stored", "locally", "to", "an", "S3", "path", "using", "S3", "multi", "-", "part", "upload", "(", "for", "files", ">", "8Mb", ")", ".", ":", "param", "local_path", ":", "Path", "to", "source", "local", "file", ":", "param", "destination_s3_path", ":", "URL", "for", "target", "S3", "location", ":", "param", "part_size", ":", "Part", "size", "in", "bytes", ".", "Default", ":", "8388608", "(", "8MB", ")", ":", "param", "kwargs", ":", "Keyword", "arguments", "are", "passed", "to", "the", "boto", "function", "upload_fileobj", "as", "ExtraArgs" ]
def put_multipart(self, local_path, destination_s3_path, part_size=DEFAULT_PART_SIZE, **kwargs): """ Put an object stored locally to an S3 path using S3 multi-part upload (for files > 8Mb). :param local_path: Path to source local file :param destination_s3_path: URL for target S3 location :param part_size: Part size in bytes. Default: 8388608 (8MB) :param kwargs: Keyword arguments are passed to the boto function `upload_fileobj` as ExtraArgs """ self._check_deprecated_argument(**kwargs) from boto3.s3.transfer import TransferConfig # default part size for boto3 is 8Mb, changing it to fit part_size # provided as a parameter transfer_config = TransferConfig(multipart_chunksize=part_size) (bucket, key) = self._path_to_bucket_and_key(destination_s3_path) self.s3.meta.client.upload_fileobj( Fileobj=open(local_path, 'rb'), Bucket=bucket, Key=key, Config=transfer_config, ExtraArgs=kwargs)
[ "def", "put_multipart", "(", "self", ",", "local_path", ",", "destination_s3_path", ",", "part_size", "=", "DEFAULT_PART_SIZE", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_check_deprecated_argument", "(", "*", "*", "kwargs", ")", "from", "boto3", ".", "s3", ".", "transfer", "import", "TransferConfig", "# default part size for boto3 is 8Mb, changing it to fit part_size", "# provided as a parameter", "transfer_config", "=", "TransferConfig", "(", "multipart_chunksize", "=", "part_size", ")", "(", "bucket", ",", "key", ")", "=", "self", ".", "_path_to_bucket_and_key", "(", "destination_s3_path", ")", "self", ".", "s3", ".", "meta", ".", "client", ".", "upload_fileobj", "(", "Fileobj", "=", "open", "(", "local_path", ",", "'rb'", ")", ",", "Bucket", "=", "bucket", ",", "Key", "=", "key", ",", "Config", "=", "transfer_config", ",", "ExtraArgs", "=", "kwargs", ")" ]
https://github.com/spotify/luigi/blob/c3b66f4a5fa7eaa52f9a72eb6704b1049035c789/luigi/contrib/s3.py#L258-L277
lad1337/XDM
0c1b7009fe00f06f102a6f67c793478f515e7efe
site-packages/cherrypy/lib/cptools.py
python
SessionAuth.do_logout
(self, from_page='..', **kwargs)
Logout. May raise redirect, or return True if request handled.
Logout. May raise redirect, or return True if request handled.
[ "Logout", ".", "May", "raise", "redirect", "or", "return", "True", "if", "request", "handled", "." ]
def do_logout(self, from_page='..', **kwargs): """Logout. May raise redirect, or return True if request handled.""" sess = cherrypy.session username = sess.get(self.session_key) sess[self.session_key] = None if username: cherrypy.serving.request.login = None self.on_logout(username) raise cherrypy.HTTPRedirect(from_page)
[ "def", "do_logout", "(", "self", ",", "from_page", "=", "'..'", ",", "*", "*", "kwargs", ")", ":", "sess", "=", "cherrypy", ".", "session", "username", "=", "sess", ".", "get", "(", "self", ".", "session_key", ")", "sess", "[", "self", ".", "session_key", "]", "=", "None", "if", "username", ":", "cherrypy", ".", "serving", ".", "request", ".", "login", "=", "None", "self", ".", "on_logout", "(", "username", ")", "raise", "cherrypy", ".", "HTTPRedirect", "(", "from_page", ")" ]
https://github.com/lad1337/XDM/blob/0c1b7009fe00f06f102a6f67c793478f515e7efe/site-packages/cherrypy/lib/cptools.py#L330-L338
yt-project/unyt
ec5b3301c110787c9a67c600a66d8051ab8c831e
unyt/_version.py
python
plus_or_dot
(pieces)
return "+"
Return a + if we don't already have one, else return a .
Return a + if we don't already have one, else return a .
[ "Return", "a", "+", "if", "we", "don", "t", "already", "have", "one", "else", "return", "a", "." ]
def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+"
[ "def", "plus_or_dot", "(", "pieces", ")", ":", "if", "\"+\"", "in", "pieces", ".", "get", "(", "\"closest-tag\"", ",", "\"\"", ")", ":", "return", "\".\"", "return", "\"+\"" ]
https://github.com/yt-project/unyt/blob/ec5b3301c110787c9a67c600a66d8051ab8c831e/unyt/_version.py#L308-L312
mitmproxy/pdoc
5d32ea9f1320b39e8de7e28398da1d18eec85834
pdoc/search.py
python
precompile_index
(documents: list[dict], compile_js: Path)
This method tries to precompile the Elasticlunr.js search index by invoking `nodejs` or `node`. If that fails, an unprocessed index will be returned (which will be compiled locally on the client side). If this happens and the index is rather large (>3MB), a warning with precompile instructions is printed. We currently require nodejs, but we'd welcome PRs that support other JaveScript runtimes or – even better – a Python-based search index generation similar to [elasticlunr-rs](https://github.com/mattico/elasticlunr-rs) that could be shipped as part of pdoc.
This method tries to precompile the Elasticlunr.js search index by invoking `nodejs` or `node`. If that fails, an unprocessed index will be returned (which will be compiled locally on the client side). If this happens and the index is rather large (>3MB), a warning with precompile instructions is printed.
[ "This", "method", "tries", "to", "precompile", "the", "Elasticlunr", ".", "js", "search", "index", "by", "invoking", "nodejs", "or", "node", ".", "If", "that", "fails", "an", "unprocessed", "index", "will", "be", "returned", "(", "which", "will", "be", "compiled", "locally", "on", "the", "client", "side", ")", ".", "If", "this", "happens", "and", "the", "index", "is", "rather", "large", "(", ">", "3MB", ")", "a", "warning", "with", "precompile", "instructions", "is", "printed", "." ]
def precompile_index(documents: list[dict], compile_js: Path) -> str: """ This method tries to precompile the Elasticlunr.js search index by invoking `nodejs` or `node`. If that fails, an unprocessed index will be returned (which will be compiled locally on the client side). If this happens and the index is rather large (>3MB), a warning with precompile instructions is printed. We currently require nodejs, but we'd welcome PRs that support other JaveScript runtimes or – even better – a Python-based search index generation similar to [elasticlunr-rs](https://github.com/mattico/elasticlunr-rs) that could be shipped as part of pdoc. """ raw = json.dumps(documents) try: if shutil.which("nodejs"): executable = "nodejs" else: executable = "node" out = subprocess.check_output( [executable, compile_js], input=raw.encode(), cwd=Path(__file__).parent / "templates", stderr=subprocess.STDOUT, ) index = json.loads(out) index["_isPrebuiltIndex"] = True except Exception as e: if len(raw) > 3 * 1024 * 1024: print( f"pdoc failed to precompile the search index: {e}\n" f"Search will work, but may be slower. " f"This error may only show up now because your index has reached a certain size. " f"See https://pdoc.dev/docs/pdoc/search.html for details." ) if isinstance(e, subprocess.CalledProcessError): print(f"{' Node.js Output ':=^80}") print( textwrap.indent(e.output.decode("utf8", "replace"), " ").rstrip() ) print("=" * 80) return raw else: return json.dumps(index)
[ "def", "precompile_index", "(", "documents", ":", "list", "[", "dict", "]", ",", "compile_js", ":", "Path", ")", "->", "str", ":", "raw", "=", "json", ".", "dumps", "(", "documents", ")", "try", ":", "if", "shutil", ".", "which", "(", "\"nodejs\"", ")", ":", "executable", "=", "\"nodejs\"", "else", ":", "executable", "=", "\"node\"", "out", "=", "subprocess", ".", "check_output", "(", "[", "executable", ",", "compile_js", "]", ",", "input", "=", "raw", ".", "encode", "(", ")", ",", "cwd", "=", "Path", "(", "__file__", ")", ".", "parent", "/", "\"templates\"", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", ")", "index", "=", "json", ".", "loads", "(", "out", ")", "index", "[", "\"_isPrebuiltIndex\"", "]", "=", "True", "except", "Exception", "as", "e", ":", "if", "len", "(", "raw", ")", ">", "3", "*", "1024", "*", "1024", ":", "print", "(", "f\"pdoc failed to precompile the search index: {e}\\n\"", "f\"Search will work, but may be slower. \"", "f\"This error may only show up now because your index has reached a certain size. \"", "f\"See https://pdoc.dev/docs/pdoc/search.html for details.\"", ")", "if", "isinstance", "(", "e", ",", "subprocess", ".", "CalledProcessError", ")", ":", "print", "(", "f\"{' Node.js Output ':=^80}\"", ")", "print", "(", "textwrap", ".", "indent", "(", "e", ".", "output", ".", "decode", "(", "\"utf8\"", ",", "\"replace\"", ")", ",", "\" \"", ")", ".", "rstrip", "(", ")", ")", "print", "(", "\"=\"", "*", "80", ")", "return", "raw", "else", ":", "return", "json", ".", "dumps", "(", "index", ")" ]
https://github.com/mitmproxy/pdoc/blob/5d32ea9f1320b39e8de7e28398da1d18eec85834/pdoc/search.py#L116-L156
naftaliharris/tauthon
5587ceec329b75f7caf6d65a036db61ac1bae214
Parser/asdl.py
python
ASDLParser.p_definition_1
(self, (definitions, definition))
return definitions + definition
definitions ::= definition definitions
definitions ::= definition definitions
[ "definitions", "::", "=", "definition", "definitions" ]
def p_definition_1(self, (definitions, definition)): " definitions ::= definition definitions " return definitions + definition
[ "def", "p_definition_1", "(", "self", ",", "(", "definitions", ",", "definition", ")", ")", ":", "return", "definitions", "+", "definition" ]
https://github.com/naftaliharris/tauthon/blob/5587ceec329b75f7caf6d65a036db61ac1bae214/Parser/asdl.py#L136-L138
mit-han-lab/data-efficient-gans
6858275f08f43a33026844c8c2ac4e703e8a07ba
DiffAugment-stylegan2/training/networks_stylegan2.py
python
D_stylegan2
( images_in, # First input: Images [minibatch, channel, height, width]. num_channels=3, # Number of input color channels. Overridden based on dataset. label_size=0, # Label dimensionality. resolution=1024, # Input resolution. Overridden based on dataset. fmap_base=16384, # Overall multiplier for the number of feature maps. fmap_decay=1.0, # log2 feature map reduction when doubling the resolution. fmap_min=1, # Minimum number of feature maps in any layer. fmap_max=512, # Maximum number of feature maps in any layer. architecture='resnet', # Architecture: 'orig', 'skip', 'resnet'. nonlinearity='lrelu', # Activation function: 'relu', 'lrelu', etc. mbstd_group_size=4, # Group size for the minibatch standard deviation layer, 0 = disable. mbstd_num_features=1, # Number of features for the minibatch standard deviation layer. dtype='float32', # Data type to use for activations and outputs. resample_kernel=[1, 3, 3, 1], # Low-pass filter to apply when resampling activations. None = no filtering. impl='cuda', avg_pooling=False, **_kwargs)
return scores_out
[]
def D_stylegan2( images_in, # First input: Images [minibatch, channel, height, width]. num_channels=3, # Number of input color channels. Overridden based on dataset. label_size=0, # Label dimensionality. resolution=1024, # Input resolution. Overridden based on dataset. fmap_base=16384, # Overall multiplier for the number of feature maps. fmap_decay=1.0, # log2 feature map reduction when doubling the resolution. fmap_min=1, # Minimum number of feature maps in any layer. fmap_max=512, # Maximum number of feature maps in any layer. architecture='resnet', # Architecture: 'orig', 'skip', 'resnet'. nonlinearity='lrelu', # Activation function: 'relu', 'lrelu', etc. mbstd_group_size=4, # Group size for the minibatch standard deviation layer, 0 = disable. mbstd_num_features=1, # Number of features for the minibatch standard deviation layer. dtype='float32', # Data type to use for activations and outputs. resample_kernel=[1, 3, 3, 1], # Low-pass filter to apply when resampling activations. None = no filtering. impl='cuda', avg_pooling=False, **_kwargs): # Ignore unrecognized keyword args. resolution_log2 = int(np.ceil(np.log2(resolution))) pad = (2 ** resolution_log2 - resolution) // 2 def nf(stage): return np.clip(int(fmap_base / (2.0 ** (stage * fmap_decay))), fmap_min, fmap_max) assert architecture in ['orig', 'skip', 'resnet'] act = nonlinearity images_in.set_shape([None, num_channels, resolution, resolution]) images_in = tf.cast(images_in, dtype) # Building blocks for main layers. def fromrgb(x, y, res): # res = 2..resolution_log2 with tf.variable_scope('FromRGB'): t = apply_bias_act(conv2d_layer(y, fmaps=nf(res - 1), kernel=1, impl=impl), act=act, impl=impl) return t if x is None else x + t def block(x, res): # res = 2..resolution_log2 t = x with tf.variable_scope('Conv0'): x = apply_bias_act(conv2d_layer(x, fmaps=nf(res - 1), kernel=3, impl=impl), act=act, impl=impl) with tf.variable_scope('Conv1_down'): x = apply_bias_act(conv2d_layer(x, fmaps=nf(res - 2), kernel=3, down=True, resample_kernel=resample_kernel, impl=impl), act=act, impl=impl) if architecture == 'resnet': with tf.variable_scope('Skip'): t = conv2d_layer(t, fmaps=nf(res - 2), kernel=1, down=True, resample_kernel=resample_kernel, impl=impl) x = (x + t) * (1 / np.sqrt(2)) return x def downsample(y): with tf.variable_scope('Downsample'): return downsample_2d(y, k=resample_kernel, impl=impl) # Main layers. x = None y = tf.pad(images_in, [[0, 0], [0, 0], [pad, pad], [pad, pad]]) for res in range(resolution_log2, 2, -1): with tf.variable_scope('%dx%d' % (2**res, 2**res)): if architecture == 'skip' or res == resolution_log2: x = fromrgb(x, y, res) x = block(x, res) if architecture == 'skip': y = downsample(y) # Final layers. with tf.variable_scope('4x4'): if architecture == 'skip': x = fromrgb(x, y, 2) if mbstd_group_size > 1: with tf.variable_scope('MinibatchStddev'): x = minibatch_stddev_layer(x, mbstd_group_size, mbstd_num_features) with tf.variable_scope('Conv'): x = apply_bias_act(conv2d_layer(x, fmaps=nf(1), kernel=3, impl=impl), act=act, impl=impl) if avg_pooling: x = tf.reduce_mean(x, axis=[2, 3]) with tf.variable_scope('Dense0'): x = apply_bias_act(dense_layer(x, fmaps=nf(0)), act=act, impl=impl) with tf.variable_scope('Output'): if label_size > 0: scores_out = apply_bias_act(dense_layer(x, fmaps=label_size), impl=impl) else: scores_out = tf.squeeze(apply_bias_act(dense_layer(x, fmaps=1), impl=impl), axis=1) assert scores_out.dtype == tf.as_dtype(dtype) scores_out = tf.identity(scores_out, name='scores_out') return scores_out
[ "def", "D_stylegan2", "(", "images_in", ",", "# First input: Images [minibatch, channel, height, width].", "num_channels", "=", "3", ",", "# Number of input color channels. Overridden based on dataset.", "label_size", "=", "0", ",", "# Label dimensionality.", "resolution", "=", "1024", ",", "# Input resolution. Overridden based on dataset.", "fmap_base", "=", "16384", ",", "# Overall multiplier for the number of feature maps.", "fmap_decay", "=", "1.0", ",", "# log2 feature map reduction when doubling the resolution.", "fmap_min", "=", "1", ",", "# Minimum number of feature maps in any layer.", "fmap_max", "=", "512", ",", "# Maximum number of feature maps in any layer.", "architecture", "=", "'resnet'", ",", "# Architecture: 'orig', 'skip', 'resnet'.", "nonlinearity", "=", "'lrelu'", ",", "# Activation function: 'relu', 'lrelu', etc.", "mbstd_group_size", "=", "4", ",", "# Group size for the minibatch standard deviation layer, 0 = disable.", "mbstd_num_features", "=", "1", ",", "# Number of features for the minibatch standard deviation layer.", "dtype", "=", "'float32'", ",", "# Data type to use for activations and outputs.", "resample_kernel", "=", "[", "1", ",", "3", ",", "3", ",", "1", "]", ",", "# Low-pass filter to apply when resampling activations. None = no filtering.", "impl", "=", "'cuda'", ",", "avg_pooling", "=", "False", ",", "*", "*", "_kwargs", ")", ":", "# Ignore unrecognized keyword args.", "resolution_log2", "=", "int", "(", "np", ".", "ceil", "(", "np", ".", "log2", "(", "resolution", ")", ")", ")", "pad", "=", "(", "2", "**", "resolution_log2", "-", "resolution", ")", "//", "2", "def", "nf", "(", "stage", ")", ":", "return", "np", ".", "clip", "(", "int", "(", "fmap_base", "/", "(", "2.0", "**", "(", "stage", "*", "fmap_decay", ")", ")", ")", ",", "fmap_min", ",", "fmap_max", ")", "assert", "architecture", "in", "[", "'orig'", ",", "'skip'", ",", "'resnet'", "]", "act", "=", "nonlinearity", "images_in", ".", "set_shape", "(", "[", "None", ",", "num_channels", ",", "resolution", ",", "resolution", "]", ")", "images_in", "=", "tf", ".", "cast", "(", "images_in", ",", "dtype", ")", "# Building blocks for main layers.", "def", "fromrgb", "(", "x", ",", "y", ",", "res", ")", ":", "# res = 2..resolution_log2", "with", "tf", ".", "variable_scope", "(", "'FromRGB'", ")", ":", "t", "=", "apply_bias_act", "(", "conv2d_layer", "(", "y", ",", "fmaps", "=", "nf", "(", "res", "-", "1", ")", ",", "kernel", "=", "1", ",", "impl", "=", "impl", ")", ",", "act", "=", "act", ",", "impl", "=", "impl", ")", "return", "t", "if", "x", "is", "None", "else", "x", "+", "t", "def", "block", "(", "x", ",", "res", ")", ":", "# res = 2..resolution_log2", "t", "=", "x", "with", "tf", ".", "variable_scope", "(", "'Conv0'", ")", ":", "x", "=", "apply_bias_act", "(", "conv2d_layer", "(", "x", ",", "fmaps", "=", "nf", "(", "res", "-", "1", ")", ",", "kernel", "=", "3", ",", "impl", "=", "impl", ")", ",", "act", "=", "act", ",", "impl", "=", "impl", ")", "with", "tf", ".", "variable_scope", "(", "'Conv1_down'", ")", ":", "x", "=", "apply_bias_act", "(", "conv2d_layer", "(", "x", ",", "fmaps", "=", "nf", "(", "res", "-", "2", ")", ",", "kernel", "=", "3", ",", "down", "=", "True", ",", "resample_kernel", "=", "resample_kernel", ",", "impl", "=", "impl", ")", ",", "act", "=", "act", ",", "impl", "=", "impl", ")", "if", "architecture", "==", "'resnet'", ":", "with", "tf", ".", "variable_scope", "(", "'Skip'", ")", ":", "t", "=", "conv2d_layer", "(", "t", ",", "fmaps", "=", "nf", "(", "res", "-", "2", ")", ",", "kernel", "=", "1", ",", "down", "=", "True", ",", "resample_kernel", "=", "resample_kernel", ",", "impl", "=", "impl", ")", "x", "=", "(", "x", "+", "t", ")", "*", "(", "1", "/", "np", ".", "sqrt", "(", "2", ")", ")", "return", "x", "def", "downsample", "(", "y", ")", ":", "with", "tf", ".", "variable_scope", "(", "'Downsample'", ")", ":", "return", "downsample_2d", "(", "y", ",", "k", "=", "resample_kernel", ",", "impl", "=", "impl", ")", "# Main layers.", "x", "=", "None", "y", "=", "tf", ".", "pad", "(", "images_in", ",", "[", "[", "0", ",", "0", "]", ",", "[", "0", ",", "0", "]", ",", "[", "pad", ",", "pad", "]", ",", "[", "pad", ",", "pad", "]", "]", ")", "for", "res", "in", "range", "(", "resolution_log2", ",", "2", ",", "-", "1", ")", ":", "with", "tf", ".", "variable_scope", "(", "'%dx%d'", "%", "(", "2", "**", "res", ",", "2", "**", "res", ")", ")", ":", "if", "architecture", "==", "'skip'", "or", "res", "==", "resolution_log2", ":", "x", "=", "fromrgb", "(", "x", ",", "y", ",", "res", ")", "x", "=", "block", "(", "x", ",", "res", ")", "if", "architecture", "==", "'skip'", ":", "y", "=", "downsample", "(", "y", ")", "# Final layers.", "with", "tf", ".", "variable_scope", "(", "'4x4'", ")", ":", "if", "architecture", "==", "'skip'", ":", "x", "=", "fromrgb", "(", "x", ",", "y", ",", "2", ")", "if", "mbstd_group_size", ">", "1", ":", "with", "tf", ".", "variable_scope", "(", "'MinibatchStddev'", ")", ":", "x", "=", "minibatch_stddev_layer", "(", "x", ",", "mbstd_group_size", ",", "mbstd_num_features", ")", "with", "tf", ".", "variable_scope", "(", "'Conv'", ")", ":", "x", "=", "apply_bias_act", "(", "conv2d_layer", "(", "x", ",", "fmaps", "=", "nf", "(", "1", ")", ",", "kernel", "=", "3", ",", "impl", "=", "impl", ")", ",", "act", "=", "act", ",", "impl", "=", "impl", ")", "if", "avg_pooling", ":", "x", "=", "tf", ".", "reduce_mean", "(", "x", ",", "axis", "=", "[", "2", ",", "3", "]", ")", "with", "tf", ".", "variable_scope", "(", "'Dense0'", ")", ":", "x", "=", "apply_bias_act", "(", "dense_layer", "(", "x", ",", "fmaps", "=", "nf", "(", "0", ")", ")", ",", "act", "=", "act", ",", "impl", "=", "impl", ")", "with", "tf", ".", "variable_scope", "(", "'Output'", ")", ":", "if", "label_size", ">", "0", ":", "scores_out", "=", "apply_bias_act", "(", "dense_layer", "(", "x", ",", "fmaps", "=", "label_size", ")", ",", "impl", "=", "impl", ")", "else", ":", "scores_out", "=", "tf", ".", "squeeze", "(", "apply_bias_act", "(", "dense_layer", "(", "x", ",", "fmaps", "=", "1", ")", ",", "impl", "=", "impl", ")", ",", "axis", "=", "1", ")", "assert", "scores_out", ".", "dtype", "==", "tf", ".", "as_dtype", "(", "dtype", ")", "scores_out", "=", "tf", ".", "identity", "(", "scores_out", ",", "name", "=", "'scores_out'", ")", "return", "scores_out" ]
https://github.com/mit-han-lab/data-efficient-gans/blob/6858275f08f43a33026844c8c2ac4e703e8a07ba/DiffAugment-stylegan2/training/networks_stylegan2.py#L422-L505
mjq11302010044/RRPN_pytorch
a966f6f238c03498514742cde5cd98e51efb440c
maskrcnn_benchmark/modeling/rrpn/rrpn.py
python
build_rpn
(cfg)
return RPNModule(cfg)
This gives the gist of it. Not super important because it doesn't change as much
This gives the gist of it. Not super important because it doesn't change as much
[ "This", "gives", "the", "gist", "of", "it", ".", "Not", "super", "important", "because", "it", "doesn", "t", "change", "as", "much" ]
def build_rpn(cfg): """ This gives the gist of it. Not super important because it doesn't change as much """ return RPNModule(cfg)
[ "def", "build_rpn", "(", "cfg", ")", ":", "return", "RPNModule", "(", "cfg", ")" ]
https://github.com/mjq11302010044/RRPN_pytorch/blob/a966f6f238c03498514742cde5cd98e51efb440c/maskrcnn_benchmark/modeling/rrpn/rrpn.py#L143-L147
SteveDoyle2/pyNastran
eda651ac2d4883d95a34951f8a002ff94f642a1a
pyNastran/utils/dict_to_h5py.py
python
load_obj_from_hdf5
(hdf5_filename: str, custom_types_dict=None, log=None, debug=False)
return model
loads an hdf5 file into an object Parameters ---------- hdf5_filename : str the h5 filename to load custom_types_dict : dict[key] : function() the custom mapper
loads an hdf5 file into an object
[ "loads", "an", "hdf5", "file", "into", "an", "object" ]
def load_obj_from_hdf5(hdf5_filename: str, custom_types_dict=None, log=None, debug=False): """ loads an hdf5 file into an object Parameters ---------- hdf5_filename : str the h5 filename to load custom_types_dict : dict[key] : function() the custom mapper """ check_path(hdf5_filename, 'hdf5_filename') log = get_logger2(log=log, debug=debug, encoding='utf-8') log.info('hdf5_filename = %r' % hdf5_filename) model = {} with h5py.File(hdf5_filename, 'r') as h5_file: load_obj_from_hdf5_file(model, h5_file, custom_types_dict=custom_types_dict, log=log, debug=debug) return model
[ "def", "load_obj_from_hdf5", "(", "hdf5_filename", ":", "str", ",", "custom_types_dict", "=", "None", ",", "log", "=", "None", ",", "debug", "=", "False", ")", ":", "check_path", "(", "hdf5_filename", ",", "'hdf5_filename'", ")", "log", "=", "get_logger2", "(", "log", "=", "log", ",", "debug", "=", "debug", ",", "encoding", "=", "'utf-8'", ")", "log", ".", "info", "(", "'hdf5_filename = %r'", "%", "hdf5_filename", ")", "model", "=", "{", "}", "with", "h5py", ".", "File", "(", "hdf5_filename", ",", "'r'", ")", "as", "h5_file", ":", "load_obj_from_hdf5_file", "(", "model", ",", "h5_file", ",", "custom_types_dict", "=", "custom_types_dict", ",", "log", "=", "log", ",", "debug", "=", "debug", ")", "return", "model" ]
https://github.com/SteveDoyle2/pyNastran/blob/eda651ac2d4883d95a34951f8a002ff94f642a1a/pyNastran/utils/dict_to_h5py.py#L219-L237
caiiiac/Machine-Learning-with-Python
1a26c4467da41ca4ebc3d5bd789ea942ef79422f
MachineLearning/venv/lib/python3.5/site-packages/scipy/sparse/dok.py
python
dok_matrix.__getitem__
(self, index)
return newdok
If key=(i,j) is a pair of integers, return the corresponding element. If either i or j is a slice or sequence, return a new sparse matrix with just these elements.
If key=(i,j) is a pair of integers, return the corresponding element. If either i or j is a slice or sequence, return a new sparse matrix with just these elements.
[ "If", "key", "=", "(", "i", "j", ")", "is", "a", "pair", "of", "integers", "return", "the", "corresponding", "element", ".", "If", "either", "i", "or", "j", "is", "a", "slice", "or", "sequence", "return", "a", "new", "sparse", "matrix", "with", "just", "these", "elements", "." ]
def __getitem__(self, index): """If key=(i,j) is a pair of integers, return the corresponding element. If either i or j is a slice or sequence, return a new sparse matrix with just these elements. """ zero = self.dtype.type(0) i, j = self._unpack_index(index) i_intlike = isintlike(i) j_intlike = isintlike(j) if i_intlike and j_intlike: # Scalar index case i = int(i) j = int(j) if i < 0: i += self.shape[0] if i < 0 or i >= self.shape[0]: raise IndexError('index out of bounds') if j < 0: j += self.shape[1] if j < 0 or j >= self.shape[1]: raise IndexError('index out of bounds') return dict.get(self, (i,j), zero) elif ((i_intlike or isinstance(i, slice)) and (j_intlike or isinstance(j, slice))): # Fast path for slicing very sparse matrices i_slice = slice(i, i+1) if i_intlike else i j_slice = slice(j, j+1) if j_intlike else j i_indices = i_slice.indices(self.shape[0]) j_indices = j_slice.indices(self.shape[1]) i_seq = xrange(*i_indices) j_seq = xrange(*j_indices) newshape = (len(i_seq), len(j_seq)) newsize = _prod(newshape) if len(self) < 2*newsize and newsize != 0: # Switch to the fast path only when advantageous # (count the iterations in the loops, adjust for complexity) # # We also don't handle newsize == 0 here (if # i/j_intlike, it can mean index i or j was out of # bounds) return self._getitem_ranges(i_indices, j_indices, newshape) i, j = self._index_to_arrays(i, j) if i.size == 0: return dok_matrix(i.shape, dtype=self.dtype) min_i = i.min() if min_i < -self.shape[0] or i.max() >= self.shape[0]: raise IndexError('index (%d) out of range -%d to %d)' % (i.min(), self.shape[0], self.shape[0]-1)) if min_i < 0: i = i.copy() i[i < 0] += self.shape[0] min_j = j.min() if min_j < -self.shape[1] or j.max() >= self.shape[1]: raise IndexError('index (%d) out of range -%d to %d)' % (j.min(), self.shape[1], self.shape[1]-1)) if min_j < 0: j = j.copy() j[j < 0] += self.shape[1] newdok = dok_matrix(i.shape, dtype=self.dtype) for a in xrange(i.shape[0]): for b in xrange(i.shape[1]): v = dict.get(self, (i[a,b], j[a,b]), zero) if v != 0: dict.__setitem__(newdok, (a, b), v) return newdok
[ "def", "__getitem__", "(", "self", ",", "index", ")", ":", "zero", "=", "self", ".", "dtype", ".", "type", "(", "0", ")", "i", ",", "j", "=", "self", ".", "_unpack_index", "(", "index", ")", "i_intlike", "=", "isintlike", "(", "i", ")", "j_intlike", "=", "isintlike", "(", "j", ")", "if", "i_intlike", "and", "j_intlike", ":", "# Scalar index case", "i", "=", "int", "(", "i", ")", "j", "=", "int", "(", "j", ")", "if", "i", "<", "0", ":", "i", "+=", "self", ".", "shape", "[", "0", "]", "if", "i", "<", "0", "or", "i", ">=", "self", ".", "shape", "[", "0", "]", ":", "raise", "IndexError", "(", "'index out of bounds'", ")", "if", "j", "<", "0", ":", "j", "+=", "self", ".", "shape", "[", "1", "]", "if", "j", "<", "0", "or", "j", ">=", "self", ".", "shape", "[", "1", "]", ":", "raise", "IndexError", "(", "'index out of bounds'", ")", "return", "dict", ".", "get", "(", "self", ",", "(", "i", ",", "j", ")", ",", "zero", ")", "elif", "(", "(", "i_intlike", "or", "isinstance", "(", "i", ",", "slice", ")", ")", "and", "(", "j_intlike", "or", "isinstance", "(", "j", ",", "slice", ")", ")", ")", ":", "# Fast path for slicing very sparse matrices", "i_slice", "=", "slice", "(", "i", ",", "i", "+", "1", ")", "if", "i_intlike", "else", "i", "j_slice", "=", "slice", "(", "j", ",", "j", "+", "1", ")", "if", "j_intlike", "else", "j", "i_indices", "=", "i_slice", ".", "indices", "(", "self", ".", "shape", "[", "0", "]", ")", "j_indices", "=", "j_slice", ".", "indices", "(", "self", ".", "shape", "[", "1", "]", ")", "i_seq", "=", "xrange", "(", "*", "i_indices", ")", "j_seq", "=", "xrange", "(", "*", "j_indices", ")", "newshape", "=", "(", "len", "(", "i_seq", ")", ",", "len", "(", "j_seq", ")", ")", "newsize", "=", "_prod", "(", "newshape", ")", "if", "len", "(", "self", ")", "<", "2", "*", "newsize", "and", "newsize", "!=", "0", ":", "# Switch to the fast path only when advantageous", "# (count the iterations in the loops, adjust for complexity)", "#", "# We also don't handle newsize == 0 here (if", "# i/j_intlike, it can mean index i or j was out of", "# bounds)", "return", "self", ".", "_getitem_ranges", "(", "i_indices", ",", "j_indices", ",", "newshape", ")", "i", ",", "j", "=", "self", ".", "_index_to_arrays", "(", "i", ",", "j", ")", "if", "i", ".", "size", "==", "0", ":", "return", "dok_matrix", "(", "i", ".", "shape", ",", "dtype", "=", "self", ".", "dtype", ")", "min_i", "=", "i", ".", "min", "(", ")", "if", "min_i", "<", "-", "self", ".", "shape", "[", "0", "]", "or", "i", ".", "max", "(", ")", ">=", "self", ".", "shape", "[", "0", "]", ":", "raise", "IndexError", "(", "'index (%d) out of range -%d to %d)'", "%", "(", "i", ".", "min", "(", ")", ",", "self", ".", "shape", "[", "0", "]", ",", "self", ".", "shape", "[", "0", "]", "-", "1", ")", ")", "if", "min_i", "<", "0", ":", "i", "=", "i", ".", "copy", "(", ")", "i", "[", "i", "<", "0", "]", "+=", "self", ".", "shape", "[", "0", "]", "min_j", "=", "j", ".", "min", "(", ")", "if", "min_j", "<", "-", "self", ".", "shape", "[", "1", "]", "or", "j", ".", "max", "(", ")", ">=", "self", ".", "shape", "[", "1", "]", ":", "raise", "IndexError", "(", "'index (%d) out of range -%d to %d)'", "%", "(", "j", ".", "min", "(", ")", ",", "self", ".", "shape", "[", "1", "]", ",", "self", ".", "shape", "[", "1", "]", "-", "1", ")", ")", "if", "min_j", "<", "0", ":", "j", "=", "j", ".", "copy", "(", ")", "j", "[", "j", "<", "0", "]", "+=", "self", ".", "shape", "[", "1", "]", "newdok", "=", "dok_matrix", "(", "i", ".", "shape", ",", "dtype", "=", "self", ".", "dtype", ")", "for", "a", "in", "xrange", "(", "i", ".", "shape", "[", "0", "]", ")", ":", "for", "b", "in", "xrange", "(", "i", ".", "shape", "[", "1", "]", ")", ":", "v", "=", "dict", ".", "get", "(", "self", ",", "(", "i", "[", "a", ",", "b", "]", ",", "j", "[", "a", ",", "b", "]", ")", ",", "zero", ")", "if", "v", "!=", "0", ":", "dict", ".", "__setitem__", "(", "newdok", ",", "(", "a", ",", "b", ")", ",", "v", ")", "return", "newdok" ]
https://github.com/caiiiac/Machine-Learning-with-Python/blob/1a26c4467da41ca4ebc3d5bd789ea942ef79422f/MachineLearning/venv/lib/python3.5/site-packages/scipy/sparse/dok.py#L142-L216
ahmetozlu/tensorflow_object_counting_api
630a1471a1aa19e582d09898f569c5eea031a21d
mask_rcnn_counting_api/spaghetti_counter_training/training/mrcnn/model.py
python
build_fpn_mask_graph
(rois, feature_maps, image_meta, pool_size, num_classes, train_bn=True)
return x
Builds the computation graph of the mask head of Feature Pyramid Network. rois: [batch, num_rois, (y1, x1, y2, x2)] Proposal boxes in normalized coordinates. feature_maps: List of feature maps from different layers of the pyramid, [P2, P3, P4, P5]. Each has a different resolution. image_meta: [batch, (meta data)] Image details. See compose_image_meta() pool_size: The width of the square feature map generated from ROI Pooling. num_classes: number of classes, which determines the depth of the results train_bn: Boolean. Train or freeze Batch Norm layers Returns: Masks [batch, roi_count, height, width, num_classes]
Builds the computation graph of the mask head of Feature Pyramid Network.
[ "Builds", "the", "computation", "graph", "of", "the", "mask", "head", "of", "Feature", "Pyramid", "Network", "." ]
def build_fpn_mask_graph(rois, feature_maps, image_meta, pool_size, num_classes, train_bn=True): """Builds the computation graph of the mask head of Feature Pyramid Network. rois: [batch, num_rois, (y1, x1, y2, x2)] Proposal boxes in normalized coordinates. feature_maps: List of feature maps from different layers of the pyramid, [P2, P3, P4, P5]. Each has a different resolution. image_meta: [batch, (meta data)] Image details. See compose_image_meta() pool_size: The width of the square feature map generated from ROI Pooling. num_classes: number of classes, which determines the depth of the results train_bn: Boolean. Train or freeze Batch Norm layers Returns: Masks [batch, roi_count, height, width, num_classes] """ # ROI Pooling # Shape: [batch, boxes, pool_height, pool_width, channels] x = PyramidROIAlign([pool_size, pool_size], name="roi_align_mask")([rois, image_meta] + feature_maps) # Conv layers x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding="same"), name="mrcnn_mask_conv1")(x) x = KL.TimeDistributed(BatchNorm(), name='mrcnn_mask_bn1')(x, training=train_bn) x = KL.Activation('relu')(x) x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding="same"), name="mrcnn_mask_conv2")(x) x = KL.TimeDistributed(BatchNorm(), name='mrcnn_mask_bn2')(x, training=train_bn) x = KL.Activation('relu')(x) x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding="same"), name="mrcnn_mask_conv3")(x) x = KL.TimeDistributed(BatchNorm(), name='mrcnn_mask_bn3')(x, training=train_bn) x = KL.Activation('relu')(x) x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding="same"), name="mrcnn_mask_conv4")(x) x = KL.TimeDistributed(BatchNorm(), name='mrcnn_mask_bn4')(x, training=train_bn) x = KL.Activation('relu')(x) x = KL.TimeDistributed(KL.Conv2DTranspose(256, (2, 2), strides=2, activation="relu"), name="mrcnn_mask_deconv")(x) x = KL.TimeDistributed(KL.Conv2D(num_classes, (1, 1), strides=1, activation="sigmoid"), name="mrcnn_mask")(x) return x
[ "def", "build_fpn_mask_graph", "(", "rois", ",", "feature_maps", ",", "image_meta", ",", "pool_size", ",", "num_classes", ",", "train_bn", "=", "True", ")", ":", "# ROI Pooling", "# Shape: [batch, boxes, pool_height, pool_width, channels]", "x", "=", "PyramidROIAlign", "(", "[", "pool_size", ",", "pool_size", "]", ",", "name", "=", "\"roi_align_mask\"", ")", "(", "[", "rois", ",", "image_meta", "]", "+", "feature_maps", ")", "# Conv layers", "x", "=", "KL", ".", "TimeDistributed", "(", "KL", ".", "Conv2D", "(", "256", ",", "(", "3", ",", "3", ")", ",", "padding", "=", "\"same\"", ")", ",", "name", "=", "\"mrcnn_mask_conv1\"", ")", "(", "x", ")", "x", "=", "KL", ".", "TimeDistributed", "(", "BatchNorm", "(", ")", ",", "name", "=", "'mrcnn_mask_bn1'", ")", "(", "x", ",", "training", "=", "train_bn", ")", "x", "=", "KL", ".", "Activation", "(", "'relu'", ")", "(", "x", ")", "x", "=", "KL", ".", "TimeDistributed", "(", "KL", ".", "Conv2D", "(", "256", ",", "(", "3", ",", "3", ")", ",", "padding", "=", "\"same\"", ")", ",", "name", "=", "\"mrcnn_mask_conv2\"", ")", "(", "x", ")", "x", "=", "KL", ".", "TimeDistributed", "(", "BatchNorm", "(", ")", ",", "name", "=", "'mrcnn_mask_bn2'", ")", "(", "x", ",", "training", "=", "train_bn", ")", "x", "=", "KL", ".", "Activation", "(", "'relu'", ")", "(", "x", ")", "x", "=", "KL", ".", "TimeDistributed", "(", "KL", ".", "Conv2D", "(", "256", ",", "(", "3", ",", "3", ")", ",", "padding", "=", "\"same\"", ")", ",", "name", "=", "\"mrcnn_mask_conv3\"", ")", "(", "x", ")", "x", "=", "KL", ".", "TimeDistributed", "(", "BatchNorm", "(", ")", ",", "name", "=", "'mrcnn_mask_bn3'", ")", "(", "x", ",", "training", "=", "train_bn", ")", "x", "=", "KL", ".", "Activation", "(", "'relu'", ")", "(", "x", ")", "x", "=", "KL", ".", "TimeDistributed", "(", "KL", ".", "Conv2D", "(", "256", ",", "(", "3", ",", "3", ")", ",", "padding", "=", "\"same\"", ")", ",", "name", "=", "\"mrcnn_mask_conv4\"", ")", "(", "x", ")", "x", "=", "KL", ".", "TimeDistributed", "(", "BatchNorm", "(", ")", ",", "name", "=", "'mrcnn_mask_bn4'", ")", "(", "x", ",", "training", "=", "train_bn", ")", "x", "=", "KL", ".", "Activation", "(", "'relu'", ")", "(", "x", ")", "x", "=", "KL", ".", "TimeDistributed", "(", "KL", ".", "Conv2DTranspose", "(", "256", ",", "(", "2", ",", "2", ")", ",", "strides", "=", "2", ",", "activation", "=", "\"relu\"", ")", ",", "name", "=", "\"mrcnn_mask_deconv\"", ")", "(", "x", ")", "x", "=", "KL", ".", "TimeDistributed", "(", "KL", ".", "Conv2D", "(", "num_classes", ",", "(", "1", ",", "1", ")", ",", "strides", "=", "1", ",", "activation", "=", "\"sigmoid\"", ")", ",", "name", "=", "\"mrcnn_mask\"", ")", "(", "x", ")", "return", "x" ]
https://github.com/ahmetozlu/tensorflow_object_counting_api/blob/630a1471a1aa19e582d09898f569c5eea031a21d/mask_rcnn_counting_api/spaghetti_counter_training/training/mrcnn/model.py#L959-L1008
rowliny/DiffHelper
ab3a96f58f9579d0023aed9ebd785f4edf26f8af
Tool/SitePackages/PIL/_binary.py
python
si32le
(c, o=0)
return unpack_from("<i", c, o)[0]
Converts a 4-bytes (32 bits) string to a signed integer. :param c: string containing bytes to convert :param o: offset of bytes to convert in string
Converts a 4-bytes (32 bits) string to a signed integer.
[ "Converts", "a", "4", "-", "bytes", "(", "32", "bits", ")", "string", "to", "a", "signed", "integer", "." ]
def si32le(c, o=0): """ Converts a 4-bytes (32 bits) string to a signed integer. :param c: string containing bytes to convert :param o: offset of bytes to convert in string """ return unpack_from("<i", c, o)[0]
[ "def", "si32le", "(", "c", ",", "o", "=", "0", ")", ":", "return", "unpack_from", "(", "\"<i\"", ",", "c", ",", "o", ")", "[", "0", "]" ]
https://github.com/rowliny/DiffHelper/blob/ab3a96f58f9579d0023aed9ebd785f4edf26f8af/Tool/SitePackages/PIL/_binary.py#L70-L77
galaxyproject/galaxy
4c03520f05062e0f4a1b3655dc0b7452fda69943
lib/galaxy/webapps/galaxy/api/job_files.py
python
JobFilesAPIController.create
(self, trans, job_id, payload, **kwargs)
return {"message": "ok"}
create( self, trans, job_id, payload, **kwargs ) * POST /api/jobs/{job_id}/files Populate an output file (formal dataset, task split part, working directory file (such as those related to metadata)). This should be a multipart post with a 'file' parameter containing the contents of the actual file to create. :type job_id: str :param job_id: encoded id string of the job :type payload: dict :param payload: dictionary structure containing:: 'job_key' = Key authenticating 'path' = Path to file to create. ..note: This API method is intended only for consumption by job runners, not end users. :rtype: dict :returns: an okay message
create( self, trans, job_id, payload, **kwargs ) * POST /api/jobs/{job_id}/files Populate an output file (formal dataset, task split part, working directory file (such as those related to metadata)). This should be a multipart post with a 'file' parameter containing the contents of the actual file to create.
[ "create", "(", "self", "trans", "job_id", "payload", "**", "kwargs", ")", "*", "POST", "/", "api", "/", "jobs", "/", "{", "job_id", "}", "/", "files", "Populate", "an", "output", "file", "(", "formal", "dataset", "task", "split", "part", "working", "directory", "file", "(", "such", "as", "those", "related", "to", "metadata", "))", ".", "This", "should", "be", "a", "multipart", "post", "with", "a", "file", "parameter", "containing", "the", "contents", "of", "the", "actual", "file", "to", "create", "." ]
def create(self, trans, job_id, payload, **kwargs): """ create( self, trans, job_id, payload, **kwargs ) * POST /api/jobs/{job_id}/files Populate an output file (formal dataset, task split part, working directory file (such as those related to metadata)). This should be a multipart post with a 'file' parameter containing the contents of the actual file to create. :type job_id: str :param job_id: encoded id string of the job :type payload: dict :param payload: dictionary structure containing:: 'job_key' = Key authenticating 'path' = Path to file to create. ..note: This API method is intended only for consumption by job runners, not end users. :rtype: dict :returns: an okay message """ job = self.__authorize_job_access(trans, job_id, **payload) path = payload.get("path") self.__check_job_can_write_to_path(trans, job, path) # Is this writing an unneeded file? Should this just copy in Python? if '__file_path' in payload: file_path = payload.get('__file_path') upload_store = trans.app.config.nginx_upload_job_files_store assert upload_store, ("Request appears to have been processed by" " nginx_upload_module but Galaxy is not" " configured to recognize it") assert file_path.startswith(upload_store), \ ("Filename provided by nginx (%s) is not in correct" " directory (%s)" % (file_path, upload_store)) input_file = open(file_path) else: input_file = payload.get("file", payload.get("__file", None)).file target_dir = os.path.dirname(path) util.safe_makedirs(target_dir) try: shutil.move(input_file.name, path) finally: try: input_file.close() except OSError: # Fails to close file if not using nginx upload because the # tempfile has moved and Python wants to delete it. pass return {"message": "ok"}
[ "def", "create", "(", "self", ",", "trans", ",", "job_id", ",", "payload", ",", "*", "*", "kwargs", ")", ":", "job", "=", "self", ".", "__authorize_job_access", "(", "trans", ",", "job_id", ",", "*", "*", "payload", ")", "path", "=", "payload", ".", "get", "(", "\"path\"", ")", "self", ".", "__check_job_can_write_to_path", "(", "trans", ",", "job", ",", "path", ")", "# Is this writing an unneeded file? Should this just copy in Python?", "if", "'__file_path'", "in", "payload", ":", "file_path", "=", "payload", ".", "get", "(", "'__file_path'", ")", "upload_store", "=", "trans", ".", "app", ".", "config", ".", "nginx_upload_job_files_store", "assert", "upload_store", ",", "(", "\"Request appears to have been processed by\"", "\" nginx_upload_module but Galaxy is not\"", "\" configured to recognize it\"", ")", "assert", "file_path", ".", "startswith", "(", "upload_store", ")", ",", "(", "\"Filename provided by nginx (%s) is not in correct\"", "\" directory (%s)\"", "%", "(", "file_path", ",", "upload_store", ")", ")", "input_file", "=", "open", "(", "file_path", ")", "else", ":", "input_file", "=", "payload", ".", "get", "(", "\"file\"", ",", "payload", ".", "get", "(", "\"__file\"", ",", "None", ")", ")", ".", "file", "target_dir", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "util", ".", "safe_makedirs", "(", "target_dir", ")", "try", ":", "shutil", ".", "move", "(", "input_file", ".", "name", ",", "path", ")", "finally", ":", "try", ":", "input_file", ".", "close", "(", ")", "except", "OSError", ":", "# Fails to close file if not using nginx upload because the", "# tempfile has moved and Python wants to delete it.", "pass", "return", "{", "\"message\"", ":", "\"ok\"", "}" ]
https://github.com/galaxyproject/galaxy/blob/4c03520f05062e0f4a1b3655dc0b7452fda69943/lib/galaxy/webapps/galaxy/api/job_files.py#L62-L114
OpenMDAO/OpenMDAO1
791a6fbbb7d266f3dcbc1f7bde3ae03a70dc1317
openmdao/core/system.py
python
System._rec_set_param
(self, name, value)
[]
def _rec_set_param(self, name, value): parts = name.split('.', 1) if len(parts) == 1: self.params[name] = value else: return self._subsystems[parts[0]]._rec_set_param(parts[1], value)
[ "def", "_rec_set_param", "(", "self", ",", "name", ",", "value", ")", ":", "parts", "=", "name", ".", "split", "(", "'.'", ",", "1", ")", "if", "len", "(", "parts", ")", "==", "1", ":", "self", ".", "params", "[", "name", "]", "=", "value", "else", ":", "return", "self", ".", "_subsystems", "[", "parts", "[", "0", "]", "]", ".", "_rec_set_param", "(", "parts", "[", "1", "]", ",", "value", ")" ]
https://github.com/OpenMDAO/OpenMDAO1/blob/791a6fbbb7d266f3dcbc1f7bde3ae03a70dc1317/openmdao/core/system.py#L266-L271
NVIDIA/DeepLearningExamples
589604d49e016cd9ef4525f7abcc9c7b826cfc5e
PyTorch/Detection/Efficientdet/effdet/layers/nms_layer.py
python
soft_nms
( boxes, scores, method_gaussian: bool = True, sigma: float = 0.5, iou_threshold: float = .5, score_threshold: float = 0.005 )
return idxs_out[:count], scores_out[:count]
Soft non-max suppression algorithm. Implementation of [Soft-NMS -- Improving Object Detection With One Line of Codec] (https://arxiv.org/abs/1704.04503) Args: boxes_remain (Tensor[N, ?]): boxes where NMS will be performed if Boxes, in (x1, y1, x2, y2) format if RotatedBoxes, in (x_ctr, y_ctr, width, height, angle_degrees) format scores_remain (Tensor[N]): scores for each one of the boxes method_gaussian (bool): use gaussian method if True, otherwise linear sigma (float): parameter for Gaussian penalty function iou_threshold (float): iou threshold for applying linear decay. Nt from the paper re-used as threshold for standard "hard" nms score_threshold (float): boxes with scores below this threshold are pruned at each iteration. Dramatically reduces computation time. Authors use values in [10e-4, 10e-2] Returns: tuple(Tensor, Tensor): [0]: int64 tensor with the indices of the elements that have been kept by Soft NMS, sorted in decreasing order of scores [1]: float tensor with the re-scored scores of the elements that were kept
Soft non-max suppression algorithm. Implementation of [Soft-NMS -- Improving Object Detection With One Line of Codec] (https://arxiv.org/abs/1704.04503) Args: boxes_remain (Tensor[N, ?]): boxes where NMS will be performed if Boxes, in (x1, y1, x2, y2) format if RotatedBoxes, in (x_ctr, y_ctr, width, height, angle_degrees) format scores_remain (Tensor[N]): scores for each one of the boxes method_gaussian (bool): use gaussian method if True, otherwise linear sigma (float): parameter for Gaussian penalty function iou_threshold (float): iou threshold for applying linear decay. Nt from the paper re-used as threshold for standard "hard" nms score_threshold (float): boxes with scores below this threshold are pruned at each iteration. Dramatically reduces computation time. Authors use values in [10e-4, 10e-2] Returns: tuple(Tensor, Tensor): [0]: int64 tensor with the indices of the elements that have been kept by Soft NMS, sorted in decreasing order of scores [1]: float tensor with the re-scored scores of the elements that were kept
[ "Soft", "non", "-", "max", "suppression", "algorithm", ".", "Implementation", "of", "[", "Soft", "-", "NMS", "--", "Improving", "Object", "Detection", "With", "One", "Line", "of", "Codec", "]", "(", "https", ":", "//", "arxiv", ".", "org", "/", "abs", "/", "1704", ".", "04503", ")", "Args", ":", "boxes_remain", "(", "Tensor", "[", "N", "?", "]", ")", ":", "boxes", "where", "NMS", "will", "be", "performed", "if", "Boxes", "in", "(", "x1", "y1", "x2", "y2", ")", "format", "if", "RotatedBoxes", "in", "(", "x_ctr", "y_ctr", "width", "height", "angle_degrees", ")", "format", "scores_remain", "(", "Tensor", "[", "N", "]", ")", ":", "scores", "for", "each", "one", "of", "the", "boxes", "method_gaussian", "(", "bool", ")", ":", "use", "gaussian", "method", "if", "True", "otherwise", "linear", "sigma", "(", "float", ")", ":", "parameter", "for", "Gaussian", "penalty", "function", "iou_threshold", "(", "float", ")", ":", "iou", "threshold", "for", "applying", "linear", "decay", ".", "Nt", "from", "the", "paper", "re", "-", "used", "as", "threshold", "for", "standard", "hard", "nms", "score_threshold", "(", "float", ")", ":", "boxes", "with", "scores", "below", "this", "threshold", "are", "pruned", "at", "each", "iteration", ".", "Dramatically", "reduces", "computation", "time", ".", "Authors", "use", "values", "in", "[", "10e", "-", "4", "10e", "-", "2", "]", "Returns", ":", "tuple", "(", "Tensor", "Tensor", ")", ":", "[", "0", "]", ":", "int64", "tensor", "with", "the", "indices", "of", "the", "elements", "that", "have", "been", "kept", "by", "Soft", "NMS", "sorted", "in", "decreasing", "order", "of", "scores", "[", "1", "]", ":", "float", "tensor", "with", "the", "re", "-", "scored", "scores", "of", "the", "elements", "that", "were", "kept" ]
def soft_nms( boxes, scores, method_gaussian: bool = True, sigma: float = 0.5, iou_threshold: float = .5, score_threshold: float = 0.005 ): """ Soft non-max suppression algorithm. Implementation of [Soft-NMS -- Improving Object Detection With One Line of Codec] (https://arxiv.org/abs/1704.04503) Args: boxes_remain (Tensor[N, ?]): boxes where NMS will be performed if Boxes, in (x1, y1, x2, y2) format if RotatedBoxes, in (x_ctr, y_ctr, width, height, angle_degrees) format scores_remain (Tensor[N]): scores for each one of the boxes method_gaussian (bool): use gaussian method if True, otherwise linear sigma (float): parameter for Gaussian penalty function iou_threshold (float): iou threshold for applying linear decay. Nt from the paper re-used as threshold for standard "hard" nms score_threshold (float): boxes with scores below this threshold are pruned at each iteration. Dramatically reduces computation time. Authors use values in [10e-4, 10e-2] Returns: tuple(Tensor, Tensor): [0]: int64 tensor with the indices of the elements that have been kept by Soft NMS, sorted in decreasing order of scores [1]: float tensor with the re-scored scores of the elements that were kept """ # st = time.perf_counter() device = boxes.device boxes_remain = boxes.clone() scores_remain = scores.clone() num_elem = scores_remain.size()[0] idxs = torch.arange(num_elem) idxs_out = torch.zeros(num_elem, dtype=torch.int64, device=device) scores_out = torch.zeros(num_elem, dtype=torch.float32, device=device) area = (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1]) boxes_remain = torch.cat((boxes_remain, area.unsqueeze(1)), dim=1) # [N, 5] BS, x1, y1, x2, y2, area count: int = 0 # print("[SOFTMAX] before loop starts in softnms {}".format(time.perf_counter() - st)) while scores_remain.numel() > 0: # st1 = time.perf_counter() top_idx = 0 # torch.argmax(scores_remain) idxs_out[count] = idxs[top_idx] scores_out[count] = scores_remain[top_idx] count += 1 top_box = boxes_remain[top_idx] ious = pairwise_iou(top_box.unsqueeze(0), boxes_remain)[0] # st2 = time.perf_counter() # print("[SOFTMAX] Before gaussian in softnms {}".format(st2 - st1)) if method_gaussian: decay = torch.exp(-torch.pow(ious, 2) / sigma) else: decay = torch.ones_like(ious) decay_mask = ious > iou_threshold decay[decay_mask] = 1 - ious[decay_mask] # st3 = time.perf_counter() # print("[SOFTMAX] Gaussian in softnms {}".format(st3 - st2)) scores_remain *= decay keep = scores_remain > score_threshold keep[top_idx] = torch.tensor(False, device=device) boxes_remain = boxes_remain[keep] scores_remain = scores_remain[keep] idxs = idxs[keep] # st4 = time.perf_counter() # print("[SOFTMAX] Remaining in softnms {}".format(st4 - st3)) # print("[SOFTMAX] Entire loop takes in softnms {}".format(st4 - st1)) # st5 = time.perf_counter() # print("[SOFTMAX] Remaining in softnms {}".format(st5 - st)) return idxs_out[:count], scores_out[:count]
[ "def", "soft_nms", "(", "boxes", ",", "scores", ",", "method_gaussian", ":", "bool", "=", "True", ",", "sigma", ":", "float", "=", "0.5", ",", "iou_threshold", ":", "float", "=", ".5", ",", "score_threshold", ":", "float", "=", "0.005", ")", ":", "# st = time.perf_counter()", "device", "=", "boxes", ".", "device", "boxes_remain", "=", "boxes", ".", "clone", "(", ")", "scores_remain", "=", "scores", ".", "clone", "(", ")", "num_elem", "=", "scores_remain", ".", "size", "(", ")", "[", "0", "]", "idxs", "=", "torch", ".", "arange", "(", "num_elem", ")", "idxs_out", "=", "torch", ".", "zeros", "(", "num_elem", ",", "dtype", "=", "torch", ".", "int64", ",", "device", "=", "device", ")", "scores_out", "=", "torch", ".", "zeros", "(", "num_elem", ",", "dtype", "=", "torch", ".", "float32", ",", "device", "=", "device", ")", "area", "=", "(", "boxes", "[", ":", ",", "2", "]", "-", "boxes", "[", ":", ",", "0", "]", ")", "*", "(", "boxes", "[", ":", ",", "3", "]", "-", "boxes", "[", ":", ",", "1", "]", ")", "boxes_remain", "=", "torch", ".", "cat", "(", "(", "boxes_remain", ",", "area", ".", "unsqueeze", "(", "1", ")", ")", ",", "dim", "=", "1", ")", "# [N, 5] BS, x1, y1, x2, y2, area", "count", ":", "int", "=", "0", "# print(\"[SOFTMAX] before loop starts in softnms {}\".format(time.perf_counter() - st))", "while", "scores_remain", ".", "numel", "(", ")", ">", "0", ":", "# st1 = time.perf_counter()", "top_idx", "=", "0", "# torch.argmax(scores_remain)", "idxs_out", "[", "count", "]", "=", "idxs", "[", "top_idx", "]", "scores_out", "[", "count", "]", "=", "scores_remain", "[", "top_idx", "]", "count", "+=", "1", "top_box", "=", "boxes_remain", "[", "top_idx", "]", "ious", "=", "pairwise_iou", "(", "top_box", ".", "unsqueeze", "(", "0", ")", ",", "boxes_remain", ")", "[", "0", "]", "# st2 = time.perf_counter()", "# print(\"[SOFTMAX] Before gaussian in softnms {}\".format(st2 - st1))", "if", "method_gaussian", ":", "decay", "=", "torch", ".", "exp", "(", "-", "torch", ".", "pow", "(", "ious", ",", "2", ")", "/", "sigma", ")", "else", ":", "decay", "=", "torch", ".", "ones_like", "(", "ious", ")", "decay_mask", "=", "ious", ">", "iou_threshold", "decay", "[", "decay_mask", "]", "=", "1", "-", "ious", "[", "decay_mask", "]", "# st3 = time.perf_counter()", "# print(\"[SOFTMAX] Gaussian in softnms {}\".format(st3 - st2))", "scores_remain", "*=", "decay", "keep", "=", "scores_remain", ">", "score_threshold", "keep", "[", "top_idx", "]", "=", "torch", ".", "tensor", "(", "False", ",", "device", "=", "device", ")", "boxes_remain", "=", "boxes_remain", "[", "keep", "]", "scores_remain", "=", "scores_remain", "[", "keep", "]", "idxs", "=", "idxs", "[", "keep", "]", "# st4 = time.perf_counter()", "# print(\"[SOFTMAX] Remaining in softnms {}\".format(st4 - st3))", "# print(\"[SOFTMAX] Entire loop takes in softnms {}\".format(st4 - st1))", "# st5 = time.perf_counter()", "# print(\"[SOFTMAX] Remaining in softnms {}\".format(st5 - st))", "return", "idxs_out", "[", ":", "count", "]", ",", "scores_out", "[", ":", "count", "]" ]
https://github.com/NVIDIA/DeepLearningExamples/blob/589604d49e016cd9ef4525f7abcc9c7b826cfc5e/PyTorch/Detection/Efficientdet/effdet/layers/nms_layer.py#L56-L134
barnumbirr/coinmarketcap
2b951be7cedd7efc484f50f27600ee8a003f3c94
coinmarketcap/core.py
python
Market.stats
(self, **kwargs)
return response
This endpoint displays the global data found at the top of coinmarketcap.com. Optional parameters: (string) convert - return pricing info in terms of another currency. Valid fiat currency values are: "AUD", "BRL", "CAD", "CHF", "CLP", "CNY", "CZK", "DKK", "EUR", "GBP", "HKD", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN", "MYR", "NOK", "NZD", "PHP", "PKR", "PLN", "RUB", "SEK", "SGD", "THB", "TRY", "TWD", "ZAR" Valid cryptocurrency values are: "BTC", "ETH" "XRP", "LTC", and "BCH"
This endpoint displays the global data found at the top of coinmarketcap.com.
[ "This", "endpoint", "displays", "the", "global", "data", "found", "at", "the", "top", "of", "coinmarketcap", ".", "com", "." ]
def stats(self, **kwargs): """ This endpoint displays the global data found at the top of coinmarketcap.com. Optional parameters: (string) convert - return pricing info in terms of another currency. Valid fiat currency values are: "AUD", "BRL", "CAD", "CHF", "CLP", "CNY", "CZK", "DKK", "EUR", "GBP", "HKD", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN", "MYR", "NOK", "NZD", "PHP", "PKR", "PLN", "RUB", "SEK", "SGD", "THB", "TRY", "TWD", "ZAR" Valid cryptocurrency values are: "BTC", "ETH" "XRP", "LTC", and "BCH" """ params = {} params.update(kwargs) response = self.__request('global/', params) return response
[ "def", "stats", "(", "self", ",", "*", "*", "kwargs", ")", ":", "params", "=", "{", "}", "params", ".", "update", "(", "kwargs", ")", "response", "=", "self", ".", "__request", "(", "'global/'", ",", "params", ")", "return", "response" ]
https://github.com/barnumbirr/coinmarketcap/blob/2b951be7cedd7efc484f50f27600ee8a003f3c94/coinmarketcap/core.py#L98-L114
collinsctk/PyQYT
7af3673955f94ff1b2df2f94220cd2dab2e252af
ExtentionPackages/Crypto/PublicKey/DSA.py
python
_DSAobj.verify
(self, M, signature)
return pubkey.pubkey.verify(self, M, signature)
Verify the validity of a DSA signature. :Parameter M: The expected message. :Type M: byte string or long :Parameter signature: The DSA signature to verify. :Type signature: A tuple with 2 longs as return by `sign` :Return: True if the signature is correct, False otherwise.
Verify the validity of a DSA signature.
[ "Verify", "the", "validity", "of", "a", "DSA", "signature", "." ]
def verify(self, M, signature): """Verify the validity of a DSA signature. :Parameter M: The expected message. :Type M: byte string or long :Parameter signature: The DSA signature to verify. :Type signature: A tuple with 2 longs as return by `sign` :Return: True if the signature is correct, False otherwise. """ return pubkey.pubkey.verify(self, M, signature)
[ "def", "verify", "(", "self", ",", "M", ",", "signature", ")", ":", "return", "pubkey", ".", "pubkey", ".", "verify", "(", "self", ",", "M", ",", "signature", ")" ]
https://github.com/collinsctk/PyQYT/blob/7af3673955f94ff1b2df2f94220cd2dab2e252af/ExtentionPackages/Crypto/PublicKey/DSA.py#L158-L169
jython/jython3
def4f8ec47cb7a9c799ea4c745f12badf92c5769
lib-python/3.5.1/xmlrpc/server.py
python
SimpleXMLRPCDispatcher._dispatch
(self, method, params)
Dispatches the XML-RPC method. XML-RPC calls are forwarded to a registered function that matches the called XML-RPC method name. If no such function exists then the call is forwarded to the registered instance, if available. If the registered instance has a _dispatch method then that method will be called with the name of the XML-RPC method and its parameters as a tuple e.g. instance._dispatch('add',(2,3)) If the registered instance does not have a _dispatch method then the instance will be searched to find a matching method and, if found, will be called. Methods beginning with an '_' are considered private and will not be called.
Dispatches the XML-RPC method.
[ "Dispatches", "the", "XML", "-", "RPC", "method", "." ]
def _dispatch(self, method, params): """Dispatches the XML-RPC method. XML-RPC calls are forwarded to a registered function that matches the called XML-RPC method name. If no such function exists then the call is forwarded to the registered instance, if available. If the registered instance has a _dispatch method then that method will be called with the name of the XML-RPC method and its parameters as a tuple e.g. instance._dispatch('add',(2,3)) If the registered instance does not have a _dispatch method then the instance will be searched to find a matching method and, if found, will be called. Methods beginning with an '_' are considered private and will not be called. """ func = None try: # check to see if a matching function has been registered func = self.funcs[method] except KeyError: if self.instance is not None: # check for a _dispatch method if hasattr(self.instance, '_dispatch'): return self.instance._dispatch(method, params) else: # call instance method directly try: func = resolve_dotted_attribute( self.instance, method, self.allow_dotted_names ) except AttributeError: pass if func is not None: return func(*params) else: raise Exception('method "%s" is not supported' % method)
[ "def", "_dispatch", "(", "self", ",", "method", ",", "params", ")", ":", "func", "=", "None", "try", ":", "# check to see if a matching function has been registered", "func", "=", "self", ".", "funcs", "[", "method", "]", "except", "KeyError", ":", "if", "self", ".", "instance", "is", "not", "None", ":", "# check for a _dispatch method", "if", "hasattr", "(", "self", ".", "instance", ",", "'_dispatch'", ")", ":", "return", "self", ".", "instance", ".", "_dispatch", "(", "method", ",", "params", ")", "else", ":", "# call instance method directly", "try", ":", "func", "=", "resolve_dotted_attribute", "(", "self", ".", "instance", ",", "method", ",", "self", ".", "allow_dotted_names", ")", "except", "AttributeError", ":", "pass", "if", "func", "is", "not", "None", ":", "return", "func", "(", "*", "params", ")", "else", ":", "raise", "Exception", "(", "'method \"%s\" is not supported'", "%", "method", ")" ]
https://github.com/jython/jython3/blob/def4f8ec47cb7a9c799ea4c745f12badf92c5769/lib-python/3.5.1/xmlrpc/server.py#L368-L412
fooying/3102
0faee38c30b2e24154f41e68457cfd8f7a61c040
thirdparty/dns/rdataset.py
python
Rdataset.to_text
(self, name=None, origin=None, relativize=True, override_rdclass=None, **kw)
return s.getvalue()[:-1]
Convert the rdataset into DNS master file format. @see: L{dns.name.Name.choose_relativity} for more information on how I{origin} and I{relativize} determine the way names are emitted. Any additional keyword arguments are passed on to the rdata to_text() method. @param name: If name is not None, emit a RRs with I{name} as the owner name. @type name: dns.name.Name object @param origin: The origin for relative names, or None. @type origin: dns.name.Name object @param relativize: True if names should names be relativized @type relativize: bool
Convert the rdataset into DNS master file format.
[ "Convert", "the", "rdataset", "into", "DNS", "master", "file", "format", "." ]
def to_text(self, name=None, origin=None, relativize=True, override_rdclass=None, **kw): """Convert the rdataset into DNS master file format. @see: L{dns.name.Name.choose_relativity} for more information on how I{origin} and I{relativize} determine the way names are emitted. Any additional keyword arguments are passed on to the rdata to_text() method. @param name: If name is not None, emit a RRs with I{name} as the owner name. @type name: dns.name.Name object @param origin: The origin for relative names, or None. @type origin: dns.name.Name object @param relativize: True if names should names be relativized @type relativize: bool""" if not name is None: name = name.choose_relativity(origin, relativize) ntext = str(name) pad = ' ' else: ntext = '' pad = '' s = StringIO.StringIO() if not override_rdclass is None: rdclass = override_rdclass else: rdclass = self.rdclass if len(self) == 0: # # Empty rdatasets are used for the question section, and in # some dynamic updates, so we don't need to print out the TTL # (which is meaningless anyway). # print >> s, '%s%s%s %s' % (ntext, pad, dns.rdataclass.to_text(rdclass), dns.rdatatype.to_text(self.rdtype)) else: for rd in self: print >> s, '%s%s%d %s %s %s' % \ (ntext, pad, self.ttl, dns.rdataclass.to_text(rdclass), dns.rdatatype.to_text(self.rdtype), rd.to_text(origin=origin, relativize=relativize, **kw)) # # We strip off the final \n for the caller's convenience in printing # return s.getvalue()[:-1]
[ "def", "to_text", "(", "self", ",", "name", "=", "None", ",", "origin", "=", "None", ",", "relativize", "=", "True", ",", "override_rdclass", "=", "None", ",", "*", "*", "kw", ")", ":", "if", "not", "name", "is", "None", ":", "name", "=", "name", ".", "choose_relativity", "(", "origin", ",", "relativize", ")", "ntext", "=", "str", "(", "name", ")", "pad", "=", "' '", "else", ":", "ntext", "=", "''", "pad", "=", "''", "s", "=", "StringIO", ".", "StringIO", "(", ")", "if", "not", "override_rdclass", "is", "None", ":", "rdclass", "=", "override_rdclass", "else", ":", "rdclass", "=", "self", ".", "rdclass", "if", "len", "(", "self", ")", "==", "0", ":", "#", "# Empty rdatasets are used for the question section, and in", "# some dynamic updates, so we don't need to print out the TTL", "# (which is meaningless anyway).", "#", "print", ">>", "s", ",", "'%s%s%s %s'", "%", "(", "ntext", ",", "pad", ",", "dns", ".", "rdataclass", ".", "to_text", "(", "rdclass", ")", ",", "dns", ".", "rdatatype", ".", "to_text", "(", "self", ".", "rdtype", ")", ")", "else", ":", "for", "rd", "in", "self", ":", "print", ">>", "s", ",", "'%s%s%d %s %s %s'", "%", "(", "ntext", ",", "pad", ",", "self", ".", "ttl", ",", "dns", ".", "rdataclass", ".", "to_text", "(", "rdclass", ")", ",", "dns", ".", "rdatatype", ".", "to_text", "(", "self", ".", "rdtype", ")", ",", "rd", ".", "to_text", "(", "origin", "=", "origin", ",", "relativize", "=", "relativize", ",", "*", "*", "kw", ")", ")", "#", "# We strip off the final \\n for the caller's convenience in printing", "#", "return", "s", ".", "getvalue", "(", ")", "[", ":", "-", "1", "]" ]
https://github.com/fooying/3102/blob/0faee38c30b2e24154f41e68457cfd8f7a61c040/thirdparty/dns/rdataset.py#L170-L218
twisted/twisted
dee676b040dd38b847ea6fb112a712cb5e119490
src/twisted/words/protocols/jabber/error.py
python
exceptionFromStanza
(stanza)
return exception
Build an exception object from an error stanza. @param stanza: the error stanza @type stanza: L{domish.Element} @return: the generated exception object @rtype: L{StanzaError}
Build an exception object from an error stanza.
[ "Build", "an", "exception", "object", "from", "an", "error", "stanza", "." ]
def exceptionFromStanza(stanza): """ Build an exception object from an error stanza. @param stanza: the error stanza @type stanza: L{domish.Element} @return: the generated exception object @rtype: L{StanzaError} """ children = [] condition = text = textLang = appCondition = type = code = None for element in stanza.elements(): if element.name == "error" and element.uri == stanza.uri: code = element.getAttribute("code") type = element.getAttribute("type") error = _parseError(element, NS_XMPP_STANZAS) condition = error["condition"] text = error["text"] textLang = error["textLang"] appCondition = error["appCondition"] if not condition and code: condition, type = CODES_TO_CONDITIONS[code] text = str(stanza.error) else: children.append(element) if condition is None: # TODO: raise exception instead? return StanzaError(None) exception = StanzaError(condition, type, text, textLang, appCondition) exception.children = children exception.stanza = stanza return exception
[ "def", "exceptionFromStanza", "(", "stanza", ")", ":", "children", "=", "[", "]", "condition", "=", "text", "=", "textLang", "=", "appCondition", "=", "type", "=", "code", "=", "None", "for", "element", "in", "stanza", ".", "elements", "(", ")", ":", "if", "element", ".", "name", "==", "\"error\"", "and", "element", ".", "uri", "==", "stanza", ".", "uri", ":", "code", "=", "element", ".", "getAttribute", "(", "\"code\"", ")", "type", "=", "element", ".", "getAttribute", "(", "\"type\"", ")", "error", "=", "_parseError", "(", "element", ",", "NS_XMPP_STANZAS", ")", "condition", "=", "error", "[", "\"condition\"", "]", "text", "=", "error", "[", "\"text\"", "]", "textLang", "=", "error", "[", "\"textLang\"", "]", "appCondition", "=", "error", "[", "\"appCondition\"", "]", "if", "not", "condition", "and", "code", ":", "condition", ",", "type", "=", "CODES_TO_CONDITIONS", "[", "code", "]", "text", "=", "str", "(", "stanza", ".", "error", ")", "else", ":", "children", ".", "append", "(", "element", ")", "if", "condition", "is", "None", ":", "# TODO: raise exception instead?", "return", "StanzaError", "(", "None", ")", "exception", "=", "StanzaError", "(", "condition", ",", "type", ",", "text", ",", "textLang", ",", "appCondition", ")", "exception", ".", "children", "=", "children", "exception", ".", "stanza", "=", "stanza", "return", "exception" ]
https://github.com/twisted/twisted/blob/dee676b040dd38b847ea6fb112a712cb5e119490/src/twisted/words/protocols/jabber/error.py#L286-L323
jliljebl/flowblade
995313a509b80e99eb1ad550d945bdda5995093b
flowblade-trunk/Flowblade/tools/fluxity.py
python
FluxityContextPrivate.error_on_wrong_method
(self, method_name, required_method)
[]
def error_on_wrong_method(self, method_name, required_method): if required_method == self.current_method: return error_str = "'FluxityContext." + method_name + "' has to called in script method '" + self.method_name[required_method] + "'." _raise_contained_error(error_str)
[ "def", "error_on_wrong_method", "(", "self", ",", "method_name", ",", "required_method", ")", ":", "if", "required_method", "==", "self", ".", "current_method", ":", "return", "error_str", "=", "\"'FluxityContext.\"", "+", "method_name", "+", "\"' has to called in script method '\"", "+", "self", ".", "method_name", "[", "required_method", "]", "+", "\"'.\"", "_raise_contained_error", "(", "error_str", ")" ]
https://github.com/jliljebl/flowblade/blob/995313a509b80e99eb1ad550d945bdda5995093b/flowblade-trunk/Flowblade/tools/fluxity.py#L765-L770
etetoolkit/ete
2b207357dc2a40ccad7bfd8f54964472c72e4726
ete3/nexml/_nexml.py
python
TreeAndNetworkSet.build
(self, node)
[]
def build(self, node): self.buildAttributes(node, node.attrib, []) for child in node: nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] self.buildChildren(child, node, nodeName_)
[ "def", "build", "(", "self", ",", "node", ")", ":", "self", ".", "buildAttributes", "(", "node", ",", "node", ".", "attrib", ",", "[", "]", ")", "for", "child", "in", "node", ":", "nodeName_", "=", "Tag_pattern_", ".", "match", "(", "child", ".", "tag", ")", ".", "groups", "(", ")", "[", "-", "1", "]", "self", ".", "buildChildren", "(", "child", ",", "node", ",", "nodeName_", ")" ]
https://github.com/etetoolkit/ete/blob/2b207357dc2a40ccad7bfd8f54964472c72e4726/ete3/nexml/_nexml.py#L11435-L11439
jameskermode/f90wrap
6a6021d3d8c01125e13ecd0ef8faa52f19e5be3e
f90wrap/transform.py
python
set_intent
(attributes, intent)
return attributes
Remove any current "intent" from attributes and replace with intent given
Remove any current "intent" from attributes and replace with intent given
[ "Remove", "any", "current", "intent", "from", "attributes", "and", "replace", "with", "intent", "given" ]
def set_intent(attributes, intent): """Remove any current "intent" from attributes and replace with intent given""" attributes = [attr for attr in attributes if not attr.startswith('intent')] attributes.append(intent) return attributes
[ "def", "set_intent", "(", "attributes", ",", "intent", ")", ":", "attributes", "=", "[", "attr", "for", "attr", "in", "attributes", "if", "not", "attr", ".", "startswith", "(", "'intent'", ")", "]", "attributes", ".", "append", "(", "intent", ")", "return", "attributes" ]
https://github.com/jameskermode/f90wrap/blob/6a6021d3d8c01125e13ecd0ef8faa52f19e5be3e/f90wrap/transform.py#L444-L448
great-expectations/great_expectations
45224cb890aeae725af25905923d0dbbab2d969d
great_expectations/datasource/batch_kwargs_generator/query_batch_kwargs_generator.py
python
QueryBatchKwargsGenerator._build_batch_kwargs
(self, batch_parameters)
return SqlAlchemyDatasourceQueryBatchKwargs(batch_kwargs)
Build batch kwargs from a partition id.
Build batch kwargs from a partition id.
[ "Build", "batch", "kwargs", "from", "a", "partition", "id", "." ]
def _build_batch_kwargs(self, batch_parameters): """Build batch kwargs from a partition id.""" data_asset_name = batch_parameters.pop("data_asset_name") raw_query = self._get_raw_query(data_asset_name=data_asset_name) partition_id = batch_parameters.pop("partition_id", None) batch_kwargs = self._datasource.process_batch_parameters(**batch_parameters) batch_kwargs["query"] = raw_query if partition_id: if not batch_kwargs["query_parameters"]: batch_kwargs["query_parameters"] = {} batch_kwargs["query_parameters"]["partition_id"] = partition_id return SqlAlchemyDatasourceQueryBatchKwargs(batch_kwargs)
[ "def", "_build_batch_kwargs", "(", "self", ",", "batch_parameters", ")", ":", "data_asset_name", "=", "batch_parameters", ".", "pop", "(", "\"data_asset_name\"", ")", "raw_query", "=", "self", ".", "_get_raw_query", "(", "data_asset_name", "=", "data_asset_name", ")", "partition_id", "=", "batch_parameters", ".", "pop", "(", "\"partition_id\"", ",", "None", ")", "batch_kwargs", "=", "self", ".", "_datasource", ".", "process_batch_parameters", "(", "*", "*", "batch_parameters", ")", "batch_kwargs", "[", "\"query\"", "]", "=", "raw_query", "if", "partition_id", ":", "if", "not", "batch_kwargs", "[", "\"query_parameters\"", "]", ":", "batch_kwargs", "[", "\"query_parameters\"", "]", "=", "{", "}", "batch_kwargs", "[", "\"query_parameters\"", "]", "[", "\"partition_id\"", "]", "=", "partition_id", "return", "SqlAlchemyDatasourceQueryBatchKwargs", "(", "batch_kwargs", ")" ]
https://github.com/great-expectations/great_expectations/blob/45224cb890aeae725af25905923d0dbbab2d969d/great_expectations/datasource/batch_kwargs_generator/query_batch_kwargs_generator.py#L162-L175
seopbo/nlp_classification
21ea6e3f5737e7074bdd8dd190e5f5172f86f6bf
A_Structured_Self-attentive_Sentence_Embedding_cls/model/utils.py
python
Vocab.padding_token
(self)
return self._padding_token
[]
def padding_token(self): return self._padding_token
[ "def", "padding_token", "(", "self", ")", ":", "return", "self", ".", "_padding_token" ]
https://github.com/seopbo/nlp_classification/blob/21ea6e3f5737e7074bdd8dd190e5f5172f86f6bf/A_Structured_Self-attentive_Sentence_Embedding_cls/model/utils.py#L144-L145
cuthbertLab/music21
bd30d4663e52955ed922c10fdf541419d8c67671
music21/bar.py
python
typeToMusicXMLBarStyle
(value)
Convert a music21 barline name into the musicxml name -- essentially just changes the names of 'double' and 'final' to 'light-light' and 'light-heavy' Does not do error checking to make sure it's a valid name, since setting the style on a Barline object already does that. >>> bar.typeToMusicXMLBarStyle('final') 'light-heavy' >>> bar.typeToMusicXMLBarStyle('regular') 'regular'
Convert a music21 barline name into the musicxml name -- essentially just changes the names of 'double' and 'final' to 'light-light' and 'light-heavy'
[ "Convert", "a", "music21", "barline", "name", "into", "the", "musicxml", "name", "--", "essentially", "just", "changes", "the", "names", "of", "double", "and", "final", "to", "light", "-", "light", "and", "light", "-", "heavy" ]
def typeToMusicXMLBarStyle(value): ''' Convert a music21 barline name into the musicxml name -- essentially just changes the names of 'double' and 'final' to 'light-light' and 'light-heavy' Does not do error checking to make sure it's a valid name, since setting the style on a Barline object already does that. >>> bar.typeToMusicXMLBarStyle('final') 'light-heavy' >>> bar.typeToMusicXMLBarStyle('regular') 'regular' ''' if value.lower() in reverseBarTypeDict: return reverseBarTypeDict[value.lower()] else: return value
[ "def", "typeToMusicXMLBarStyle", "(", "value", ")", ":", "if", "value", ".", "lower", "(", ")", "in", "reverseBarTypeDict", ":", "return", "reverseBarTypeDict", "[", "value", ".", "lower", "(", ")", "]", "else", ":", "return", "value" ]
https://github.com/cuthbertLab/music21/blob/bd30d4663e52955ed922c10fdf541419d8c67671/music21/bar.py#L53-L70
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/components/zwave_js/helpers.py
python
get_zwave_value_from_config
(node: ZwaveNode, config: ConfigType)
return node.values[value_id]
Get a Z-Wave JS Value from a config.
Get a Z-Wave JS Value from a config.
[ "Get", "a", "Z", "-", "Wave", "JS", "Value", "from", "a", "config", "." ]
def get_zwave_value_from_config(node: ZwaveNode, config: ConfigType) -> ZwaveValue: """Get a Z-Wave JS Value from a config.""" endpoint = None if config.get(ATTR_ENDPOINT): endpoint = config[ATTR_ENDPOINT] property_key = None if config.get(ATTR_PROPERTY_KEY): property_key = config[ATTR_PROPERTY_KEY] value_id = get_value_id( node, config[ATTR_COMMAND_CLASS], config[ATTR_PROPERTY], endpoint, property_key, ) if value_id not in node.values: raise vol.Invalid(f"Value {value_id} can't be found on node {node}") return node.values[value_id]
[ "def", "get_zwave_value_from_config", "(", "node", ":", "ZwaveNode", ",", "config", ":", "ConfigType", ")", "->", "ZwaveValue", ":", "endpoint", "=", "None", "if", "config", ".", "get", "(", "ATTR_ENDPOINT", ")", ":", "endpoint", "=", "config", "[", "ATTR_ENDPOINT", "]", "property_key", "=", "None", "if", "config", ".", "get", "(", "ATTR_PROPERTY_KEY", ")", ":", "property_key", "=", "config", "[", "ATTR_PROPERTY_KEY", "]", "value_id", "=", "get_value_id", "(", "node", ",", "config", "[", "ATTR_COMMAND_CLASS", "]", ",", "config", "[", "ATTR_PROPERTY", "]", ",", "endpoint", ",", "property_key", ",", ")", "if", "value_id", "not", "in", "node", ".", "values", ":", "raise", "vol", ".", "Invalid", "(", "f\"Value {value_id} can't be found on node {node}\"", ")", "return", "node", ".", "values", "[", "value_id", "]" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/zwave_js/helpers.py#L213-L230
f-dangel/backpack
1da7e53ebb2c490e2b7dd9f79116583641f3cca1
backpack/core/derivatives/elementwise.py
python
ElementwiseDerivatives.df
( self, module: Module, g_inp: Tuple[Tensor], g_out: Tuple[Tensor], subsampling: List[int] = None, )
Elementwise first derivative. Args: module: PyTorch activation module. g_inp: Gradients of the module w.r.t. its inputs. g_out: Gradients of the module w.r.t. its outputs. subsampling: Indices of active samples. ``None`` means all samples. Returns: Tensor containing the derivatives `f'(input[i]) ∀ i`.
Elementwise first derivative.
[ "Elementwise", "first", "derivative", "." ]
def df( self, module: Module, g_inp: Tuple[Tensor], g_out: Tuple[Tensor], subsampling: List[int] = None, ): """Elementwise first derivative. Args: module: PyTorch activation module. g_inp: Gradients of the module w.r.t. its inputs. g_out: Gradients of the module w.r.t. its outputs. subsampling: Indices of active samples. ``None`` means all samples. Returns: Tensor containing the derivatives `f'(input[i]) ∀ i`. """ raise NotImplementedError("First derivatives not implemented")
[ "def", "df", "(", "self", ",", "module", ":", "Module", ",", "g_inp", ":", "Tuple", "[", "Tensor", "]", ",", "g_out", ":", "Tuple", "[", "Tensor", "]", ",", "subsampling", ":", "List", "[", "int", "]", "=", "None", ",", ")", ":", "raise", "NotImplementedError", "(", "\"First derivatives not implemented\"", ")" ]
https://github.com/f-dangel/backpack/blob/1da7e53ebb2c490e2b7dd9f79116583641f3cca1/backpack/core/derivatives/elementwise.py#L26-L44
donnemartin/gitsome
d7c57abc7cb66e9c910a844f15d4536866da3310
gitsome/lib/github3/users.py
python
User.impersonate
(self, scopes=None)
return self._instance_or_null(Authorization, json)
Obtain an impersonation token for the user. The retrieved token will allow impersonation of the user. This is only available for admins of a GitHub Enterprise instance. :param list scopes: (optional), areas you want this token to apply to, i.e., 'gist', 'user' :returns: :class:`Authorization <Authorization>`
Obtain an impersonation token for the user.
[ "Obtain", "an", "impersonation", "token", "for", "the", "user", "." ]
def impersonate(self, scopes=None): """Obtain an impersonation token for the user. The retrieved token will allow impersonation of the user. This is only available for admins of a GitHub Enterprise instance. :param list scopes: (optional), areas you want this token to apply to, i.e., 'gist', 'user' :returns: :class:`Authorization <Authorization>` """ url = self._build_url('admin', 'users', self.id, 'authorizations') data = {} if scopes: data['scopes'] = scopes json = self._json(self._post(url, data=data), 201) return self._instance_or_null(Authorization, json)
[ "def", "impersonate", "(", "self", ",", "scopes", "=", "None", ")", ":", "url", "=", "self", ".", "_build_url", "(", "'admin'", ",", "'users'", ",", "self", ".", "id", ",", "'authorizations'", ")", "data", "=", "{", "}", "if", "scopes", ":", "data", "[", "'scopes'", "]", "=", "scopes", "json", "=", "self", ".", "_json", "(", "self", ".", "_post", "(", "url", ",", "data", "=", "data", ")", ",", "201", ")", "return", "self", ".", "_instance_or_null", "(", "Authorization", ",", "json", ")" ]
https://github.com/donnemartin/gitsome/blob/d7c57abc7cb66e9c910a844f15d4536866da3310/gitsome/lib/github3/users.py#L400-L418
spulec/moto
a688c0032596a7dfef122b69a08f2bec3be2e481
moto/dynamodb2/comparisons.py
python
ConditionExpressionParser._apply_between
(self, nodes)
return output
Apply condition := operand BETWEEN operand AND operand.
Apply condition := operand BETWEEN operand AND operand.
[ "Apply", "condition", ":", "=", "operand", "BETWEEN", "operand", "AND", "operand", "." ]
def _apply_between(self, nodes): """Apply condition := operand BETWEEN operand AND operand.""" output = deque() while nodes: if self._matches(nodes, ["*", "BETWEEN"]): self._assert( self._matches( nodes, ["OPERAND", "BETWEEN", "OPERAND", "AND", "OPERAND"] ), "Bad BETWEEN expression", list(nodes)[:5], ) lhs = nodes.popleft() between_node = nodes.popleft() low = nodes.popleft() and_node = nodes.popleft() high = nodes.popleft() all_children = [lhs, between_node, low, and_node, high] nodes.appendleft( self.Node( nonterminal=self.Nonterminal.CONDITION, kind=self.Kind.BETWEEN, text=" ".join([t.text for t in all_children]), value=None, children=[lhs, low, high], ) ) else: output.append(nodes.popleft()) return output
[ "def", "_apply_between", "(", "self", ",", "nodes", ")", ":", "output", "=", "deque", "(", ")", "while", "nodes", ":", "if", "self", ".", "_matches", "(", "nodes", ",", "[", "\"*\"", ",", "\"BETWEEN\"", "]", ")", ":", "self", ".", "_assert", "(", "self", ".", "_matches", "(", "nodes", ",", "[", "\"OPERAND\"", ",", "\"BETWEEN\"", ",", "\"OPERAND\"", ",", "\"AND\"", ",", "\"OPERAND\"", "]", ")", ",", "\"Bad BETWEEN expression\"", ",", "list", "(", "nodes", ")", "[", ":", "5", "]", ",", ")", "lhs", "=", "nodes", ".", "popleft", "(", ")", "between_node", "=", "nodes", ".", "popleft", "(", ")", "low", "=", "nodes", ".", "popleft", "(", ")", "and_node", "=", "nodes", ".", "popleft", "(", ")", "high", "=", "nodes", ".", "popleft", "(", ")", "all_children", "=", "[", "lhs", ",", "between_node", ",", "low", ",", "and_node", ",", "high", "]", "nodes", ".", "appendleft", "(", "self", ".", "Node", "(", "nonterminal", "=", "self", ".", "Nonterminal", ".", "CONDITION", ",", "kind", "=", "self", ".", "Kind", ".", "BETWEEN", ",", "text", "=", "\" \"", ".", "join", "(", "[", "t", ".", "text", "for", "t", "in", "all_children", "]", ")", ",", "value", "=", "None", ",", "children", "=", "[", "lhs", ",", "low", ",", "high", "]", ",", ")", ")", "else", ":", "output", ".", "append", "(", "nodes", ".", "popleft", "(", ")", ")", "return", "output" ]
https://github.com/spulec/moto/blob/a688c0032596a7dfef122b69a08f2bec3be2e481/moto/dynamodb2/comparisons.py#L551-L580
ArduPilot/pymavlink
9d6ea618e8d0622bee95fa902b6251882e225afb
quaternion.py
python
QuaternionBase.__eq__
(self, other)
return NotImplemented
Equality test (same orientation, not necessarily same rotation) :param other: a QuaternionBase :returns: true if the quaternions are equal
Equality test (same orientation, not necessarily same rotation)
[ "Equality", "test", "(", "same", "orientation", "not", "necessarily", "same", "rotation", ")" ]
def __eq__(self, other): """ Equality test (same orientation, not necessarily same rotation) :param other: a QuaternionBase :returns: true if the quaternions are equal """ if isinstance(other, QuaternionBase): return abs(self.q.dot(other.q)) > 1 - np.finfo(float).eps return NotImplemented
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "if", "isinstance", "(", "other", ",", "QuaternionBase", ")", ":", "return", "abs", "(", "self", ".", "q", ".", "dot", "(", "other", ".", "q", ")", ")", ">", "1", "-", "np", ".", "finfo", "(", "float", ")", ".", "eps", "return", "NotImplemented" ]
https://github.com/ArduPilot/pymavlink/blob/9d6ea618e8d0622bee95fa902b6251882e225afb/quaternion.py#L203-L212
kbandla/ImmunityDebugger
2abc03fb15c8f3ed0914e1175c4d8933977c73e3
1.85/Libs/immlib.py
python
Debugger.setBreakpointOnName
(self,name)
return debugger.set_breakpoint_on_name(name)
Set a Breakpoint. @type Name: STRING @param Name: name of the function to bp @rtype: DWORD @return: Address of name
Set a Breakpoint.
[ "Set", "a", "Breakpoint", "." ]
def setBreakpointOnName(self,name): """ Set a Breakpoint. @type Name: STRING @param Name: name of the function to bp @rtype: DWORD @return: Address of name """ return debugger.set_breakpoint_on_name(name)
[ "def", "setBreakpointOnName", "(", "self", ",", "name", ")", ":", "return", "debugger", ".", "set_breakpoint_on_name", "(", "name", ")" ]
https://github.com/kbandla/ImmunityDebugger/blob/2abc03fb15c8f3ed0914e1175c4d8933977c73e3/1.85/Libs/immlib.py#L1981-L1991
python-rope/rope
bcdfe6b70b1437d976e21c56b6ec1281b22823aa
rope/base/history.py
python
History.undo
(self, change=None, drop=False, task_handle=taskhandle.NullTaskHandle())
return result
Redo done changes from the history When `change` is `None`, the last done change will be undone. If change is not `None` it should be an item from `self.undo_list`; this change and all changes that depend on it will be undone. In both cases the list of undone changes will be returned. If `drop` is `True`, the undone change will not be appended to the redo list.
Redo done changes from the history
[ "Redo", "done", "changes", "from", "the", "history" ]
def undo(self, change=None, drop=False, task_handle=taskhandle.NullTaskHandle()): """Redo done changes from the history When `change` is `None`, the last done change will be undone. If change is not `None` it should be an item from `self.undo_list`; this change and all changes that depend on it will be undone. In both cases the list of undone changes will be returned. If `drop` is `True`, the undone change will not be appended to the redo list. """ if not self._undo_list: raise exceptions.HistoryError("Undo list is empty") if change is None: change = self.undo_list[-1] dependencies = self._find_dependencies(self.undo_list, change) self._move_front(self.undo_list, dependencies) self._perform_undos(len(dependencies), task_handle) result = self.redo_list[-len(dependencies) :] if drop: del self.redo_list[-len(dependencies) :] return result
[ "def", "undo", "(", "self", ",", "change", "=", "None", ",", "drop", "=", "False", ",", "task_handle", "=", "taskhandle", ".", "NullTaskHandle", "(", ")", ")", ":", "if", "not", "self", ".", "_undo_list", ":", "raise", "exceptions", ".", "HistoryError", "(", "\"Undo list is empty\"", ")", "if", "change", "is", "None", ":", "change", "=", "self", ".", "undo_list", "[", "-", "1", "]", "dependencies", "=", "self", ".", "_find_dependencies", "(", "self", ".", "undo_list", ",", "change", ")", "self", ".", "_move_front", "(", "self", ".", "undo_list", ",", "dependencies", ")", "self", ".", "_perform_undos", "(", "len", "(", "dependencies", ")", ",", "task_handle", ")", "result", "=", "self", ".", "redo_list", "[", "-", "len", "(", "dependencies", ")", ":", "]", "if", "drop", ":", "del", "self", ".", "redo_list", "[", "-", "len", "(", "dependencies", ")", ":", "]", "return", "result" ]
https://github.com/python-rope/rope/blob/bcdfe6b70b1437d976e21c56b6ec1281b22823aa/rope/base/history.py#L55-L78
nosmokingbandit/watcher
dadacd21a5790ee609058a98a17fcc8954d24439
lib/sqlalchemy/orm/scoping.py
python
scoped_session.__init__
(self, session_factory, scopefunc=None)
Construct a new :class:`.scoped_session`. :param session_factory: a factory to create new :class:`.Session` instances. This is usually, but not necessarily, an instance of :class:`.sessionmaker`. :param scopefunc: optional function which defines the current scope. If not passed, the :class:`.scoped_session` object assumes "thread-local" scope, and will use a Python ``threading.local()`` in order to maintain the current :class:`.Session`. If passed, the function should return a hashable token; this token will be used as the key in a dictionary in order to store and retrieve the current :class:`.Session`.
Construct a new :class:`.scoped_session`.
[ "Construct", "a", "new", ":", "class", ":", ".", "scoped_session", "." ]
def __init__(self, session_factory, scopefunc=None): """Construct a new :class:`.scoped_session`. :param session_factory: a factory to create new :class:`.Session` instances. This is usually, but not necessarily, an instance of :class:`.sessionmaker`. :param scopefunc: optional function which defines the current scope. If not passed, the :class:`.scoped_session` object assumes "thread-local" scope, and will use a Python ``threading.local()`` in order to maintain the current :class:`.Session`. If passed, the function should return a hashable token; this token will be used as the key in a dictionary in order to store and retrieve the current :class:`.Session`. """ self.session_factory = session_factory if scopefunc: self.registry = ScopedRegistry(session_factory, scopefunc) else: self.registry = ThreadLocalRegistry(session_factory)
[ "def", "__init__", "(", "self", ",", "session_factory", ",", "scopefunc", "=", "None", ")", ":", "self", ".", "session_factory", "=", "session_factory", "if", "scopefunc", ":", "self", ".", "registry", "=", "ScopedRegistry", "(", "session_factory", ",", "scopefunc", ")", "else", ":", "self", ".", "registry", "=", "ThreadLocalRegistry", "(", "session_factory", ")" ]
https://github.com/nosmokingbandit/watcher/blob/dadacd21a5790ee609058a98a17fcc8954d24439/lib/sqlalchemy/orm/scoping.py#L30-L51
Source-Python-Dev-Team/Source.Python
d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb
addons/source-python/packages/source-python/memory/manager.py
python
CustomType.on_dealloc
(self)
Call the destructor. This method is automatically called, when the pointer gets deallocated. It then calls the destructor if it was specified.
Call the destructor.
[ "Call", "the", "destructor", "." ]
def on_dealloc(self): """Call the destructor. This method is automatically called, when the pointer gets deallocated. It then calls the destructor if it was specified. """ # Call the destructor if it was specified if self._destructor is not None: self._destructor()
[ "def", "on_dealloc", "(", "self", ")", ":", "# Call the destructor if it was specified", "if", "self", ".", "_destructor", "is", "not", "None", ":", "self", ".", "_destructor", "(", ")" ]
https://github.com/Source-Python-Dev-Team/Source.Python/blob/d0ffd8ccbd1e9923c9bc44936f20613c1c76b7fb/addons/source-python/packages/source-python/memory/manager.py#L135-L143
CalebBell/thermo
572a47d1b03d49fe609b8d5f826fa6a7cde00828
thermo/chemical.py
python
Chemical.calc_S_excess
(self, T, P)
return S_dep
[]
def calc_S_excess(self, T, P): S_dep = 0 if self.phase_ref == 'g' and self.phase == 'g': S_dep += self.eos.to_TP(T, P).S_dep_g - self.S_dep_ref_g elif self.phase_ref == 'l' and self.phase == 'l': try: S_dep += self.eos.to_TP(T, P).S_dep_l - self._eos_T_101325.S_dep_l except: S_dep += 0 elif self.phase_ref == 'g' and self.phase == 'l': S_dep += self.S_dep_Tb_Pb_g - self.S_dep_Tb_P_ref_g S_dep += (self.eos.to_TP(T, P).S_dep_l - self._eos_T_101325.S_dep_l) elif self.phase_ref == 'l' and self.phase == 'g': S_dep += self.S_dep_T_ref_Pb - self.S_dep_ref_l S_dep += (self.eos.to_TP(T, P).S_dep_g - self.S_dep_Tb_Pb_g) return S_dep
[ "def", "calc_S_excess", "(", "self", ",", "T", ",", "P", ")", ":", "S_dep", "=", "0", "if", "self", ".", "phase_ref", "==", "'g'", "and", "self", ".", "phase", "==", "'g'", ":", "S_dep", "+=", "self", ".", "eos", ".", "to_TP", "(", "T", ",", "P", ")", ".", "S_dep_g", "-", "self", ".", "S_dep_ref_g", "elif", "self", ".", "phase_ref", "==", "'l'", "and", "self", ".", "phase", "==", "'l'", ":", "try", ":", "S_dep", "+=", "self", ".", "eos", ".", "to_TP", "(", "T", ",", "P", ")", ".", "S_dep_l", "-", "self", ".", "_eos_T_101325", ".", "S_dep_l", "except", ":", "S_dep", "+=", "0", "elif", "self", ".", "phase_ref", "==", "'g'", "and", "self", ".", "phase", "==", "'l'", ":", "S_dep", "+=", "self", ".", "S_dep_Tb_Pb_g", "-", "self", ".", "S_dep_Tb_P_ref_g", "S_dep", "+=", "(", "self", ".", "eos", ".", "to_TP", "(", "T", ",", "P", ")", ".", "S_dep_l", "-", "self", ".", "_eos_T_101325", ".", "S_dep_l", ")", "elif", "self", ".", "phase_ref", "==", "'l'", "and", "self", ".", "phase", "==", "'g'", ":", "S_dep", "+=", "self", ".", "S_dep_T_ref_Pb", "-", "self", ".", "S_dep_ref_l", "S_dep", "+=", "(", "self", ".", "eos", ".", "to_TP", "(", "T", ",", "P", ")", ".", "S_dep_g", "-", "self", ".", "S_dep_Tb_Pb_g", ")", "return", "S_dep" ]
https://github.com/CalebBell/thermo/blob/572a47d1b03d49fe609b8d5f826fa6a7cde00828/thermo/chemical.py#L1400-L1418
onnx/sklearn-onnx
8e19d19b8a9bcae7f17d5b7cc2514cf6b89f8199
skl2onnx/operator_converters/label_binariser.py
python
convert_sklearn_label_binariser
(scope: Scope, operator: Operator, container: ModelComponentContainer)
Converts Scikit Label Binariser model to onnx format.
Converts Scikit Label Binariser model to onnx format.
[ "Converts", "Scikit", "Label", "Binariser", "model", "to", "onnx", "format", "." ]
def convert_sklearn_label_binariser(scope: Scope, operator: Operator, container: ModelComponentContainer): """Converts Scikit Label Binariser model to onnx format.""" binariser_op = operator.raw_operator classes = binariser_op.classes_ if (hasattr(binariser_op, 'sparse_input_') and binariser_op.sparse_input_): raise RuntimeError("sparse is not supported for LabelBinarizer.") if (hasattr(binariser_op, 'y_type_') and binariser_op.y_type_ == "multilabel-indicator"): if binariser_op.pos_label != 1: raise RuntimeError("pos_label != 1 is not supported " "for LabelBinarizer.") if list(classes) != list(range(len(classes))): raise RuntimeError("classes != [0, 1, ..., n_classes] is not " "supported for LabelBinarizer.") container.add_node('Identity', operator.inputs[0].full_name, operator.output_full_names, name=scope.get_unique_operator_name('identity')) else: zeros_tensor = np.full((1, len(classes)), binariser_op.neg_label, dtype=np.float) unit_tensor = np.full((1, len(classes)), binariser_op.pos_label, dtype=np.float) classes_tensor_name = scope.get_unique_variable_name('classes_tensor') equal_condition_tensor_name = scope.get_unique_variable_name( 'equal_condition_tensor') zeros_tensor_name = scope.get_unique_variable_name('zero_tensor') unit_tensor_name = scope.get_unique_variable_name('unit_tensor') where_result_name = scope.get_unique_variable_name('where_result') class_dtype = onnx_proto.TensorProto.STRING if np.issubdtype(binariser_op.classes_.dtype, np.signedinteger): class_dtype = onnx_proto.TensorProto.INT64 else: classes = np.array([s.encode('utf-8') for s in classes]) container.add_initializer(classes_tensor_name, class_dtype, [len(classes)], classes) container.add_initializer( zeros_tensor_name, onnx_proto.TensorProto.FLOAT, zeros_tensor.shape, zeros_tensor.ravel()) container.add_initializer( unit_tensor_name, onnx_proto.TensorProto.FLOAT, unit_tensor.shape, unit_tensor.ravel()) reshaped_input_name = scope.get_unique_variable_name('reshaped_input') apply_reshape(scope, operator.inputs[0].full_name, reshaped_input_name, container, desired_shape=[-1, 1]) # Models with classes_/inputs of string type would fail in the # following step as Equal op does not support string comparison. container.add_node('Equal', [classes_tensor_name, reshaped_input_name], equal_condition_tensor_name, name=scope.get_unique_operator_name('equal')) container.add_node( 'Where', [equal_condition_tensor_name, unit_tensor_name, zeros_tensor_name], where_result_name, name=scope.get_unique_operator_name('where')) where_res = where_result_name if len(binariser_op.classes_) == 2: array_f_name = scope.get_unique_variable_name( 'array_feature_extractor_result') pos_class_index_name = scope.get_unique_variable_name( 'pos_class_index') container.add_initializer( pos_class_index_name, onnx_proto.TensorProto.INT64, [], [1]) container.add_node( 'ArrayFeatureExtractor', [where_result_name, pos_class_index_name], array_f_name, op_domain='ai.onnx.ml', name=scope.get_unique_operator_name('ArrayFeatureExtractor')) where_res = array_f_name apply_cast(scope, where_res, operator.output_full_names, container, to=onnx_proto.TensorProto.INT64)
[ "def", "convert_sklearn_label_binariser", "(", "scope", ":", "Scope", ",", "operator", ":", "Operator", ",", "container", ":", "ModelComponentContainer", ")", ":", "binariser_op", "=", "operator", ".", "raw_operator", "classes", "=", "binariser_op", ".", "classes_", "if", "(", "hasattr", "(", "binariser_op", ",", "'sparse_input_'", ")", "and", "binariser_op", ".", "sparse_input_", ")", ":", "raise", "RuntimeError", "(", "\"sparse is not supported for LabelBinarizer.\"", ")", "if", "(", "hasattr", "(", "binariser_op", ",", "'y_type_'", ")", "and", "binariser_op", ".", "y_type_", "==", "\"multilabel-indicator\"", ")", ":", "if", "binariser_op", ".", "pos_label", "!=", "1", ":", "raise", "RuntimeError", "(", "\"pos_label != 1 is not supported \"", "\"for LabelBinarizer.\"", ")", "if", "list", "(", "classes", ")", "!=", "list", "(", "range", "(", "len", "(", "classes", ")", ")", ")", ":", "raise", "RuntimeError", "(", "\"classes != [0, 1, ..., n_classes] is not \"", "\"supported for LabelBinarizer.\"", ")", "container", ".", "add_node", "(", "'Identity'", ",", "operator", ".", "inputs", "[", "0", "]", ".", "full_name", ",", "operator", ".", "output_full_names", ",", "name", "=", "scope", ".", "get_unique_operator_name", "(", "'identity'", ")", ")", "else", ":", "zeros_tensor", "=", "np", ".", "full", "(", "(", "1", ",", "len", "(", "classes", ")", ")", ",", "binariser_op", ".", "neg_label", ",", "dtype", "=", "np", ".", "float", ")", "unit_tensor", "=", "np", ".", "full", "(", "(", "1", ",", "len", "(", "classes", ")", ")", ",", "binariser_op", ".", "pos_label", ",", "dtype", "=", "np", ".", "float", ")", "classes_tensor_name", "=", "scope", ".", "get_unique_variable_name", "(", "'classes_tensor'", ")", "equal_condition_tensor_name", "=", "scope", ".", "get_unique_variable_name", "(", "'equal_condition_tensor'", ")", "zeros_tensor_name", "=", "scope", ".", "get_unique_variable_name", "(", "'zero_tensor'", ")", "unit_tensor_name", "=", "scope", ".", "get_unique_variable_name", "(", "'unit_tensor'", ")", "where_result_name", "=", "scope", ".", "get_unique_variable_name", "(", "'where_result'", ")", "class_dtype", "=", "onnx_proto", ".", "TensorProto", ".", "STRING", "if", "np", ".", "issubdtype", "(", "binariser_op", ".", "classes_", ".", "dtype", ",", "np", ".", "signedinteger", ")", ":", "class_dtype", "=", "onnx_proto", ".", "TensorProto", ".", "INT64", "else", ":", "classes", "=", "np", ".", "array", "(", "[", "s", ".", "encode", "(", "'utf-8'", ")", "for", "s", "in", "classes", "]", ")", "container", ".", "add_initializer", "(", "classes_tensor_name", ",", "class_dtype", ",", "[", "len", "(", "classes", ")", "]", ",", "classes", ")", "container", ".", "add_initializer", "(", "zeros_tensor_name", ",", "onnx_proto", ".", "TensorProto", ".", "FLOAT", ",", "zeros_tensor", ".", "shape", ",", "zeros_tensor", ".", "ravel", "(", ")", ")", "container", ".", "add_initializer", "(", "unit_tensor_name", ",", "onnx_proto", ".", "TensorProto", ".", "FLOAT", ",", "unit_tensor", ".", "shape", ",", "unit_tensor", ".", "ravel", "(", ")", ")", "reshaped_input_name", "=", "scope", ".", "get_unique_variable_name", "(", "'reshaped_input'", ")", "apply_reshape", "(", "scope", ",", "operator", ".", "inputs", "[", "0", "]", ".", "full_name", ",", "reshaped_input_name", ",", "container", ",", "desired_shape", "=", "[", "-", "1", ",", "1", "]", ")", "# Models with classes_/inputs of string type would fail in the", "# following step as Equal op does not support string comparison.", "container", ".", "add_node", "(", "'Equal'", ",", "[", "classes_tensor_name", ",", "reshaped_input_name", "]", ",", "equal_condition_tensor_name", ",", "name", "=", "scope", ".", "get_unique_operator_name", "(", "'equal'", ")", ")", "container", ".", "add_node", "(", "'Where'", ",", "[", "equal_condition_tensor_name", ",", "unit_tensor_name", ",", "zeros_tensor_name", "]", ",", "where_result_name", ",", "name", "=", "scope", ".", "get_unique_operator_name", "(", "'where'", ")", ")", "where_res", "=", "where_result_name", "if", "len", "(", "binariser_op", ".", "classes_", ")", "==", "2", ":", "array_f_name", "=", "scope", ".", "get_unique_variable_name", "(", "'array_feature_extractor_result'", ")", "pos_class_index_name", "=", "scope", ".", "get_unique_variable_name", "(", "'pos_class_index'", ")", "container", ".", "add_initializer", "(", "pos_class_index_name", ",", "onnx_proto", ".", "TensorProto", ".", "INT64", ",", "[", "]", ",", "[", "1", "]", ")", "container", ".", "add_node", "(", "'ArrayFeatureExtractor'", ",", "[", "where_result_name", ",", "pos_class_index_name", "]", ",", "array_f_name", ",", "op_domain", "=", "'ai.onnx.ml'", ",", "name", "=", "scope", ".", "get_unique_operator_name", "(", "'ArrayFeatureExtractor'", ")", ")", "where_res", "=", "array_f_name", "apply_cast", "(", "scope", ",", "where_res", ",", "operator", ".", "output_full_names", ",", "container", ",", "to", "=", "onnx_proto", ".", "TensorProto", ".", "INT64", ")" ]
https://github.com/onnx/sklearn-onnx/blob/8e19d19b8a9bcae7f17d5b7cc2514cf6b89f8199/skl2onnx/operator_converters/label_binariser.py#L12-L91
bcbio/bcbio-nextgen
c80f9b6b1be3267d1f981b7035e3b72441d258f2
bcbio/variation/multi.py
python
_diff_dict
(orig, new)
return final
Diff a nested dictionary, returning only key/values that differ.
Diff a nested dictionary, returning only key/values that differ.
[ "Diff", "a", "nested", "dictionary", "returning", "only", "key", "/", "values", "that", "differ", "." ]
def _diff_dict(orig, new): """Diff a nested dictionary, returning only key/values that differ. """ final = {} for k, v in new.items(): if isinstance(v, dict): v = _diff_dict(orig.get(k, {}), v) if len(v) > 0: final[k] = v elif v != orig.get(k): final[k] = v for k, v in orig.items(): if k not in new: final[k] = None return final
[ "def", "_diff_dict", "(", "orig", ",", "new", ")", ":", "final", "=", "{", "}", "for", "k", ",", "v", "in", "new", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "dict", ")", ":", "v", "=", "_diff_dict", "(", "orig", ".", "get", "(", "k", ",", "{", "}", ")", ",", "v", ")", "if", "len", "(", "v", ")", ">", "0", ":", "final", "[", "k", "]", "=", "v", "elif", "v", "!=", "orig", ".", "get", "(", "k", ")", ":", "final", "[", "k", "]", "=", "v", "for", "k", ",", "v", "in", "orig", ".", "items", "(", ")", ":", "if", "k", "not", "in", "new", ":", "final", "[", "k", "]", "=", "None", "return", "final" ]
https://github.com/bcbio/bcbio-nextgen/blob/c80f9b6b1be3267d1f981b7035e3b72441d258f2/bcbio/variation/multi.py#L183-L197
onejgordon/flow-dashboard
993320e2eb0f86d89b9904a3d5415c7479c5918e
tools.py
python
dt_from_ts
(ms)
Convert timestamp in ms to datetime >>> dt_from_ts(1494269497212) datetime.datetime(2017, 5, 8, 18, 51, 37, 212000)
Convert timestamp in ms to datetime
[ "Convert", "timestamp", "in", "ms", "to", "datetime" ]
def dt_from_ts(ms): ''' Convert timestamp in ms to datetime >>> dt_from_ts(1494269497212) datetime.datetime(2017, 5, 8, 18, 51, 37, 212000) ''' if ms == 0: return None else: return datetime.utcfromtimestamp(float(ms) / 1000)
[ "def", "dt_from_ts", "(", "ms", ")", ":", "if", "ms", "==", "0", ":", "return", "None", "else", ":", "return", "datetime", ".", "utcfromtimestamp", "(", "float", "(", "ms", ")", "/", "1000", ")" ]
https://github.com/onejgordon/flow-dashboard/blob/993320e2eb0f86d89b9904a3d5415c7479c5918e/tools.py#L219-L230
Delgan/loguru
3d5234541c81318e7f6f725eca7bab294fe09c23
loguru/_logger.py
python
Logger.parse
(file, pattern, *, cast={}, chunk=2 ** 16)
Parse raw logs and extract each entry as a |dict|. The logging format has to be specified as the regex ``pattern``, it will then be used to parse the ``file`` and retrieve each entry based on the named groups present in the regex. Parameters ---------- file : |str|, |Path| or |file-like object|_ The path of the log file to be parsed, or an already opened file object. pattern : |str| or |re.Pattern|_ The regex to use for logs parsing, it should contain named groups which will be included in the returned dict. cast : |callable|_ or |dict|, optional A function that should convert in-place the regex groups parsed (a dict of string values) to more appropriate types. If a dict is passed, it should be a mapping between keys of parsed log dict and the function that should be used to convert the associated value. chunk : |int|, optional The number of bytes read while iterating through the logs, this avoids having to load the whole file in memory. Yields ------ :class:`dict` The dict mapping regex named groups to matched values, as returned by |match.groupdict| and optionally converted according to ``cast`` argument. Examples -------- >>> reg = r"(?P<lvl>[0-9]+): (?P<msg>.*)" # If log format is "{level.no} - {message}" >>> for e in logger.parse("file.log", reg): # A file line could be "10 - A debug message" ... print(e) # => {'lvl': '10', 'msg': 'A debug message'} >>> caster = dict(lvl=int) # Parse 'lvl' key as an integer >>> for e in logger.parse("file.log", reg, cast=caster): ... print(e) # => {'lvl': 10, 'msg': 'A debug message'} >>> def cast(groups): ... if "date" in groups: ... groups["date"] = datetime.strptime(groups["date"], "%Y-%m-%d %H:%M:%S") ... >>> with open("file.log") as file: ... for log in logger.parse(file, reg, cast=cast): ... print(log["date"], log["something_else"])
Parse raw logs and extract each entry as a |dict|.
[ "Parse", "raw", "logs", "and", "extract", "each", "entry", "as", "a", "|dict|", "." ]
def parse(file, pattern, *, cast={}, chunk=2 ** 16): """Parse raw logs and extract each entry as a |dict|. The logging format has to be specified as the regex ``pattern``, it will then be used to parse the ``file`` and retrieve each entry based on the named groups present in the regex. Parameters ---------- file : |str|, |Path| or |file-like object|_ The path of the log file to be parsed, or an already opened file object. pattern : |str| or |re.Pattern|_ The regex to use for logs parsing, it should contain named groups which will be included in the returned dict. cast : |callable|_ or |dict|, optional A function that should convert in-place the regex groups parsed (a dict of string values) to more appropriate types. If a dict is passed, it should be a mapping between keys of parsed log dict and the function that should be used to convert the associated value. chunk : |int|, optional The number of bytes read while iterating through the logs, this avoids having to load the whole file in memory. Yields ------ :class:`dict` The dict mapping regex named groups to matched values, as returned by |match.groupdict| and optionally converted according to ``cast`` argument. Examples -------- >>> reg = r"(?P<lvl>[0-9]+): (?P<msg>.*)" # If log format is "{level.no} - {message}" >>> for e in logger.parse("file.log", reg): # A file line could be "10 - A debug message" ... print(e) # => {'lvl': '10', 'msg': 'A debug message'} >>> caster = dict(lvl=int) # Parse 'lvl' key as an integer >>> for e in logger.parse("file.log", reg, cast=caster): ... print(e) # => {'lvl': 10, 'msg': 'A debug message'} >>> def cast(groups): ... if "date" in groups: ... groups["date"] = datetime.strptime(groups["date"], "%Y-%m-%d %H:%M:%S") ... >>> with open("file.log") as file: ... for log in logger.parse(file, reg, cast=cast): ... print(log["date"], log["something_else"]) """ if isinstance(file, (str, PathLike)): should_close = True fileobj = open(str(file)) elif hasattr(file, "read") and callable(file.read): should_close = False fileobj = file else: raise TypeError( "Invalid file, it should be a string path or a file object, not: '%s'" % type(file).__name__ ) if isinstance(cast, dict): def cast_function(groups): for key, converter in cast.items(): if key in groups: groups[key] = converter(groups[key]) elif callable(cast): cast_function = cast else: raise TypeError( "Invalid cast, it should be a function or a dict, not: '%s'" % type(cast).__name__ ) try: regex = re.compile(pattern) except TypeError: raise TypeError( "Invalid pattern, it should be a string or a compiled regex, not: '%s'" % type(pattern).__name__ ) from None matches = Logger._find_iter(fileobj, regex, chunk) for match in matches: groups = match.groupdict() cast_function(groups) yield groups if should_close: fileobj.close()
[ "def", "parse", "(", "file", ",", "pattern", ",", "*", ",", "cast", "=", "{", "}", ",", "chunk", "=", "2", "**", "16", ")", ":", "if", "isinstance", "(", "file", ",", "(", "str", ",", "PathLike", ")", ")", ":", "should_close", "=", "True", "fileobj", "=", "open", "(", "str", "(", "file", ")", ")", "elif", "hasattr", "(", "file", ",", "\"read\"", ")", "and", "callable", "(", "file", ".", "read", ")", ":", "should_close", "=", "False", "fileobj", "=", "file", "else", ":", "raise", "TypeError", "(", "\"Invalid file, it should be a string path or a file object, not: '%s'\"", "%", "type", "(", "file", ")", ".", "__name__", ")", "if", "isinstance", "(", "cast", ",", "dict", ")", ":", "def", "cast_function", "(", "groups", ")", ":", "for", "key", ",", "converter", "in", "cast", ".", "items", "(", ")", ":", "if", "key", "in", "groups", ":", "groups", "[", "key", "]", "=", "converter", "(", "groups", "[", "key", "]", ")", "elif", "callable", "(", "cast", ")", ":", "cast_function", "=", "cast", "else", ":", "raise", "TypeError", "(", "\"Invalid cast, it should be a function or a dict, not: '%s'\"", "%", "type", "(", "cast", ")", ".", "__name__", ")", "try", ":", "regex", "=", "re", ".", "compile", "(", "pattern", ")", "except", "TypeError", ":", "raise", "TypeError", "(", "\"Invalid pattern, it should be a string or a compiled regex, not: '%s'\"", "%", "type", "(", "pattern", ")", ".", "__name__", ")", "from", "None", "matches", "=", "Logger", ".", "_find_iter", "(", "fileobj", ",", "regex", ",", "chunk", ")", "for", "match", "in", "matches", ":", "groups", "=", "match", ".", "groupdict", "(", ")", "cast_function", "(", "groups", ")", "yield", "groups", "if", "should_close", ":", "fileobj", ".", "close", "(", ")" ]
https://github.com/Delgan/loguru/blob/3d5234541c81318e7f6f725eca7bab294fe09c23/loguru/_logger.py#L1735-L1824
TencentCloud/tencentcloud-sdk-python
3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2
tencentcloud/iai/v20200303/iai_client.py
python
IaiClient.SearchFaces
(self, request)
用于对一张待识别的人脸图片,在一个或多个人员库中识别出最相似的 TopK 人员,识别结果按照相似度从大到小排序。 支持一次性识别图片中的最多 10 张人脸,支持一次性跨 100 个人员库(Group)搜索。 单次搜索的人员库人脸总数量和人员库的算法模型版本(FaceModelVersion)相关。算法模型版本为2.0的人员库,单次搜索人员库人脸总数量不得超过 100 万张;算法模型版本为3.0的人员库,单次搜索人员库人脸总数量不得超过 300 万张。 与[人员搜索](https://cloud.tencent.com/document/product/867/44992)及[人员搜索按库返回](https://cloud.tencent.com/document/product/867/44991)接口不同的是,本接口将该人员(Person)下的每个人脸(Face)都作为单独个体进行验证,而人员搜索及人员搜索按库返回接口 会将该人员(Person)下的所有人脸(Face)进行融合特征处理,即若某个Person下有4张 Face,本接口会将4张 Face 的特征进行融合处理,生成对应这个 Person 的特征,使搜索更加准确。 本接口需与[人员库管理相关接口](https://cloud.tencent.com/document/product/867/45015)结合使用。 > - 公共参数中的签名方式请使用V3版本,即配置SignatureMethod参数为TC3-HMAC-SHA256。 > - 不可同时搜索不同算法模型版本(FaceModelVersion)的人员库。 :param request: Request instance for SearchFaces. :type request: :class:`tencentcloud.iai.v20200303.models.SearchFacesRequest` :rtype: :class:`tencentcloud.iai.v20200303.models.SearchFacesResponse`
用于对一张待识别的人脸图片,在一个或多个人员库中识别出最相似的 TopK 人员,识别结果按照相似度从大到小排序。
[ "用于对一张待识别的人脸图片,在一个或多个人员库中识别出最相似的", "TopK", "人员,识别结果按照相似度从大到小排序。" ]
def SearchFaces(self, request): """用于对一张待识别的人脸图片,在一个或多个人员库中识别出最相似的 TopK 人员,识别结果按照相似度从大到小排序。 支持一次性识别图片中的最多 10 张人脸,支持一次性跨 100 个人员库(Group)搜索。 单次搜索的人员库人脸总数量和人员库的算法模型版本(FaceModelVersion)相关。算法模型版本为2.0的人员库,单次搜索人员库人脸总数量不得超过 100 万张;算法模型版本为3.0的人员库,单次搜索人员库人脸总数量不得超过 300 万张。 与[人员搜索](https://cloud.tencent.com/document/product/867/44992)及[人员搜索按库返回](https://cloud.tencent.com/document/product/867/44991)接口不同的是,本接口将该人员(Person)下的每个人脸(Face)都作为单独个体进行验证,而人员搜索及人员搜索按库返回接口 会将该人员(Person)下的所有人脸(Face)进行融合特征处理,即若某个Person下有4张 Face,本接口会将4张 Face 的特征进行融合处理,生成对应这个 Person 的特征,使搜索更加准确。 本接口需与[人员库管理相关接口](https://cloud.tencent.com/document/product/867/45015)结合使用。 > - 公共参数中的签名方式请使用V3版本,即配置SignatureMethod参数为TC3-HMAC-SHA256。 > - 不可同时搜索不同算法模型版本(FaceModelVersion)的人员库。 :param request: Request instance for SearchFaces. :type request: :class:`tencentcloud.iai.v20200303.models.SearchFacesRequest` :rtype: :class:`tencentcloud.iai.v20200303.models.SearchFacesResponse` """ try: params = request._serialize() body = self.call("SearchFaces", params) response = json.loads(body) if "Error" not in response["Response"]: model = models.SearchFacesResponse() model._deserialize(response["Response"]) return model else: code = response["Response"]["Error"]["Code"] message = response["Response"]["Error"]["Message"] reqid = response["Response"]["RequestId"] raise TencentCloudSDKException(code, message, reqid) except Exception as e: if isinstance(e, TencentCloudSDKException): raise else: raise TencentCloudSDKException(e.message, e.message)
[ "def", "SearchFaces", "(", "self", ",", "request", ")", ":", "try", ":", "params", "=", "request", ".", "_serialize", "(", ")", "body", "=", "self", ".", "call", "(", "\"SearchFaces\"", ",", "params", ")", "response", "=", "json", ".", "loads", "(", "body", ")", "if", "\"Error\"", "not", "in", "response", "[", "\"Response\"", "]", ":", "model", "=", "models", ".", "SearchFacesResponse", "(", ")", "model", ".", "_deserialize", "(", "response", "[", "\"Response\"", "]", ")", "return", "model", "else", ":", "code", "=", "response", "[", "\"Response\"", "]", "[", "\"Error\"", "]", "[", "\"Code\"", "]", "message", "=", "response", "[", "\"Response\"", "]", "[", "\"Error\"", "]", "[", "\"Message\"", "]", "reqid", "=", "response", "[", "\"Response\"", "]", "[", "\"RequestId\"", "]", "raise", "TencentCloudSDKException", "(", "code", ",", "message", ",", "reqid", ")", "except", "Exception", "as", "e", ":", "if", "isinstance", "(", "e", ",", "TencentCloudSDKException", ")", ":", "raise", "else", ":", "raise", "TencentCloudSDKException", "(", "e", ".", "message", ",", "e", ".", "message", ")" ]
https://github.com/TencentCloud/tencentcloud-sdk-python/blob/3677fd1cdc8c5fd626ce001c13fd3b59d1f279d2/tencentcloud/iai/v20200303/iai_client.py#L1016-L1056
aquario-crypto/Book_on_Python_Algorithms_and_Data_Structure
234b4b1fc84faf4a06843c1fba1d05ccc18f80e6
book/ebook_src/searching_and_sorting/sorting/count_sort.py
python
count_sort_dict
(a)
return b
an example of counting sort using default dictionaries
an example of counting sort using default dictionaries
[ "an", "example", "of", "counting", "sort", "using", "default", "dictionaries" ]
def count_sort_dict(a): ''' an example of counting sort using default dictionaries ''' b, c = [], defaultdict(list) for x in a: c[x].append(x) # we could have used key = lambda x:x for k in range(min(c), max(c) + 1): b.extend(c[k]) return b
[ "def", "count_sort_dict", "(", "a", ")", ":", "b", ",", "c", "=", "[", "]", ",", "defaultdict", "(", "list", ")", "for", "x", "in", "a", ":", "c", "[", "x", "]", ".", "append", "(", "x", ")", "# we could have used key = lambda x:x", "for", "k", "in", "range", "(", "min", "(", "c", ")", ",", "max", "(", "c", ")", "+", "1", ")", ":", "b", ".", "extend", "(", "c", "[", "k", "]", ")", "return", "b" ]
https://github.com/aquario-crypto/Book_on_Python_Algorithms_and_Data_Structure/blob/234b4b1fc84faf4a06843c1fba1d05ccc18f80e6/book/ebook_src/searching_and_sorting/sorting/count_sort.py#L9-L16
biopython/biopython
2dd97e71762af7b046d7f7f8a4f1e38db6b06c86
Bio/Nexus/Nexus.py
python
_seqmatrix2strmatrix
(matrix)
return {t: str(matrix[t]) for t in matrix}
Convert a Seq-object matrix to a plain sequence-string matrix (PRIVATE).
Convert a Seq-object matrix to a plain sequence-string matrix (PRIVATE).
[ "Convert", "a", "Seq", "-", "object", "matrix", "to", "a", "plain", "sequence", "-", "string", "matrix", "(", "PRIVATE", ")", "." ]
def _seqmatrix2strmatrix(matrix): """Convert a Seq-object matrix to a plain sequence-string matrix (PRIVATE).""" return {t: str(matrix[t]) for t in matrix}
[ "def", "_seqmatrix2strmatrix", "(", "matrix", ")", ":", "return", "{", "t", ":", "str", "(", "matrix", "[", "t", "]", ")", "for", "t", "in", "matrix", "}" ]
https://github.com/biopython/biopython/blob/2dd97e71762af7b046d7f7f8a4f1e38db6b06c86/Bio/Nexus/Nexus.py#L332-L334
idanr1986/cuckoo-droid
1350274639473d3d2b0ac740cae133ca53ab7444
analyzer/android/lib/api/androguard/dvm.py
python
ProtoIdItem.get_return_type_idx
(self)
return self.return_type_idx
Return the index into the type_ids list for the return type of this prototype :rtype: int
Return the index into the type_ids list for the return type of this prototype
[ "Return", "the", "index", "into", "the", "type_ids", "list", "for", "the", "return", "type", "of", "this", "prototype" ]
def get_return_type_idx(self) : """ Return the index into the type_ids list for the return type of this prototype :rtype: int """ return self.return_type_idx
[ "def", "get_return_type_idx", "(", "self", ")", ":", "return", "self", ".", "return_type_idx" ]
https://github.com/idanr1986/cuckoo-droid/blob/1350274639473d3d2b0ac740cae133ca53ab7444/analyzer/android/lib/api/androguard/dvm.py#L1995-L2001
CouchPotato/CouchPotatoServer
7260c12f72447ddb6f062367c6dfbda03ecd4e9c
libs/xmpp/protocol.py
python
JID.__init__
(self, jid=None, node='', domain='', resource='')
Constructor. JID can be specified as string (jid argument) or as separate parts. Examples: JID('node@domain/resource') JID(node='node',domain='domain.org')
Constructor. JID can be specified as string (jid argument) or as separate parts. Examples: JID('node
[ "Constructor", ".", "JID", "can", "be", "specified", "as", "string", "(", "jid", "argument", ")", "or", "as", "separate", "parts", ".", "Examples", ":", "JID", "(", "node" ]
def __init__(self, jid=None, node='', domain='', resource=''): """ Constructor. JID can be specified as string (jid argument) or as separate parts. Examples: JID('node@domain/resource') JID(node='node',domain='domain.org') """ if not jid and not domain: raise ValueError('JID must contain at least domain name') elif type(jid)==type(self): self.node,self.domain,self.resource=jid.node,jid.domain,jid.resource elif domain: self.node,self.domain,self.resource=node,domain,resource else: if jid.find('@')+1: self.node,jid=jid.split('@',1) else: self.node='' if jid.find('/')+1: self.domain,self.resource=jid.split('/',1) else: self.domain,self.resource=jid,''
[ "def", "__init__", "(", "self", ",", "jid", "=", "None", ",", "node", "=", "''", ",", "domain", "=", "''", ",", "resource", "=", "''", ")", ":", "if", "not", "jid", "and", "not", "domain", ":", "raise", "ValueError", "(", "'JID must contain at least domain name'", ")", "elif", "type", "(", "jid", ")", "==", "type", "(", "self", ")", ":", "self", ".", "node", ",", "self", ".", "domain", ",", "self", ".", "resource", "=", "jid", ".", "node", ",", "jid", ".", "domain", ",", "jid", ".", "resource", "elif", "domain", ":", "self", ".", "node", ",", "self", ".", "domain", ",", "self", ".", "resource", "=", "node", ",", "domain", ",", "resource", "else", ":", "if", "jid", ".", "find", "(", "'@'", ")", "+", "1", ":", "self", ".", "node", ",", "jid", "=", "jid", ".", "split", "(", "'@'", ",", "1", ")", "else", ":", "self", ".", "node", "=", "''", "if", "jid", ".", "find", "(", "'/'", ")", "+", "1", ":", "self", ".", "domain", ",", "self", ".", "resource", "=", "jid", ".", "split", "(", "'/'", ",", "1", ")", "else", ":", "self", ".", "domain", ",", "self", ".", "resource", "=", "jid", ",", "''" ]
https://github.com/CouchPotato/CouchPotatoServer/blob/7260c12f72447ddb6f062367c6dfbda03ecd4e9c/libs/xmpp/protocol.py#L269-L282
gkrizek/bash-lambda-layer
703b0ade8174022d44779d823172ab7ac33a5505
bin/docutils/utils/math/math2html.py
python
ContainerSize.set
(self, width = None, height = None)
return self
Set the proper size with width and height.
Set the proper size with width and height.
[ "Set", "the", "proper", "size", "with", "width", "and", "height", "." ]
def set(self, width = None, height = None): "Set the proper size with width and height." self.setvalue('width', width) self.setvalue('height', height) return self
[ "def", "set", "(", "self", ",", "width", "=", "None", ",", "height", "=", "None", ")", ":", "self", ".", "setvalue", "(", "'width'", ",", "width", ")", "self", ".", "setvalue", "(", "'height'", ",", "height", ")", "return", "self" ]
https://github.com/gkrizek/bash-lambda-layer/blob/703b0ade8174022d44779d823172ab7ac33a5505/bin/docutils/utils/math/math2html.py#L3423-L3427
openstack/swift
b8d7c3dcb817504dcc0959ba52cc4ed2cf66c100
swift/common/http.py
python
is_redirection
(status)
return 300 <= status <= 399
Check if HTTP status code is redirection. :param status: http status code :returns: True if status is redirection, else False
Check if HTTP status code is redirection.
[ "Check", "if", "HTTP", "status", "code", "is", "redirection", "." ]
def is_redirection(status): """ Check if HTTP status code is redirection. :param status: http status code :returns: True if status is redirection, else False """ return 300 <= status <= 399
[ "def", "is_redirection", "(", "status", ")", ":", "return", "300", "<=", "status", "<=", "399" ]
https://github.com/openstack/swift/blob/b8d7c3dcb817504dcc0959ba52cc4ed2cf66c100/swift/common/http.py#L37-L44
spyder-ide/spyder
55da47c032dfcf519600f67f8b30eab467f965e7
spyder/plugins/plots/widgets/figurebrowser.py
python
ThumbnailScrollBar.set_figureviewer
(self, figure_viewer)
Set the namespace for the FigureViewer.
Set the namespace for the FigureViewer.
[ "Set", "the", "namespace", "for", "the", "FigureViewer", "." ]
def set_figureviewer(self, figure_viewer): """Set the namespace for the FigureViewer.""" self.figure_viewer = figure_viewer
[ "def", "set_figureviewer", "(", "self", ",", "figure_viewer", ")", ":", "self", ".", "figure_viewer", "=", "figure_viewer" ]
https://github.com/spyder-ide/spyder/blob/55da47c032dfcf519600f67f8b30eab467f965e7/spyder/plugins/plots/widgets/figurebrowser.py#L625-L627
google/apitools
31cad2d904f356872d2965687e84b2d87ee2cdd3
apitools/base/py/encoding_helper.py
python
PyValueToMessage
(message_type, value)
return JsonToMessage(message_type, json.dumps(value))
Convert the given python value to a message of type message_type.
Convert the given python value to a message of type message_type.
[ "Convert", "the", "given", "python", "value", "to", "a", "message", "of", "type", "message_type", "." ]
def PyValueToMessage(message_type, value): """Convert the given python value to a message of type message_type.""" return JsonToMessage(message_type, json.dumps(value))
[ "def", "PyValueToMessage", "(", "message_type", ",", "value", ")", ":", "return", "JsonToMessage", "(", "message_type", ",", "json", ".", "dumps", "(", "value", ")", ")" ]
https://github.com/google/apitools/blob/31cad2d904f356872d2965687e84b2d87ee2cdd3/apitools/base/py/encoding_helper.py#L150-L152
garrickbrazil/M3D-RPN
bf204e3f95f647d73a132535385119b12c8d6c36
lib/util.py
python
init_log_file
(folder_path, suffix=None, log_level=logging.INFO)
return file_path
This function inits a log file given a folder to write the log to. it automatically adds a timestamp and optional suffix to the log. Anything written to the log will automatically write to console too. Example: import logging init_log_file('output/logs/') logging.info('this will show up in both the log AND console!')
This function inits a log file given a folder to write the log to. it automatically adds a timestamp and optional suffix to the log. Anything written to the log will automatically write to console too.
[ "This", "function", "inits", "a", "log", "file", "given", "a", "folder", "to", "write", "the", "log", "to", ".", "it", "automatically", "adds", "a", "timestamp", "and", "optional", "suffix", "to", "the", "log", ".", "Anything", "written", "to", "the", "log", "will", "automatically", "write", "to", "console", "too", "." ]
def init_log_file(folder_path, suffix=None, log_level=logging.INFO): """ This function inits a log file given a folder to write the log to. it automatically adds a timestamp and optional suffix to the log. Anything written to the log will automatically write to console too. Example: import logging init_log_file('output/logs/') logging.info('this will show up in both the log AND console!') """ timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") log_format = '[%(levelname)s]: %(asctime)s %(message)s' if suffix is not None: file_name = timestamp + '_' + suffix else: file_name = timestamp file_path = os.path.join(folder_path, file_name) logging.basicConfig(filename=file_path, level=log_level, format=log_format) logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) return file_path
[ "def", "init_log_file", "(", "folder_path", ",", "suffix", "=", "None", ",", "log_level", "=", "logging", ".", "INFO", ")", ":", "timestamp", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "\"%Y%m%d_%H%M%S\"", ")", "log_format", "=", "'[%(levelname)s]: %(asctime)s %(message)s'", "if", "suffix", "is", "not", "None", ":", "file_name", "=", "timestamp", "+", "'_'", "+", "suffix", "else", ":", "file_name", "=", "timestamp", "file_path", "=", "os", ".", "path", ".", "join", "(", "folder_path", ",", "file_name", ")", "logging", ".", "basicConfig", "(", "filename", "=", "file_path", ",", "level", "=", "log_level", ",", "format", "=", "log_format", ")", "logging", ".", "getLogger", "(", ")", ".", "addHandler", "(", "logging", ".", "StreamHandler", "(", "sys", ".", "stdout", ")", ")", "return", "file_path" ]
https://github.com/garrickbrazil/M3D-RPN/blob/bf204e3f95f647d73a132535385119b12c8d6c36/lib/util.py#L103-L128
deanishe/alfred-convert
97407f4ec8dbca5abbc6952b2b56cf3918624177
src/workflow/workflow.py
python
Workflow.delete_password
(self, account, service=None)
Delete the password stored at ``service/account``. Raise :class:`PasswordNotFound` if account is unknown. :param account: name of the account the password is for, e.g. "Pinboard" :type account: ``unicode`` :param service: Name of the service. By default, this is the workflow's bundle ID :type service: ``unicode``
Delete the password stored at ``service/account``.
[ "Delete", "the", "password", "stored", "at", "service", "/", "account", "." ]
def delete_password(self, account, service=None): """Delete the password stored at ``service/account``. Raise :class:`PasswordNotFound` if account is unknown. :param account: name of the account the password is for, e.g. "Pinboard" :type account: ``unicode`` :param service: Name of the service. By default, this is the workflow's bundle ID :type service: ``unicode`` """ if not service: service = self.bundleid self._call_security('delete-generic-password', service, account) self.logger.debug('deleted password : %s:%s', service, account)
[ "def", "delete_password", "(", "self", ",", "account", ",", "service", "=", "None", ")", ":", "if", "not", "service", ":", "service", "=", "self", ".", "bundleid", "self", ".", "_call_security", "(", "'delete-generic-password'", ",", "service", ",", "account", ")", "self", ".", "logger", ".", "debug", "(", "'deleted password : %s:%s'", ",", "service", ",", "account", ")" ]
https://github.com/deanishe/alfred-convert/blob/97407f4ec8dbca5abbc6952b2b56cf3918624177/src/workflow/workflow.py#L2465-L2483
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/django/db/backends/base/introspection.py
python
BaseDatabaseIntrospection.installed_models
(self, tables)
return { m for m in all_models if self.table_name_converter(m._meta.db_table) in tables }
Returns a set of all models represented by the provided list of table names.
Returns a set of all models represented by the provided list of table names.
[ "Returns", "a", "set", "of", "all", "models", "represented", "by", "the", "provided", "list", "of", "table", "names", "." ]
def installed_models(self, tables): "Returns a set of all models represented by the provided list of table names." from django.apps import apps from django.db import router all_models = [] for app_config in apps.get_app_configs(): all_models.extend(router.get_migratable_models(app_config, self.connection.alias)) tables = list(map(self.table_name_converter, tables)) return { m for m in all_models if self.table_name_converter(m._meta.db_table) in tables }
[ "def", "installed_models", "(", "self", ",", "tables", ")", ":", "from", "django", ".", "apps", "import", "apps", "from", "django", ".", "db", "import", "router", "all_models", "=", "[", "]", "for", "app_config", "in", "apps", ".", "get_app_configs", "(", ")", ":", "all_models", ".", "extend", "(", "router", ".", "get_migratable_models", "(", "app_config", ",", "self", ".", "connection", ".", "alias", ")", ")", "tables", "=", "list", "(", "map", "(", "self", ".", "table_name_converter", ",", "tables", ")", ")", "return", "{", "m", "for", "m", "in", "all_models", "if", "self", ".", "table_name_converter", "(", "m", ".", "_meta", ".", "db_table", ")", "in", "tables", "}" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/django/db/backends/base/introspection.py#L96-L107
adamewing/bamsurgeon
826921a48cb4aa91e419a0e0946255e17223ea39
bamsurgeon/common.py
python
minorbase
(basepile)
returns tuple: (minor base, count)
returns tuple: (minor base, count)
[ "returns", "tuple", ":", "(", "minor", "base", "count", ")" ]
def minorbase(basepile): """returns tuple: (minor base, count) """ c = Counter(basepile) if len(list(c.elements())) > 1: return c.most_common(2)[-1] else: return c.most_common()[0]
[ "def", "minorbase", "(", "basepile", ")", ":", "c", "=", "Counter", "(", "basepile", ")", "if", "len", "(", "list", "(", "c", ".", "elements", "(", ")", ")", ")", ">", "1", ":", "return", "c", ".", "most_common", "(", "2", ")", "[", "-", "1", "]", "else", ":", "return", "c", ".", "most_common", "(", ")", "[", "0", "]" ]
https://github.com/adamewing/bamsurgeon/blob/826921a48cb4aa91e419a0e0946255e17223ea39/bamsurgeon/common.py#L37-L44
linuxscout/mishkal
4f4ae0ebc2d6acbeb3de3f0303151ec7b54d2f76
interfaces/web/lib/paste/urlmap.py
python
URLMap.sort_apps
(self)
Make sure applications are sorted with longest URLs first
Make sure applications are sorted with longest URLs first
[ "Make", "sure", "applications", "are", "sorted", "with", "longest", "URLs", "first" ]
def sort_apps(self): """ Make sure applications are sorted with longest URLs first """ def key(app_desc): (domain, url), app = app_desc if not domain: # Make sure empty domains sort last: return '\xff', -len(url) else: return domain, -len(url) apps = [(key(desc), desc) for desc in self.applications] apps.sort() self.applications = [desc for (sortable, desc) in apps]
[ "def", "sort_apps", "(", "self", ")", ":", "def", "key", "(", "app_desc", ")", ":", "(", "domain", ",", "url", ")", ",", "app", "=", "app_desc", "if", "not", "domain", ":", "# Make sure empty domains sort last:", "return", "'\\xff'", ",", "-", "len", "(", "url", ")", "else", ":", "return", "domain", ",", "-", "len", "(", "url", ")", "apps", "=", "[", "(", "key", "(", "desc", ")", ",", "desc", ")", "for", "desc", "in", "self", ".", "applications", "]", "apps", ".", "sort", "(", ")", "self", ".", "applications", "=", "[", "desc", "for", "(", "sortable", ",", "desc", ")", "in", "apps", "]" ]
https://github.com/linuxscout/mishkal/blob/4f4ae0ebc2d6acbeb3de3f0303151ec7b54d2f76/interfaces/web/lib/paste/urlmap.py#L134-L147
PyCQA/astroid
a815443f62faae05249621a396dcf0afd884a619
astroid/rebuilder.py
python
TreeRebuilder.visit_lambda
(self, node: "ast.Lambda", parent: NodeNG)
return newnode
visit a Lambda node by returning a fresh instance of it
visit a Lambda node by returning a fresh instance of it
[ "visit", "a", "Lambda", "node", "by", "returning", "a", "fresh", "instance", "of", "it" ]
def visit_lambda(self, node: "ast.Lambda", parent: NodeNG) -> nodes.Lambda: """visit a Lambda node by returning a fresh instance of it""" if sys.version_info >= (3, 8): newnode = nodes.Lambda( lineno=node.lineno, col_offset=node.col_offset, end_lineno=node.end_lineno, end_col_offset=node.end_col_offset, parent=parent, ) else: newnode = nodes.Lambda(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.args, newnode), self.visit(node.body, newnode)) return newnode
[ "def", "visit_lambda", "(", "self", ",", "node", ":", "\"ast.Lambda\"", ",", "parent", ":", "NodeNG", ")", "->", "nodes", ".", "Lambda", ":", "if", "sys", ".", "version_info", ">=", "(", "3", ",", "8", ")", ":", "newnode", "=", "nodes", ".", "Lambda", "(", "lineno", "=", "node", ".", "lineno", ",", "col_offset", "=", "node", ".", "col_offset", ",", "end_lineno", "=", "node", ".", "end_lineno", ",", "end_col_offset", "=", "node", ".", "end_col_offset", ",", "parent", "=", "parent", ",", ")", "else", ":", "newnode", "=", "nodes", ".", "Lambda", "(", "node", ".", "lineno", ",", "node", ".", "col_offset", ",", "parent", ")", "newnode", ".", "postinit", "(", "self", ".", "visit", "(", "node", ".", "args", ",", "newnode", ")", ",", "self", ".", "visit", "(", "node", ".", "body", ",", "newnode", ")", ")", "return", "newnode" ]
https://github.com/PyCQA/astroid/blob/a815443f62faae05249621a396dcf0afd884a619/astroid/rebuilder.py#L1798-L1811
BerkeleyAutomation/dex-net
cccf93319095374b0eefc24b8b6cd40bc23966d2
src/dexnet/learning/discrete_selection_policies.py
python
DiscreteSelectionPolicy.choose_next
(self)
Choose the next index of the model to sample
Choose the next index of the model to sample
[ "Choose", "the", "next", "index", "of", "the", "model", "to", "sample" ]
def choose_next(self): """ Choose the next index of the model to sample """ pass
[ "def", "choose_next", "(", "self", ")", ":", "pass" ]
https://github.com/BerkeleyAutomation/dex-net/blob/cccf93319095374b0eefc24b8b6cd40bc23966d2/src/dexnet/learning/discrete_selection_policies.py#L44-L48
IntelAI/models
1d7a53ccfad3e6f0e7378c9e3c8840895d63df8c
models/language_translation/tensorflow/transformer_mlperf/inference/int8/transformer/utils/tokenizer.py
python
_list_to_index_dict
(lst)
return {item: n for n, item in enumerate(lst)}
Create dictionary mapping list items to their indices in the list.
Create dictionary mapping list items to their indices in the list.
[ "Create", "dictionary", "mapping", "list", "items", "to", "their", "indices", "in", "the", "list", "." ]
def _list_to_index_dict(lst): """Create dictionary mapping list items to their indices in the list.""" return {item: n for n, item in enumerate(lst)}
[ "def", "_list_to_index_dict", "(", "lst", ")", ":", "return", "{", "item", ":", "n", "for", "n", ",", "item", "in", "enumerate", "(", "lst", ")", "}" ]
https://github.com/IntelAI/models/blob/1d7a53ccfad3e6f0e7378c9e3c8840895d63df8c/models/language_translation/tensorflow/transformer_mlperf/inference/int8/transformer/utils/tokenizer.py#L367-L369
nltk/nltk
3f74ac55681667d7ef78b664557487145f51eb02
nltk/parse/projectivedependencyparser.py
python
projective_prob_parse_demo
()
A demo showing the training and use of a projective dependency parser.
A demo showing the training and use of a projective dependency parser.
[ "A", "demo", "showing", "the", "training", "and", "use", "of", "a", "projective", "dependency", "parser", "." ]
def projective_prob_parse_demo(): """ A demo showing the training and use of a projective dependency parser. """ from nltk.parse.dependencygraph import conll_data2 graphs = [DependencyGraph(entry) for entry in conll_data2.split("\n\n") if entry] ppdp = ProbabilisticProjectiveDependencyParser() print("Training Probabilistic Projective Dependency Parser...") ppdp.train(graphs) sent = ["Cathy", "zag", "hen", "wild", "zwaaien", "."] print("Parsing '", " ".join(sent), "'...") print("Parse:") for tree in ppdp.parse(sent): print(tree)
[ "def", "projective_prob_parse_demo", "(", ")", ":", "from", "nltk", ".", "parse", ".", "dependencygraph", "import", "conll_data2", "graphs", "=", "[", "DependencyGraph", "(", "entry", ")", "for", "entry", "in", "conll_data2", ".", "split", "(", "\"\\n\\n\"", ")", "if", "entry", "]", "ppdp", "=", "ProbabilisticProjectiveDependencyParser", "(", ")", "print", "(", "\"Training Probabilistic Projective Dependency Parser...\"", ")", "ppdp", ".", "train", "(", "graphs", ")", "sent", "=", "[", "\"Cathy\"", ",", "\"zag\"", ",", "\"hen\"", ",", "\"wild\"", ",", "\"zwaaien\"", ",", "\".\"", "]", "print", "(", "\"Parsing '\"", ",", "\" \"", ".", "join", "(", "sent", ")", ",", "\"'...\"", ")", "print", "(", "\"Parse:\"", ")", "for", "tree", "in", "ppdp", ".", "parse", "(", "sent", ")", ":", "print", "(", "tree", ")" ]
https://github.com/nltk/nltk/blob/3f74ac55681667d7ef78b664557487145f51eb02/nltk/parse/projectivedependencyparser.py#L696-L712
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/xml/etree/ElementTree.py
python
Element.findtext
(self, path, default=None, namespaces=None)
return ElementPath.findtext(self, path, default, namespaces)
Find text for first matching element by tag name or path. *path* is a string having either an element tag or an XPath, *default* is the value to return if the element was not found, *namespaces* is an optional mapping from namespace prefix to full name. Return text content of first matching element, or default value if none was found. Note that if an element is found having no text content, the empty string is returned.
Find text for first matching element by tag name or path.
[ "Find", "text", "for", "first", "matching", "element", "by", "tag", "name", "or", "path", "." ]
def findtext(self, path, default=None, namespaces=None): """Find text for first matching element by tag name or path. *path* is a string having either an element tag or an XPath, *default* is the value to return if the element was not found, *namespaces* is an optional mapping from namespace prefix to full name. Return text content of first matching element, or default value if none was found. Note that if an element is found having no text content, the empty string is returned. """ return ElementPath.findtext(self, path, default, namespaces)
[ "def", "findtext", "(", "self", ",", "path", ",", "default", "=", "None", ",", "namespaces", "=", "None", ")", ":", "return", "ElementPath", ".", "findtext", "(", "self", ",", "path", ",", "default", ",", "namespaces", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/xml/etree/ElementTree.py#L301-L313
phonopy/phonopy
816586d0ba8177482ecf40e52f20cbdee2260d51
phonopy/phonon/modulation.py
python
Modulation._get_phase_factor
(self, modulation, argument)
return phase_factor
[]
def _get_phase_factor(self, modulation, argument): u = np.ravel(modulation) index_max_elem = np.argmax(abs(u)) max_elem = u[index_max_elem] phase_for_zero = max_elem / abs(max_elem) phase_factor = np.exp(1j * np.pi * argument / 180) / phase_for_zero return phase_factor
[ "def", "_get_phase_factor", "(", "self", ",", "modulation", ",", "argument", ")", ":", "u", "=", "np", ".", "ravel", "(", "modulation", ")", "index_max_elem", "=", "np", ".", "argmax", "(", "abs", "(", "u", ")", ")", "max_elem", "=", "u", "[", "index_max_elem", "]", "phase_for_zero", "=", "max_elem", "/", "abs", "(", "max_elem", ")", "phase_factor", "=", "np", ".", "exp", "(", "1j", "*", "np", ".", "pi", "*", "argument", "/", "180", ")", "/", "phase_for_zero", "return", "phase_factor" ]
https://github.com/phonopy/phonopy/blob/816586d0ba8177482ecf40e52f20cbdee2260d51/phonopy/phonon/modulation.py#L178-L185
missionpinball/mpf
8e6b74cff4ba06d2fec9445742559c1068b88582
mpf/core/placeholder_manager.py
python
TextTemplate.__init__
(self, machine: "MachineController", text: str)
Initialise placeholder.
Initialise placeholder.
[ "Initialise", "placeholder", "." ]
def __init__(self, machine: "MachineController", text: str) -> None: """Initialise placeholder.""" self.machine = machine self.text = str(text) self._change_callback = None
[ "def", "__init__", "(", "self", ",", "machine", ":", "\"MachineController\"", ",", "text", ":", "str", ")", "->", "None", ":", "self", ".", "machine", "=", "machine", "self", ".", "text", "=", "str", "(", "text", ")", "self", ".", "_change_callback", "=", "None" ]
https://github.com/missionpinball/mpf/blob/8e6b74cff4ba06d2fec9445742559c1068b88582/mpf/core/placeholder_manager.py#L261-L265
mcneel/rhinoscriptsyntax
c49bd0bf24c2513bdcb84d1bf307144489600fd9
Scripts/rhinoscript/toolbar.py
python
CloseToolbarCollection
(name, prompt=False)
return False
Closes a currently open toolbar collection Parameters: name (str): name of a currently open toolbar collection prompt (bool, optional): if True, user will be prompted to save the collection file if it has been modified prior to closing Returns: bool: True or False indicating success or failure Example: import rhinoscriptsyntax as rs names = rs.ToolbarCollectionNames() if names: for name in names: rs.CloseToolbarCollection( name, True ) See Also: IsToolbarCollection OpenToolbarCollection ToolbarCollectionCount ToolbarCollectionNames ToolbarCollectionPath
Closes a currently open toolbar collection Parameters: name (str): name of a currently open toolbar collection prompt (bool, optional): if True, user will be prompted to save the collection file if it has been modified prior to closing Returns: bool: True or False indicating success or failure Example: import rhinoscriptsyntax as rs names = rs.ToolbarCollectionNames() if names: for name in names: rs.CloseToolbarCollection( name, True ) See Also: IsToolbarCollection OpenToolbarCollection ToolbarCollectionCount ToolbarCollectionNames ToolbarCollectionPath
[ "Closes", "a", "currently", "open", "toolbar", "collection", "Parameters", ":", "name", "(", "str", ")", ":", "name", "of", "a", "currently", "open", "toolbar", "collection", "prompt", "(", "bool", "optional", ")", ":", "if", "True", "user", "will", "be", "prompted", "to", "save", "the", "collection", "file", "if", "it", "has", "been", "modified", "prior", "to", "closing", "Returns", ":", "bool", ":", "True", "or", "False", "indicating", "success", "or", "failure", "Example", ":", "import", "rhinoscriptsyntax", "as", "rs", "names", "=", "rs", ".", "ToolbarCollectionNames", "()", "if", "names", ":", "for", "name", "in", "names", ":", "rs", ".", "CloseToolbarCollection", "(", "name", "True", ")", "See", "Also", ":", "IsToolbarCollection", "OpenToolbarCollection", "ToolbarCollectionCount", "ToolbarCollectionNames", "ToolbarCollectionPath" ]
def CloseToolbarCollection(name, prompt=False): """Closes a currently open toolbar collection Parameters: name (str): name of a currently open toolbar collection prompt (bool, optional): if True, user will be prompted to save the collection file if it has been modified prior to closing Returns: bool: True or False indicating success or failure Example: import rhinoscriptsyntax as rs names = rs.ToolbarCollectionNames() if names: for name in names: rs.CloseToolbarCollection( name, True ) See Also: IsToolbarCollection OpenToolbarCollection ToolbarCollectionCount ToolbarCollectionNames ToolbarCollectionPath """ tbfile = Rhino.RhinoApp.ToolbarFiles.FindByName(name, True) if tbfile: return tbfile.Close(prompt) return False
[ "def", "CloseToolbarCollection", "(", "name", ",", "prompt", "=", "False", ")", ":", "tbfile", "=", "Rhino", ".", "RhinoApp", ".", "ToolbarFiles", ".", "FindByName", "(", "name", ",", "True", ")", "if", "tbfile", ":", "return", "tbfile", ".", "Close", "(", "prompt", ")", "return", "False" ]
https://github.com/mcneel/rhinoscriptsyntax/blob/c49bd0bf24c2513bdcb84d1bf307144489600fd9/Scripts/rhinoscript/toolbar.py#L3-L25
bendmorris/static-python
2e0f8c4d7ed5b359dc7d8a75b6fb37e6b6c5c473
Mac/Tools/bundlebuilder.py
python
BundleBuilder.build
(self)
Build the bundle.
Build the bundle.
[ "Build", "the", "bundle", "." ]
def build(self): """Build the bundle.""" builddir = self.builddir if builddir and not os.path.exists(builddir): os.mkdir(builddir) self.message("Building %s" % repr(self.bundlepath), 1) if os.path.exists(self.bundlepath): shutil.rmtree(self.bundlepath) if os.path.exists(self.bundlepath + '~'): shutil.rmtree(self.bundlepath + '~') bp = self.bundlepath # Create the app bundle in a temporary location and then # rename the completed bundle. This way the Finder will # never see an incomplete bundle (where it might pick up # and cache the wrong meta data) self.bundlepath = bp + '~' try: os.mkdir(self.bundlepath) self.preProcess() self._copyFiles() self._addMetaFiles() self.postProcess() os.rename(self.bundlepath, bp) finally: self.bundlepath = bp self.message("Done.", 1)
[ "def", "build", "(", "self", ")", ":", "builddir", "=", "self", ".", "builddir", "if", "builddir", "and", "not", "os", ".", "path", ".", "exists", "(", "builddir", ")", ":", "os", ".", "mkdir", "(", "builddir", ")", "self", ".", "message", "(", "\"Building %s\"", "%", "repr", "(", "self", ".", "bundlepath", ")", ",", "1", ")", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "bundlepath", ")", ":", "shutil", ".", "rmtree", "(", "self", ".", "bundlepath", ")", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "bundlepath", "+", "'~'", ")", ":", "shutil", ".", "rmtree", "(", "self", ".", "bundlepath", "+", "'~'", ")", "bp", "=", "self", ".", "bundlepath", "# Create the app bundle in a temporary location and then", "# rename the completed bundle. This way the Finder will", "# never see an incomplete bundle (where it might pick up", "# and cache the wrong meta data)", "self", ".", "bundlepath", "=", "bp", "+", "'~'", "try", ":", "os", ".", "mkdir", "(", "self", ".", "bundlepath", ")", "self", ".", "preProcess", "(", ")", "self", ".", "_copyFiles", "(", ")", "self", ".", "_addMetaFiles", "(", ")", "self", ".", "postProcess", "(", ")", "os", ".", "rename", "(", "self", ".", "bundlepath", ",", "bp", ")", "finally", ":", "self", ".", "bundlepath", "=", "bp", "self", ".", "message", "(", "\"Done.\"", ",", "1", ")" ]
https://github.com/bendmorris/static-python/blob/2e0f8c4d7ed5b359dc7d8a75b6fb37e6b6c5c473/Mac/Tools/bundlebuilder.py#L140-L166
fluentpython/example-code
d5133ad6e4a48eac0980d2418ed39d7ff693edbe
04-text-byte/sanitize.py
python
dewinize
(txt)
return txt.translate(multi_map)
Replace Win1252 symbols with ASCII chars or sequences
Replace Win1252 symbols with ASCII chars or sequences
[ "Replace", "Win1252", "symbols", "with", "ASCII", "chars", "or", "sequences" ]
def dewinize(txt): """Replace Win1252 symbols with ASCII chars or sequences""" return txt.translate(multi_map)
[ "def", "dewinize", "(", "txt", ")", ":", "return", "txt", ".", "translate", "(", "multi_map", ")" ]
https://github.com/fluentpython/example-code/blob/d5133ad6e4a48eac0980d2418ed39d7ff693edbe/04-text-byte/sanitize.py#L78-L80
tkipf/pygcn
1600b5b748b3976413d1e307540ccc62605b4d6d
pygcn/models.py
python
GCN.forward
(self, x, adj)
return F.log_softmax(x, dim=1)
[]
def forward(self, x, adj): x = F.relu(self.gc1(x, adj)) x = F.dropout(x, self.dropout, training=self.training) x = self.gc2(x, adj) return F.log_softmax(x, dim=1)
[ "def", "forward", "(", "self", ",", "x", ",", "adj", ")", ":", "x", "=", "F", ".", "relu", "(", "self", ".", "gc1", "(", "x", ",", "adj", ")", ")", "x", "=", "F", ".", "dropout", "(", "x", ",", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "x", "=", "self", ".", "gc2", "(", "x", ",", "adj", ")", "return", "F", ".", "log_softmax", "(", "x", ",", "dim", "=", "1", ")" ]
https://github.com/tkipf/pygcn/blob/1600b5b748b3976413d1e307540ccc62605b4d6d/pygcn/models.py#L14-L18
Chaffelson/nipyapi
d3b186fd701ce308c2812746d98af9120955e810
nipyapi/nifi/models/remote_process_group_port_dto.py
python
RemoteProcessGroupPortDTO.target_id
(self)
return self._target_id
Gets the target_id of this RemoteProcessGroupPortDTO. The id of the target port. :return: The target_id of this RemoteProcessGroupPortDTO. :rtype: str
Gets the target_id of this RemoteProcessGroupPortDTO. The id of the target port.
[ "Gets", "the", "target_id", "of", "this", "RemoteProcessGroupPortDTO", ".", "The", "id", "of", "the", "target", "port", "." ]
def target_id(self): """ Gets the target_id of this RemoteProcessGroupPortDTO. The id of the target port. :return: The target_id of this RemoteProcessGroupPortDTO. :rtype: str """ return self._target_id
[ "def", "target_id", "(", "self", ")", ":", "return", "self", ".", "_target_id" ]
https://github.com/Chaffelson/nipyapi/blob/d3b186fd701ce308c2812746d98af9120955e810/nipyapi/nifi/models/remote_process_group_port_dto.py#L135-L143
rpm-software-management/dnf
a96abad2cde8c46f7f36d7774d9d86f2d94715db
dnf/exceptions.py
python
MarkingErrors.__init__
(self, no_match_group_specs=(), error_group_specs=(), no_match_pkg_specs=(), error_pkg_specs=(), module_depsolv_errors=())
Initialize the marking error instance.
Initialize the marking error instance.
[ "Initialize", "the", "marking", "error", "instance", "." ]
def __init__(self, no_match_group_specs=(), error_group_specs=(), no_match_pkg_specs=(), error_pkg_specs=(), module_depsolv_errors=()): """Initialize the marking error instance.""" msg = _("Problems in request:") if (no_match_pkg_specs): msg += "\n" + _("missing packages: ") + ", ".join(no_match_pkg_specs) if (error_pkg_specs): msg += "\n" + _("broken packages: ") + ", ".join(error_pkg_specs) if (no_match_group_specs): msg += "\n" + _("missing groups or modules: ") + ", ".join(no_match_group_specs) if (error_group_specs): msg += "\n" + _("broken groups or modules: ") + ", ".join(error_group_specs) if (module_depsolv_errors): msg_mod = dnf.util._format_resolve_problems(module_depsolv_errors[0]) if module_depsolv_errors[1] == \ libdnf.module.ModulePackageContainer.ModuleErrorType_ERROR_IN_DEFAULTS: msg += "\n" + "\n".join([P_('Modular dependency problem with Defaults:', 'Modular dependency problems with Defaults:', len(module_depsolv_errors)), msg_mod]) else: msg += "\n" + "\n".join([P_('Modular dependency problem:', 'Modular dependency problems:', len(module_depsolv_errors)), msg_mod]) super(MarkingErrors, self).__init__(msg) self.no_match_group_specs = no_match_group_specs self.error_group_specs = error_group_specs self.no_match_pkg_specs = no_match_pkg_specs self.error_pkg_specs = error_pkg_specs self.module_depsolv_errors = module_depsolv_errors
[ "def", "__init__", "(", "self", ",", "no_match_group_specs", "=", "(", ")", ",", "error_group_specs", "=", "(", ")", ",", "no_match_pkg_specs", "=", "(", ")", ",", "error_pkg_specs", "=", "(", ")", ",", "module_depsolv_errors", "=", "(", ")", ")", ":", "msg", "=", "_", "(", "\"Problems in request:\"", ")", "if", "(", "no_match_pkg_specs", ")", ":", "msg", "+=", "\"\\n\"", "+", "_", "(", "\"missing packages: \"", ")", "+", "\", \"", ".", "join", "(", "no_match_pkg_specs", ")", "if", "(", "error_pkg_specs", ")", ":", "msg", "+=", "\"\\n\"", "+", "_", "(", "\"broken packages: \"", ")", "+", "\", \"", ".", "join", "(", "error_pkg_specs", ")", "if", "(", "no_match_group_specs", ")", ":", "msg", "+=", "\"\\n\"", "+", "_", "(", "\"missing groups or modules: \"", ")", "+", "\", \"", ".", "join", "(", "no_match_group_specs", ")", "if", "(", "error_group_specs", ")", ":", "msg", "+=", "\"\\n\"", "+", "_", "(", "\"broken groups or modules: \"", ")", "+", "\", \"", ".", "join", "(", "error_group_specs", ")", "if", "(", "module_depsolv_errors", ")", ":", "msg_mod", "=", "dnf", ".", "util", ".", "_format_resolve_problems", "(", "module_depsolv_errors", "[", "0", "]", ")", "if", "module_depsolv_errors", "[", "1", "]", "==", "libdnf", ".", "module", ".", "ModulePackageContainer", ".", "ModuleErrorType_ERROR_IN_DEFAULTS", ":", "msg", "+=", "\"\\n\"", "+", "\"\\n\"", ".", "join", "(", "[", "P_", "(", "'Modular dependency problem with Defaults:'", ",", "'Modular dependency problems with Defaults:'", ",", "len", "(", "module_depsolv_errors", ")", ")", ",", "msg_mod", "]", ")", "else", ":", "msg", "+=", "\"\\n\"", "+", "\"\\n\"", ".", "join", "(", "[", "P_", "(", "'Modular dependency problem:'", ",", "'Modular dependency problems:'", ",", "len", "(", "module_depsolv_errors", ")", ")", ",", "msg_mod", "]", ")", "super", "(", "MarkingErrors", ",", "self", ")", ".", "__init__", "(", "msg", ")", "self", ".", "no_match_group_specs", "=", "no_match_group_specs", "self", ".", "error_group_specs", "=", "error_group_specs", "self", ".", "no_match_pkg_specs", "=", "no_match_pkg_specs", "self", ".", "error_pkg_specs", "=", "error_pkg_specs", "self", ".", "module_depsolv_errors", "=", "module_depsolv_errors" ]
https://github.com/rpm-software-management/dnf/blob/a96abad2cde8c46f7f36d7774d9d86f2d94715db/dnf/exceptions.py#L110-L140
dylanaraps/pywal
236aa48e741ff8d65c4c3826db2813bf2ee6f352
pywal/__main__.py
python
parse_args
(parser)
Process args.
Process args.
[ "Process", "args", "." ]
def parse_args(parser): """Process args.""" args = parser.parse_args() if args.q: logging.getLogger().disabled = True sys.stdout = sys.stderr = open(os.devnull, "w") if args.a: util.Color.alpha_num = args.a if args.i: image_file = image.get(args.i, iterative=args.iterative, recursive=args.recursive) colors_plain = colors.get(image_file, args.l, args.backend, sat=args.saturate) if args.theme: colors_plain = theme.file(args.theme, args.l) if args.R: colors_plain = theme.file(os.path.join(CACHE_DIR, "colors.json")) if args.w: cached_wallpaper = util.read_file(os.path.join(CACHE_DIR, "wal")) colors_plain = colors.get(cached_wallpaper[0], args.l, args.backend, sat=args.saturate) if args.b: args.b = "#%s" % (args.b.strip("#")) colors_plain["special"]["background"] = args.b colors_plain["colors"]["color0"] = args.b if not args.n: wallpaper.change(colors_plain["wallpaper"]) if args.p: theme.save(colors_plain, args.p, args.l) sequences.send(colors_plain, to_send=not args.s, vte_fix=args.vte) if sys.stdout.isatty(): colors.palette() export.every(colors_plain) if not args.e: reload.env(tty_reload=not args.t) if args.o: for cmd in args.o: util.disown([cmd]) if not args.e: reload.gtk()
[ "def", "parse_args", "(", "parser", ")", ":", "args", "=", "parser", ".", "parse_args", "(", ")", "if", "args", ".", "q", ":", "logging", ".", "getLogger", "(", ")", ".", "disabled", "=", "True", "sys", ".", "stdout", "=", "sys", ".", "stderr", "=", "open", "(", "os", ".", "devnull", ",", "\"w\"", ")", "if", "args", ".", "a", ":", "util", ".", "Color", ".", "alpha_num", "=", "args", ".", "a", "if", "args", ".", "i", ":", "image_file", "=", "image", ".", "get", "(", "args", ".", "i", ",", "iterative", "=", "args", ".", "iterative", ",", "recursive", "=", "args", ".", "recursive", ")", "colors_plain", "=", "colors", ".", "get", "(", "image_file", ",", "args", ".", "l", ",", "args", ".", "backend", ",", "sat", "=", "args", ".", "saturate", ")", "if", "args", ".", "theme", ":", "colors_plain", "=", "theme", ".", "file", "(", "args", ".", "theme", ",", "args", ".", "l", ")", "if", "args", ".", "R", ":", "colors_plain", "=", "theme", ".", "file", "(", "os", ".", "path", ".", "join", "(", "CACHE_DIR", ",", "\"colors.json\"", ")", ")", "if", "args", ".", "w", ":", "cached_wallpaper", "=", "util", ".", "read_file", "(", "os", ".", "path", ".", "join", "(", "CACHE_DIR", ",", "\"wal\"", ")", ")", "colors_plain", "=", "colors", ".", "get", "(", "cached_wallpaper", "[", "0", "]", ",", "args", ".", "l", ",", "args", ".", "backend", ",", "sat", "=", "args", ".", "saturate", ")", "if", "args", ".", "b", ":", "args", ".", "b", "=", "\"#%s\"", "%", "(", "args", ".", "b", ".", "strip", "(", "\"#\"", ")", ")", "colors_plain", "[", "\"special\"", "]", "[", "\"background\"", "]", "=", "args", ".", "b", "colors_plain", "[", "\"colors\"", "]", "[", "\"color0\"", "]", "=", "args", ".", "b", "if", "not", "args", ".", "n", ":", "wallpaper", ".", "change", "(", "colors_plain", "[", "\"wallpaper\"", "]", ")", "if", "args", ".", "p", ":", "theme", ".", "save", "(", "colors_plain", ",", "args", ".", "p", ",", "args", ".", "l", ")", "sequences", ".", "send", "(", "colors_plain", ",", "to_send", "=", "not", "args", ".", "s", ",", "vte_fix", "=", "args", ".", "vte", ")", "if", "sys", ".", "stdout", ".", "isatty", "(", ")", ":", "colors", ".", "palette", "(", ")", "export", ".", "every", "(", "colors_plain", ")", "if", "not", "args", ".", "e", ":", "reload", ".", "env", "(", "tty_reload", "=", "not", "args", ".", "t", ")", "if", "args", ".", "o", ":", "for", "cmd", "in", "args", ".", "o", ":", "util", ".", "disown", "(", "[", "cmd", "]", ")", "if", "not", "args", ".", "e", ":", "reload", ".", "gtk", "(", ")" ]
https://github.com/dylanaraps/pywal/blob/236aa48e741ff8d65c4c3826db2813bf2ee6f352/pywal/__main__.py#L164-L218
JiYou/openstack
8607dd488bde0905044b303eb6e52bdea6806923
packages/source/cinder/cinder/volume/drivers/netapp/iscsi.py
python
NetAppISCSIDriver.delete_volume
(self, volume)
Driver entry point for destroying existing volumes.
Driver entry point for destroying existing volumes.
[ "Driver", "entry", "point", "for", "destroying", "existing", "volumes", "." ]
def delete_volume(self, volume): """Driver entry point for destroying existing volumes.""" name = volume['name'] project = volume['project_id'] self._remove_destroy(name, project)
[ "def", "delete_volume", "(", "self", ",", "volume", ")", ":", "name", "=", "volume", "[", "'name'", "]", "project", "=", "volume", "[", "'project_id'", "]", "self", ".", "_remove_destroy", "(", "name", ",", "project", ")" ]
https://github.com/JiYou/openstack/blob/8607dd488bde0905044b303eb6e52bdea6806923/packages/source/cinder/cinder/volume/drivers/netapp/iscsi.py#L541-L545
nlloyd/SubliminalCollaborator
5c619e17ddbe8acb9eea8996ec038169ddcd50a1
libs/twisted/words/protocols/irc.py
python
IRCClient._safeMaximumLineLength
(self, command)
return MAX_COMMAND_LENGTH - len(theoretical) - fudge
Estimate a safe maximum line length for the given command. This is done by assuming the maximum values for nickname length, realname and hostname combined with the command that needs to be sent and some guessing. A theoretical maximum value is used because it is possible that our nickname, username or hostname changes (on the server side) while the length is still being calculated.
Estimate a safe maximum line length for the given command.
[ "Estimate", "a", "safe", "maximum", "line", "length", "for", "the", "given", "command", "." ]
def _safeMaximumLineLength(self, command): """ Estimate a safe maximum line length for the given command. This is done by assuming the maximum values for nickname length, realname and hostname combined with the command that needs to be sent and some guessing. A theoretical maximum value is used because it is possible that our nickname, username or hostname changes (on the server side) while the length is still being calculated. """ # :nickname!realname@hostname COMMAND ... theoretical = ':%s!%s@%s %s' % ( 'a' * self.supported.getFeature('NICKLEN'), # This value is based on observation. 'b' * 10, # See <http://tools.ietf.org/html/rfc2812#section-2.3.1>. 'c' * 63, command) # Fingers crossed. fudge = 10 return MAX_COMMAND_LENGTH - len(theoretical) - fudge
[ "def", "_safeMaximumLineLength", "(", "self", ",", "command", ")", ":", "# :nickname!realname@hostname COMMAND ...", "theoretical", "=", "':%s!%s@%s %s'", "%", "(", "'a'", "*", "self", ".", "supported", ".", "getFeature", "(", "'NICKLEN'", ")", ",", "# This value is based on observation.", "'b'", "*", "10", ",", "# See <http://tools.ietf.org/html/rfc2812#section-2.3.1>.", "'c'", "*", "63", ",", "command", ")", "# Fingers crossed.", "fudge", "=", "10", "return", "MAX_COMMAND_LENGTH", "-", "len", "(", "theoretical", ")", "-", "fudge" ]
https://github.com/nlloyd/SubliminalCollaborator/blob/5c619e17ddbe8acb9eea8996ec038169ddcd50a1/libs/twisted/words/protocols/irc.py#L1555-L1575
zzzeek/sqlalchemy
fc5c54fcd4d868c2a4c7ac19668d72f506fe821e
lib/sqlalchemy/engine/base.py
python
Connection.invalidate
(self, exception=None)
Invalidate the underlying DBAPI connection associated with this :class:`_engine.Connection`. An attempt will be made to close the underlying DBAPI connection immediately; however if this operation fails, the error is logged but not raised. The connection is then discarded whether or not close() succeeded. Upon the next use (where "use" typically means using the :meth:`_engine.Connection.execute` method or similar), this :class:`_engine.Connection` will attempt to procure a new DBAPI connection using the services of the :class:`_pool.Pool` as a source of connectivity (e.g. a "reconnection"). If a transaction was in progress (e.g. the :meth:`_engine.Connection.begin` method has been called) when :meth:`_engine.Connection.invalidate` method is called, at the DBAPI level all state associated with this transaction is lost, as the DBAPI connection is closed. The :class:`_engine.Connection` will not allow a reconnection to proceed until the :class:`.Transaction` object is ended, by calling the :meth:`.Transaction.rollback` method; until that point, any attempt at continuing to use the :class:`_engine.Connection` will raise an :class:`~sqlalchemy.exc.InvalidRequestError`. This is to prevent applications from accidentally continuing an ongoing transactional operations despite the fact that the transaction has been lost due to an invalidation. The :meth:`_engine.Connection.invalidate` method, just like auto-invalidation, will at the connection pool level invoke the :meth:`_events.PoolEvents.invalidate` event. :param exception: an optional ``Exception`` instance that's the reason for the invalidation. is passed along to event handlers and logging functions. .. seealso:: :ref:`pool_connection_invalidation`
Invalidate the underlying DBAPI connection associated with this :class:`_engine.Connection`.
[ "Invalidate", "the", "underlying", "DBAPI", "connection", "associated", "with", "this", ":", "class", ":", "_engine", ".", "Connection", "." ]
def invalidate(self, exception=None): """Invalidate the underlying DBAPI connection associated with this :class:`_engine.Connection`. An attempt will be made to close the underlying DBAPI connection immediately; however if this operation fails, the error is logged but not raised. The connection is then discarded whether or not close() succeeded. Upon the next use (where "use" typically means using the :meth:`_engine.Connection.execute` method or similar), this :class:`_engine.Connection` will attempt to procure a new DBAPI connection using the services of the :class:`_pool.Pool` as a source of connectivity (e.g. a "reconnection"). If a transaction was in progress (e.g. the :meth:`_engine.Connection.begin` method has been called) when :meth:`_engine.Connection.invalidate` method is called, at the DBAPI level all state associated with this transaction is lost, as the DBAPI connection is closed. The :class:`_engine.Connection` will not allow a reconnection to proceed until the :class:`.Transaction` object is ended, by calling the :meth:`.Transaction.rollback` method; until that point, any attempt at continuing to use the :class:`_engine.Connection` will raise an :class:`~sqlalchemy.exc.InvalidRequestError`. This is to prevent applications from accidentally continuing an ongoing transactional operations despite the fact that the transaction has been lost due to an invalidation. The :meth:`_engine.Connection.invalidate` method, just like auto-invalidation, will at the connection pool level invoke the :meth:`_events.PoolEvents.invalidate` event. :param exception: an optional ``Exception`` instance that's the reason for the invalidation. is passed along to event handlers and logging functions. .. seealso:: :ref:`pool_connection_invalidation` """ if self.__branch_from: return self.__branch_from.invalidate(exception=exception) if self.invalidated: return if self.closed: raise exc.ResourceClosedError("This Connection is closed") if self._still_open_and_dbapi_connection_is_valid: self._dbapi_connection.invalidate(exception) self._dbapi_connection = None
[ "def", "invalidate", "(", "self", ",", "exception", "=", "None", ")", ":", "if", "self", ".", "__branch_from", ":", "return", "self", ".", "__branch_from", ".", "invalidate", "(", "exception", "=", "exception", ")", "if", "self", ".", "invalidated", ":", "return", "if", "self", ".", "closed", ":", "raise", "exc", ".", "ResourceClosedError", "(", "\"This Connection is closed\"", ")", "if", "self", ".", "_still_open_and_dbapi_connection_is_valid", ":", "self", ".", "_dbapi_connection", ".", "invalidate", "(", "exception", ")", "self", ".", "_dbapi_connection", "=", "None" ]
https://github.com/zzzeek/sqlalchemy/blob/fc5c54fcd4d868c2a4c7ac19668d72f506fe821e/lib/sqlalchemy/engine/base.py#L612-L669
SheffieldML/GPy
bb1bc5088671f9316bc92a46d356734e34c2d5c0
GPy/util/input_warping_functions.py
python
KumarWarping.__init__
(self, X, warping_indices=None, epsilon=None, Xmin=None, Xmax=None)
[]
def __init__(self, X, warping_indices=None, epsilon=None, Xmin=None, Xmax=None): super(KumarWarping, self).__init__(name='input_warp_kumar') if warping_indices is not None and np.max(warping_indices) > X.shape[1] -1: raise ValueError("Kumar warping indices exceed feature dimension") if warping_indices is not None and np.min(warping_indices) < 0: raise ValueError("Kumar warping indices should be larger than 0") if warping_indices is not None and np.any(list(map(lambda x: not isinstance(x, int), warping_indices))): raise ValueError("Kumar warping indices should be integer") if Xmin is None and Xmax is None: Xmin = X.min(axis=0) Xmax = X.max(axis=0) else: if Xmin is None or Xmax is None: raise ValueError("Xmin and Xmax need to be provide at the same time!") if len(Xmin) != X.shape[1] or len(Xmax) != X.shape[1]: raise ValueError("Xmin and Xmax should have n_feature values!") if epsilon is None: epsilon = 1e-6 self.epsilon = epsilon self.Xmin = Xmin - self.epsilon self.Xmax = Xmax + self.epsilon self.scaling = 1.0 / (self.Xmax - self.Xmin) self.X_normalized = (X - self.Xmin) / (self.Xmax - self.Xmin) if warping_indices is None: warping_indices = range(X.shape[1]) self.warping_indices = warping_indices self.warping_dim = len(self.warping_indices) self.num_parameters = 2 * self.warping_dim # create parameters self.params = [[Param('a%d' % i, 1.0), Param('b%d' % i, 1.0)] for i in range(self.warping_dim)] # add constraints for i in range(self.warping_dim): self.params[i][0].constrain_bounded(0.0, 10.0) self.params[i][1].constrain_bounded(0.0, 10.0) # set priors and add them into handler for i in range(self.warping_dim): self.params[i][0].set_prior(LogGaussian(0.0, 0.75)) self.params[i][1].set_prior(LogGaussian(0.0, 0.75)) self.link_parameter(self.params[i][0]) self.link_parameter(self.params[i][1])
[ "def", "__init__", "(", "self", ",", "X", ",", "warping_indices", "=", "None", ",", "epsilon", "=", "None", ",", "Xmin", "=", "None", ",", "Xmax", "=", "None", ")", ":", "super", "(", "KumarWarping", ",", "self", ")", ".", "__init__", "(", "name", "=", "'input_warp_kumar'", ")", "if", "warping_indices", "is", "not", "None", "and", "np", ".", "max", "(", "warping_indices", ")", ">", "X", ".", "shape", "[", "1", "]", "-", "1", ":", "raise", "ValueError", "(", "\"Kumar warping indices exceed feature dimension\"", ")", "if", "warping_indices", "is", "not", "None", "and", "np", ".", "min", "(", "warping_indices", ")", "<", "0", ":", "raise", "ValueError", "(", "\"Kumar warping indices should be larger than 0\"", ")", "if", "warping_indices", "is", "not", "None", "and", "np", ".", "any", "(", "list", "(", "map", "(", "lambda", "x", ":", "not", "isinstance", "(", "x", ",", "int", ")", ",", "warping_indices", ")", ")", ")", ":", "raise", "ValueError", "(", "\"Kumar warping indices should be integer\"", ")", "if", "Xmin", "is", "None", "and", "Xmax", "is", "None", ":", "Xmin", "=", "X", ".", "min", "(", "axis", "=", "0", ")", "Xmax", "=", "X", ".", "max", "(", "axis", "=", "0", ")", "else", ":", "if", "Xmin", "is", "None", "or", "Xmax", "is", "None", ":", "raise", "ValueError", "(", "\"Xmin and Xmax need to be provide at the same time!\"", ")", "if", "len", "(", "Xmin", ")", "!=", "X", ".", "shape", "[", "1", "]", "or", "len", "(", "Xmax", ")", "!=", "X", ".", "shape", "[", "1", "]", ":", "raise", "ValueError", "(", "\"Xmin and Xmax should have n_feature values!\"", ")", "if", "epsilon", "is", "None", ":", "epsilon", "=", "1e-6", "self", ".", "epsilon", "=", "epsilon", "self", ".", "Xmin", "=", "Xmin", "-", "self", ".", "epsilon", "self", ".", "Xmax", "=", "Xmax", "+", "self", ".", "epsilon", "self", ".", "scaling", "=", "1.0", "/", "(", "self", ".", "Xmax", "-", "self", ".", "Xmin", ")", "self", ".", "X_normalized", "=", "(", "X", "-", "self", ".", "Xmin", ")", "/", "(", "self", ".", "Xmax", "-", "self", ".", "Xmin", ")", "if", "warping_indices", "is", "None", ":", "warping_indices", "=", "range", "(", "X", ".", "shape", "[", "1", "]", ")", "self", ".", "warping_indices", "=", "warping_indices", "self", ".", "warping_dim", "=", "len", "(", "self", ".", "warping_indices", ")", "self", ".", "num_parameters", "=", "2", "*", "self", ".", "warping_dim", "# create parameters", "self", ".", "params", "=", "[", "[", "Param", "(", "'a%d'", "%", "i", ",", "1.0", ")", ",", "Param", "(", "'b%d'", "%", "i", ",", "1.0", ")", "]", "for", "i", "in", "range", "(", "self", ".", "warping_dim", ")", "]", "# add constraints", "for", "i", "in", "range", "(", "self", ".", "warping_dim", ")", ":", "self", ".", "params", "[", "i", "]", "[", "0", "]", ".", "constrain_bounded", "(", "0.0", ",", "10.0", ")", "self", ".", "params", "[", "i", "]", "[", "1", "]", ".", "constrain_bounded", "(", "0.0", ",", "10.0", ")", "# set priors and add them into handler", "for", "i", "in", "range", "(", "self", ".", "warping_dim", ")", ":", "self", ".", "params", "[", "i", "]", "[", "0", "]", ".", "set_prior", "(", "LogGaussian", "(", "0.0", ",", "0.75", ")", ")", "self", ".", "params", "[", "i", "]", "[", "1", "]", ".", "set_prior", "(", "LogGaussian", "(", "0.0", ",", "0.75", ")", ")", "self", ".", "link_parameter", "(", "self", ".", "params", "[", "i", "]", "[", "0", "]", ")", "self", ".", "link_parameter", "(", "self", ".", "params", "[", "i", "]", "[", "1", "]", ")" ]
https://github.com/SheffieldML/GPy/blob/bb1bc5088671f9316bc92a46d356734e34c2d5c0/GPy/util/input_warping_functions.py#L118-L169
google/trax
d6cae2067dedd0490b78d831033607357e975015
trax/rl/space_serializer.py
python
create
(space, vocab_size)
return { gym.spaces.Box: BoxSpaceSerializer, gym.spaces.Discrete: DiscreteSpaceSerializer, gym.spaces.MultiDiscrete: MultiDiscreteSpaceSerializer, }[type(space)](space, vocab_size)
Creates a SpaceSerializer for the given Gym space.
Creates a SpaceSerializer for the given Gym space.
[ "Creates", "a", "SpaceSerializer", "for", "the", "given", "Gym", "space", "." ]
def create(space, vocab_size): """Creates a SpaceSerializer for the given Gym space.""" return { gym.spaces.Box: BoxSpaceSerializer, gym.spaces.Discrete: DiscreteSpaceSerializer, gym.spaces.MultiDiscrete: MultiDiscreteSpaceSerializer, }[type(space)](space, vocab_size)
[ "def", "create", "(", "space", ",", "vocab_size", ")", ":", "return", "{", "gym", ".", "spaces", ".", "Box", ":", "BoxSpaceSerializer", ",", "gym", ".", "spaces", ".", "Discrete", ":", "DiscreteSpaceSerializer", ",", "gym", ".", "spaces", ".", "MultiDiscrete", ":", "MultiDiscreteSpaceSerializer", ",", "}", "[", "type", "(", "space", ")", "]", "(", "space", ",", "vocab_size", ")" ]
https://github.com/google/trax/blob/d6cae2067dedd0490b78d831033607357e975015/trax/rl/space_serializer.py#L88-L94
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/kombu/transport/redis.py
python
_after_fork_cleanup_channel
(channel)
[]
def _after_fork_cleanup_channel(channel): channel._after_fork()
[ "def", "_after_fork_cleanup_channel", "(", "channel", ")", ":", "channel", ".", "_after_fork", "(", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/kombu/transport/redis.py#L132-L133
apache/libcloud
90971e17bfd7b6bb97b2489986472c531cc8e140
libcloud/common/upcloud.py
python
UpcloudNodeOperations.destroy_node
(self, node_id)
Destroys the node. :param node_id: Id of the Node :type node_id: ``int``
Destroys the node.
[ "Destroys", "the", "node", "." ]
def destroy_node(self, node_id): """ Destroys the node. :param node_id: Id of the Node :type node_id: ``int`` """ self.connection.request("1.2/server/{0}".format(node_id), method="DELETE")
[ "def", "destroy_node", "(", "self", ",", "node_id", ")", ":", "self", ".", "connection", ".", "request", "(", "\"1.2/server/{0}\"", ".", "format", "(", "node_id", ")", ",", "method", "=", "\"DELETE\"", ")" ]
https://github.com/apache/libcloud/blob/90971e17bfd7b6bb97b2489986472c531cc8e140/libcloud/common/upcloud.py#L194-L201
demisto/content
5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07
Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py
python
MicrosoftClient._get_self_deployed_token_auth_code
( self, refresh_token: str = '', resource: str = '', scope: Optional[str] = None)
return access_token, expires_in, refresh_token
Gets a token by authorizing a self deployed Azure application. Returns: tuple: An access token, its expiry and refresh token.
Gets a token by authorizing a self deployed Azure application. Returns: tuple: An access token, its expiry and refresh token.
[ "Gets", "a", "token", "by", "authorizing", "a", "self", "deployed", "Azure", "application", ".", "Returns", ":", "tuple", ":", "An", "access", "token", "its", "expiry", "and", "refresh", "token", "." ]
def _get_self_deployed_token_auth_code( self, refresh_token: str = '', resource: str = '', scope: Optional[str] = None) -> Tuple[str, int, str]: """ Gets a token by authorizing a self deployed Azure application. Returns: tuple: An access token, its expiry and refresh token. """ data = assign_params( client_id=self.client_id, client_secret=self.client_secret, resource=self.resource if not resource else resource, redirect_uri=self.redirect_uri ) if scope: data['scope'] = scope refresh_token = refresh_token or self._get_refresh_token_from_auth_code_param() if refresh_token: data['grant_type'] = REFRESH_TOKEN data['refresh_token'] = refresh_token else: if SESSION_STATE in self.auth_code: raise ValueError('Malformed auth_code parameter: Please copy the auth code from the redirected uri ' 'without any additional info and without the "session_state" query parameter.') data['grant_type'] = AUTHORIZATION_CODE data['code'] = self.auth_code response_json: dict = {} try: response = requests.post(self.token_retrieval_url, data, verify=self.verify) if response.status_code not in {200, 201}: return_error(f'Error in Microsoft authorization. Status: {response.status_code},' f' body: {self.error_parser(response)}') response_json = response.json() except Exception as e: return_error(f'Error in Microsoft authorization: {str(e)}') access_token = response_json.get('access_token', '') expires_in = int(response_json.get('expires_in', 3595)) refresh_token = response_json.get('refresh_token', '') return access_token, expires_in, refresh_token
[ "def", "_get_self_deployed_token_auth_code", "(", "self", ",", "refresh_token", ":", "str", "=", "''", ",", "resource", ":", "str", "=", "''", ",", "scope", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "Tuple", "[", "str", ",", "int", ",", "str", "]", ":", "data", "=", "assign_params", "(", "client_id", "=", "self", ".", "client_id", ",", "client_secret", "=", "self", ".", "client_secret", ",", "resource", "=", "self", ".", "resource", "if", "not", "resource", "else", "resource", ",", "redirect_uri", "=", "self", ".", "redirect_uri", ")", "if", "scope", ":", "data", "[", "'scope'", "]", "=", "scope", "refresh_token", "=", "refresh_token", "or", "self", ".", "_get_refresh_token_from_auth_code_param", "(", ")", "if", "refresh_token", ":", "data", "[", "'grant_type'", "]", "=", "REFRESH_TOKEN", "data", "[", "'refresh_token'", "]", "=", "refresh_token", "else", ":", "if", "SESSION_STATE", "in", "self", ".", "auth_code", ":", "raise", "ValueError", "(", "'Malformed auth_code parameter: Please copy the auth code from the redirected uri '", "'without any additional info and without the \"session_state\" query parameter.'", ")", "data", "[", "'grant_type'", "]", "=", "AUTHORIZATION_CODE", "data", "[", "'code'", "]", "=", "self", ".", "auth_code", "response_json", ":", "dict", "=", "{", "}", "try", ":", "response", "=", "requests", ".", "post", "(", "self", ".", "token_retrieval_url", ",", "data", ",", "verify", "=", "self", ".", "verify", ")", "if", "response", ".", "status_code", "not", "in", "{", "200", ",", "201", "}", ":", "return_error", "(", "f'Error in Microsoft authorization. Status: {response.status_code},'", "f' body: {self.error_parser(response)}'", ")", "response_json", "=", "response", ".", "json", "(", ")", "except", "Exception", "as", "e", ":", "return_error", "(", "f'Error in Microsoft authorization: {str(e)}'", ")", "access_token", "=", "response_json", ".", "get", "(", "'access_token'", ",", "''", ")", "expires_in", "=", "int", "(", "response_json", ".", "get", "(", "'expires_in'", ",", "3595", ")", ")", "refresh_token", "=", "response_json", ".", "get", "(", "'refresh_token'", ",", "''", ")", "return", "access_token", ",", "expires_in", ",", "refresh_token" ]
https://github.com/demisto/content/blob/5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py#L372-L414
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit /tools/sqli/waf/denyall.py
python
detect
(get_page)
return retval
[]
def detect(get_page): retval = False for vector in WAF_ATTACK_VECTORS: page, headers, code = get_page(get=vector) retval = re.search(r"\Asessioncookie=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None retval |= code == 200 and re.search(r"\ACondition Intercepted", page, re.I) is not None if retval: break return retval
[ "def", "detect", "(", "get_page", ")", ":", "retval", "=", "False", "for", "vector", "in", "WAF_ATTACK_VECTORS", ":", "page", ",", "headers", ",", "code", "=", "get_page", "(", "get", "=", "vector", ")", "retval", "=", "re", ".", "search", "(", "r\"\\Asessioncookie=\"", ",", "headers", ".", "get", "(", "HTTP_HEADER", ".", "SET_COOKIE", ",", "\"\"", ")", ",", "re", ".", "I", ")", "is", "not", "None", "retval", "|=", "code", "==", "200", "and", "re", ".", "search", "(", "r\"\\ACondition Intercepted\"", ",", "page", ",", "re", ".", "I", ")", "is", "not", "None", "if", "retval", ":", "break", "return", "retval" ]
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit /tools/sqli/waf/denyall.py#L15-L25
s-leger/archipack
5a6243bf1edf08a6b429661ce291dacb551e5f8a
pygeos/op_buffer.py
python
BufferSubGraph.add
(self, node, nodeStack: list)
* Adds the argument node and all its out edges to the subgraph * @param node the node to add * @param nodeStack the current set of nodes being traversed
* Adds the argument node and all its out edges to the subgraph *
[ "*", "Adds", "the", "argument", "node", "and", "all", "its", "out", "edges", "to", "the", "subgraph", "*" ]
def add(self, node, nodeStack: list) -> None: """ * Adds the argument node and all its out edges to the subgraph * @param node the node to add * @param nodeStack the current set of nodes being traversed """ node.isVisited = True self.nodes.append(node) star = node.star for de in star.edges: self._edgeEnds.append(de) sym = de.sym symNode = sym.node """ * NOTE: this is a depth-first traversal of the graph. * This will cause a large depth of recursion. * It might be better to do a breadth-first traversal. """ if not symNode.isVisited: nodeStack.append(symNode)
[ "def", "add", "(", "self", ",", "node", ",", "nodeStack", ":", "list", ")", "->", "None", ":", "node", ".", "isVisited", "=", "True", "self", ".", "nodes", ".", "append", "(", "node", ")", "star", "=", "node", ".", "star", "for", "de", "in", "star", ".", "edges", ":", "self", ".", "_edgeEnds", ".", "append", "(", "de", ")", "sym", "=", "de", ".", "sym", "symNode", "=", "sym", ".", "node", "\"\"\"\n * NOTE: this is a depth-first traversal of the graph.\n * This will cause a large depth of recursion.\n * It might be better to do a breadth-first traversal.\n \"\"\"", "if", "not", "symNode", ".", "isVisited", ":", "nodeStack", ".", "append", "(", "symNode", ")" ]
https://github.com/s-leger/archipack/blob/5a6243bf1edf08a6b429661ce291dacb551e5f8a/pygeos/op_buffer.py#L659-L678
digidotcom/xbee-python
0757f4be0017530c205175fbee8f9f61be9614d1
digi/xbee/devices.py
python
XBeeDevice._send_data_64_16
(self, x64addr, x16addr, data, transmit_options=TransmitOptions.NONE.value)
return self.send_packet_sync_and_get_response(packet)
Blocking method. This method sends data to the remote XBee with the given 64-bit/16-bit address. This method waits for the packet response. The default timeout is :attr:`.XBeeDevice._DEFAULT_TIMEOUT_SYNC_OPERATIONS`. Args: x64addr (:class:`.XBee64BitAddress`): 64-bit address of the destination XBee. x16addr (:class:`.XBee16BitAddress`): 16-bit address of the destination XBee, :attr:`.XBee16BitAddress.UNKNOWN_ADDRESS` if unknown. data (String or Bytearray): Raw data to send. transmit_options (Integer, optional): Transmit options, bitfield of :class:`.TransmitOptions`. Default to `TransmitOptions.NONE.value`. Returns: :class:`.XBeePacket`: The response. Raises: ValueError: If `x64addr`, `x16addr` or `data` is `None`. TimeoutException: If response is not received before the read timeout expires. InvalidOperatingModeException: If the XBee's operating mode is not API or ESCAPED API. This method only checks the cached value of the operating mode. TransmitException: If the status of the response received is not OK. XBeeException: If the XBee's communication interface is closed. .. seealso:: | :class:`.XBee64BitAddress` | :class:`.XBee16BitAddress` | :class:`.XBeePacket`
Blocking method. This method sends data to the remote XBee with the given 64-bit/16-bit address.
[ "Blocking", "method", ".", "This", "method", "sends", "data", "to", "the", "remote", "XBee", "with", "the", "given", "64", "-", "bit", "/", "16", "-", "bit", "address", "." ]
def _send_data_64_16(self, x64addr, x16addr, data, transmit_options=TransmitOptions.NONE.value): """ Blocking method. This method sends data to the remote XBee with the given 64-bit/16-bit address. This method waits for the packet response. The default timeout is :attr:`.XBeeDevice._DEFAULT_TIMEOUT_SYNC_OPERATIONS`. Args: x64addr (:class:`.XBee64BitAddress`): 64-bit address of the destination XBee. x16addr (:class:`.XBee16BitAddress`): 16-bit address of the destination XBee, :attr:`.XBee16BitAddress.UNKNOWN_ADDRESS` if unknown. data (String or Bytearray): Raw data to send. transmit_options (Integer, optional): Transmit options, bitfield of :class:`.TransmitOptions`. Default to `TransmitOptions.NONE.value`. Returns: :class:`.XBeePacket`: The response. Raises: ValueError: If `x64addr`, `x16addr` or `data` is `None`. TimeoutException: If response is not received before the read timeout expires. InvalidOperatingModeException: If the XBee's operating mode is not API or ESCAPED API. This method only checks the cached value of the operating mode. TransmitException: If the status of the response received is not OK. XBeeException: If the XBee's communication interface is closed. .. seealso:: | :class:`.XBee64BitAddress` | :class:`.XBee16BitAddress` | :class:`.XBeePacket` """ if x64addr is None: raise ValueError("64-bit address cannot be None") if x16addr is None: raise ValueError("16-bit address cannot be None") if not isinstance(data, (str, bytearray, bytes)): raise ValueError("Data must be a string or bytearray") if self.is_remote(): raise OperationNotSupportedException( message="Cannot send data to a remote device from a remote device") if isinstance(data, str): data = data.encode(encoding="utf8", errors="ignore") packet = TransmitPacket(self.get_next_frame_id(), x64addr, x16addr, 0, transmit_options, rf_data=data) return self.send_packet_sync_and_get_response(packet)
[ "def", "_send_data_64_16", "(", "self", ",", "x64addr", ",", "x16addr", ",", "data", ",", "transmit_options", "=", "TransmitOptions", ".", "NONE", ".", "value", ")", ":", "if", "x64addr", "is", "None", ":", "raise", "ValueError", "(", "\"64-bit address cannot be None\"", ")", "if", "x16addr", "is", "None", ":", "raise", "ValueError", "(", "\"16-bit address cannot be None\"", ")", "if", "not", "isinstance", "(", "data", ",", "(", "str", ",", "bytearray", ",", "bytes", ")", ")", ":", "raise", "ValueError", "(", "\"Data must be a string or bytearray\"", ")", "if", "self", ".", "is_remote", "(", ")", ":", "raise", "OperationNotSupportedException", "(", "message", "=", "\"Cannot send data to a remote device from a remote device\"", ")", "if", "isinstance", "(", "data", ",", "str", ")", ":", "data", "=", "data", ".", "encode", "(", "encoding", "=", "\"utf8\"", ",", "errors", "=", "\"ignore\"", ")", "packet", "=", "TransmitPacket", "(", "self", ".", "get_next_frame_id", "(", ")", ",", "x64addr", ",", "x16addr", ",", "0", ",", "transmit_options", ",", "rf_data", "=", "data", ")", "return", "self", ".", "send_packet_sync_and_get_response", "(", "packet", ")" ]
https://github.com/digidotcom/xbee-python/blob/0757f4be0017530c205175fbee8f9f61be9614d1/digi/xbee/devices.py#L2622-L2674
SpaceNetChallenge/SpaceNet_Off_Nadir_Solutions
014c4ca27a70b5907a183e942228004c989dcbe4
selim_sef/training/losses.py
python
lovasz_hinge_flat
(logits, labels)
return loss
Binary Lovasz hinge loss logits: [P] Variable, logits at each prediction (between -\infty and +\infty) labels: [P] Tensor, binary ground truth labels (0 or 1) ignore: label to ignore
Binary Lovasz hinge loss logits: [P] Variable, logits at each prediction (between -\infty and +\infty) labels: [P] Tensor, binary ground truth labels (0 or 1) ignore: label to ignore
[ "Binary", "Lovasz", "hinge", "loss", "logits", ":", "[", "P", "]", "Variable", "logits", "at", "each", "prediction", "(", "between", "-", "\\", "infty", "and", "+", "\\", "infty", ")", "labels", ":", "[", "P", "]", "Tensor", "binary", "ground", "truth", "labels", "(", "0", "or", "1", ")", "ignore", ":", "label", "to", "ignore" ]
def lovasz_hinge_flat(logits, labels): """ Binary Lovasz hinge loss logits: [P] Variable, logits at each prediction (between -\infty and +\infty) labels: [P] Tensor, binary ground truth labels (0 or 1) ignore: label to ignore """ if len(labels) == 0: # only void pixels, the gradients should be 0 return logits.sum() * 0. signs = 2. * labels.float() - 1. errors = (1. - logits * Variable(signs)) errors_sorted, perm = torch.sort(errors, dim=0, descending=True) perm = perm.data gt_sorted = labels[perm] grad = lovasz_grad(gt_sorted) loss = torch.dot(F.relu(errors_sorted), Variable(grad)) return loss
[ "def", "lovasz_hinge_flat", "(", "logits", ",", "labels", ")", ":", "if", "len", "(", "labels", ")", "==", "0", ":", "# only void pixels, the gradients should be 0", "return", "logits", ".", "sum", "(", ")", "*", "0.", "signs", "=", "2.", "*", "labels", ".", "float", "(", ")", "-", "1.", "errors", "=", "(", "1.", "-", "logits", "*", "Variable", "(", "signs", ")", ")", "errors_sorted", ",", "perm", "=", "torch", ".", "sort", "(", "errors", ",", "dim", "=", "0", ",", "descending", "=", "True", ")", "perm", "=", "perm", ".", "data", "gt_sorted", "=", "labels", "[", "perm", "]", "grad", "=", "lovasz_grad", "(", "gt_sorted", ")", "loss", "=", "torch", ".", "dot", "(", "F", ".", "relu", "(", "errors_sorted", ")", ",", "Variable", "(", "grad", ")", ")", "return", "loss" ]
https://github.com/SpaceNetChallenge/SpaceNet_Off_Nadir_Solutions/blob/014c4ca27a70b5907a183e942228004c989dcbe4/selim_sef/training/losses.py#L177-L194
twisted/twisted
dee676b040dd38b847ea6fb112a712cb5e119490
src/twisted/conch/insults/insults.py
python
ITerminalTransport.singleWidthLine
()
Make the current line a single-width, single-height line.
Make the current line a single-width, single-height line.
[ "Make", "the", "current", "line", "a", "single", "-", "width", "single", "-", "height", "line", "." ]
def singleWidthLine(): """ Make the current line a single-width, single-height line. """
[ "def", "singleWidthLine", "(", ")", ":" ]
https://github.com/twisted/twisted/blob/dee676b040dd38b847ea6fb112a712cb5e119490/src/twisted/conch/insults/insults.py#L233-L236
scipy-lectures/scipy-lecture-notes
7c91eb4afc4b7f0c77bd022e3bd0c33a4c9a1f50
intro/solutions/path_site.py
python
find_module
(module)
return result
[]
def find_module(module): result = [] # Loop over the list of paths in sys.path for subdir in sys.path: # Join the subdir path with the module we're searching for pth = os.path.join(subdir, module) # Use glob to test if the pth is exists res = glob.glob(pth) # glob returns a list, if it is not empty, the pth exists if len(res) > 0: result.append(res) return result
[ "def", "find_module", "(", "module", ")", ":", "result", "=", "[", "]", "# Loop over the list of paths in sys.path", "for", "subdir", "in", "sys", ".", "path", ":", "# Join the subdir path with the module we're searching for", "pth", "=", "os", ".", "path", ".", "join", "(", "subdir", ",", "module", ")", "# Use glob to test if the pth is exists", "res", "=", "glob", ".", "glob", "(", "pth", ")", "# glob returns a list, if it is not empty, the pth exists", "if", "len", "(", "res", ")", ">", "0", ":", "result", ".", "append", "(", "res", ")", "return", "result" ]
https://github.com/scipy-lectures/scipy-lecture-notes/blob/7c91eb4afc4b7f0c77bd022e3bd0c33a4c9a1f50/intro/solutions/path_site.py#L7-L18
microsoft/nni
31f11f51249660930824e888af0d4e022823285c
nni/algorithms/hpo/metis_tuner/metis_tuner.py
python
MetisTuner.update_search_space
(self, search_space)
Update the self.x_bounds and self.x_types by the search_space.json Parameters ---------- search_space : dict
Update the self.x_bounds and self.x_types by the search_space.json
[ "Update", "the", "self", ".", "x_bounds", "and", "self", ".", "x_types", "by", "the", "search_space", ".", "json" ]
def update_search_space(self, search_space): """ Update the self.x_bounds and self.x_types by the search_space.json Parameters ---------- search_space : dict """ validate_search_space(search_space, ['choice', 'randint', 'uniform', 'quniform']) self.x_bounds = [[] for i in range(len(search_space))] self.x_types = [NONE_TYPE for i in range(len(search_space))] for key in search_space: self.key_order.append(key) key_type = {} if isinstance(search_space, dict): for key in search_space: key_type = search_space[key]['_type'] key_range = search_space[key]['_value'] idx = self.key_order.index(key) if key_type == 'quniform': if key_range[2] == 1 and key_range[0].is_integer( ) and key_range[1].is_integer(): self.x_bounds[idx] = [key_range[0], key_range[1] + 1] self.x_types[idx] = 'range_int' else: low, high, q = key_range bounds = np.clip( np.arange( np.round( low / q), np.round( high / q) + 1) * q, low, high) self.x_bounds[idx] = bounds self.x_types[idx] = 'discrete_int' elif key_type == 'randint': self.x_bounds[idx] = [key_range[0], key_range[1]] self.x_types[idx] = 'range_int' elif key_type == 'uniform': self.x_bounds[idx] = [key_range[0], key_range[1]] self.x_types[idx] = 'range_continuous' elif key_type == 'choice': self.x_bounds[idx] = key_range for key_value in key_range: if not isinstance(key_value, (int, float)): raise RuntimeError( "Metis Tuner only support numerical choice.") self.x_types[idx] = 'discrete_int' else: logger.info( "Metis Tuner doesn't support this kind of variable: %s", str(key_type)) raise RuntimeError( "Metis Tuner doesn't support this kind of variable: %s" % str(key_type)) else: logger.info("The format of search space is not a dict.") raise RuntimeError("The format of search space is not a dict.") self.minimize_starting_points = _rand_init( self.x_bounds, self.x_types, self.selection_num_starting_points)
[ "def", "update_search_space", "(", "self", ",", "search_space", ")", ":", "validate_search_space", "(", "search_space", ",", "[", "'choice'", ",", "'randint'", ",", "'uniform'", ",", "'quniform'", "]", ")", "self", ".", "x_bounds", "=", "[", "[", "]", "for", "i", "in", "range", "(", "len", "(", "search_space", ")", ")", "]", "self", ".", "x_types", "=", "[", "NONE_TYPE", "for", "i", "in", "range", "(", "len", "(", "search_space", ")", ")", "]", "for", "key", "in", "search_space", ":", "self", ".", "key_order", ".", "append", "(", "key", ")", "key_type", "=", "{", "}", "if", "isinstance", "(", "search_space", ",", "dict", ")", ":", "for", "key", "in", "search_space", ":", "key_type", "=", "search_space", "[", "key", "]", "[", "'_type'", "]", "key_range", "=", "search_space", "[", "key", "]", "[", "'_value'", "]", "idx", "=", "self", ".", "key_order", ".", "index", "(", "key", ")", "if", "key_type", "==", "'quniform'", ":", "if", "key_range", "[", "2", "]", "==", "1", "and", "key_range", "[", "0", "]", ".", "is_integer", "(", ")", "and", "key_range", "[", "1", "]", ".", "is_integer", "(", ")", ":", "self", ".", "x_bounds", "[", "idx", "]", "=", "[", "key_range", "[", "0", "]", ",", "key_range", "[", "1", "]", "+", "1", "]", "self", ".", "x_types", "[", "idx", "]", "=", "'range_int'", "else", ":", "low", ",", "high", ",", "q", "=", "key_range", "bounds", "=", "np", ".", "clip", "(", "np", ".", "arange", "(", "np", ".", "round", "(", "low", "/", "q", ")", ",", "np", ".", "round", "(", "high", "/", "q", ")", "+", "1", ")", "*", "q", ",", "low", ",", "high", ")", "self", ".", "x_bounds", "[", "idx", "]", "=", "bounds", "self", ".", "x_types", "[", "idx", "]", "=", "'discrete_int'", "elif", "key_type", "==", "'randint'", ":", "self", ".", "x_bounds", "[", "idx", "]", "=", "[", "key_range", "[", "0", "]", ",", "key_range", "[", "1", "]", "]", "self", ".", "x_types", "[", "idx", "]", "=", "'range_int'", "elif", "key_type", "==", "'uniform'", ":", "self", ".", "x_bounds", "[", "idx", "]", "=", "[", "key_range", "[", "0", "]", ",", "key_range", "[", "1", "]", "]", "self", ".", "x_types", "[", "idx", "]", "=", "'range_continuous'", "elif", "key_type", "==", "'choice'", ":", "self", ".", "x_bounds", "[", "idx", "]", "=", "key_range", "for", "key_value", "in", "key_range", ":", "if", "not", "isinstance", "(", "key_value", ",", "(", "int", ",", "float", ")", ")", ":", "raise", "RuntimeError", "(", "\"Metis Tuner only support numerical choice.\"", ")", "self", ".", "x_types", "[", "idx", "]", "=", "'discrete_int'", "else", ":", "logger", ".", "info", "(", "\"Metis Tuner doesn't support this kind of variable: %s\"", ",", "str", "(", "key_type", ")", ")", "raise", "RuntimeError", "(", "\"Metis Tuner doesn't support this kind of variable: %s\"", "%", "str", "(", "key_type", ")", ")", "else", ":", "logger", ".", "info", "(", "\"The format of search space is not a dict.\"", ")", "raise", "RuntimeError", "(", "\"The format of search space is not a dict.\"", ")", "self", ".", "minimize_starting_points", "=", "_rand_init", "(", "self", ".", "x_bounds", ",", "self", ".", "x_types", ",", "self", ".", "selection_num_starting_points", ")" ]
https://github.com/microsoft/nni/blob/31f11f51249660930824e888af0d4e022823285c/nni/algorithms/hpo/metis_tuner/metis_tuner.py#L148-L214
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/multiprocessing/resource_sharer.py
python
_ResourceSharer._start
(self)
[]
def _start(self): from .connection import Listener assert self._listener is None util.debug('starting listener and thread for sending handles') self._listener = Listener(authkey=process.current_process().authkey) self._address = self._listener.address t = threading.Thread(target=self._serve) t.daemon = True t.start() self._thread = t
[ "def", "_start", "(", "self", ")", ":", "from", ".", "connection", "import", "Listener", "assert", "self", ".", "_listener", "is", "None", "util", ".", "debug", "(", "'starting listener and thread for sending handles'", ")", "self", ".", "_listener", "=", "Listener", "(", "authkey", "=", "process", ".", "current_process", "(", ")", ".", "authkey", ")", "self", ".", "_address", "=", "self", ".", "_listener", ".", "address", "t", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_serve", ")", "t", ".", "daemon", "=", "True", "t", ".", "start", "(", ")", "self", ".", "_thread", "=", "t" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/multiprocessing/resource_sharer.py#L126-L135
passiomatic/coldsweat
f48961e21d192b9b19415e9290307314df3820f3
coldsweat/frontend.py
python
FrontendApp._redirect
(self, klass, location)
return response
Return a temporary or permament redirect response object. Caller may return it or raise it.
Return a temporary or permament redirect response object. Caller may return it or raise it.
[ "Return", "a", "temporary", "or", "permament", "redirect", "response", "object", ".", "Caller", "may", "return", "it", "or", "raise", "it", "." ]
def _redirect(self, klass, location): ''' Return a temporary or permament redirect response object. Caller may return it or raise it. ''' response = klass(location=location) if self.alert_message: response.set_cookie('alert_message', self.alert_message) return response
[ "def", "_redirect", "(", "self", ",", "klass", ",", "location", ")", ":", "response", "=", "klass", "(", "location", "=", "location", ")", "if", "self", ".", "alert_message", ":", "response", ".", "set_cookie", "(", "'alert_message'", ",", "self", ".", "alert_message", ")", "return", "response" ]
https://github.com/passiomatic/coldsweat/blob/f48961e21d192b9b19415e9290307314df3820f3/coldsweat/frontend.py#L536-L544
JDAI-CV/fast-reid
31d99b793fe0937461b9c9bc8a8a11f88bf5642c
fastreid/evaluation/roc.py
python
evaluate_roc
( distmat, q_pids, g_pids, q_camids, g_camids, use_cython=True )
Evaluates CMC rank. Args: distmat (numpy.ndarray): distance matrix of shape (num_query, num_gallery). q_pids (numpy.ndarray): 1-D array containing person identities of each query instance. g_pids (numpy.ndarray): 1-D array containing person identities of each gallery instance. q_camids (numpy.ndarray): 1-D array containing camera views under which each query instance is captured. g_camids (numpy.ndarray): 1-D array containing camera views under which each gallery instance is captured. use_cython (bool, optional): use cython code for evaluation. Default is True. This is highly recommended as the cython code can speed up the cmc computation by more than 10x. This requires Cython to be installed.
Evaluates CMC rank. Args: distmat (numpy.ndarray): distance matrix of shape (num_query, num_gallery). q_pids (numpy.ndarray): 1-D array containing person identities of each query instance. g_pids (numpy.ndarray): 1-D array containing person identities of each gallery instance. q_camids (numpy.ndarray): 1-D array containing camera views under which each query instance is captured. g_camids (numpy.ndarray): 1-D array containing camera views under which each gallery instance is captured. use_cython (bool, optional): use cython code for evaluation. Default is True. This is highly recommended as the cython code can speed up the cmc computation by more than 10x. This requires Cython to be installed.
[ "Evaluates", "CMC", "rank", ".", "Args", ":", "distmat", "(", "numpy", ".", "ndarray", ")", ":", "distance", "matrix", "of", "shape", "(", "num_query", "num_gallery", ")", ".", "q_pids", "(", "numpy", ".", "ndarray", ")", ":", "1", "-", "D", "array", "containing", "person", "identities", "of", "each", "query", "instance", ".", "g_pids", "(", "numpy", ".", "ndarray", ")", ":", "1", "-", "D", "array", "containing", "person", "identities", "of", "each", "gallery", "instance", ".", "q_camids", "(", "numpy", ".", "ndarray", ")", ":", "1", "-", "D", "array", "containing", "camera", "views", "under", "which", "each", "query", "instance", "is", "captured", ".", "g_camids", "(", "numpy", ".", "ndarray", ")", ":", "1", "-", "D", "array", "containing", "camera", "views", "under", "which", "each", "gallery", "instance", "is", "captured", ".", "use_cython", "(", "bool", "optional", ")", ":", "use", "cython", "code", "for", "evaluation", ".", "Default", "is", "True", ".", "This", "is", "highly", "recommended", "as", "the", "cython", "code", "can", "speed", "up", "the", "cmc", "computation", "by", "more", "than", "10x", ".", "This", "requires", "Cython", "to", "be", "installed", "." ]
def evaluate_roc( distmat, q_pids, g_pids, q_camids, g_camids, use_cython=True ): """Evaluates CMC rank. Args: distmat (numpy.ndarray): distance matrix of shape (num_query, num_gallery). q_pids (numpy.ndarray): 1-D array containing person identities of each query instance. g_pids (numpy.ndarray): 1-D array containing person identities of each gallery instance. q_camids (numpy.ndarray): 1-D array containing camera views under which each query instance is captured. g_camids (numpy.ndarray): 1-D array containing camera views under which each gallery instance is captured. use_cython (bool, optional): use cython code for evaluation. Default is True. This is highly recommended as the cython code can speed up the cmc computation by more than 10x. This requires Cython to be installed. """ if use_cython and IS_CYTHON_AVAI: return evaluate_roc_cy(distmat, q_pids, g_pids, q_camids, g_camids) else: return evaluate_roc_py(distmat, q_pids, g_pids, q_camids, g_camids)
[ "def", "evaluate_roc", "(", "distmat", ",", "q_pids", ",", "g_pids", ",", "q_camids", ",", "g_camids", ",", "use_cython", "=", "True", ")", ":", "if", "use_cython", "and", "IS_CYTHON_AVAI", ":", "return", "evaluate_roc_cy", "(", "distmat", ",", "q_pids", ",", "g_pids", ",", "q_camids", ",", "g_camids", ")", "else", ":", "return", "evaluate_roc_py", "(", "distmat", ",", "q_pids", ",", "g_pids", ",", "q_camids", ",", "g_camids", ")" ]
https://github.com/JDAI-CV/fast-reid/blob/31d99b793fe0937461b9c9bc8a8a11f88bf5642c/fastreid/evaluation/roc.py#L64-L90
out0fmemory/GoAgent-Always-Available
c4254984fea633ce3d1893fe5901debd9f22c2a9
server/lib/google/appengine/ext/bulkload/bulkloader_config.py
python
GenericImporter.__reserve_entity_key
(self, entity)
Collect entity key to be reserved if it has a numeric id in its path. Keys to reserve are stored in self.keys_to_reserve. They are not tracked if self.reserve_keys is None. Args: entity: An entity with a key.
Collect entity key to be reserved if it has a numeric id in its path.
[ "Collect", "entity", "key", "to", "be", "reserved", "if", "it", "has", "a", "numeric", "id", "in", "its", "path", "." ]
def __reserve_entity_key(self, entity): """Collect entity key to be reserved if it has a numeric id in its path. Keys to reserve are stored in self.keys_to_reserve. They are not tracked if self.reserve_keys is None. Args: entity: An entity with a key. """ if not self.reserve_keys: return if isinstance(entity, datastore.Entity): if not entity.key(): return elif not entity.has_key(): return key = entity.key() if not key.has_id_or_name(): return for id_or_name in key.to_path()[1::2]: if isinstance(id_or_name, (int, long)): self.keys_to_reserve.append(key) return
[ "def", "__reserve_entity_key", "(", "self", ",", "entity", ")", ":", "if", "not", "self", ".", "reserve_keys", ":", "return", "if", "isinstance", "(", "entity", ",", "datastore", ".", "Entity", ")", ":", "if", "not", "entity", ".", "key", "(", ")", ":", "return", "elif", "not", "entity", ".", "has_key", "(", ")", ":", "return", "key", "=", "entity", ".", "key", "(", ")", "if", "not", "key", ".", "has_id_or_name", "(", ")", ":", "return", "for", "id_or_name", "in", "key", ".", "to_path", "(", ")", "[", "1", ":", ":", "2", "]", ":", "if", "isinstance", "(", "id_or_name", ",", "(", "int", ",", "long", ")", ")", ":", "self", ".", "keys_to_reserve", ".", "append", "(", "key", ")", "return" ]
https://github.com/out0fmemory/GoAgent-Always-Available/blob/c4254984fea633ce3d1893fe5901debd9f22c2a9/server/lib/google/appengine/ext/bulkload/bulkloader_config.py#L412-L440
openhatch/oh-mainline
ce29352a034e1223141dcc2f317030bbc3359a51
vendor/packages/twisted/twisted/conch/ui/ansi.py
python
AnsiParser.parseString
(self, str)
Turn a string input into a list of L{ColorText} elements.
Turn a string input into a list of L{ColorText} elements.
[ "Turn", "a", "string", "input", "into", "a", "list", "of", "L", "{", "ColorText", "}", "elements", "." ]
def parseString(self, str): """ Turn a string input into a list of L{ColorText} elements. """ if self.prepend: str = self.prepend + str self.prepend = '' parts = str.split('\x1B') if len(parts) == 1: self.writeString(self.formatText(parts[0])) else: self.writeString(self.formatText(parts[0])) for s in parts[1:]: L = len(s) i = 0 type = None while i < L: if s[i] not in string.digits+'[;?': break i+=1 if not s: self.prepend = '\x1b' return if s[0]!='[': self.writeString(self.formatText(s[i+1:])) continue else: s=s[1:] i-=1 if i==L-1: self.prepend = '\x1b[' return type = _setmap.get(s[i], None) if type is None: continue if type == AnsiParser.COLOR_SET: self.parseColor(s[:i + 1]) s = s[i + 1:] self.writeString(self.formatText(s)) elif type == AnsiParser.CURSOR_SET: cursor, s = s[:i+1], s[i+1:] self.parseCursor(cursor) self.writeString(self.formatText(s)) elif type == AnsiParser.ERASE_SET: erase, s = s[:i+1], s[i+1:] self.parseErase(erase) self.writeString(self.formatText(s)) elif type == AnsiParser.MODE_SET: mode, s = s[:i+1], s[i+1:] #self.parseErase('2J') self.writeString(self.formatText(s)) elif i == L: self.prepend = '\x1B[' + s else: log.msg('Unhandled ANSI control type: %c' % (s[i],)) s = s[i + 1:] self.writeString(self.formatText(s))
[ "def", "parseString", "(", "self", ",", "str", ")", ":", "if", "self", ".", "prepend", ":", "str", "=", "self", ".", "prepend", "+", "str", "self", ".", "prepend", "=", "''", "parts", "=", "str", ".", "split", "(", "'\\x1B'", ")", "if", "len", "(", "parts", ")", "==", "1", ":", "self", ".", "writeString", "(", "self", ".", "formatText", "(", "parts", "[", "0", "]", ")", ")", "else", ":", "self", ".", "writeString", "(", "self", ".", "formatText", "(", "parts", "[", "0", "]", ")", ")", "for", "s", "in", "parts", "[", "1", ":", "]", ":", "L", "=", "len", "(", "s", ")", "i", "=", "0", "type", "=", "None", "while", "i", "<", "L", ":", "if", "s", "[", "i", "]", "not", "in", "string", ".", "digits", "+", "'[;?'", ":", "break", "i", "+=", "1", "if", "not", "s", ":", "self", ".", "prepend", "=", "'\\x1b'", "return", "if", "s", "[", "0", "]", "!=", "'['", ":", "self", ".", "writeString", "(", "self", ".", "formatText", "(", "s", "[", "i", "+", "1", ":", "]", ")", ")", "continue", "else", ":", "s", "=", "s", "[", "1", ":", "]", "i", "-=", "1", "if", "i", "==", "L", "-", "1", ":", "self", ".", "prepend", "=", "'\\x1b['", "return", "type", "=", "_setmap", ".", "get", "(", "s", "[", "i", "]", ",", "None", ")", "if", "type", "is", "None", ":", "continue", "if", "type", "==", "AnsiParser", ".", "COLOR_SET", ":", "self", ".", "parseColor", "(", "s", "[", ":", "i", "+", "1", "]", ")", "s", "=", "s", "[", "i", "+", "1", ":", "]", "self", ".", "writeString", "(", "self", ".", "formatText", "(", "s", ")", ")", "elif", "type", "==", "AnsiParser", ".", "CURSOR_SET", ":", "cursor", ",", "s", "=", "s", "[", ":", "i", "+", "1", "]", ",", "s", "[", "i", "+", "1", ":", "]", "self", ".", "parseCursor", "(", "cursor", ")", "self", ".", "writeString", "(", "self", ".", "formatText", "(", "s", ")", ")", "elif", "type", "==", "AnsiParser", ".", "ERASE_SET", ":", "erase", ",", "s", "=", "s", "[", ":", "i", "+", "1", "]", ",", "s", "[", "i", "+", "1", ":", "]", "self", ".", "parseErase", "(", "erase", ")", "self", ".", "writeString", "(", "self", ".", "formatText", "(", "s", ")", ")", "elif", "type", "==", "AnsiParser", ".", "MODE_SET", ":", "mode", ",", "s", "=", "s", "[", ":", "i", "+", "1", "]", ",", "s", "[", "i", "+", "1", ":", "]", "#self.parseErase('2J')", "self", ".", "writeString", "(", "self", ".", "formatText", "(", "s", ")", ")", "elif", "i", "==", "L", ":", "self", ".", "prepend", "=", "'\\x1B['", "+", "s", "else", ":", "log", ".", "msg", "(", "'Unhandled ANSI control type: %c'", "%", "(", "s", "[", "i", "]", ",", ")", ")", "s", "=", "s", "[", "i", "+", "1", ":", "]", "self", ".", "writeString", "(", "self", ".", "formatText", "(", "s", ")", ")" ]
https://github.com/openhatch/oh-mainline/blob/ce29352a034e1223141dcc2f317030bbc3359a51/vendor/packages/twisted/twisted/conch/ui/ansi.py#L97-L156
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/request.py
python
RequestMethods.request_encode_body
(self, method, url, fields=None, headers=None, encode_multipart=True, multipart_boundary=None, **urlopen_kw)
return self.urlopen(method, url, **extra_kw)
Make a request using :meth:`urlopen` with the ``fields`` encoded in the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the payload with the appropriate content type. Otherwise :meth:`urllib.urlencode` is used with the 'application/x-www-form-urlencoded' content type. Multipart encoding must be used when posting files, and it's reasonably safe to use it in other times too. However, it may break request signing, such as with OAuth. Supports an optional ``fields`` parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: fields = { 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', } When uploading a file, providing a filename (the first parameter of the tuple) is optional but recommended to best mimick behavior of browsers. Note that if ``headers`` are supplied, the 'Content-Type' header will be overwritten because it depends on the dynamic random boundary string which is used to compose the body of the request. The random boundary string can be explicitly set with the ``multipart_boundary`` parameter.
Make a request using :meth:`urlopen` with the ``fields`` encoded in the body. This is useful for request methods like POST, PUT, PATCH, etc.
[ "Make", "a", "request", "using", ":", "meth", ":", "urlopen", "with", "the", "fields", "encoded", "in", "the", "body", ".", "This", "is", "useful", "for", "request", "methods", "like", "POST", "PUT", "PATCH", "etc", "." ]
def request_encode_body(self, method, url, fields=None, headers=None, encode_multipart=True, multipart_boundary=None, **urlopen_kw): """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the payload with the appropriate content type. Otherwise :meth:`urllib.urlencode` is used with the 'application/x-www-form-urlencoded' content type. Multipart encoding must be used when posting files, and it's reasonably safe to use it in other times too. However, it may break request signing, such as with OAuth. Supports an optional ``fields`` parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: fields = { 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', } When uploading a file, providing a filename (the first parameter of the tuple) is optional but recommended to best mimick behavior of browsers. Note that if ``headers`` are supplied, the 'Content-Type' header will be overwritten because it depends on the dynamic random boundary string which is used to compose the body of the request. The random boundary string can be explicitly set with the ``multipart_boundary`` parameter. """ if headers is None: headers = self.headers extra_kw = {'headers': {}} if fields: if 'body' in urlopen_kw: raise TypeError( "request got values for both 'fields' and 'body', can only specify one.") if encode_multipart: body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) else: body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' extra_kw['body'] = body extra_kw['headers'] = {'Content-Type': content_type} extra_kw['headers'].update(headers) extra_kw.update(urlopen_kw) return self.urlopen(method, url, **extra_kw)
[ "def", "request_encode_body", "(", "self", ",", "method", ",", "url", ",", "fields", "=", "None", ",", "headers", "=", "None", ",", "encode_multipart", "=", "True", ",", "multipart_boundary", "=", "None", ",", "*", "*", "urlopen_kw", ")", ":", "if", "headers", "is", "None", ":", "headers", "=", "self", ".", "headers", "extra_kw", "=", "{", "'headers'", ":", "{", "}", "}", "if", "fields", ":", "if", "'body'", "in", "urlopen_kw", ":", "raise", "TypeError", "(", "\"request got values for both 'fields' and 'body', can only specify one.\"", ")", "if", "encode_multipart", ":", "body", ",", "content_type", "=", "encode_multipart_formdata", "(", "fields", ",", "boundary", "=", "multipart_boundary", ")", "else", ":", "body", ",", "content_type", "=", "urlencode", "(", "fields", ")", ",", "'application/x-www-form-urlencoded'", "extra_kw", "[", "'body'", "]", "=", "body", "extra_kw", "[", "'headers'", "]", "=", "{", "'Content-Type'", ":", "content_type", "}", "extra_kw", "[", "'headers'", "]", ".", "update", "(", "headers", ")", "extra_kw", ".", "update", "(", "urlopen_kw", ")", "return", "self", ".", "urlopen", "(", "method", ",", "url", ",", "*", "*", "extra_kw", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/request.py#L92-L151
hakril/PythonForWindows
61e027a678d5b87aa64fcf8a37a6661a86236589
windows/alpc.py
python
MessageAttribute._extract_alpc_attributes_values
(self, value)
return [KNOWN_ALPC_ATTRIBUTES_MAPPING[x] for x in attrs]
[]
def _extract_alpc_attributes_values(self, value): attrs = [] for mask in (1 << i for i in range(64)): if value & mask: attrs.append(mask) return [KNOWN_ALPC_ATTRIBUTES_MAPPING[x] for x in attrs]
[ "def", "_extract_alpc_attributes_values", "(", "self", ",", "value", ")", ":", "attrs", "=", "[", "]", "for", "mask", "in", "(", "1", "<<", "i", "for", "i", "in", "range", "(", "64", ")", ")", ":", "if", "value", "&", "mask", ":", "attrs", ".", "append", "(", "mask", ")", "return", "[", "KNOWN_ALPC_ATTRIBUTES_MAPPING", "[", "x", "]", "for", "x", "in", "attrs", "]" ]
https://github.com/hakril/PythonForWindows/blob/61e027a678d5b87aa64fcf8a37a6661a86236589/windows/alpc.py#L272-L277
nltk/nltk_contrib
c9da2c29777ca9df650740145f1f4a375ccac961
nltk_contrib/toolbox/text.py
python
Text.set_file
(self, file)
Change file path set upon initialization.
Change file path set upon initialization.
[ "Change", "file", "path", "set", "upon", "initialization", "." ]
def set_file(self, file): """Change file path set upon initialization.""" self._file = file
[ "def", "set_file", "(", "self", ",", "file", ")", ":", "self", ".", "_file", "=", "file" ]
https://github.com/nltk/nltk_contrib/blob/c9da2c29777ca9df650740145f1f4a375ccac961/nltk_contrib/toolbox/text.py#L489-L491
devitocodes/devito
6abd441e3f5f091775ad332be6b95e017b8cbd16
devito/types/basic.py
python
AbstractFunction.__shape_setup__
(cls, **kwargs)
return ()
Extract the object shape from ``kwargs``.
Extract the object shape from ``kwargs``.
[ "Extract", "the", "object", "shape", "from", "kwargs", "." ]
def __shape_setup__(cls, **kwargs): """Extract the object shape from ``kwargs``.""" return ()
[ "def", "__shape_setup__", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "return", "(", ")" ]
https://github.com/devitocodes/devito/blob/6abd441e3f5f091775ad332be6b95e017b8cbd16/devito/types/basic.py#L806-L808
agoragames/kairos
0b062d543b0f4a46df460fa0eb6ec281232ab179
kairos/redis_backend.py
python
RedisBackend.delete
(self, name)
return len(keys)
Delete all the data in a named timeseries.
Delete all the data in a named timeseries.
[ "Delete", "all", "the", "data", "in", "a", "named", "timeseries", "." ]
def delete(self, name): ''' Delete all the data in a named timeseries. ''' keys = self._client.keys('%s%s:*'%(self._prefix,name)) pipe = self._client.pipeline(transaction=False) for key in keys: pipe.delete( key ) pipe.execute() # Could be not technically the exact number of keys deleted, but is a close # enough approximation return len(keys)
[ "def", "delete", "(", "self", ",", "name", ")", ":", "keys", "=", "self", ".", "_client", ".", "keys", "(", "'%s%s:*'", "%", "(", "self", ".", "_prefix", ",", "name", ")", ")", "pipe", "=", "self", ".", "_client", ".", "pipeline", "(", "transaction", "=", "False", ")", "for", "key", "in", "keys", ":", "pipe", ".", "delete", "(", "key", ")", "pipe", ".", "execute", "(", ")", "# Could be not technically the exact number of keys deleted, but is a close", "# enough approximation", "return", "len", "(", "keys", ")" ]
https://github.com/agoragames/kairos/blob/0b062d543b0f4a46df460fa0eb6ec281232ab179/kairos/redis_backend.py#L175-L188
ddbourgin/numpy-ml
b0359af5285fbf9699d64fd5ec059493228af03e
numpy_ml/neural_nets/modules/modules.py
python
MultiHeadedAttentionModule.gradients
(self)
return { "components": { "Q": self.projections["Q"].gradients, "K": self.projections["K"].gradients, "V": self.projections["V"].gradients, "O": self.projections["O"].gradients, "attention": self.attention.gradients, } }
A dictionary of the accumulated module parameter gradients.
A dictionary of the accumulated module parameter gradients.
[ "A", "dictionary", "of", "the", "accumulated", "module", "parameter", "gradients", "." ]
def gradients(self): """A dictionary of the accumulated module parameter gradients.""" return { "components": { "Q": self.projections["Q"].gradients, "K": self.projections["K"].gradients, "V": self.projections["V"].gradients, "O": self.projections["O"].gradients, "attention": self.attention.gradients, } }
[ "def", "gradients", "(", "self", ")", ":", "return", "{", "\"components\"", ":", "{", "\"Q\"", ":", "self", ".", "projections", "[", "\"Q\"", "]", ".", "gradients", ",", "\"K\"", ":", "self", ".", "projections", "[", "\"K\"", "]", ".", "gradients", ",", "\"V\"", ":", "self", ".", "projections", "[", "\"V\"", "]", ".", "gradients", ",", "\"O\"", ":", "self", ".", "projections", "[", "\"O\"", "]", ".", "gradients", ",", "\"attention\"", ":", "self", ".", "attention", ".", "gradients", ",", "}", "}" ]
https://github.com/ddbourgin/numpy-ml/blob/b0359af5285fbf9699d64fd5ec059493228af03e/numpy_ml/neural_nets/modules/modules.py#L1384-L1394
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/jupyter_client/threaded.py
python
IOLoopThread.run
(self)
Run my loop, ignoring EINTR events in the poller
Run my loop, ignoring EINTR events in the poller
[ "Run", "my", "loop", "ignoring", "EINTR", "events", "in", "the", "poller" ]
def run(self): """Run my loop, ignoring EINTR events in the poller""" if 'asyncio' in sys.modules: # tornado may be using asyncio, # ensure an eventloop exists for this thread import asyncio asyncio.set_event_loop(asyncio.new_event_loop()) self.ioloop = ioloop.IOLoop() # signal that self.ioloop is defined self._start_event.set() while True: try: self.ioloop.start() except ZMQError as e: if e.errno == errno.EINTR: continue else: raise except Exception: if self._exiting: break else: raise else: break
[ "def", "run", "(", "self", ")", ":", "if", "'asyncio'", "in", "sys", ".", "modules", ":", "# tornado may be using asyncio,", "# ensure an eventloop exists for this thread", "import", "asyncio", "asyncio", ".", "set_event_loop", "(", "asyncio", ".", "new_event_loop", "(", ")", ")", "self", ".", "ioloop", "=", "ioloop", ".", "IOLoop", "(", ")", "# signal that self.ioloop is defined", "self", ".", "_start_event", ".", "set", "(", ")", "while", "True", ":", "try", ":", "self", ".", "ioloop", ".", "start", "(", ")", "except", "ZMQError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "EINTR", ":", "continue", "else", ":", "raise", "except", "Exception", ":", "if", "self", ".", "_exiting", ":", "break", "else", ":", "raise", "else", ":", "break" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/jupyter_client/threaded.py#L176-L200
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/Python-2.7.9/Lib/hotshot/__init__.py
python
Profile.stop
(self)
Stop the profiler.
Stop the profiler.
[ "Stop", "the", "profiler", "." ]
def stop(self): """Stop the profiler.""" self._prof.stop()
[ "def", "stop", "(", "self", ")", ":", "self", ".", "_prof", ".", "stop", "(", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/Python-2.7.9/Lib/hotshot/__init__.py#L38-L40
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/min/modulefinder.py
python
ModuleFinder.ensure_fromlist
(self, m, fromlist, recursive=0)
[]
def ensure_fromlist(self, m, fromlist, recursive=0): self.msg(4, "ensure_fromlist", m, fromlist, recursive) for sub in fromlist: if sub == "*": if not recursive: all = self.find_all_submodules(m) if all: self.ensure_fromlist(m, all, 1) elif not hasattr(m, sub): subname = "%s.%s" % (m.__name__, sub) submod = self.import_module(sub, subname, m) if not submod: raise ImportError("No module named " + subname)
[ "def", "ensure_fromlist", "(", "self", ",", "m", ",", "fromlist", ",", "recursive", "=", "0", ")", ":", "self", ".", "msg", "(", "4", ",", "\"ensure_fromlist\"", ",", "m", ",", "fromlist", ",", "recursive", ")", "for", "sub", "in", "fromlist", ":", "if", "sub", "==", "\"*\"", ":", "if", "not", "recursive", ":", "all", "=", "self", ".", "find_all_submodules", "(", "m", ")", "if", "all", ":", "self", ".", "ensure_fromlist", "(", "m", ",", "all", ",", "1", ")", "elif", "not", "hasattr", "(", "m", ",", "sub", ")", ":", "subname", "=", "\"%s.%s\"", "%", "(", "m", ".", "__name__", ",", "sub", ")", "submod", "=", "self", ".", "import_module", "(", "sub", ",", "subname", ",", "m", ")", "if", "not", "submod", ":", "raise", "ImportError", "(", "\"No module named \"", "+", "subname", ")" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/min/modulefinder.py#L258-L270
OCA/l10n-spain
99050907670a70307fcd8cdfb6f3400d9e120df4
l10n_es_aeat_mod115/models/mod115.py
python
L10nEsAeatMod115Report._check_tipo_declaracion
(self)
[]
def _check_tipo_declaracion(self): for rec in self: if rec.casilla_05 <= 0.0 and rec.tipo_declaracion != "N": raise ValidationError( _( "The result of the declaration is negative. " "You should select another Result type" ) ) elif rec.casilla_05 > 0.0 and rec.tipo_declaracion == "N": raise ValidationError( _( "The result of the declaration is positive. " "You should select another Result type" ) )
[ "def", "_check_tipo_declaracion", "(", "self", ")", ":", "for", "rec", "in", "self", ":", "if", "rec", ".", "casilla_05", "<=", "0.0", "and", "rec", ".", "tipo_declaracion", "!=", "\"N\"", ":", "raise", "ValidationError", "(", "_", "(", "\"The result of the declaration is negative. \"", "\"You should select another Result type\"", ")", ")", "elif", "rec", ".", "casilla_05", ">", "0.0", "and", "rec", ".", "tipo_declaracion", "==", "\"N\"", ":", "raise", "ValidationError", "(", "_", "(", "\"The result of the declaration is positive. \"", "\"You should select another Result type\"", ")", ")" ]
https://github.com/OCA/l10n-spain/blob/99050907670a70307fcd8cdfb6f3400d9e120df4/l10n_es_aeat_mod115/models/mod115.py#L87-L102
sethmlarson/virtualbox-python
984a6e2cb0e8996f4df40f4444c1528849f1c70d
virtualbox/library.py
python
IUnattended.product_key
(self, value)
return self._set_attr("productKey", value)
[]
def product_key(self, value): if not isinstance(value, basestring): raise TypeError("value is not an instance of basestring") return self._set_attr("productKey", value)
[ "def", "product_key", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "basestring", ")", ":", "raise", "TypeError", "(", "\"value is not an instance of basestring\"", ")", "return", "self", ".", "_set_attr", "(", "\"productKey\"", ",", "value", ")" ]
https://github.com/sethmlarson/virtualbox-python/blob/984a6e2cb0e8996f4df40f4444c1528849f1c70d/virtualbox/library.py#L12235-L12238
mars-project/mars
6afd7ed86db77f29cc9470485698ef192ecc6d33
mars/dataframe/sort/sort_values.py
python
series_sort_values
( series, axis=0, ascending=True, inplace=False, kind="quicksort", na_position="last", ignore_index=False, parallel_kind="PSRS", psrs_kinds=None, )
Sort by the values. Sort a Series in ascending or descending order by some criterion. Parameters ---------- series : input Series. axis : {0 or 'index'}, default 0 Axis to direct sorting. The value 'index' is accepted for compatibility with DataFrame.sort_values. ascending : bool, default True If True, sort values in ascending order, otherwise descending. inplace : bool, default False If True, perform operation in-place. kind : {'quicksort', 'mergesort' or 'heapsort'}, default 'quicksort' Choice of sorting algorithm. See also :func:`numpy.sort` for more information. 'mergesort' is the only stable algorithm. na_position : {'first' or 'last'}, default 'last' Argument 'first' puts NaNs at the beginning, 'last' puts NaNs at the end. ignore_index : bool, default False If True, the resulting axis will be labeled 0, 1, …, n - 1. Returns ------- Series Series ordered by values. Examples -------- >>> import mars.dataframe as md >>> raw = pd.Series([np.nan, 1, 3, 10, 5]) >>> s = md.Series(raw) >>> s.execute() 0 NaN 1 1.0 2 3.0 3 10.0 4 5.0 dtype: float64 Sort values ascending order (default behaviour) >>> s.sort_values(ascending=True).execute() 1 1.0 2 3.0 4 5.0 3 10.0 0 NaN dtype: float64 Sort values descending order >>> s.sort_values(ascending=False).execute() 3 10.0 4 5.0 2 3.0 1 1.0 0 NaN dtype: float64 Sort values inplace >>> s.sort_values(ascending=False, inplace=True) >>> s.execute() 3 10.0 4 5.0 2 3.0 1 1.0 0 NaN dtype: float64 Sort values putting NAs first
Sort by the values.
[ "Sort", "by", "the", "values", "." ]
def series_sort_values( series, axis=0, ascending=True, inplace=False, kind="quicksort", na_position="last", ignore_index=False, parallel_kind="PSRS", psrs_kinds=None, ): """ Sort by the values. Sort a Series in ascending or descending order by some criterion. Parameters ---------- series : input Series. axis : {0 or 'index'}, default 0 Axis to direct sorting. The value 'index' is accepted for compatibility with DataFrame.sort_values. ascending : bool, default True If True, sort values in ascending order, otherwise descending. inplace : bool, default False If True, perform operation in-place. kind : {'quicksort', 'mergesort' or 'heapsort'}, default 'quicksort' Choice of sorting algorithm. See also :func:`numpy.sort` for more information. 'mergesort' is the only stable algorithm. na_position : {'first' or 'last'}, default 'last' Argument 'first' puts NaNs at the beginning, 'last' puts NaNs at the end. ignore_index : bool, default False If True, the resulting axis will be labeled 0, 1, …, n - 1. Returns ------- Series Series ordered by values. Examples -------- >>> import mars.dataframe as md >>> raw = pd.Series([np.nan, 1, 3, 10, 5]) >>> s = md.Series(raw) >>> s.execute() 0 NaN 1 1.0 2 3.0 3 10.0 4 5.0 dtype: float64 Sort values ascending order (default behaviour) >>> s.sort_values(ascending=True).execute() 1 1.0 2 3.0 4 5.0 3 10.0 0 NaN dtype: float64 Sort values descending order >>> s.sort_values(ascending=False).execute() 3 10.0 4 5.0 2 3.0 1 1.0 0 NaN dtype: float64 Sort values inplace >>> s.sort_values(ascending=False, inplace=True) >>> s.execute() 3 10.0 4 5.0 2 3.0 1 1.0 0 NaN dtype: float64 Sort values putting NAs first """ if na_position not in ["last", "first"]: # pragma: no cover raise TypeError(f"invalid na_position: {na_position}") axis = validate_axis(axis, series) if axis != 0: raise NotImplementedError("Only support sort on axis 0") psrs_kinds = _validate_sort_psrs_kinds(psrs_kinds) op = DataFrameSortValues( axis=axis, ascending=ascending, inplace=inplace, kind=kind, na_position=na_position, ignore_index=ignore_index, parallel_kind=parallel_kind, psrs_kinds=psrs_kinds, output_types=[OutputType.series], gpu=series.op.is_gpu(), ) sorted_series = op(series) if inplace: series.data = sorted_series.data else: return sorted_series
[ "def", "series_sort_values", "(", "series", ",", "axis", "=", "0", ",", "ascending", "=", "True", ",", "inplace", "=", "False", ",", "kind", "=", "\"quicksort\"", ",", "na_position", "=", "\"last\"", ",", "ignore_index", "=", "False", ",", "parallel_kind", "=", "\"PSRS\"", ",", "psrs_kinds", "=", "None", ",", ")", ":", "if", "na_position", "not", "in", "[", "\"last\"", ",", "\"first\"", "]", ":", "# pragma: no cover", "raise", "TypeError", "(", "f\"invalid na_position: {na_position}\"", ")", "axis", "=", "validate_axis", "(", "axis", ",", "series", ")", "if", "axis", "!=", "0", ":", "raise", "NotImplementedError", "(", "\"Only support sort on axis 0\"", ")", "psrs_kinds", "=", "_validate_sort_psrs_kinds", "(", "psrs_kinds", ")", "op", "=", "DataFrameSortValues", "(", "axis", "=", "axis", ",", "ascending", "=", "ascending", ",", "inplace", "=", "inplace", ",", "kind", "=", "kind", ",", "na_position", "=", "na_position", ",", "ignore_index", "=", "ignore_index", ",", "parallel_kind", "=", "parallel_kind", ",", "psrs_kinds", "=", "psrs_kinds", ",", "output_types", "=", "[", "OutputType", ".", "series", "]", ",", "gpu", "=", "series", ".", "op", ".", "is_gpu", "(", ")", ",", ")", "sorted_series", "=", "op", "(", "series", ")", "if", "inplace", ":", "series", ".", "data", "=", "sorted_series", ".", "data", "else", ":", "return", "sorted_series" ]
https://github.com/mars-project/mars/blob/6afd7ed86db77f29cc9470485698ef192ecc6d33/mars/dataframe/sort/sort_values.py#L270-L379
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
cb692f527e4e819b6c228187c5702d990a180043
external/Scripting Engine/Xenotix Python Scripting Engine/bin/x86/Debug/Lib/logging/__init__.py
python
Logger.removeHandler
(self, hdlr)
Remove the specified handler from this logger.
Remove the specified handler from this logger.
[ "Remove", "the", "specified", "handler", "from", "this", "logger", "." ]
def removeHandler(self, hdlr): """ Remove the specified handler from this logger. """ _acquireLock() try: if hdlr in self.handlers: self.handlers.remove(hdlr) finally: _releaseLock()
[ "def", "removeHandler", "(", "self", ",", "hdlr", ")", ":", "_acquireLock", "(", ")", "try", ":", "if", "hdlr", "in", "self", ".", "handlers", ":", "self", ".", "handlers", ".", "remove", "(", "hdlr", ")", "finally", ":", "_releaseLock", "(", ")" ]
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/external/Scripting Engine/Xenotix Python Scripting Engine/bin/x86/Debug/Lib/logging/__init__.py#L1273-L1282