nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
sequence | function
stringlengths 34
151k
| function_tokens
sequence | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/arrays/_ranges.py | python | _generate_range_overflow_safe_signed | (
endpoint: int, periods: int, stride: int, side: str
) | A special case for _generate_range_overflow_safe where `periods * stride`
can be calculated without overflowing int64 bounds. | A special case for _generate_range_overflow_safe where `periods * stride`
can be calculated without overflowing int64 bounds. | [
"A",
"special",
"case",
"for",
"_generate_range_overflow_safe",
"where",
"periods",
"*",
"stride",
"can",
"be",
"calculated",
"without",
"overflowing",
"int64",
"bounds",
"."
] | def _generate_range_overflow_safe_signed(
endpoint: int, periods: int, stride: int, side: str
) -> int:
"""
A special case for _generate_range_overflow_safe where `periods * stride`
can be calculated without overflowing int64 bounds.
"""
assert side in ["start", "end"]
if side == "end":
stride *= -1
with np.errstate(over="raise"):
addend = np.int64(periods) * np.int64(stride)
try:
# easy case with no overflows
return np.int64(endpoint) + addend
except (FloatingPointError, OverflowError):
# with endpoint negative and addend positive we risk
# FloatingPointError; with reversed signed we risk OverflowError
pass
# if stride and endpoint had opposite signs, then endpoint + addend
# should never overflow. so they must have the same signs
assert (stride > 0 and endpoint >= 0) or (stride < 0 and endpoint <= 0)
if stride > 0:
# watch out for very special case in which we just slightly
# exceed implementation bounds, but when passing the result to
# np.arange will get a result slightly within the bounds
result = np.uint64(endpoint) + np.uint64(addend)
i64max = np.uint64(np.iinfo(np.int64).max)
assert result > i64max
if result <= i64max + np.uint64(stride):
return result
raise OutOfBoundsDatetime(
f"Cannot generate range with {side}={endpoint} and periods={periods}"
) | [
"def",
"_generate_range_overflow_safe_signed",
"(",
"endpoint",
":",
"int",
",",
"periods",
":",
"int",
",",
"stride",
":",
"int",
",",
"side",
":",
"str",
")",
"->",
"int",
":",
"assert",
"side",
"in",
"[",
"\"start\"",
",",
"\"end\"",
"]",
"if",
"side",
"==",
"\"end\"",
":",
"stride",
"*=",
"-",
"1",
"with",
"np",
".",
"errstate",
"(",
"over",
"=",
"\"raise\"",
")",
":",
"addend",
"=",
"np",
".",
"int64",
"(",
"periods",
")",
"*",
"np",
".",
"int64",
"(",
"stride",
")",
"try",
":",
"# easy case with no overflows",
"return",
"np",
".",
"int64",
"(",
"endpoint",
")",
"+",
"addend",
"except",
"(",
"FloatingPointError",
",",
"OverflowError",
")",
":",
"# with endpoint negative and addend positive we risk",
"# FloatingPointError; with reversed signed we risk OverflowError",
"pass",
"# if stride and endpoint had opposite signs, then endpoint + addend",
"# should never overflow. so they must have the same signs",
"assert",
"(",
"stride",
">",
"0",
"and",
"endpoint",
">=",
"0",
")",
"or",
"(",
"stride",
"<",
"0",
"and",
"endpoint",
"<=",
"0",
")",
"if",
"stride",
">",
"0",
":",
"# watch out for very special case in which we just slightly",
"# exceed implementation bounds, but when passing the result to",
"# np.arange will get a result slightly within the bounds",
"result",
"=",
"np",
".",
"uint64",
"(",
"endpoint",
")",
"+",
"np",
".",
"uint64",
"(",
"addend",
")",
"i64max",
"=",
"np",
".",
"uint64",
"(",
"np",
".",
"iinfo",
"(",
"np",
".",
"int64",
")",
".",
"max",
")",
"assert",
"result",
">",
"i64max",
"if",
"result",
"<=",
"i64max",
"+",
"np",
".",
"uint64",
"(",
"stride",
")",
":",
"return",
"result",
"raise",
"OutOfBoundsDatetime",
"(",
"f\"Cannot generate range with {side}={endpoint} and periods={periods}\"",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/arrays/_ranges.py#L153-L190 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/mailbox.py | python | Maildir._lookup | (self, key) | Use TOC to return subpath for given key, or raise a KeyError. | Use TOC to return subpath for given key, or raise a KeyError. | [
"Use",
"TOC",
"to",
"return",
"subpath",
"for",
"given",
"key",
"or",
"raise",
"a",
"KeyError",
"."
] | def _lookup(self, key):
"""Use TOC to return subpath for given key, or raise a KeyError."""
try:
if os.path.exists(os.path.join(self._path, self._toc[key])):
return self._toc[key]
except KeyError:
pass
self._refresh()
try:
return self._toc[key]
except KeyError:
raise KeyError('No message with key: %s' % key) from None | [
"def",
"_lookup",
"(",
"self",
",",
"key",
")",
":",
"try",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_path",
",",
"self",
".",
"_toc",
"[",
"key",
"]",
")",
")",
":",
"return",
"self",
".",
"_toc",
"[",
"key",
"]",
"except",
"KeyError",
":",
"pass",
"self",
".",
"_refresh",
"(",
")",
"try",
":",
"return",
"self",
".",
"_toc",
"[",
"key",
"]",
"except",
"KeyError",
":",
"raise",
"KeyError",
"(",
"'No message with key: %s'",
"%",
"key",
")",
"from",
"None"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/mailbox.py#L547-L558 |
||
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/Arch/importIFCHelper.py | python | buildRelGroups | (ifcfile) | return groups | Build the groups relation table. | Build the groups relation table. | [
"Build",
"the",
"groups",
"relation",
"table",
"."
] | def buildRelGroups(ifcfile):
"""Build the groups relation table."""
groups = {} # { host:[child,...], ... } # used in structural IFC
for r in ifcfile.by_type("IfcRelAssignsToGroup"):
groups.setdefault(r.RelatingGroup.id(), []).extend([e.id() for e in r.RelatedObjects])
return groups | [
"def",
"buildRelGroups",
"(",
"ifcfile",
")",
":",
"groups",
"=",
"{",
"}",
"# { host:[child,...], ... } # used in structural IFC",
"for",
"r",
"in",
"ifcfile",
".",
"by_type",
"(",
"\"IfcRelAssignsToGroup\"",
")",
":",
"groups",
".",
"setdefault",
"(",
"r",
".",
"RelatingGroup",
".",
"id",
"(",
")",
",",
"[",
"]",
")",
".",
"extend",
"(",
"[",
"e",
".",
"id",
"(",
")",
"for",
"e",
"in",
"r",
".",
"RelatedObjects",
"]",
")",
"return",
"groups"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Arch/importIFCHelper.py#L219-L226 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib2to3/fixer_util.py | python | is_import | (node) | return node.type in (syms.import_name, syms.import_from) | Returns true if the node is an import statement. | Returns true if the node is an import statement. | [
"Returns",
"true",
"if",
"the",
"node",
"is",
"an",
"import",
"statement",
"."
] | def is_import(node):
"""Returns true if the node is an import statement."""
return node.type in (syms.import_name, syms.import_from) | [
"def",
"is_import",
"(",
"node",
")",
":",
"return",
"node",
".",
"type",
"in",
"(",
"syms",
".",
"import_name",
",",
"syms",
".",
"import_from",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib2to3/fixer_util.py#L290-L292 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/propgrid.py | python | PropertyGridManager.GetCurrentPage | (*args, **kwargs) | return _propgrid.PropertyGridManager_GetCurrentPage(*args, **kwargs) | GetCurrentPage(self) -> PropertyGridPage | GetCurrentPage(self) -> PropertyGridPage | [
"GetCurrentPage",
"(",
"self",
")",
"-",
">",
"PropertyGridPage"
] | def GetCurrentPage(*args, **kwargs):
"""GetCurrentPage(self) -> PropertyGridPage"""
return _propgrid.PropertyGridManager_GetCurrentPage(*args, **kwargs) | [
"def",
"GetCurrentPage",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_propgrid",
".",
"PropertyGridManager_GetCurrentPage",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/propgrid.py#L3477-L3479 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/setuptools/py3/setuptools/command/build_ext.py | python | get_abi3_suffix | () | Return the file extension for an abi3-compliant Extension() | Return the file extension for an abi3-compliant Extension() | [
"Return",
"the",
"file",
"extension",
"for",
"an",
"abi3",
"-",
"compliant",
"Extension",
"()"
] | def get_abi3_suffix():
"""Return the file extension for an abi3-compliant Extension()"""
for suffix in EXTENSION_SUFFIXES:
if '.abi3' in suffix: # Unix
return suffix
elif suffix == '.pyd': # Windows
return suffix | [
"def",
"get_abi3_suffix",
"(",
")",
":",
"for",
"suffix",
"in",
"EXTENSION_SUFFIXES",
":",
"if",
"'.abi3'",
"in",
"suffix",
":",
"# Unix",
"return",
"suffix",
"elif",
"suffix",
"==",
"'.pyd'",
":",
"# Windows",
"return",
"suffix"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py3/setuptools/command/build_ext.py#L66-L72 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_core.py | python | Image.SaveFile | (*args, **kwargs) | return _core_.Image_SaveFile(*args, **kwargs) | SaveFile(self, String name, int type) -> bool
Saves an image in the named file. | SaveFile(self, String name, int type) -> bool | [
"SaveFile",
"(",
"self",
"String",
"name",
"int",
"type",
")",
"-",
">",
"bool"
] | def SaveFile(*args, **kwargs):
"""
SaveFile(self, String name, int type) -> bool
Saves an image in the named file.
"""
return _core_.Image_SaveFile(*args, **kwargs) | [
"def",
"SaveFile",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Image_SaveFile",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L3195-L3201 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/distutils/filelist.py | python | FileList.include_pattern | (self, pattern, anchor=1, prefix=None, is_regex=0) | return files_found | Select strings (presumably filenames) from 'self.files' that
match 'pattern', a Unix-style wildcard (glob) pattern.
Patterns are not quite the same as implemented by the 'fnmatch'
module: '*' and '?' match non-special characters, where "special"
is platform-dependent: slash on Unix; colon, slash, and backslash on
DOS/Windows; and colon on Mac OS.
If 'anchor' is true (the default), then the pattern match is more
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
'anchor' is false, both of these will match.
If 'prefix' is supplied, then only filenames starting with 'prefix'
(itself a pattern) and ending with 'pattern', with anything in between
them, will match. 'anchor' is ignored in this case.
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
'pattern' is assumed to be either a string containing a regex or a
regex object -- no translation is done, the regex is just compiled
and used as-is.
Selected strings will be added to self.files.
Return 1 if files are found. | Select strings (presumably filenames) from 'self.files' that
match 'pattern', a Unix-style wildcard (glob) pattern. | [
"Select",
"strings",
"(",
"presumably",
"filenames",
")",
"from",
"self",
".",
"files",
"that",
"match",
"pattern",
"a",
"Unix",
"-",
"style",
"wildcard",
"(",
"glob",
")",
"pattern",
"."
] | def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
"""Select strings (presumably filenames) from 'self.files' that
match 'pattern', a Unix-style wildcard (glob) pattern.
Patterns are not quite the same as implemented by the 'fnmatch'
module: '*' and '?' match non-special characters, where "special"
is platform-dependent: slash on Unix; colon, slash, and backslash on
DOS/Windows; and colon on Mac OS.
If 'anchor' is true (the default), then the pattern match is more
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
'anchor' is false, both of these will match.
If 'prefix' is supplied, then only filenames starting with 'prefix'
(itself a pattern) and ending with 'pattern', with anything in between
them, will match. 'anchor' is ignored in this case.
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
'pattern' is assumed to be either a string containing a regex or a
regex object -- no translation is done, the regex is just compiled
and used as-is.
Selected strings will be added to self.files.
Return 1 if files are found.
"""
# XXX docstring lying about what the special chars are?
files_found = 0
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
self.debug_print("include_pattern: applying regex r'%s'" %
pattern_re.pattern)
# delayed loading of allfiles list
if self.allfiles is None:
self.findall()
for name in self.allfiles:
if pattern_re.search(name):
self.debug_print(" adding " + name)
self.files.append(name)
files_found = 1
return files_found | [
"def",
"include_pattern",
"(",
"self",
",",
"pattern",
",",
"anchor",
"=",
"1",
",",
"prefix",
"=",
"None",
",",
"is_regex",
"=",
"0",
")",
":",
"# XXX docstring lying about what the special chars are?",
"files_found",
"=",
"0",
"pattern_re",
"=",
"translate_pattern",
"(",
"pattern",
",",
"anchor",
",",
"prefix",
",",
"is_regex",
")",
"self",
".",
"debug_print",
"(",
"\"include_pattern: applying regex r'%s'\"",
"%",
"pattern_re",
".",
"pattern",
")",
"# delayed loading of allfiles list",
"if",
"self",
".",
"allfiles",
"is",
"None",
":",
"self",
".",
"findall",
"(",
")",
"for",
"name",
"in",
"self",
".",
"allfiles",
":",
"if",
"pattern_re",
".",
"search",
"(",
"name",
")",
":",
"self",
".",
"debug_print",
"(",
"\" adding \"",
"+",
"name",
")",
"self",
".",
"files",
".",
"append",
"(",
"name",
")",
"files_found",
"=",
"1",
"return",
"files_found"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/distutils/filelist.py#L187-L229 |
|
fatih/subvim | 241b6d170597857105da219c9b7d36059e9f11fb | vim/base/YouCompleteMe/third_party/requests/requests/utils.py | python | dict_from_cookiejar | (cj) | return cookie_dict | Returns a key/value dictionary from a CookieJar.
:param cj: CookieJar object to extract cookies from. | Returns a key/value dictionary from a CookieJar. | [
"Returns",
"a",
"key",
"/",
"value",
"dictionary",
"from",
"a",
"CookieJar",
"."
] | def dict_from_cookiejar(cj):
"""Returns a key/value dictionary from a CookieJar.
:param cj: CookieJar object to extract cookies from.
"""
cookie_dict = {}
for cookie in cj:
cookie_dict[cookie.name] = cookie.value
return cookie_dict | [
"def",
"dict_from_cookiejar",
"(",
"cj",
")",
":",
"cookie_dict",
"=",
"{",
"}",
"for",
"cookie",
"in",
"cj",
":",
"cookie_dict",
"[",
"cookie",
".",
"name",
"]",
"=",
"cookie",
".",
"value",
"return",
"cookie_dict"
] | https://github.com/fatih/subvim/blob/241b6d170597857105da219c9b7d36059e9f11fb/vim/base/YouCompleteMe/third_party/requests/requests/utils.py#L236-L247 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/training/quantize_training.py | python | do_quantize_training_on_graphdef | (input_graph, num_bits) | return graph | A general quantization scheme is being developed in `tf.contrib.quantize`.
Consider using that instead, though since it is in the tf.contrib namespace,
it is not subject to backward compatibility guarantees.
Args:
input_graph: A `GraphDef`.
num_bits: The number of bits for quantize training.
Returns:
The graph with quantize training done. | A general quantization scheme is being developed in `tf.contrib.quantize`. | [
"A",
"general",
"quantization",
"scheme",
"is",
"being",
"developed",
"in",
"tf",
".",
"contrib",
".",
"quantize",
"."
] | def do_quantize_training_on_graphdef(input_graph, num_bits):
"""A general quantization scheme is being developed in `tf.contrib.quantize`.
Consider using that instead, though since it is in the tf.contrib namespace,
it is not subject to backward compatibility guarantees.
Args:
input_graph: A `GraphDef`.
num_bits: The number of bits for quantize training.
Returns:
The graph with quantize training done.
"""
graph = graph_pb2.GraphDef()
result_graph_string = DoQuantizeTrainingOnGraphDefHelper(
input_graph.SerializeToString(), num_bits)
graph.ParseFromString(result_graph_string)
return graph | [
"def",
"do_quantize_training_on_graphdef",
"(",
"input_graph",
",",
"num_bits",
")",
":",
"graph",
"=",
"graph_pb2",
".",
"GraphDef",
"(",
")",
"result_graph_string",
"=",
"DoQuantizeTrainingOnGraphDefHelper",
"(",
"input_graph",
".",
"SerializeToString",
"(",
")",
",",
"num_bits",
")",
"graph",
".",
"ParseFromString",
"(",
"result_graph_string",
")",
"return",
"graph"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/training/quantize_training.py#L27-L46 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py2/scipy/special/orthogonal.py | python | chebys | (n, monic=False) | return p | r"""Chebyshev polynomial of the second kind on :math:`[-2, 2]`.
Defined as :math:`S_n(x) = U_n(x/2)` where :math:`U_n` is the
nth Chebychev polynomial of the second kind.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
S : orthopoly1d
Chebyshev polynomial of the second kind on :math:`[-2, 2]`.
Notes
-----
The polynomials :math:`S_n(x)` are orthogonal over :math:`[-2, 2]`
with weight function :math:`\sqrt{1 - (x/2)}^2`.
See Also
--------
chebyu : Chebyshev polynomial of the second kind
References
----------
.. [1] Abramowitz and Stegun, "Handbook of Mathematical Functions"
Section 22. National Bureau of Standards, 1972. | r"""Chebyshev polynomial of the second kind on :math:`[-2, 2]`. | [
"r",
"Chebyshev",
"polynomial",
"of",
"the",
"second",
"kind",
"on",
":",
"math",
":",
"[",
"-",
"2",
"2",
"]",
"."
] | def chebys(n, monic=False):
r"""Chebyshev polynomial of the second kind on :math:`[-2, 2]`.
Defined as :math:`S_n(x) = U_n(x/2)` where :math:`U_n` is the
nth Chebychev polynomial of the second kind.
Parameters
----------
n : int
Degree of the polynomial.
monic : bool, optional
If `True`, scale the leading coefficient to be 1. Default is
`False`.
Returns
-------
S : orthopoly1d
Chebyshev polynomial of the second kind on :math:`[-2, 2]`.
Notes
-----
The polynomials :math:`S_n(x)` are orthogonal over :math:`[-2, 2]`
with weight function :math:`\sqrt{1 - (x/2)}^2`.
See Also
--------
chebyu : Chebyshev polynomial of the second kind
References
----------
.. [1] Abramowitz and Stegun, "Handbook of Mathematical Functions"
Section 22. National Bureau of Standards, 1972.
"""
if n < 0:
raise ValueError("n must be nonnegative.")
if n == 0:
n1 = n + 1
else:
n1 = n
x, w, mu0 = roots_chebys(n1, mu=True)
if n == 0:
x, w = [], []
hn = pi
kn = 1.0
p = orthopoly1d(x, w, hn, kn,
wfunc=lambda x: sqrt(1 - x * x / 4.0),
limits=(-2, 2), monic=monic)
if not monic:
factor = (n + 1.0) / p(2)
p._scale(factor)
p.__dict__['_eval_func'] = lambda x: eval_chebys(n, x)
return p | [
"def",
"chebys",
"(",
"n",
",",
"monic",
"=",
"False",
")",
":",
"if",
"n",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"\"n must be nonnegative.\"",
")",
"if",
"n",
"==",
"0",
":",
"n1",
"=",
"n",
"+",
"1",
"else",
":",
"n1",
"=",
"n",
"x",
",",
"w",
",",
"mu0",
"=",
"roots_chebys",
"(",
"n1",
",",
"mu",
"=",
"True",
")",
"if",
"n",
"==",
"0",
":",
"x",
",",
"w",
"=",
"[",
"]",
",",
"[",
"]",
"hn",
"=",
"pi",
"kn",
"=",
"1.0",
"p",
"=",
"orthopoly1d",
"(",
"x",
",",
"w",
",",
"hn",
",",
"kn",
",",
"wfunc",
"=",
"lambda",
"x",
":",
"sqrt",
"(",
"1",
"-",
"x",
"*",
"x",
"/",
"4.0",
")",
",",
"limits",
"=",
"(",
"-",
"2",
",",
"2",
")",
",",
"monic",
"=",
"monic",
")",
"if",
"not",
"monic",
":",
"factor",
"=",
"(",
"n",
"+",
"1.0",
")",
"/",
"p",
"(",
"2",
")",
"p",
".",
"_scale",
"(",
"factor",
")",
"p",
".",
"__dict__",
"[",
"'_eval_func'",
"]",
"=",
"lambda",
"x",
":",
"eval_chebys",
"(",
"n",
",",
"x",
")",
"return",
"p"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/special/orthogonal.py#L1678-L1731 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/util/decorator_utils.py | python | _normalize_docstring | (docstring) | return '\n'.join(trimmed) | Normalizes the docstring.
Replaces tabs with spaces, removes leading and trailing blanks lines, and
removes any indentation.
Copied from PEP-257:
https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation
Args:
docstring: the docstring to normalize
Returns:
The normalized docstring | Normalizes the docstring. | [
"Normalizes",
"the",
"docstring",
"."
] | def _normalize_docstring(docstring):
"""Normalizes the docstring.
Replaces tabs with spaces, removes leading and trailing blanks lines, and
removes any indentation.
Copied from PEP-257:
https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation
Args:
docstring: the docstring to normalize
Returns:
The normalized docstring
"""
if not docstring:
return ''
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# (we use sys.maxsize because sys.maxint doesn't exist in Python 3)
indent = sys.maxsize
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
if indent < sys.maxsize:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed) | [
"def",
"_normalize_docstring",
"(",
"docstring",
")",
":",
"if",
"not",
"docstring",
":",
"return",
"''",
"# Convert tabs to spaces (following the normal Python rules)",
"# and split into a list of lines:",
"lines",
"=",
"docstring",
".",
"expandtabs",
"(",
")",
".",
"splitlines",
"(",
")",
"# Determine minimum indentation (first line doesn't count):",
"# (we use sys.maxsize because sys.maxint doesn't exist in Python 3)",
"indent",
"=",
"sys",
".",
"maxsize",
"for",
"line",
"in",
"lines",
"[",
"1",
":",
"]",
":",
"stripped",
"=",
"line",
".",
"lstrip",
"(",
")",
"if",
"stripped",
":",
"indent",
"=",
"min",
"(",
"indent",
",",
"len",
"(",
"line",
")",
"-",
"len",
"(",
"stripped",
")",
")",
"# Remove indentation (first line is special):",
"trimmed",
"=",
"[",
"lines",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"]",
"if",
"indent",
"<",
"sys",
".",
"maxsize",
":",
"for",
"line",
"in",
"lines",
"[",
"1",
":",
"]",
":",
"trimmed",
".",
"append",
"(",
"line",
"[",
"indent",
":",
"]",
".",
"rstrip",
"(",
")",
")",
"# Strip off trailing and leading blank lines:",
"while",
"trimmed",
"and",
"not",
"trimmed",
"[",
"-",
"1",
"]",
":",
"trimmed",
".",
"pop",
"(",
")",
"while",
"trimmed",
"and",
"not",
"trimmed",
"[",
"0",
"]",
":",
"trimmed",
".",
"pop",
"(",
"0",
")",
"# Return a single string:",
"return",
"'\\n'",
".",
"join",
"(",
"trimmed",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/util/decorator_utils.py#L31-L69 |
|
rachitiitr/DataStructures-Algorithms | ca63481f86eb6d3fdcb42e20ea87de24d60eccf3 | Library/Math/projecteuelr/Euler.py | python | n2words | (num,join=True) | return words | words = {} convert an integer number into words | words = {} convert an integer number into words | [
"words",
"=",
"{}",
"convert",
"an",
"integer",
"number",
"into",
"words"
] | def n2words(num,join=True):
'''words = {} convert an integer number into words'''
units = ['','One','Two','Three','Four','Five','Six','Seven','Eight','Nine']
teens = ['','Eleven','Twelve','Thirteen','Fourteen','Fifteen','Sixteen', \
'Seventeen','Eighteen','Nineteen']
tens = ['','Ten','Twenty','Thirty','Forty','Fifty','Sixty','Seventy', \
'Eighty','Ninety']
thousands = ['','Thousand','Million','Billion','Trillion','Quadrillion', \
'Quintillion','Sextillion','Septillion','Octillion', \
'Nonillion','Decillion','Undecillion','Duodecillion', \
'Tredecillion','Quattuordecillion','Sexdecillion', \
'Septendecillion','Octodecillion','Novemdecillion', \
'Vigintillion']
words = []
if num==0: words.append('zero')
else:
numStr = '%d'%num
numStrLen = len(numStr)
groups = (numStrLen+2)/3
numStr = numStr.zfill(groups*3)
for i in range(0,groups*3,3):
h,t,u = int(numStr[i]),int(numStr[i+1]),int(numStr[i+2])
g = groups-(i/3+1)
if h>=1:
words.append(units[h])
words.append('Hundred')
if t>1:
words.append(tens[t])
if u>=1: words.append(units[u])
elif t==1:
if u>=1: words.append(teens[u])
else: words.append(tens[t])
else:
if u>=1: words.append(units[u])
if (g>=1) and ((h+t+u)>0): words.append(thousands[g]+'')
if join: return ' '.join(words)
return words | [
"def",
"n2words",
"(",
"num",
",",
"join",
"=",
"True",
")",
":",
"units",
"=",
"[",
"''",
",",
"'One'",
",",
"'Two'",
",",
"'Three'",
",",
"'Four'",
",",
"'Five'",
",",
"'Six'",
",",
"'Seven'",
",",
"'Eight'",
",",
"'Nine'",
"]",
"teens",
"=",
"[",
"''",
",",
"'Eleven'",
",",
"'Twelve'",
",",
"'Thirteen'",
",",
"'Fourteen'",
",",
"'Fifteen'",
",",
"'Sixteen'",
",",
"'Seventeen'",
",",
"'Eighteen'",
",",
"'Nineteen'",
"]",
"tens",
"=",
"[",
"''",
",",
"'Ten'",
",",
"'Twenty'",
",",
"'Thirty'",
",",
"'Forty'",
",",
"'Fifty'",
",",
"'Sixty'",
",",
"'Seventy'",
",",
"'Eighty'",
",",
"'Ninety'",
"]",
"thousands",
"=",
"[",
"''",
",",
"'Thousand'",
",",
"'Million'",
",",
"'Billion'",
",",
"'Trillion'",
",",
"'Quadrillion'",
",",
"'Quintillion'",
",",
"'Sextillion'",
",",
"'Septillion'",
",",
"'Octillion'",
",",
"'Nonillion'",
",",
"'Decillion'",
",",
"'Undecillion'",
",",
"'Duodecillion'",
",",
"'Tredecillion'",
",",
"'Quattuordecillion'",
",",
"'Sexdecillion'",
",",
"'Septendecillion'",
",",
"'Octodecillion'",
",",
"'Novemdecillion'",
",",
"'Vigintillion'",
"]",
"words",
"=",
"[",
"]",
"if",
"num",
"==",
"0",
":",
"words",
".",
"append",
"(",
"'zero'",
")",
"else",
":",
"numStr",
"=",
"'%d'",
"%",
"num",
"numStrLen",
"=",
"len",
"(",
"numStr",
")",
"groups",
"=",
"(",
"numStrLen",
"+",
"2",
")",
"/",
"3",
"numStr",
"=",
"numStr",
".",
"zfill",
"(",
"groups",
"*",
"3",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"groups",
"*",
"3",
",",
"3",
")",
":",
"h",
",",
"t",
",",
"u",
"=",
"int",
"(",
"numStr",
"[",
"i",
"]",
")",
",",
"int",
"(",
"numStr",
"[",
"i",
"+",
"1",
"]",
")",
",",
"int",
"(",
"numStr",
"[",
"i",
"+",
"2",
"]",
")",
"g",
"=",
"groups",
"-",
"(",
"i",
"/",
"3",
"+",
"1",
")",
"if",
"h",
">=",
"1",
":",
"words",
".",
"append",
"(",
"units",
"[",
"h",
"]",
")",
"words",
".",
"append",
"(",
"'Hundred'",
")",
"if",
"t",
">",
"1",
":",
"words",
".",
"append",
"(",
"tens",
"[",
"t",
"]",
")",
"if",
"u",
">=",
"1",
":",
"words",
".",
"append",
"(",
"units",
"[",
"u",
"]",
")",
"elif",
"t",
"==",
"1",
":",
"if",
"u",
">=",
"1",
":",
"words",
".",
"append",
"(",
"teens",
"[",
"u",
"]",
")",
"else",
":",
"words",
".",
"append",
"(",
"tens",
"[",
"t",
"]",
")",
"else",
":",
"if",
"u",
">=",
"1",
":",
"words",
".",
"append",
"(",
"units",
"[",
"u",
"]",
")",
"if",
"(",
"g",
">=",
"1",
")",
"and",
"(",
"(",
"h",
"+",
"t",
"+",
"u",
")",
">",
"0",
")",
":",
"words",
".",
"append",
"(",
"thousands",
"[",
"g",
"]",
"+",
"''",
")",
"if",
"join",
":",
"return",
"' '",
".",
"join",
"(",
"words",
")",
"return",
"words"
] | https://github.com/rachitiitr/DataStructures-Algorithms/blob/ca63481f86eb6d3fdcb42e20ea87de24d60eccf3/Library/Math/projecteuelr/Euler.py#L292-L328 |
|
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/distribute/distribute_coordinator.py | python | _WorkerContext.task_type | (self) | return self._task_type | Returns the role of the corresponing task. | Returns the role of the corresponing task. | [
"Returns",
"the",
"role",
"of",
"the",
"corresponing",
"task",
"."
] | def task_type(self):
"""Returns the role of the corresponing task."""
return self._task_type | [
"def",
"task_type",
"(",
"self",
")",
":",
"return",
"self",
".",
"_task_type"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/distribute/distribute_coordinator.py#L286-L288 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | demo/Ticker.py | python | TestPanel.OnText | (self, evt) | Live update of the ticker text | Live update of the ticker text | [
"Live",
"update",
"of",
"the",
"ticker",
"text"
] | def OnText(self, evt):
"""Live update of the ticker text"""
self.ticker.SetText(self.txt.GetValue()) | [
"def",
"OnText",
"(",
"self",
",",
"evt",
")",
":",
"self",
".",
"ticker",
".",
"SetText",
"(",
"self",
".",
"txt",
".",
"GetValue",
"(",
")",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/demo/Ticker.py#L112-L114 |
||
weolar/miniblink49 | 1c4678db0594a4abde23d3ebbcc7cd13c3170777 | third_party/WebKit/Source/bindings/scripts/aggregate_generated_bindings.py | python | extract_conditional | (idl_contents) | return match.group(1) | Find [Conditional] interface extended attribute. | Find [Conditional] interface extended attribute. | [
"Find",
"[",
"Conditional",
"]",
"interface",
"extended",
"attribute",
"."
] | def extract_conditional(idl_contents):
"""Find [Conditional] interface extended attribute."""
match = CONDITIONAL_PATTERN.search(idl_contents)
if not match:
return None
return match.group(1) | [
"def",
"extract_conditional",
"(",
"idl_contents",
")",
":",
"match",
"=",
"CONDITIONAL_PATTERN",
".",
"search",
"(",
"idl_contents",
")",
"if",
"not",
"match",
":",
"return",
"None",
"return",
"match",
".",
"group",
"(",
"1",
")"
] | https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Source/bindings/scripts/aggregate_generated_bindings.py#L103-L109 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py2/scipy/io/mmio.py | python | MMFile._init_attrs | (self, **kwargs) | Initialize each attributes with the corresponding keyword arg value
or a default of None | Initialize each attributes with the corresponding keyword arg value
or a default of None | [
"Initialize",
"each",
"attributes",
"with",
"the",
"corresponding",
"keyword",
"arg",
"value",
"or",
"a",
"default",
"of",
"None"
] | def _init_attrs(self, **kwargs):
"""
Initialize each attributes with the corresponding keyword arg value
or a default of None
"""
attrs = self.__class__.__slots__
public_attrs = [attr[1:] for attr in attrs]
invalid_keys = set(kwargs.keys()) - set(public_attrs)
if invalid_keys:
raise ValueError('''found %s invalid keyword arguments, please only
use %s''' % (tuple(invalid_keys),
public_attrs))
for attr in attrs:
setattr(self, attr, kwargs.get(attr[1:], None)) | [
"def",
"_init_attrs",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"attrs",
"=",
"self",
".",
"__class__",
".",
"__slots__",
"public_attrs",
"=",
"[",
"attr",
"[",
"1",
":",
"]",
"for",
"attr",
"in",
"attrs",
"]",
"invalid_keys",
"=",
"set",
"(",
"kwargs",
".",
"keys",
"(",
")",
")",
"-",
"set",
"(",
"public_attrs",
")",
"if",
"invalid_keys",
":",
"raise",
"ValueError",
"(",
"'''found %s invalid keyword arguments, please only\n use %s'''",
"%",
"(",
"tuple",
"(",
"invalid_keys",
")",
",",
"public_attrs",
")",
")",
"for",
"attr",
"in",
"attrs",
":",
"setattr",
"(",
"self",
",",
"attr",
",",
"kwargs",
".",
"get",
"(",
"attr",
"[",
"1",
":",
"]",
",",
"None",
")",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/io/mmio.py#L459-L475 |
||
PJunhyuk/people-counting-pose | 8cdaab5281847c296b305643842053d496e2e4e8 | sort.py | python | Sort.__init__ | (self, max_age=1, min_hits=3) | Sets key parameters for SORT | Sets key parameters for SORT | [
"Sets",
"key",
"parameters",
"for",
"SORT"
] | def __init__(self, max_age=1, min_hits=3):
"""
Sets key parameters for SORT
"""
self.max_age = max_age
self.min_hits = min_hits
self.trackers = []
self.frame_count = 0 | [
"def",
"__init__",
"(",
"self",
",",
"max_age",
"=",
"1",
",",
"min_hits",
"=",
"3",
")",
":",
"self",
".",
"max_age",
"=",
"max_age",
"self",
".",
"min_hits",
"=",
"min_hits",
"self",
".",
"trackers",
"=",
"[",
"]",
"self",
".",
"frame_count",
"=",
"0"
] | https://github.com/PJunhyuk/people-counting-pose/blob/8cdaab5281847c296b305643842053d496e2e4e8/sort.py#L178-L185 |
||
adobe/chromium | cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7 | chrome/tools/extract_actions.py | python | AddChromeOSActions | (actions) | Add actions reported by non-Chrome processes in Chrome OS.
Arguments:
actions: set of actions to add to. | Add actions reported by non-Chrome processes in Chrome OS. | [
"Add",
"actions",
"reported",
"by",
"non",
"-",
"Chrome",
"processes",
"in",
"Chrome",
"OS",
"."
] | def AddChromeOSActions(actions):
"""Add actions reported by non-Chrome processes in Chrome OS.
Arguments:
actions: set of actions to add to.
"""
# Actions sent by the Chrome OS window manager.
actions.add('Accel_NextWindow_Tab')
actions.add('Accel_PrevWindow_Tab')
actions.add('Accel_NextWindow_F5')
actions.add('Accel_PrevWindow_F5')
# Actions sent by the Chrome OS power manager.
actions.add('Accel_BrightnessDown_F6')
actions.add('Accel_BrightnessUp_F7')
# Actions sent by Chrome OS update engine.
actions.add('Updater.ServerCertificateChanged')
actions.add('Updater.ServerCertificateFailed')
# Actions sent by Chrome OS cryptohome.
actions.add('Cryptohome.PKCS11InitFail') | [
"def",
"AddChromeOSActions",
"(",
"actions",
")",
":",
"# Actions sent by the Chrome OS window manager.",
"actions",
".",
"add",
"(",
"'Accel_NextWindow_Tab'",
")",
"actions",
".",
"add",
"(",
"'Accel_PrevWindow_Tab'",
")",
"actions",
".",
"add",
"(",
"'Accel_NextWindow_F5'",
")",
"actions",
".",
"add",
"(",
"'Accel_PrevWindow_F5'",
")",
"# Actions sent by the Chrome OS power manager.",
"actions",
".",
"add",
"(",
"'Accel_BrightnessDown_F6'",
")",
"actions",
".",
"add",
"(",
"'Accel_BrightnessUp_F7'",
")",
"# Actions sent by Chrome OS update engine.",
"actions",
".",
"add",
"(",
"'Updater.ServerCertificateChanged'",
")",
"actions",
".",
"add",
"(",
"'Updater.ServerCertificateFailed'",
")",
"# Actions sent by Chrome OS cryptohome.",
"actions",
".",
"add",
"(",
"'Cryptohome.PKCS11InitFail'",
")"
] | https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/chrome/tools/extract_actions.py#L203-L224 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/stc.py | python | StyledTextCtrl.BraceHighlightIndicator | (*args, **kwargs) | return _stc.StyledTextCtrl_BraceHighlightIndicator(*args, **kwargs) | BraceHighlightIndicator(self, bool useBraceHighlightIndicator, int indicator) | BraceHighlightIndicator(self, bool useBraceHighlightIndicator, int indicator) | [
"BraceHighlightIndicator",
"(",
"self",
"bool",
"useBraceHighlightIndicator",
"int",
"indicator",
")"
] | def BraceHighlightIndicator(*args, **kwargs):
"""BraceHighlightIndicator(self, bool useBraceHighlightIndicator, int indicator)"""
return _stc.StyledTextCtrl_BraceHighlightIndicator(*args, **kwargs) | [
"def",
"BraceHighlightIndicator",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextCtrl_BraceHighlightIndicator",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/stc.py#L4807-L4809 |
|
vslavik/poedit | f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a | deps/boost/tools/build/src/build/type.py | python | __register_features | () | Register features need by this module. | Register features need by this module. | [
"Register",
"features",
"need",
"by",
"this",
"module",
"."
] | def __register_features ():
""" Register features need by this module.
"""
# The feature is optional so that it is never implicitly added.
# It's used only for internal purposes, and in all cases we
# want to explicitly use it.
feature.feature ('target-type', [], ['composite', 'optional'])
feature.feature ('main-target-type', [], ['optional', 'incidental'])
feature.feature ('base-target-type', [], ['composite', 'optional', 'free']) | [
"def",
"__register_features",
"(",
")",
":",
"# The feature is optional so that it is never implicitly added.",
"# It's used only for internal purposes, and in all cases we",
"# want to explicitly use it.",
"feature",
".",
"feature",
"(",
"'target-type'",
",",
"[",
"]",
",",
"[",
"'composite'",
",",
"'optional'",
"]",
")",
"feature",
".",
"feature",
"(",
"'main-target-type'",
",",
"[",
"]",
",",
"[",
"'optional'",
",",
"'incidental'",
"]",
")",
"feature",
".",
"feature",
"(",
"'base-target-type'",
",",
"[",
"]",
",",
"[",
"'composite'",
",",
"'optional'",
",",
"'free'",
"]",
")"
] | https://github.com/vslavik/poedit/blob/f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a/deps/boost/tools/build/src/build/type.py#L22-L30 |
||
root-project/root | fcd3583bb14852bf2e8cd2415717cbaac0e75896 | interpreter/llvm/src/utils/benchmark/tools/gbench/util.py | python | run_or_load_benchmark | (filename, benchmark_flags) | Get the results for a specified benchmark. If 'filename' specifies
an executable benchmark then the results are generated by running the
benchmark. Otherwise 'filename' must name a valid JSON output file,
which is loaded and the result returned. | Get the results for a specified benchmark. If 'filename' specifies
an executable benchmark then the results are generated by running the
benchmark. Otherwise 'filename' must name a valid JSON output file,
which is loaded and the result returned. | [
"Get",
"the",
"results",
"for",
"a",
"specified",
"benchmark",
".",
"If",
"filename",
"specifies",
"an",
"executable",
"benchmark",
"then",
"the",
"results",
"are",
"generated",
"by",
"running",
"the",
"benchmark",
".",
"Otherwise",
"filename",
"must",
"name",
"a",
"valid",
"JSON",
"output",
"file",
"which",
"is",
"loaded",
"and",
"the",
"result",
"returned",
"."
] | def run_or_load_benchmark(filename, benchmark_flags):
"""
Get the results for a specified benchmark. If 'filename' specifies
an executable benchmark then the results are generated by running the
benchmark. Otherwise 'filename' must name a valid JSON output file,
which is loaded and the result returned.
"""
ftype = check_input_file(filename)
if ftype == IT_JSON:
return load_benchmark_results(filename)
elif ftype == IT_Executable:
return run_benchmark(filename, benchmark_flags)
else:
assert False | [
"def",
"run_or_load_benchmark",
"(",
"filename",
",",
"benchmark_flags",
")",
":",
"ftype",
"=",
"check_input_file",
"(",
"filename",
")",
"if",
"ftype",
"==",
"IT_JSON",
":",
"return",
"load_benchmark_results",
"(",
"filename",
")",
"elif",
"ftype",
"==",
"IT_Executable",
":",
"return",
"run_benchmark",
"(",
"filename",
",",
"benchmark_flags",
")",
"else",
":",
"assert",
"False"
] | https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/interpreter/llvm/src/utils/benchmark/tools/gbench/util.py#L146-L159 |
||
google/or-tools | 2cb85b4eead4c38e1c54b48044f92087cf165bce | examples/python/cover_rectangle_sat.py | python | cover_rectangle | (num_squares) | return status == cp_model.FEASIBLE | Try to fill the rectangle with a given number of squares. | Try to fill the rectangle with a given number of squares. | [
"Try",
"to",
"fill",
"the",
"rectangle",
"with",
"a",
"given",
"number",
"of",
"squares",
"."
] | def cover_rectangle(num_squares):
"""Try to fill the rectangle with a given number of squares."""
size_x = 72
size_y = 37
model = cp_model.CpModel()
areas = []
sizes = []
x_intervals = []
y_intervals = []
x_starts = []
y_starts = []
# Creates intervals for the NoOverlap2D and size variables.
for i in range(num_squares):
size = model.NewIntVar(1, size_y, 'size_%i' % i)
start_x = model.NewIntVar(0, size_x, 'sx_%i' % i)
end_x = model.NewIntVar(0, size_x, 'ex_%i' % i)
start_y = model.NewIntVar(0, size_y, 'sy_%i' % i)
end_y = model.NewIntVar(0, size_y, 'ey_%i' % i)
interval_x = model.NewIntervalVar(start_x, size, end_x, 'ix_%i' % i)
interval_y = model.NewIntervalVar(start_y, size, end_y, 'iy_%i' % i)
area = model.NewIntVar(1, size_y * size_y, 'area_%i' % i)
model.AddProdEquality(area, [size, size])
areas.append(area)
x_intervals.append(interval_x)
y_intervals.append(interval_y)
sizes.append(size)
x_starts.append(start_x)
y_starts.append(start_y)
# Main constraint.
model.AddNoOverlap2D(x_intervals, y_intervals)
# Redundant constraints.
model.AddCumulative(x_intervals, sizes, size_y)
model.AddCumulative(y_intervals, sizes, size_x)
# Forces the rectangle to be exactly covered.
model.Add(sum(areas) == size_x * size_y)
# Symmetry breaking 1: sizes are ordered.
for i in range(num_squares - 1):
model.Add(sizes[i] <= sizes[i + 1])
# Define same to be true iff sizes[i] == sizes[i + 1]
same = model.NewBoolVar('')
model.Add(sizes[i] == sizes[i + 1]).OnlyEnforceIf(same)
model.Add(sizes[i] < sizes[i + 1]).OnlyEnforceIf(same.Not())
# Tie break with starts.
model.Add(x_starts[i] <= x_starts[i + 1]).OnlyEnforceIf(same)
# Symmetry breaking 2: first square in one quadrant.
model.Add(x_starts[0] < 36)
model.Add(y_starts[0] < 19)
# Creates a solver and solves.
solver = cp_model.CpSolver()
status = solver.Solve(model)
print('%s found in %0.2fs' % (solver.StatusName(status), solver.WallTime()))
# Prints solution.
if status == cp_model.OPTIMAL:
display = [[' ' for _ in range(size_x)] for _ in range(size_y)]
for i in range(num_squares):
sol_x = solver.Value(x_starts[i])
sol_y = solver.Value(y_starts[i])
sol_s = solver.Value(sizes[i])
char = format(i, '01x')
for j in range(sol_s):
for k in range(sol_s):
if display[sol_y + j][sol_x + k] != ' ':
print('ERROR between %s and %s' %
(display[sol_y + j][sol_x + k], char))
display[sol_y + j][sol_x + k] = char
for line in range(size_y):
print(' '.join(display[line]))
return status == cp_model.FEASIBLE | [
"def",
"cover_rectangle",
"(",
"num_squares",
")",
":",
"size_x",
"=",
"72",
"size_y",
"=",
"37",
"model",
"=",
"cp_model",
".",
"CpModel",
"(",
")",
"areas",
"=",
"[",
"]",
"sizes",
"=",
"[",
"]",
"x_intervals",
"=",
"[",
"]",
"y_intervals",
"=",
"[",
"]",
"x_starts",
"=",
"[",
"]",
"y_starts",
"=",
"[",
"]",
"# Creates intervals for the NoOverlap2D and size variables.",
"for",
"i",
"in",
"range",
"(",
"num_squares",
")",
":",
"size",
"=",
"model",
".",
"NewIntVar",
"(",
"1",
",",
"size_y",
",",
"'size_%i'",
"%",
"i",
")",
"start_x",
"=",
"model",
".",
"NewIntVar",
"(",
"0",
",",
"size_x",
",",
"'sx_%i'",
"%",
"i",
")",
"end_x",
"=",
"model",
".",
"NewIntVar",
"(",
"0",
",",
"size_x",
",",
"'ex_%i'",
"%",
"i",
")",
"start_y",
"=",
"model",
".",
"NewIntVar",
"(",
"0",
",",
"size_y",
",",
"'sy_%i'",
"%",
"i",
")",
"end_y",
"=",
"model",
".",
"NewIntVar",
"(",
"0",
",",
"size_y",
",",
"'ey_%i'",
"%",
"i",
")",
"interval_x",
"=",
"model",
".",
"NewIntervalVar",
"(",
"start_x",
",",
"size",
",",
"end_x",
",",
"'ix_%i'",
"%",
"i",
")",
"interval_y",
"=",
"model",
".",
"NewIntervalVar",
"(",
"start_y",
",",
"size",
",",
"end_y",
",",
"'iy_%i'",
"%",
"i",
")",
"area",
"=",
"model",
".",
"NewIntVar",
"(",
"1",
",",
"size_y",
"*",
"size_y",
",",
"'area_%i'",
"%",
"i",
")",
"model",
".",
"AddProdEquality",
"(",
"area",
",",
"[",
"size",
",",
"size",
"]",
")",
"areas",
".",
"append",
"(",
"area",
")",
"x_intervals",
".",
"append",
"(",
"interval_x",
")",
"y_intervals",
".",
"append",
"(",
"interval_y",
")",
"sizes",
".",
"append",
"(",
"size",
")",
"x_starts",
".",
"append",
"(",
"start_x",
")",
"y_starts",
".",
"append",
"(",
"start_y",
")",
"# Main constraint.",
"model",
".",
"AddNoOverlap2D",
"(",
"x_intervals",
",",
"y_intervals",
")",
"# Redundant constraints.",
"model",
".",
"AddCumulative",
"(",
"x_intervals",
",",
"sizes",
",",
"size_y",
")",
"model",
".",
"AddCumulative",
"(",
"y_intervals",
",",
"sizes",
",",
"size_x",
")",
"# Forces the rectangle to be exactly covered.",
"model",
".",
"Add",
"(",
"sum",
"(",
"areas",
")",
"==",
"size_x",
"*",
"size_y",
")",
"# Symmetry breaking 1: sizes are ordered.",
"for",
"i",
"in",
"range",
"(",
"num_squares",
"-",
"1",
")",
":",
"model",
".",
"Add",
"(",
"sizes",
"[",
"i",
"]",
"<=",
"sizes",
"[",
"i",
"+",
"1",
"]",
")",
"# Define same to be true iff sizes[i] == sizes[i + 1]",
"same",
"=",
"model",
".",
"NewBoolVar",
"(",
"''",
")",
"model",
".",
"Add",
"(",
"sizes",
"[",
"i",
"]",
"==",
"sizes",
"[",
"i",
"+",
"1",
"]",
")",
".",
"OnlyEnforceIf",
"(",
"same",
")",
"model",
".",
"Add",
"(",
"sizes",
"[",
"i",
"]",
"<",
"sizes",
"[",
"i",
"+",
"1",
"]",
")",
".",
"OnlyEnforceIf",
"(",
"same",
".",
"Not",
"(",
")",
")",
"# Tie break with starts.",
"model",
".",
"Add",
"(",
"x_starts",
"[",
"i",
"]",
"<=",
"x_starts",
"[",
"i",
"+",
"1",
"]",
")",
".",
"OnlyEnforceIf",
"(",
"same",
")",
"# Symmetry breaking 2: first square in one quadrant.",
"model",
".",
"Add",
"(",
"x_starts",
"[",
"0",
"]",
"<",
"36",
")",
"model",
".",
"Add",
"(",
"y_starts",
"[",
"0",
"]",
"<",
"19",
")",
"# Creates a solver and solves.",
"solver",
"=",
"cp_model",
".",
"CpSolver",
"(",
")",
"status",
"=",
"solver",
".",
"Solve",
"(",
"model",
")",
"print",
"(",
"'%s found in %0.2fs'",
"%",
"(",
"solver",
".",
"StatusName",
"(",
"status",
")",
",",
"solver",
".",
"WallTime",
"(",
")",
")",
")",
"# Prints solution.",
"if",
"status",
"==",
"cp_model",
".",
"OPTIMAL",
":",
"display",
"=",
"[",
"[",
"' '",
"for",
"_",
"in",
"range",
"(",
"size_x",
")",
"]",
"for",
"_",
"in",
"range",
"(",
"size_y",
")",
"]",
"for",
"i",
"in",
"range",
"(",
"num_squares",
")",
":",
"sol_x",
"=",
"solver",
".",
"Value",
"(",
"x_starts",
"[",
"i",
"]",
")",
"sol_y",
"=",
"solver",
".",
"Value",
"(",
"y_starts",
"[",
"i",
"]",
")",
"sol_s",
"=",
"solver",
".",
"Value",
"(",
"sizes",
"[",
"i",
"]",
")",
"char",
"=",
"format",
"(",
"i",
",",
"'01x'",
")",
"for",
"j",
"in",
"range",
"(",
"sol_s",
")",
":",
"for",
"k",
"in",
"range",
"(",
"sol_s",
")",
":",
"if",
"display",
"[",
"sol_y",
"+",
"j",
"]",
"[",
"sol_x",
"+",
"k",
"]",
"!=",
"' '",
":",
"print",
"(",
"'ERROR between %s and %s'",
"%",
"(",
"display",
"[",
"sol_y",
"+",
"j",
"]",
"[",
"sol_x",
"+",
"k",
"]",
",",
"char",
")",
")",
"display",
"[",
"sol_y",
"+",
"j",
"]",
"[",
"sol_x",
"+",
"k",
"]",
"=",
"char",
"for",
"line",
"in",
"range",
"(",
"size_y",
")",
":",
"print",
"(",
"' '",
".",
"join",
"(",
"display",
"[",
"line",
"]",
")",
")",
"return",
"status",
"==",
"cp_model",
".",
"FEASIBLE"
] | https://github.com/google/or-tools/blob/2cb85b4eead4c38e1c54b48044f92087cf165bce/examples/python/cover_rectangle_sat.py#L19-L102 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/importlib-metadata/py3/importlib_metadata/_text.py | python | FoldedCase.in_ | (self, other) | return self in FoldedCase(other) | Does self appear in other? | Does self appear in other? | [
"Does",
"self",
"appear",
"in",
"other?"
] | def in_(self, other):
"Does self appear in other?"
return self in FoldedCase(other) | [
"def",
"in_",
"(",
"self",
",",
"other",
")",
":",
"return",
"self",
"in",
"FoldedCase",
"(",
"other",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/importlib-metadata/py3/importlib_metadata/_text.py#L85-L87 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/client/timeline.py | python | Timeline._assign_lanes | (self) | Assigns non-overlapping lanes for the activities on each device. | Assigns non-overlapping lanes for the activities on each device. | [
"Assigns",
"non",
"-",
"overlapping",
"lanes",
"for",
"the",
"activities",
"on",
"each",
"device",
"."
] | def _assign_lanes(self):
"""Assigns non-overlapping lanes for the activities on each device."""
for device_stats in self._step_stats.dev_stats:
# TODO(pbar): Genuine thread IDs in NodeExecStats might be helpful.
lanes = [0]
for ns in device_stats.node_stats:
l = -1
for (i, lts) in enumerate(lanes):
if ns.all_start_micros > lts:
l = i
lanes[l] = ns.all_start_micros + ns.all_end_rel_micros
break
if l < 0:
l = len(lanes)
lanes.append(ns.all_start_micros + ns.all_end_rel_micros)
ns.thread_id = l | [
"def",
"_assign_lanes",
"(",
"self",
")",
":",
"for",
"device_stats",
"in",
"self",
".",
"_step_stats",
".",
"dev_stats",
":",
"# TODO(pbar): Genuine thread IDs in NodeExecStats might be helpful.",
"lanes",
"=",
"[",
"0",
"]",
"for",
"ns",
"in",
"device_stats",
".",
"node_stats",
":",
"l",
"=",
"-",
"1",
"for",
"(",
"i",
",",
"lts",
")",
"in",
"enumerate",
"(",
"lanes",
")",
":",
"if",
"ns",
".",
"all_start_micros",
">",
"lts",
":",
"l",
"=",
"i",
"lanes",
"[",
"l",
"]",
"=",
"ns",
".",
"all_start_micros",
"+",
"ns",
".",
"all_end_rel_micros",
"break",
"if",
"l",
"<",
"0",
":",
"l",
"=",
"len",
"(",
"lanes",
")",
"lanes",
".",
"append",
"(",
"ns",
".",
"all_start_micros",
"+",
"ns",
".",
"all_end_rel_micros",
")",
"ns",
".",
"thread_id",
"=",
"l"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/client/timeline.py#L399-L414 |
||
scanner-research/scanner | 04a0c4b4196341995985acd729c0788aab823e1c | python/scannerpy/storage.py | python | StoredStream.len | (self) | Get the number of elements in this stream. | Get the number of elements in this stream. | [
"Get",
"the",
"number",
"of",
"elements",
"in",
"this",
"stream",
"."
] | def len(self) -> int:
"""Get the number of elements in this stream."""
raise NotImplementedError | [
"def",
"len",
"(",
"self",
")",
"->",
"int",
":",
"raise",
"NotImplementedError"
] | https://github.com/scanner-research/scanner/blob/04a0c4b4196341995985acd729c0788aab823e1c/python/scannerpy/storage.py#L118-L120 |
||
neopenx/Dragon | 0e639a7319035ddc81918bd3df059230436ee0a1 | Dragon/python/dragon/core/tensor.py | python | Tensor.__sub__ | (self, other) | return output | Calculate x - y.
Parameters
----------
other : Tensor
The y.
Returns
-------
Tensor
The output tensor. | Calculate x - y. | [
"Calculate",
"x",
"-",
"y",
"."
] | def __sub__(self, other):
"""Calculate x - y.
Parameters
----------
other : Tensor
The y.
Returns
-------
Tensor
The output tensor.
"""
if not isinstance(other, Tensor):
if not isinstance(other, np.ndarray):
if not isinstance(other, list): other = [other]
other = np.array(other, dtype=np.float32)
tensor = Tensor(GetTensorName())
ws.FeedTensor(tensor, other)
other = tensor
output = self.CreateOperator(inputs=[self, other], nout=1, op_type='Sub')
if self.shape is not None:
output.shape = self.shape[:]
return output | [
"def",
"__sub__",
"(",
"self",
",",
"other",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"Tensor",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"np",
".",
"ndarray",
")",
":",
"if",
"not",
"isinstance",
"(",
"other",
",",
"list",
")",
":",
"other",
"=",
"[",
"other",
"]",
"other",
"=",
"np",
".",
"array",
"(",
"other",
",",
"dtype",
"=",
"np",
".",
"float32",
")",
"tensor",
"=",
"Tensor",
"(",
"GetTensorName",
"(",
")",
")",
"ws",
".",
"FeedTensor",
"(",
"tensor",
",",
"other",
")",
"other",
"=",
"tensor",
"output",
"=",
"self",
".",
"CreateOperator",
"(",
"inputs",
"=",
"[",
"self",
",",
"other",
"]",
",",
"nout",
"=",
"1",
",",
"op_type",
"=",
"'Sub'",
")",
"if",
"self",
".",
"shape",
"is",
"not",
"None",
":",
"output",
".",
"shape",
"=",
"self",
".",
"shape",
"[",
":",
"]",
"return",
"output"
] | https://github.com/neopenx/Dragon/blob/0e639a7319035ddc81918bd3df059230436ee0a1/Dragon/python/dragon/core/tensor.py#L512-L536 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/numpy/py3/numpy/core/getlimits.py | python | _get_machar | (ftype) | return _discovered_machar(ftype) | Get MachAr instance or MachAr-like instance
Get parameters for floating point type, by first trying signatures of
various known floating point types, then, if none match, attempting to
identify parameters by analysis.
Parameters
----------
ftype : class
Numpy floating point type class (e.g. ``np.float64``)
Returns
-------
ma_like : instance of :class:`MachAr` or :class:`MachArLike`
Object giving floating point parameters for `ftype`.
Warns
-----
UserWarning
If the binary signature of the float type is not in the dictionary of
known float types. | Get MachAr instance or MachAr-like instance | [
"Get",
"MachAr",
"instance",
"or",
"MachAr",
"-",
"like",
"instance"
] | def _get_machar(ftype):
""" Get MachAr instance or MachAr-like instance
Get parameters for floating point type, by first trying signatures of
various known floating point types, then, if none match, attempting to
identify parameters by analysis.
Parameters
----------
ftype : class
Numpy floating point type class (e.g. ``np.float64``)
Returns
-------
ma_like : instance of :class:`MachAr` or :class:`MachArLike`
Object giving floating point parameters for `ftype`.
Warns
-----
UserWarning
If the binary signature of the float type is not in the dictionary of
known float types.
"""
params = _MACHAR_PARAMS.get(ftype)
if params is None:
raise ValueError(repr(ftype))
# Detect known / suspected types
key = ftype('-0.1').newbyteorder('<').tobytes()
ma_like = None
if ftype == ntypes.longdouble:
# Could be 80 bit == 10 byte extended precision, where last bytes can
# be random garbage.
# Comparing first 10 bytes to pattern first to avoid branching on the
# random garbage.
ma_like = _KNOWN_TYPES.get(key[:10])
if ma_like is None:
ma_like = _KNOWN_TYPES.get(key)
if ma_like is not None:
return ma_like
# Fall back to parameter discovery
warnings.warn(
'Signature {} for {} does not match any known type: '
'falling back to type probe function'.format(key, ftype),
UserWarning, stacklevel=2)
return _discovered_machar(ftype) | [
"def",
"_get_machar",
"(",
"ftype",
")",
":",
"params",
"=",
"_MACHAR_PARAMS",
".",
"get",
"(",
"ftype",
")",
"if",
"params",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"repr",
"(",
"ftype",
")",
")",
"# Detect known / suspected types",
"key",
"=",
"ftype",
"(",
"'-0.1'",
")",
".",
"newbyteorder",
"(",
"'<'",
")",
".",
"tobytes",
"(",
")",
"ma_like",
"=",
"None",
"if",
"ftype",
"==",
"ntypes",
".",
"longdouble",
":",
"# Could be 80 bit == 10 byte extended precision, where last bytes can",
"# be random garbage.",
"# Comparing first 10 bytes to pattern first to avoid branching on the",
"# random garbage.",
"ma_like",
"=",
"_KNOWN_TYPES",
".",
"get",
"(",
"key",
"[",
":",
"10",
"]",
")",
"if",
"ma_like",
"is",
"None",
":",
"ma_like",
"=",
"_KNOWN_TYPES",
".",
"get",
"(",
"key",
")",
"if",
"ma_like",
"is",
"not",
"None",
":",
"return",
"ma_like",
"# Fall back to parameter discovery",
"warnings",
".",
"warn",
"(",
"'Signature {} for {} does not match any known type: '",
"'falling back to type probe function'",
".",
"format",
"(",
"key",
",",
"ftype",
")",
",",
"UserWarning",
",",
"stacklevel",
"=",
"2",
")",
"return",
"_discovered_machar",
"(",
"ftype",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py3/numpy/core/getlimits.py#L237-L281 |
|
RamadhanAmizudin/malware | 2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1 | Fuzzbunch/fuzzbunch/pyreadline/rlmain.py | python | GetOutputFile | () | return rl.console | Return the console object used by readline so that it can be used for printing in color. | Return the console object used by readline so that it can be used for printing in color. | [
"Return",
"the",
"console",
"object",
"used",
"by",
"readline",
"so",
"that",
"it",
"can",
"be",
"used",
"for",
"printing",
"in",
"color",
"."
] | def GetOutputFile():
'''Return the console object used by readline so that it can be used for printing in color.'''
return rl.console | [
"def",
"GetOutputFile",
"(",
")",
":",
"return",
"rl",
".",
"console"
] | https://github.com/RamadhanAmizudin/malware/blob/2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1/Fuzzbunch/fuzzbunch/pyreadline/rlmain.py#L431-L433 |
|
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/keras/metrics.py | python | _ConfusionMatrixConditionCount.update_state | (self, y_true, y_pred, sample_weight=None) | return metrics_utils.update_confusion_matrix_variables(
{self._confusion_matrix_cond: self.accumulator},
y_true,
y_pred,
thresholds=self.thresholds,
sample_weight=sample_weight) | Accumulates the given confusion matrix condition statistics.
Args:
y_true: The ground truth values.
y_pred: The predicted values.
sample_weight: Optional weighting of each example. Defaults to 1. Can be a
`Tensor` whose rank is either 0, or the same rank as `y_true`, and must
be broadcastable to `y_true`.
Returns:
Update op. | Accumulates the given confusion matrix condition statistics. | [
"Accumulates",
"the",
"given",
"confusion",
"matrix",
"condition",
"statistics",
"."
] | def update_state(self, y_true, y_pred, sample_weight=None):
"""Accumulates the given confusion matrix condition statistics.
Args:
y_true: The ground truth values.
y_pred: The predicted values.
sample_weight: Optional weighting of each example. Defaults to 1. Can be a
`Tensor` whose rank is either 0, or the same rank as `y_true`, and must
be broadcastable to `y_true`.
Returns:
Update op.
"""
return metrics_utils.update_confusion_matrix_variables(
{self._confusion_matrix_cond: self.accumulator},
y_true,
y_pred,
thresholds=self.thresholds,
sample_weight=sample_weight) | [
"def",
"update_state",
"(",
"self",
",",
"y_true",
",",
"y_pred",
",",
"sample_weight",
"=",
"None",
")",
":",
"return",
"metrics_utils",
".",
"update_confusion_matrix_variables",
"(",
"{",
"self",
".",
"_confusion_matrix_cond",
":",
"self",
".",
"accumulator",
"}",
",",
"y_true",
",",
"y_pred",
",",
"thresholds",
"=",
"self",
".",
"thresholds",
",",
"sample_weight",
"=",
"sample_weight",
")"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/keras/metrics.py#L866-L884 |
|
gnuradio/gnuradio | 09c3c4fa4bfb1a02caac74cb5334dfe065391e3b | gr-digital/python/digital/qa_diff_encoder_nrzi.py | python | test_nrzi.test_encode | (self) | Performs NRZI encode and checks the result | Performs NRZI encode and checks the result | [
"Performs",
"NRZI",
"encode",
"and",
"checks",
"the",
"result"
] | def test_encode(self):
"""Performs NRZI encode and checks the result"""
encoder = digital.diff_encoder_bb(2, digital.DIFF_NRZI)
self.tb.connect(self.source, encoder, self.sink)
self.tb.start()
self.tb.wait()
expected = np.cumsum((1 ^ self.data) & 1) & 1
np.testing.assert_equal(
self.sink.data(), expected,
'NRZI encode output does not match expected result') | [
"def",
"test_encode",
"(",
"self",
")",
":",
"encoder",
"=",
"digital",
".",
"diff_encoder_bb",
"(",
"2",
",",
"digital",
".",
"DIFF_NRZI",
")",
"self",
".",
"tb",
".",
"connect",
"(",
"self",
".",
"source",
",",
"encoder",
",",
"self",
".",
"sink",
")",
"self",
".",
"tb",
".",
"start",
"(",
")",
"self",
".",
"tb",
".",
"wait",
"(",
")",
"expected",
"=",
"np",
".",
"cumsum",
"(",
"(",
"1",
"^",
"self",
".",
"data",
")",
"&",
"1",
")",
"&",
"1",
"np",
".",
"testing",
".",
"assert_equal",
"(",
"self",
".",
"sink",
".",
"data",
"(",
")",
",",
"expected",
",",
"'NRZI encode output does not match expected result'",
")"
] | https://github.com/gnuradio/gnuradio/blob/09c3c4fa4bfb1a02caac74cb5334dfe065391e3b/gr-digital/python/digital/qa_diff_encoder_nrzi.py#L29-L41 |
||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/distutils/command/config.py | python | config.try_run | (self, body, headers=None, include_dirs=None, libraries=None,
library_dirs=None, lang="c") | return ok | Try to compile, link to an executable, and run a program
built from 'body' and 'headers'. Return true on success, false
otherwise. | Try to compile, link to an executable, and run a program
built from 'body' and 'headers'. Return true on success, false
otherwise. | [
"Try",
"to",
"compile",
"link",
"to",
"an",
"executable",
"and",
"run",
"a",
"program",
"built",
"from",
"body",
"and",
"headers",
".",
"Return",
"true",
"on",
"success",
"false",
"otherwise",
"."
] | def try_run(self, body, headers=None, include_dirs=None, libraries=None,
library_dirs=None, lang="c"):
"""Try to compile, link to an executable, and run a program
built from 'body' and 'headers'. Return true on success, false
otherwise.
"""
from distutils.ccompiler import CompileError, LinkError
self._check_compiler()
try:
src, obj, exe = self._link(body, headers, include_dirs,
libraries, library_dirs, lang)
self.spawn([exe])
ok = 1
except (CompileError, LinkError, DistutilsExecError):
ok = 0
log.info(ok and "success!" or "failure.")
self._clean()
return ok | [
"def",
"try_run",
"(",
"self",
",",
"body",
",",
"headers",
"=",
"None",
",",
"include_dirs",
"=",
"None",
",",
"libraries",
"=",
"None",
",",
"library_dirs",
"=",
"None",
",",
"lang",
"=",
"\"c\"",
")",
":",
"from",
"distutils",
".",
"ccompiler",
"import",
"CompileError",
",",
"LinkError",
"self",
".",
"_check_compiler",
"(",
")",
"try",
":",
"src",
",",
"obj",
",",
"exe",
"=",
"self",
".",
"_link",
"(",
"body",
",",
"headers",
",",
"include_dirs",
",",
"libraries",
",",
"library_dirs",
",",
"lang",
")",
"self",
".",
"spawn",
"(",
"[",
"exe",
"]",
")",
"ok",
"=",
"1",
"except",
"(",
"CompileError",
",",
"LinkError",
",",
"DistutilsExecError",
")",
":",
"ok",
"=",
"0",
"log",
".",
"info",
"(",
"ok",
"and",
"\"success!\"",
"or",
"\"failure.\"",
")",
"self",
".",
"_clean",
"(",
")",
"return",
"ok"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/distutils/command/config.py#L260-L278 |
|
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/debug/cli/evaluator.py | python | ExpressionEvaluator.__init__ | (self, dump) | Constructor of ExpressionEvaluator.
Args:
dump: an instance of `DebugDumpDir`. | Constructor of ExpressionEvaluator. | [
"Constructor",
"of",
"ExpressionEvaluator",
"."
] | def __init__(self, dump):
"""Constructor of ExpressionEvaluator.
Args:
dump: an instance of `DebugDumpDir`.
"""
self._dump = dump
self._cached_tensor_values = dict() | [
"def",
"__init__",
"(",
"self",
",",
"dump",
")",
":",
"self",
".",
"_dump",
"=",
"dump",
"self",
".",
"_cached_tensor_values",
"=",
"dict",
"(",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/debug/cli/evaluator.py#L109-L116 |
||
zerotier/libzt | 41eb9aebc80a5f1c816fa26a06cefde9de906676 | src/bindings/python/sockets.py | python | socket.listen | (self, backlog=None) | listen([backlog])
Put the socket in a listening state. Backlog specifies the number of
outstanding connections the OS will queue without being accepted. If
less than 0, it is set to 0. If not specified, a reasonable default
will be used. | listen([backlog]) | [
"listen",
"(",
"[",
"backlog",
"]",
")"
] | def listen(self, backlog=None):
"""listen([backlog])
Put the socket in a listening state. Backlog specifies the number of
outstanding connections the OS will queue without being accepted. If
less than 0, it is set to 0. If not specified, a reasonable default
will be used."""
if backlog is not None and backlog < 0:
backlog = 0
if backlog is None:
backlog = -1 # Lower-level code picks default
err = libzt.zts_bsd_listen(self._fd, backlog)
if err < 0:
handle_error(err) | [
"def",
"listen",
"(",
"self",
",",
"backlog",
"=",
"None",
")",
":",
"if",
"backlog",
"is",
"not",
"None",
"and",
"backlog",
"<",
"0",
":",
"backlog",
"=",
"0",
"if",
"backlog",
"is",
"None",
":",
"backlog",
"=",
"-",
"1",
"# Lower-level code picks default",
"err",
"=",
"libzt",
".",
"zts_bsd_listen",
"(",
"self",
".",
"_fd",
",",
"backlog",
")",
"if",
"err",
"<",
"0",
":",
"handle_error",
"(",
"err",
")"
] | https://github.com/zerotier/libzt/blob/41eb9aebc80a5f1c816fa26a06cefde9de906676/src/bindings/python/sockets.py#L311-L326 |
||
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | src/external/coremltools_wrap/coremltools/coremltools/models/neural_network/builder.py | python | NeuralNetworkBuilder.add_expand_dims | (self, name, input_name, output_name, axes) | return spec_layer | Add an expand dims layer to the model that increases the rank of the
input tensor by adding unit dimensions.
Refer to the **ExpandDimsLayerParams** message in specification
(NeuralNetwork.proto) for more details.
Parameters
----------
name: str
The name of this layer.
input_name: str
The input blob name of this layer.
output_name: str
The output blob name of this layer.
axes: list of int or tuple of int
Dimensions the operation perform on.
See Also
--------
add_squeeze | Add an expand dims layer to the model that increases the rank of the
input tensor by adding unit dimensions.
Refer to the **ExpandDimsLayerParams** message in specification
(NeuralNetwork.proto) for more details. | [
"Add",
"an",
"expand",
"dims",
"layer",
"to",
"the",
"model",
"that",
"increases",
"the",
"rank",
"of",
"the",
"input",
"tensor",
"by",
"adding",
"unit",
"dimensions",
".",
"Refer",
"to",
"the",
"**",
"ExpandDimsLayerParams",
"**",
"message",
"in",
"specification",
"(",
"NeuralNetwork",
".",
"proto",
")",
"for",
"more",
"details",
"."
] | def add_expand_dims(self, name, input_name, output_name, axes):
"""
Add an expand dims layer to the model that increases the rank of the
input tensor by adding unit dimensions.
Refer to the **ExpandDimsLayerParams** message in specification
(NeuralNetwork.proto) for more details.
Parameters
----------
name: str
The name of this layer.
input_name: str
The input blob name of this layer.
output_name: str
The output blob name of this layer.
axes: list of int or tuple of int
Dimensions the operation perform on.
See Also
--------
add_squeeze
"""
spec_layer = self._add_generic_layer(name, [input_name], [output_name])
spec_layer_params = spec_layer.expandDims
spec_layer_params.axes.extend(axes)
self.rank_dict[output_name] = self._get_rank(input_name) + len(axes)
return spec_layer | [
"def",
"add_expand_dims",
"(",
"self",
",",
"name",
",",
"input_name",
",",
"output_name",
",",
"axes",
")",
":",
"spec_layer",
"=",
"self",
".",
"_add_generic_layer",
"(",
"name",
",",
"[",
"input_name",
"]",
",",
"[",
"output_name",
"]",
")",
"spec_layer_params",
"=",
"spec_layer",
".",
"expandDims",
"spec_layer_params",
".",
"axes",
".",
"extend",
"(",
"axes",
")",
"self",
".",
"rank_dict",
"[",
"output_name",
"]",
"=",
"self",
".",
"_get_rank",
"(",
"input_name",
")",
"+",
"len",
"(",
"axes",
")",
"return",
"spec_layer"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/coremltools_wrap/coremltools/coremltools/models/neural_network/builder.py#L7033-L7060 |
|
BVLC/caffe | 9b891540183ddc834a02b2bd81b31afae71b2153 | python/caffe/io.py | python | Transformer.set_transpose | (self, in_, order) | Set the order of dimensions, e.g. to convert OpenCV's HxWxC images
into CxHxW.
Parameters
----------
in_ : which input to assign this dimension order
order : the order to transpose the dimensions
for example (2,0,1) changes HxWxC into CxHxW and (1,2,0) reverts | Set the order of dimensions, e.g. to convert OpenCV's HxWxC images
into CxHxW. | [
"Set",
"the",
"order",
"of",
"dimensions",
"e",
".",
"g",
".",
"to",
"convert",
"OpenCV",
"s",
"HxWxC",
"images",
"into",
"CxHxW",
"."
] | def set_transpose(self, in_, order):
"""
Set the order of dimensions, e.g. to convert OpenCV's HxWxC images
into CxHxW.
Parameters
----------
in_ : which input to assign this dimension order
order : the order to transpose the dimensions
for example (2,0,1) changes HxWxC into CxHxW and (1,2,0) reverts
"""
self.__check_input(in_)
if len(order) != len(self.inputs[in_]) - 1:
raise Exception('Transpose order needs to have the same number of '
'dimensions as the input.')
self.transpose[in_] = order | [
"def",
"set_transpose",
"(",
"self",
",",
"in_",
",",
"order",
")",
":",
"self",
".",
"__check_input",
"(",
"in_",
")",
"if",
"len",
"(",
"order",
")",
"!=",
"len",
"(",
"self",
".",
"inputs",
"[",
"in_",
"]",
")",
"-",
"1",
":",
"raise",
"Exception",
"(",
"'Transpose order needs to have the same number of '",
"'dimensions as the input.'",
")",
"self",
".",
"transpose",
"[",
"in_",
"]",
"=",
"order"
] | https://github.com/BVLC/caffe/blob/9b891540183ddc834a02b2bd81b31afae71b2153/python/caffe/io.py#L187-L202 |
||
lmb-freiburg/flownet2 | b92e198b56b0e52e1ba0a5a98dc0e39fa5ae70cc | scripts/cpp_lint.py | python | RemoveMultiLineCommentsFromRange | (lines, begin, end) | Clears a range of lines for multi-line comments. | Clears a range of lines for multi-line comments. | [
"Clears",
"a",
"range",
"of",
"lines",
"for",
"multi",
"-",
"line",
"comments",
"."
] | def RemoveMultiLineCommentsFromRange(lines, begin, end):
"""Clears a range of lines for multi-line comments."""
# Having // dummy comments makes the lines non-empty, so we will not get
# unnecessary blank line warnings later in the code.
for i in range(begin, end):
lines[i] = '// dummy' | [
"def",
"RemoveMultiLineCommentsFromRange",
"(",
"lines",
",",
"begin",
",",
"end",
")",
":",
"# Having // dummy comments makes the lines non-empty, so we will not get",
"# unnecessary blank line warnings later in the code.",
"for",
"i",
"in",
"range",
"(",
"begin",
",",
"end",
")",
":",
"lines",
"[",
"i",
"]",
"=",
"'// dummy'"
] | https://github.com/lmb-freiburg/flownet2/blob/b92e198b56b0e52e1ba0a5a98dc0e39fa5ae70cc/scripts/cpp_lint.py#L1143-L1148 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/distlib/version.py | python | _suggest_normalized_version | (s) | return rs | Suggest a normalized version close to the given version string.
If you have a version string that isn't rational (i.e. NormalizedVersion
doesn't like it) then you might be able to get an equivalent (or close)
rational version from this function.
This does a number of simple normalizations to the given string, based
on observation of versions currently in use on PyPI. Given a dump of
those version during PyCon 2009, 4287 of them:
- 2312 (53.93%) match NormalizedVersion without change
with the automatic suggestion
- 3474 (81.04%) match when using this suggestion method
@param s {str} An irrational version string.
@returns A rational version string, or None, if couldn't determine one. | Suggest a normalized version close to the given version string. | [
"Suggest",
"a",
"normalized",
"version",
"close",
"to",
"the",
"given",
"version",
"string",
"."
] | def _suggest_normalized_version(s):
"""Suggest a normalized version close to the given version string.
If you have a version string that isn't rational (i.e. NormalizedVersion
doesn't like it) then you might be able to get an equivalent (or close)
rational version from this function.
This does a number of simple normalizations to the given string, based
on observation of versions currently in use on PyPI. Given a dump of
those version during PyCon 2009, 4287 of them:
- 2312 (53.93%) match NormalizedVersion without change
with the automatic suggestion
- 3474 (81.04%) match when using this suggestion method
@param s {str} An irrational version string.
@returns A rational version string, or None, if couldn't determine one.
"""
try:
_normalized_key(s)
return s # already rational
except UnsupportedVersionError:
pass
rs = s.lower()
# part of this could use maketrans
for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
('beta', 'b'), ('rc', 'c'), ('-final', ''),
('-pre', 'c'),
('-release', ''), ('.release', ''), ('-stable', ''),
('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
('final', '')):
rs = rs.replace(orig, repl)
# if something ends with dev or pre, we add a 0
rs = re.sub(r"pre$", r"pre0", rs)
rs = re.sub(r"dev$", r"dev0", rs)
# if we have something like "b-2" or "a.2" at the end of the
# version, that is probably beta, alpha, etc
# let's remove the dash or dot
rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
# 1.0-dev-r371 -> 1.0.dev371
# 0.1-dev-r79 -> 0.1.dev79
rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
# Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
# Clean: v0.3, v1.0
if rs.startswith('v'):
rs = rs[1:]
# Clean leading '0's on numbers.
#TODO: unintended side-effect on, e.g., "2003.05.09"
# PyPI stats: 77 (~2%) better
rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
# Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
# zero.
# PyPI stats: 245 (7.56%) better
rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
# the 'dev-rNNN' tag is a dev tag
rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
# clean the - when used as a pre delimiter
rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
# a terminal "dev" or "devel" can be changed into ".dev0"
rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
# a terminal "dev" can be changed into ".dev0"
rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
# a terminal "final" or "stable" can be removed
rs = re.sub(r"(final|stable)$", "", rs)
# The 'r' and the '-' tags are post release tags
# 0.4a1.r10 -> 0.4a1.post10
# 0.9.33-17222 -> 0.9.33.post17222
# 0.9.33-r17222 -> 0.9.33.post17222
rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
# Clean 'r' instead of 'dev' usage:
# 0.9.33+r17222 -> 0.9.33.dev17222
# 1.0dev123 -> 1.0.dev123
# 1.0.git123 -> 1.0.dev123
# 1.0.bzr123 -> 1.0.dev123
# 0.1a0dev.123 -> 0.1a0.dev123
# PyPI stats: ~150 (~4%) better
rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
# Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
# 0.2.pre1 -> 0.2c1
# 0.2-c1 -> 0.2c1
# 1.0preview123 -> 1.0c123
# PyPI stats: ~21 (0.62%) better
rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
# Tcl/Tk uses "px" for their post release markers
rs = re.sub(r"p(\d+)$", r".post\1", rs)
try:
_normalized_key(rs)
except UnsupportedVersionError:
rs = None
return rs | [
"def",
"_suggest_normalized_version",
"(",
"s",
")",
":",
"try",
":",
"_normalized_key",
"(",
"s",
")",
"return",
"s",
"# already rational",
"except",
"UnsupportedVersionError",
":",
"pass",
"rs",
"=",
"s",
".",
"lower",
"(",
")",
"# part of this could use maketrans",
"for",
"orig",
",",
"repl",
"in",
"(",
"(",
"'-alpha'",
",",
"'a'",
")",
",",
"(",
"'-beta'",
",",
"'b'",
")",
",",
"(",
"'alpha'",
",",
"'a'",
")",
",",
"(",
"'beta'",
",",
"'b'",
")",
",",
"(",
"'rc'",
",",
"'c'",
")",
",",
"(",
"'-final'",
",",
"''",
")",
",",
"(",
"'-pre'",
",",
"'c'",
")",
",",
"(",
"'-release'",
",",
"''",
")",
",",
"(",
"'.release'",
",",
"''",
")",
",",
"(",
"'-stable'",
",",
"''",
")",
",",
"(",
"'+'",
",",
"'.'",
")",
",",
"(",
"'_'",
",",
"'.'",
")",
",",
"(",
"' '",
",",
"''",
")",
",",
"(",
"'.final'",
",",
"''",
")",
",",
"(",
"'final'",
",",
"''",
")",
")",
":",
"rs",
"=",
"rs",
".",
"replace",
"(",
"orig",
",",
"repl",
")",
"# if something ends with dev or pre, we add a 0",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"pre$\"",
",",
"r\"pre0\"",
",",
"rs",
")",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"dev$\"",
",",
"r\"dev0\"",
",",
"rs",
")",
"# if we have something like \"b-2\" or \"a.2\" at the end of the",
"# version, that is probably beta, alpha, etc",
"# let's remove the dash or dot",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"([abc]|rc)[\\-\\.](\\d+)$\"",
",",
"r\"\\1\\2\"",
",",
"rs",
")",
"# 1.0-dev-r371 -> 1.0.dev371",
"# 0.1-dev-r79 -> 0.1.dev79",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"[\\-\\.](dev)[\\-\\.]?r?(\\d+)$\"",
",",
"r\".\\1\\2\"",
",",
"rs",
")",
"# Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"[.~]?([abc])\\.?\"",
",",
"r\"\\1\"",
",",
"rs",
")",
"# Clean: v0.3, v1.0",
"if",
"rs",
".",
"startswith",
"(",
"'v'",
")",
":",
"rs",
"=",
"rs",
"[",
"1",
":",
"]",
"# Clean leading '0's on numbers.",
"#TODO: unintended side-effect on, e.g., \"2003.05.09\"",
"# PyPI stats: 77 (~2%) better",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"\\b0+(\\d+)(?!\\d)\"",
",",
"r\"\\1\"",
",",
"rs",
")",
"# Clean a/b/c with no version. E.g. \"1.0a\" -> \"1.0a0\". Setuptools infers",
"# zero.",
"# PyPI stats: 245 (7.56%) better",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"(\\d+[abc])$\"",
",",
"r\"\\g<1>0\"",
",",
"rs",
")",
"# the 'dev-rNNN' tag is a dev tag",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"\\.?(dev-r|dev\\.r)\\.?(\\d+)$\"",
",",
"r\".dev\\2\"",
",",
"rs",
")",
"# clean the - when used as a pre delimiter",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"-(a|b|c)(\\d+)$\"",
",",
"r\"\\1\\2\"",
",",
"rs",
")",
"# a terminal \"dev\" or \"devel\" can be changed into \".dev0\"",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"[\\.\\-](dev|devel)$\"",
",",
"r\".dev0\"",
",",
"rs",
")",
"# a terminal \"dev\" can be changed into \".dev0\"",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"(?![\\.\\-])dev$\"",
",",
"r\".dev0\"",
",",
"rs",
")",
"# a terminal \"final\" or \"stable\" can be removed",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"(final|stable)$\"",
",",
"\"\"",
",",
"rs",
")",
"# The 'r' and the '-' tags are post release tags",
"# 0.4a1.r10 -> 0.4a1.post10",
"# 0.9.33-17222 -> 0.9.33.post17222",
"# 0.9.33-r17222 -> 0.9.33.post17222",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"\\.?(r|-|-r)\\.?(\\d+)$\"",
",",
"r\".post\\2\"",
",",
"rs",
")",
"# Clean 'r' instead of 'dev' usage:",
"# 0.9.33+r17222 -> 0.9.33.dev17222",
"# 1.0dev123 -> 1.0.dev123",
"# 1.0.git123 -> 1.0.dev123",
"# 1.0.bzr123 -> 1.0.dev123",
"# 0.1a0dev.123 -> 0.1a0.dev123",
"# PyPI stats: ~150 (~4%) better",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"\\.?(dev|git|bzr)\\.?(\\d+)$\"",
",",
"r\".dev\\2\"",
",",
"rs",
")",
"# Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:",
"# 0.2.pre1 -> 0.2c1",
"# 0.2-c1 -> 0.2c1",
"# 1.0preview123 -> 1.0c123",
"# PyPI stats: ~21 (0.62%) better",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"\\.?(pre|preview|-c)(\\d+)$\"",
",",
"r\"c\\g<2>\"",
",",
"rs",
")",
"# Tcl/Tk uses \"px\" for their post release markers",
"rs",
"=",
"re",
".",
"sub",
"(",
"r\"p(\\d+)$\"",
",",
"r\".post\\1\"",
",",
"rs",
")",
"try",
":",
"_normalized_key",
"(",
"rs",
")",
"except",
"UnsupportedVersionError",
":",
"rs",
"=",
"None",
"return",
"rs"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/distlib/version.py#L903-L1119 |
|
peterljq/OpenMMD | 795d4dd660cf7e537ceb599fdb038c5388b33390 | 3D Pose Baseline to VMD/src/predict_3dpose.py | python | evaluate_batches | ( sess, model,
data_mean_3d, data_std_3d, dim_to_use_3d, dim_to_ignore_3d,
data_mean_2d, data_std_2d, dim_to_use_2d, dim_to_ignore_2d,
current_step, encoder_inputs, decoder_outputs, current_epoch=0 ) | return total_err, joint_err, step_time, loss | Generic method that evaluates performance of a list of batches.
May be used to evaluate all actions or a single action.
Args
sess
model
data_mean_3d
data_std_3d
dim_to_use_3d
dim_to_ignore_3d
data_mean_2d
data_std_2d
dim_to_use_2d
dim_to_ignore_2d
current_step
encoder_inputs
decoder_outputs
current_epoch
Returns
total_err
joint_err
step_time
loss | Generic method that evaluates performance of a list of batches.
May be used to evaluate all actions or a single action. | [
"Generic",
"method",
"that",
"evaluates",
"performance",
"of",
"a",
"list",
"of",
"batches",
".",
"May",
"be",
"used",
"to",
"evaluate",
"all",
"actions",
"or",
"a",
"single",
"action",
"."
] | def evaluate_batches( sess, model,
data_mean_3d, data_std_3d, dim_to_use_3d, dim_to_ignore_3d,
data_mean_2d, data_std_2d, dim_to_use_2d, dim_to_ignore_2d,
current_step, encoder_inputs, decoder_outputs, current_epoch=0 ):
"""
Generic method that evaluates performance of a list of batches.
May be used to evaluate all actions or a single action.
Args
sess
model
data_mean_3d
data_std_3d
dim_to_use_3d
dim_to_ignore_3d
data_mean_2d
data_std_2d
dim_to_use_2d
dim_to_ignore_2d
current_step
encoder_inputs
decoder_outputs
current_epoch
Returns
total_err
joint_err
step_time
loss
"""
n_joints = 17 if not(FLAGS.predict_14) else 14
nbatches = len( encoder_inputs )
# Loop through test examples
all_dists, start_time, loss = [], time.time(), 0.
log_every_n_batches = 100
for i in range(nbatches):
if current_epoch > 0 and (i+1) % log_every_n_batches == 0:
print("Working on test epoch {0}, batch {1} / {2}".format( current_epoch, i+1, nbatches) )
enc_in, dec_out = encoder_inputs[i], decoder_outputs[i]
dp = 1.0 # dropout keep probability is always 1 at test time
step_loss, loss_summary, poses3d = model.step( sess, enc_in, dec_out, dp, isTraining=False )
loss += step_loss
# denormalize
enc_in = data_utils.unNormalizeData( enc_in, data_mean_2d, data_std_2d, dim_to_ignore_2d )
dec_out = data_utils.unNormalizeData( dec_out, data_mean_3d, data_std_3d, dim_to_ignore_3d )
poses3d = data_utils.unNormalizeData( poses3d, data_mean_3d, data_std_3d, dim_to_ignore_3d )
# Keep only the relevant dimensions
dtu3d = np.hstack( (np.arange(3), dim_to_use_3d) ) if not(FLAGS.predict_14) else dim_to_use_3d
dec_out = dec_out[:, dtu3d]
poses3d = poses3d[:, dtu3d]
assert dec_out.shape[0] == FLAGS.batch_size
assert poses3d.shape[0] == FLAGS.batch_size
if FLAGS.procrustes:
# Apply per-frame procrustes alignment if asked to do so
for j in range(FLAGS.batch_size):
gt = np.reshape(dec_out[j,:],[-1,3])
out = np.reshape(poses3d[j,:],[-1,3])
_, Z, T, b, c = procrustes.compute_similarity_transform(gt,out,compute_optimal_scale=True)
out = (b*out.dot(T))+c
poses3d[j,:] = np.reshape(out,[-1,17*3] ) if not(FLAGS.predict_14) else np.reshape(out,[-1,14*3] )
# Compute Euclidean distance error per joint
sqerr = (poses3d - dec_out)**2 # Squared error between prediction and expected output
dists = np.zeros( (sqerr.shape[0], n_joints) ) # Array with L2 error per joint in mm
dist_idx = 0
for k in np.arange(0, n_joints*3, 3):
# Sum across X,Y, and Z dimenstions to obtain L2 distance
dists[:,dist_idx] = np.sqrt( np.sum( sqerr[:, k:k+3], axis=1 ))
dist_idx = dist_idx + 1
all_dists.append(dists)
assert sqerr.shape[0] == FLAGS.batch_size
step_time = (time.time() - start_time) / nbatches
loss = loss / nbatches
all_dists = np.vstack( all_dists )
# Error per joint and total for all passed batches
joint_err = np.mean( all_dists, axis=0 )
total_err = np.mean( all_dists )
return total_err, joint_err, step_time, loss | [
"def",
"evaluate_batches",
"(",
"sess",
",",
"model",
",",
"data_mean_3d",
",",
"data_std_3d",
",",
"dim_to_use_3d",
",",
"dim_to_ignore_3d",
",",
"data_mean_2d",
",",
"data_std_2d",
",",
"dim_to_use_2d",
",",
"dim_to_ignore_2d",
",",
"current_step",
",",
"encoder_inputs",
",",
"decoder_outputs",
",",
"current_epoch",
"=",
"0",
")",
":",
"n_joints",
"=",
"17",
"if",
"not",
"(",
"FLAGS",
".",
"predict_14",
")",
"else",
"14",
"nbatches",
"=",
"len",
"(",
"encoder_inputs",
")",
"# Loop through test examples",
"all_dists",
",",
"start_time",
",",
"loss",
"=",
"[",
"]",
",",
"time",
".",
"time",
"(",
")",
",",
"0.",
"log_every_n_batches",
"=",
"100",
"for",
"i",
"in",
"range",
"(",
"nbatches",
")",
":",
"if",
"current_epoch",
">",
"0",
"and",
"(",
"i",
"+",
"1",
")",
"%",
"log_every_n_batches",
"==",
"0",
":",
"print",
"(",
"\"Working on test epoch {0}, batch {1} / {2}\"",
".",
"format",
"(",
"current_epoch",
",",
"i",
"+",
"1",
",",
"nbatches",
")",
")",
"enc_in",
",",
"dec_out",
"=",
"encoder_inputs",
"[",
"i",
"]",
",",
"decoder_outputs",
"[",
"i",
"]",
"dp",
"=",
"1.0",
"# dropout keep probability is always 1 at test time",
"step_loss",
",",
"loss_summary",
",",
"poses3d",
"=",
"model",
".",
"step",
"(",
"sess",
",",
"enc_in",
",",
"dec_out",
",",
"dp",
",",
"isTraining",
"=",
"False",
")",
"loss",
"+=",
"step_loss",
"# denormalize",
"enc_in",
"=",
"data_utils",
".",
"unNormalizeData",
"(",
"enc_in",
",",
"data_mean_2d",
",",
"data_std_2d",
",",
"dim_to_ignore_2d",
")",
"dec_out",
"=",
"data_utils",
".",
"unNormalizeData",
"(",
"dec_out",
",",
"data_mean_3d",
",",
"data_std_3d",
",",
"dim_to_ignore_3d",
")",
"poses3d",
"=",
"data_utils",
".",
"unNormalizeData",
"(",
"poses3d",
",",
"data_mean_3d",
",",
"data_std_3d",
",",
"dim_to_ignore_3d",
")",
"# Keep only the relevant dimensions",
"dtu3d",
"=",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"arange",
"(",
"3",
")",
",",
"dim_to_use_3d",
")",
")",
"if",
"not",
"(",
"FLAGS",
".",
"predict_14",
")",
"else",
"dim_to_use_3d",
"dec_out",
"=",
"dec_out",
"[",
":",
",",
"dtu3d",
"]",
"poses3d",
"=",
"poses3d",
"[",
":",
",",
"dtu3d",
"]",
"assert",
"dec_out",
".",
"shape",
"[",
"0",
"]",
"==",
"FLAGS",
".",
"batch_size",
"assert",
"poses3d",
".",
"shape",
"[",
"0",
"]",
"==",
"FLAGS",
".",
"batch_size",
"if",
"FLAGS",
".",
"procrustes",
":",
"# Apply per-frame procrustes alignment if asked to do so",
"for",
"j",
"in",
"range",
"(",
"FLAGS",
".",
"batch_size",
")",
":",
"gt",
"=",
"np",
".",
"reshape",
"(",
"dec_out",
"[",
"j",
",",
":",
"]",
",",
"[",
"-",
"1",
",",
"3",
"]",
")",
"out",
"=",
"np",
".",
"reshape",
"(",
"poses3d",
"[",
"j",
",",
":",
"]",
",",
"[",
"-",
"1",
",",
"3",
"]",
")",
"_",
",",
"Z",
",",
"T",
",",
"b",
",",
"c",
"=",
"procrustes",
".",
"compute_similarity_transform",
"(",
"gt",
",",
"out",
",",
"compute_optimal_scale",
"=",
"True",
")",
"out",
"=",
"(",
"b",
"*",
"out",
".",
"dot",
"(",
"T",
")",
")",
"+",
"c",
"poses3d",
"[",
"j",
",",
":",
"]",
"=",
"np",
".",
"reshape",
"(",
"out",
",",
"[",
"-",
"1",
",",
"17",
"*",
"3",
"]",
")",
"if",
"not",
"(",
"FLAGS",
".",
"predict_14",
")",
"else",
"np",
".",
"reshape",
"(",
"out",
",",
"[",
"-",
"1",
",",
"14",
"*",
"3",
"]",
")",
"# Compute Euclidean distance error per joint",
"sqerr",
"=",
"(",
"poses3d",
"-",
"dec_out",
")",
"**",
"2",
"# Squared error between prediction and expected output",
"dists",
"=",
"np",
".",
"zeros",
"(",
"(",
"sqerr",
".",
"shape",
"[",
"0",
"]",
",",
"n_joints",
")",
")",
"# Array with L2 error per joint in mm",
"dist_idx",
"=",
"0",
"for",
"k",
"in",
"np",
".",
"arange",
"(",
"0",
",",
"n_joints",
"*",
"3",
",",
"3",
")",
":",
"# Sum across X,Y, and Z dimenstions to obtain L2 distance",
"dists",
"[",
":",
",",
"dist_idx",
"]",
"=",
"np",
".",
"sqrt",
"(",
"np",
".",
"sum",
"(",
"sqerr",
"[",
":",
",",
"k",
":",
"k",
"+",
"3",
"]",
",",
"axis",
"=",
"1",
")",
")",
"dist_idx",
"=",
"dist_idx",
"+",
"1",
"all_dists",
".",
"append",
"(",
"dists",
")",
"assert",
"sqerr",
".",
"shape",
"[",
"0",
"]",
"==",
"FLAGS",
".",
"batch_size",
"step_time",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"start_time",
")",
"/",
"nbatches",
"loss",
"=",
"loss",
"/",
"nbatches",
"all_dists",
"=",
"np",
".",
"vstack",
"(",
"all_dists",
")",
"# Error per joint and total for all passed batches",
"joint_err",
"=",
"np",
".",
"mean",
"(",
"all_dists",
",",
"axis",
"=",
"0",
")",
"total_err",
"=",
"np",
".",
"mean",
"(",
"all_dists",
")",
"return",
"total_err",
",",
"joint_err",
",",
"step_time",
",",
"loss"
] | https://github.com/peterljq/OpenMMD/blob/795d4dd660cf7e537ceb599fdb038c5388b33390/3D Pose Baseline to VMD/src/predict_3dpose.py#L322-L414 |
|
miyosuda/TensorFlowAndroidDemo | 35903e0221aa5f109ea2dbef27f20b52e317f42d | jni-build/jni/include/tensorflow/contrib/distributions/python/ops/multinomial.py | python | Multinomial.logits | (self) | return self._logits | Log-odds. | Log-odds. | [
"Log",
"-",
"odds",
"."
] | def logits(self):
"""Log-odds."""
return self._logits | [
"def",
"logits",
"(",
"self",
")",
":",
"return",
"self",
".",
"_logits"
] | https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/contrib/distributions/python/ops/multinomial.py#L177-L179 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/SimpleXMLRPCServer.py | python | CGIXMLRPCRequestHandler.handle_request | (self, request_text = None) | Handle a single XML-RPC request passed through a CGI post method.
If no XML data is given then it is read from stdin. The resulting
XML-RPC response is printed to stdout along with the correct HTTP
headers. | Handle a single XML-RPC request passed through a CGI post method. | [
"Handle",
"a",
"single",
"XML",
"-",
"RPC",
"request",
"passed",
"through",
"a",
"CGI",
"post",
"method",
"."
] | def handle_request(self, request_text = None):
"""Handle a single XML-RPC request passed through a CGI post method.
If no XML data is given then it is read from stdin. The resulting
XML-RPC response is printed to stdout along with the correct HTTP
headers.
"""
if request_text is None and \
os.environ.get('REQUEST_METHOD', None) == 'GET':
self.handle_get()
else:
# POST data is normally available through stdin
try:
length = int(os.environ.get('CONTENT_LENGTH', None))
except (TypeError, ValueError):
length = -1
if request_text is None:
request_text = sys.stdin.read(length)
self.handle_xmlrpc(request_text) | [
"def",
"handle_request",
"(",
"self",
",",
"request_text",
"=",
"None",
")",
":",
"if",
"request_text",
"is",
"None",
"and",
"os",
".",
"environ",
".",
"get",
"(",
"'REQUEST_METHOD'",
",",
"None",
")",
"==",
"'GET'",
":",
"self",
".",
"handle_get",
"(",
")",
"else",
":",
"# POST data is normally available through stdin",
"try",
":",
"length",
"=",
"int",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"'CONTENT_LENGTH'",
",",
"None",
")",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"length",
"=",
"-",
"1",
"if",
"request_text",
"is",
"None",
":",
"request_text",
"=",
"sys",
".",
"stdin",
".",
"read",
"(",
"length",
")",
"self",
".",
"handle_xmlrpc",
"(",
"request_text",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/SimpleXMLRPCServer.py#L680-L700 |
||
wy1iu/LargeMargin_Softmax_Loss | c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec | scripts/cpp_lint.py | python | ProcessFile | (filename, vlevel, extra_check_functions=[]) | Does google-lint on a single file.
Args:
filename: The name of the file to parse.
vlevel: The level of errors to report. Every error of confidence
>= verbose_level will be reported. 0 is a good default.
extra_check_functions: An array of additional check functions that will be
run on each source line. Each function takes 4
arguments: filename, clean_lines, line, error | Does google-lint on a single file. | [
"Does",
"google",
"-",
"lint",
"on",
"a",
"single",
"file",
"."
] | def ProcessFile(filename, vlevel, extra_check_functions=[]):
"""Does google-lint on a single file.
Args:
filename: The name of the file to parse.
vlevel: The level of errors to report. Every error of confidence
>= verbose_level will be reported. 0 is a good default.
extra_check_functions: An array of additional check functions that will be
run on each source line. Each function takes 4
arguments: filename, clean_lines, line, error
"""
_SetVerboseLevel(vlevel)
try:
# Support the UNIX convention of using "-" for stdin. Note that
# we are not opening the file with universal newline support
# (which codecs doesn't support anyway), so the resulting lines do
# contain trailing '\r' characters if we are reading a file that
# has CRLF endings.
# If after the split a trailing '\r' is present, it is removed
# below. If it is not expected to be present (i.e. os.linesep !=
# '\r\n' as in Windows), a warning is issued below if this file
# is processed.
if filename == '-':
lines = codecs.StreamReaderWriter(sys.stdin,
codecs.getreader('utf8'),
codecs.getwriter('utf8'),
'replace').read().split('\n')
else:
lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n')
carriage_return_found = False
# Remove trailing '\r'.
for linenum in range(len(lines)):
if lines[linenum].endswith('\r'):
lines[linenum] = lines[linenum].rstrip('\r')
carriage_return_found = True
except IOError:
sys.stderr.write(
"Skipping input '%s': Can't open for reading\n" % filename)
return
# Note, if no dot is found, this will give the entire filename as the ext.
file_extension = filename[filename.rfind('.') + 1:]
# When reading from stdin, the extension is unknown, so no cpplint tests
# should rely on the extension.
if filename != '-' and file_extension not in _valid_extensions:
sys.stderr.write('Ignoring %s; not a valid file name '
'(%s)\n' % (filename, ', '.join(_valid_extensions)))
else:
ProcessFileData(filename, file_extension, lines, Error,
extra_check_functions)
if carriage_return_found and os.linesep != '\r\n':
# Use 0 for linenum since outputting only one error for potentially
# several lines.
Error(filename, 0, 'whitespace/newline', 1,
'One or more unexpected \\r (^M) found;'
'better to use only a \\n')
sys.stderr.write('Done processing %s\n' % filename) | [
"def",
"ProcessFile",
"(",
"filename",
",",
"vlevel",
",",
"extra_check_functions",
"=",
"[",
"]",
")",
":",
"_SetVerboseLevel",
"(",
"vlevel",
")",
"try",
":",
"# Support the UNIX convention of using \"-\" for stdin. Note that",
"# we are not opening the file with universal newline support",
"# (which codecs doesn't support anyway), so the resulting lines do",
"# contain trailing '\\r' characters if we are reading a file that",
"# has CRLF endings.",
"# If after the split a trailing '\\r' is present, it is removed",
"# below. If it is not expected to be present (i.e. os.linesep !=",
"# '\\r\\n' as in Windows), a warning is issued below if this file",
"# is processed.",
"if",
"filename",
"==",
"'-'",
":",
"lines",
"=",
"codecs",
".",
"StreamReaderWriter",
"(",
"sys",
".",
"stdin",
",",
"codecs",
".",
"getreader",
"(",
"'utf8'",
")",
",",
"codecs",
".",
"getwriter",
"(",
"'utf8'",
")",
",",
"'replace'",
")",
".",
"read",
"(",
")",
".",
"split",
"(",
"'\\n'",
")",
"else",
":",
"lines",
"=",
"codecs",
".",
"open",
"(",
"filename",
",",
"'r'",
",",
"'utf8'",
",",
"'replace'",
")",
".",
"read",
"(",
")",
".",
"split",
"(",
"'\\n'",
")",
"carriage_return_found",
"=",
"False",
"# Remove trailing '\\r'.",
"for",
"linenum",
"in",
"range",
"(",
"len",
"(",
"lines",
")",
")",
":",
"if",
"lines",
"[",
"linenum",
"]",
".",
"endswith",
"(",
"'\\r'",
")",
":",
"lines",
"[",
"linenum",
"]",
"=",
"lines",
"[",
"linenum",
"]",
".",
"rstrip",
"(",
"'\\r'",
")",
"carriage_return_found",
"=",
"True",
"except",
"IOError",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"Skipping input '%s': Can't open for reading\\n\"",
"%",
"filename",
")",
"return",
"# Note, if no dot is found, this will give the entire filename as the ext.",
"file_extension",
"=",
"filename",
"[",
"filename",
".",
"rfind",
"(",
"'.'",
")",
"+",
"1",
":",
"]",
"# When reading from stdin, the extension is unknown, so no cpplint tests",
"# should rely on the extension.",
"if",
"filename",
"!=",
"'-'",
"and",
"file_extension",
"not",
"in",
"_valid_extensions",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Ignoring %s; not a valid file name '",
"'(%s)\\n'",
"%",
"(",
"filename",
",",
"', '",
".",
"join",
"(",
"_valid_extensions",
")",
")",
")",
"else",
":",
"ProcessFileData",
"(",
"filename",
",",
"file_extension",
",",
"lines",
",",
"Error",
",",
"extra_check_functions",
")",
"if",
"carriage_return_found",
"and",
"os",
".",
"linesep",
"!=",
"'\\r\\n'",
":",
"# Use 0 for linenum since outputting only one error for potentially",
"# several lines.",
"Error",
"(",
"filename",
",",
"0",
",",
"'whitespace/newline'",
",",
"1",
",",
"'One or more unexpected \\\\r (^M) found;'",
"'better to use only a \\\\n'",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Done processing %s\\n'",
"%",
"filename",
")"
] | https://github.com/wy1iu/LargeMargin_Softmax_Loss/blob/c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec/scripts/cpp_lint.py#L4689-L4754 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/agw/aui/auibar.py | python | CommandToolBarEvent.SetToolId | (self, id) | Sets the :class:`AuiToolBarItem` identifier.
:param integer `id`: the toolbar item identifier. | Sets the :class:`AuiToolBarItem` identifier. | [
"Sets",
"the",
":",
"class",
":",
"AuiToolBarItem",
"identifier",
"."
] | def SetToolId(self, id):
"""
Sets the :class:`AuiToolBarItem` identifier.
:param integer `id`: the toolbar item identifier.
"""
self.tool_id = id | [
"def",
"SetToolId",
"(",
"self",
",",
"id",
")",
":",
"self",
".",
"tool_id",
"=",
"id"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/auibar.py#L123-L130 |
||
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/decimal.py | python | Decimal.__rpow__ | (self, other, context=None) | return other.__pow__(self, context=context) | Swaps self/other and returns __pow__. | Swaps self/other and returns __pow__. | [
"Swaps",
"self",
"/",
"other",
"and",
"returns",
"__pow__",
"."
] | def __rpow__(self, other, context=None):
"""Swaps self/other and returns __pow__."""
other = _convert_other(other)
if other is NotImplemented:
return other
return other.__pow__(self, context=context) | [
"def",
"__rpow__",
"(",
"self",
",",
"other",
",",
"context",
"=",
"None",
")",
":",
"other",
"=",
"_convert_other",
"(",
"other",
")",
"if",
"other",
"is",
"NotImplemented",
":",
"return",
"other",
"return",
"other",
".",
"__pow__",
"(",
"self",
",",
"context",
"=",
"context",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/decimal.py#L2390-L2395 |
|
devpack/android-python27 | d42dd67565e104cf7b0b50eb473f615db3e69901 | python-build-with-qt/sip-4.11.2/siputils.py | python | Configuration.__init__ | (self, sub_cfg=None) | Initialise an instance of the class.
sub_cfg is the list of sub-class configurations. It should be None
when called normally. | Initialise an instance of the class. | [
"Initialise",
"an",
"instance",
"of",
"the",
"class",
"."
] | def __init__(self, sub_cfg=None):
"""Initialise an instance of the class.
sub_cfg is the list of sub-class configurations. It should be None
when called normally.
"""
# Find the build macros in the closest imported module from where this
# was originally defined.
self._macros = None
for cls in self.__class__.__mro__:
if cls is object:
continue
mod = sys.modules[cls.__module__]
if hasattr(mod, "_default_macros"):
self._macros = mod._default_macros
break
if sub_cfg:
cfg = sub_cfg
else:
cfg = []
cfg.append(_pkg_config)
global _config_stack
_config_stack = cfg | [
"def",
"__init__",
"(",
"self",
",",
"sub_cfg",
"=",
"None",
")",
":",
"# Find the build macros in the closest imported module from where this",
"# was originally defined.",
"self",
".",
"_macros",
"=",
"None",
"for",
"cls",
"in",
"self",
".",
"__class__",
".",
"__mro__",
":",
"if",
"cls",
"is",
"object",
":",
"continue",
"mod",
"=",
"sys",
".",
"modules",
"[",
"cls",
".",
"__module__",
"]",
"if",
"hasattr",
"(",
"mod",
",",
"\"_default_macros\"",
")",
":",
"self",
".",
"_macros",
"=",
"mod",
".",
"_default_macros",
"break",
"if",
"sub_cfg",
":",
"cfg",
"=",
"sub_cfg",
"else",
":",
"cfg",
"=",
"[",
"]",
"cfg",
".",
"append",
"(",
"_pkg_config",
")",
"global",
"_config_stack",
"_config_stack",
"=",
"cfg"
] | https://github.com/devpack/android-python27/blob/d42dd67565e104cf7b0b50eb473f615db3e69901/python-build-with-qt/sip-4.11.2/siputils.py#L37-L65 |
||
RamadhanAmizudin/malware | 2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1 | Fuzzbunch/fuzzbunch/edfmeta.py | python | parse_forward | (xmlFile) | return arches | Forward-deployment (i.e., DARINGVETERAN/DARINGNEOPHYTE) DLL Configuration
INPUT
-----
Path to plugin's standard FB file.
OUTPUT
------
Dictionary mapping archOs tag (e.g., "x86-Windows") to a tuple containing
plugin proxy and core DLLs. Note that either element may be None! | Forward-deployment (i.e., DARINGVETERAN/DARINGNEOPHYTE) DLL Configuration
INPUT
-----
Path to plugin's standard FB file.
OUTPUT
------
Dictionary mapping archOs tag (e.g., "x86-Windows") to a tuple containing
plugin proxy and core DLLs. Note that either element may be None! | [
"Forward",
"-",
"deployment",
"(",
"i",
".",
"e",
".",
"DARINGVETERAN",
"/",
"DARINGNEOPHYTE",
")",
"DLL",
"Configuration",
"INPUT",
"-----",
"Path",
"to",
"plugin",
"s",
"standard",
"FB",
"file",
".",
"OUTPUT",
"------",
"Dictionary",
"mapping",
"archOs",
"tag",
"(",
"e",
".",
"g",
".",
"x86",
"-",
"Windows",
")",
"to",
"a",
"tuple",
"containing",
"plugin",
"proxy",
"and",
"core",
"DLLs",
".",
"Note",
"that",
"either",
"element",
"may",
"be",
"None!"
] | def parse_forward(xmlFile):
"""
Forward-deployment (i.e., DARINGVETERAN/DARINGNEOPHYTE) DLL Configuration
INPUT
-----
Path to plugin's standard FB file.
OUTPUT
------
Dictionary mapping archOs tag (e.g., "x86-Windows") to a tuple containing
plugin proxy and core DLLs. Note that either element may be None!
"""
xmldoc = ElementTree.parse(xmlFile)
arches = {}
for arch in xmldoc.findall("package/arch"):
proxy = getattr(arch.find('base'), 'text', None)
core = getattr(arch.find('core'), 'text', None)
arches[arch.get('name')] = (proxy, core)
return arches | [
"def",
"parse_forward",
"(",
"xmlFile",
")",
":",
"xmldoc",
"=",
"ElementTree",
".",
"parse",
"(",
"xmlFile",
")",
"arches",
"=",
"{",
"}",
"for",
"arch",
"in",
"xmldoc",
".",
"findall",
"(",
"\"package/arch\"",
")",
":",
"proxy",
"=",
"getattr",
"(",
"arch",
".",
"find",
"(",
"'base'",
")",
",",
"'text'",
",",
"None",
")",
"core",
"=",
"getattr",
"(",
"arch",
".",
"find",
"(",
"'core'",
")",
",",
"'text'",
",",
"None",
")",
"arches",
"[",
"arch",
".",
"get",
"(",
"'name'",
")",
"]",
"=",
"(",
"proxy",
",",
"core",
")",
"return",
"arches"
] | https://github.com/RamadhanAmizudin/malware/blob/2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1/Fuzzbunch/fuzzbunch/edfmeta.py#L257-L276 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/setuptools/_vendor/packaging/markers.py | python | Marker.evaluate | (self, environment=None) | return _evaluate_markers(self._markers, current_environment) | Evaluate a marker.
Return the boolean from evaluating the given marker against the
environment. environment is an optional argument to override all or
part of the determined environment.
The environment is determined from the current Python process. | Evaluate a marker. | [
"Evaluate",
"a",
"marker",
"."
] | def evaluate(self, environment=None):
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
environment. environment is an optional argument to override all or
part of the determined environment.
The environment is determined from the current Python process.
"""
current_environment = default_environment()
if environment is not None:
current_environment.update(environment)
return _evaluate_markers(self._markers, current_environment) | [
"def",
"evaluate",
"(",
"self",
",",
"environment",
"=",
"None",
")",
":",
"current_environment",
"=",
"default_environment",
"(",
")",
"if",
"environment",
"is",
"not",
"None",
":",
"current_environment",
".",
"update",
"(",
"environment",
")",
"return",
"_evaluate_markers",
"(",
"self",
".",
"_markers",
",",
"current_environment",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/setuptools/_vendor/packaging/markers.py#L283-L296 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/framework/indexed_slices.py | python | convert_to_tensor_or_indexed_slices | (value, dtype=None, name=None) | return internal_convert_to_tensor_or_indexed_slices(
value=value, dtype=dtype, name=name, as_ref=False) | Converts the given object to a `Tensor` or an `IndexedSlices`.
If `value` is an `IndexedSlices` or `SparseTensor` it is returned
unmodified. Otherwise, it is converted to a `Tensor` using
`convert_to_tensor()`.
Args:
value: An `IndexedSlices`, `SparseTensor`, or an object that can be consumed
by `convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` or
`IndexedSlices`.
name: (Optional.) A name to use if a new `Tensor` is created.
Returns:
A `Tensor`, `IndexedSlices`, or `SparseTensor` based on `value`.
Raises:
ValueError: If `dtype` does not match the element type of `value`. | Converts the given object to a `Tensor` or an `IndexedSlices`. | [
"Converts",
"the",
"given",
"object",
"to",
"a",
"Tensor",
"or",
"an",
"IndexedSlices",
"."
] | def convert_to_tensor_or_indexed_slices(value, dtype=None, name=None):
"""Converts the given object to a `Tensor` or an `IndexedSlices`.
If `value` is an `IndexedSlices` or `SparseTensor` it is returned
unmodified. Otherwise, it is converted to a `Tensor` using
`convert_to_tensor()`.
Args:
value: An `IndexedSlices`, `SparseTensor`, or an object that can be consumed
by `convert_to_tensor()`.
dtype: (Optional.) The required `DType` of the returned `Tensor` or
`IndexedSlices`.
name: (Optional.) A name to use if a new `Tensor` is created.
Returns:
A `Tensor`, `IndexedSlices`, or `SparseTensor` based on `value`.
Raises:
ValueError: If `dtype` does not match the element type of `value`.
"""
return internal_convert_to_tensor_or_indexed_slices(
value=value, dtype=dtype, name=name, as_ref=False) | [
"def",
"convert_to_tensor_or_indexed_slices",
"(",
"value",
",",
"dtype",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"return",
"internal_convert_to_tensor_or_indexed_slices",
"(",
"value",
"=",
"value",
",",
"dtype",
"=",
"dtype",
",",
"name",
"=",
"name",
",",
"as_ref",
"=",
"False",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/framework/indexed_slices.py#L268-L289 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scikit-learn/py2/sklearn/cluster/bicluster.py | python | _log_normalize | (X) | return L - row_avg - col_avg + avg | Normalize ``X`` according to Kluger's log-interactions scheme. | Normalize ``X`` according to Kluger's log-interactions scheme. | [
"Normalize",
"X",
"according",
"to",
"Kluger",
"s",
"log",
"-",
"interactions",
"scheme",
"."
] | def _log_normalize(X):
"""Normalize ``X`` according to Kluger's log-interactions scheme."""
X = make_nonnegative(X, min_value=1)
if issparse(X):
raise ValueError("Cannot compute log of a sparse matrix,"
" because log(x) diverges to -infinity as x"
" goes to 0.")
L = np.log(X)
row_avg = L.mean(axis=1)[:, np.newaxis]
col_avg = L.mean(axis=0)
avg = L.mean()
return L - row_avg - col_avg + avg | [
"def",
"_log_normalize",
"(",
"X",
")",
":",
"X",
"=",
"make_nonnegative",
"(",
"X",
",",
"min_value",
"=",
"1",
")",
"if",
"issparse",
"(",
"X",
")",
":",
"raise",
"ValueError",
"(",
"\"Cannot compute log of a sparse matrix,\"",
"\" because log(x) diverges to -infinity as x\"",
"\" goes to 0.\"",
")",
"L",
"=",
"np",
".",
"log",
"(",
"X",
")",
"row_avg",
"=",
"L",
".",
"mean",
"(",
"axis",
"=",
"1",
")",
"[",
":",
",",
"np",
".",
"newaxis",
"]",
"col_avg",
"=",
"L",
".",
"mean",
"(",
"axis",
"=",
"0",
")",
"avg",
"=",
"L",
".",
"mean",
"(",
")",
"return",
"L",
"-",
"row_avg",
"-",
"col_avg",
"+",
"avg"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/cluster/bicluster.py#L75-L86 |
|
papyrussolution/OpenPapyrus | bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91 | Src/OSF/protobuf-3.19.1/python/google/protobuf/internal/python_message.py | python | _AddEqualsMethod | (message_descriptor, cls) | Helper for _AddMessageMethods(). | Helper for _AddMessageMethods(). | [
"Helper",
"for",
"_AddMessageMethods",
"()",
"."
] | def _AddEqualsMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def __eq__(self, other):
if (not isinstance(other, message_mod.Message) or
other.DESCRIPTOR != self.DESCRIPTOR):
return False
if self is other:
return True
if self.DESCRIPTOR.full_name == _AnyFullTypeName:
any_a = _InternalUnpackAny(self)
any_b = _InternalUnpackAny(other)
if any_a and any_b:
return any_a == any_b
if not self.ListFields() == other.ListFields():
return False
# TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions,
# then use it for the comparison.
unknown_fields = list(self._unknown_fields)
unknown_fields.sort()
other_unknown_fields = list(other._unknown_fields)
other_unknown_fields.sort()
return unknown_fields == other_unknown_fields
cls.__eq__ = __eq__ | [
"def",
"_AddEqualsMethod",
"(",
"message_descriptor",
",",
"cls",
")",
":",
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"other",
",",
"message_mod",
".",
"Message",
")",
"or",
"other",
".",
"DESCRIPTOR",
"!=",
"self",
".",
"DESCRIPTOR",
")",
":",
"return",
"False",
"if",
"self",
"is",
"other",
":",
"return",
"True",
"if",
"self",
".",
"DESCRIPTOR",
".",
"full_name",
"==",
"_AnyFullTypeName",
":",
"any_a",
"=",
"_InternalUnpackAny",
"(",
"self",
")",
"any_b",
"=",
"_InternalUnpackAny",
"(",
"other",
")",
"if",
"any_a",
"and",
"any_b",
":",
"return",
"any_a",
"==",
"any_b",
"if",
"not",
"self",
".",
"ListFields",
"(",
")",
"==",
"other",
".",
"ListFields",
"(",
")",
":",
"return",
"False",
"# TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions,",
"# then use it for the comparison.",
"unknown_fields",
"=",
"list",
"(",
"self",
".",
"_unknown_fields",
")",
"unknown_fields",
".",
"sort",
"(",
")",
"other_unknown_fields",
"=",
"list",
"(",
"other",
".",
"_unknown_fields",
")",
"other_unknown_fields",
".",
"sort",
"(",
")",
"return",
"unknown_fields",
"==",
"other_unknown_fields",
"cls",
".",
"__eq__",
"=",
"__eq__"
] | https://github.com/papyrussolution/OpenPapyrus/blob/bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91/Src/OSF/protobuf-3.19.1/python/google/protobuf/internal/python_message.py#L978-L1005 |
||
may0324/DeepCompression-caffe | 0aff6c1287bda4cfc7f378ed8a16524e1afabd8c | scripts/cpp_lint.py | python | ReplaceAll | (pattern, rep, s) | return _regexp_compile_cache[pattern].sub(rep, s) | Replaces instances of pattern in a string with a replacement.
The compiled regex is kept in a cache shared by Match and Search.
Args:
pattern: regex pattern
rep: replacement text
s: search string
Returns:
string with replacements made (or original string if no replacements) | Replaces instances of pattern in a string with a replacement. | [
"Replaces",
"instances",
"of",
"pattern",
"in",
"a",
"string",
"with",
"a",
"replacement",
"."
] | def ReplaceAll(pattern, rep, s):
"""Replaces instances of pattern in a string with a replacement.
The compiled regex is kept in a cache shared by Match and Search.
Args:
pattern: regex pattern
rep: replacement text
s: search string
Returns:
string with replacements made (or original string if no replacements)
"""
if pattern not in _regexp_compile_cache:
_regexp_compile_cache[pattern] = sre_compile.compile(pattern)
return _regexp_compile_cache[pattern].sub(rep, s) | [
"def",
"ReplaceAll",
"(",
"pattern",
",",
"rep",
",",
"s",
")",
":",
"if",
"pattern",
"not",
"in",
"_regexp_compile_cache",
":",
"_regexp_compile_cache",
"[",
"pattern",
"]",
"=",
"sre_compile",
".",
"compile",
"(",
"pattern",
")",
"return",
"_regexp_compile_cache",
"[",
"pattern",
"]",
".",
"sub",
"(",
"rep",
",",
"s",
")"
] | https://github.com/may0324/DeepCompression-caffe/blob/0aff6c1287bda4cfc7f378ed8a16524e1afabd8c/scripts/cpp_lint.py#L525-L540 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/aui.py | python | AuiTabContainer.GetIdxFromWindow | (*args, **kwargs) | return _aui.AuiTabContainer_GetIdxFromWindow(*args, **kwargs) | GetIdxFromWindow(self, Window page) -> int | GetIdxFromWindow(self, Window page) -> int | [
"GetIdxFromWindow",
"(",
"self",
"Window",
"page",
")",
"-",
">",
"int"
] | def GetIdxFromWindow(*args, **kwargs):
"""GetIdxFromWindow(self, Window page) -> int"""
return _aui.AuiTabContainer_GetIdxFromWindow(*args, **kwargs) | [
"def",
"GetIdxFromWindow",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_aui",
".",
"AuiTabContainer_GetIdxFromWindow",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/aui.py#L1188-L1190 |
|
hpi-xnor/BMXNet-v2 | af2b1859eafc5c721b1397cef02f946aaf2ce20d | python/mxnet/operator.py | python | CustomOpProp.list_auxiliary_states | (self) | return [] | list_auxiliary_states interface. Can override when creating new operators.
Returns
-------
auxs : list
list of auxiliary state blob names. | list_auxiliary_states interface. Can override when creating new operators. | [
"list_auxiliary_states",
"interface",
".",
"Can",
"override",
"when",
"creating",
"new",
"operators",
"."
] | def list_auxiliary_states(self):
"""list_auxiliary_states interface. Can override when creating new operators.
Returns
-------
auxs : list
list of auxiliary state blob names.
"""
return [] | [
"def",
"list_auxiliary_states",
"(",
"self",
")",
":",
"return",
"[",
"]"
] | https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/operator.py#L634-L642 |
|
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | deps/src/libxml2-2.9.1/python/libxml2.py | python | htmlReadFd | (fd, URL, encoding, options) | return xmlDoc(_obj=ret) | parse an XML from a file descriptor and build a tree. | parse an XML from a file descriptor and build a tree. | [
"parse",
"an",
"XML",
"from",
"a",
"file",
"descriptor",
"and",
"build",
"a",
"tree",
"."
] | def htmlReadFd(fd, URL, encoding, options):
"""parse an XML from a file descriptor and build a tree. """
ret = libxml2mod.htmlReadFd(fd, URL, encoding, options)
if ret is None:raise treeError('htmlReadFd() failed')
return xmlDoc(_obj=ret) | [
"def",
"htmlReadFd",
"(",
"fd",
",",
"URL",
",",
"encoding",
",",
"options",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"htmlReadFd",
"(",
"fd",
",",
"URL",
",",
"encoding",
",",
"options",
")",
"if",
"ret",
"is",
"None",
":",
"raise",
"treeError",
"(",
"'htmlReadFd() failed'",
")",
"return",
"xmlDoc",
"(",
"_obj",
"=",
"ret",
")"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L834-L838 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/dis.py | python | code_info | (x) | return _format_code_info(_get_code_object(x)) | Formatted details of methods, functions, or code. | Formatted details of methods, functions, or code. | [
"Formatted",
"details",
"of",
"methods",
"functions",
"or",
"code",
"."
] | def code_info(x):
"""Formatted details of methods, functions, or code."""
return _format_code_info(_get_code_object(x)) | [
"def",
"code_info",
"(",
"x",
")",
":",
"return",
"_format_code_info",
"(",
"_get_code_object",
"(",
"x",
")",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/dis.py#L142-L144 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/pandas/py2/pandas/core/indexes/numeric.py | python | NumericIndex._assert_safe_casting | (cls, data, subarr) | Subclasses need to override this only if the process of casting data
from some accepted dtype to the internal dtype(s) bears the risk of
truncation (e.g. float to int). | Subclasses need to override this only if the process of casting data
from some accepted dtype to the internal dtype(s) bears the risk of
truncation (e.g. float to int). | [
"Subclasses",
"need",
"to",
"override",
"this",
"only",
"if",
"the",
"process",
"of",
"casting",
"data",
"from",
"some",
"accepted",
"dtype",
"to",
"the",
"internal",
"dtype",
"(",
"s",
")",
"bears",
"the",
"risk",
"of",
"truncation",
"(",
"e",
".",
"g",
".",
"float",
"to",
"int",
")",
"."
] | def _assert_safe_casting(cls, data, subarr):
"""
Subclasses need to override this only if the process of casting data
from some accepted dtype to the internal dtype(s) bears the risk of
truncation (e.g. float to int).
"""
pass | [
"def",
"_assert_safe_casting",
"(",
"cls",
",",
"data",
",",
"subarr",
")",
":",
"pass"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/indexes/numeric.py#L102-L108 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_internal/network/session.py | python | user_agent | () | return "{data[installer][name]}/{data[installer][version]} {json}".format(
data=data,
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
) | Return a string representing the user agent. | Return a string representing the user agent. | [
"Return",
"a",
"string",
"representing",
"the",
"user",
"agent",
"."
] | def user_agent():
"""
Return a string representing the user agent.
"""
data = {
"installer": {"name": "pip", "version": __version__},
"python": platform.python_version(),
"implementation": {
"name": platform.python_implementation(),
},
}
if data["implementation"]["name"] == 'CPython':
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
pypy_version_info = sys.pypy_version_info[:3]
else:
pypy_version_info = sys.pypy_version_info
data["implementation"]["version"] = ".".join(
[str(x) for x in pypy_version_info]
)
elif data["implementation"]["name"] == 'Jython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'IronPython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
if sys.platform.startswith("linux"):
from pip._vendor import distro
distro_infos = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], distro.linux_distribution()),
))
libc = dict(filter(
lambda x: x[1],
zip(["lib", "version"], libc_ver()),
))
if libc:
distro_infos["libc"] = libc
if distro_infos:
data["distro"] = distro_infos
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
if platform.system():
data.setdefault("system", {})["name"] = platform.system()
if platform.release():
data.setdefault("system", {})["release"] = platform.release()
if platform.machine():
data["cpu"] = platform.machine()
if has_tls():
import _ssl as ssl
data["openssl_version"] = ssl.OPENSSL_VERSION
setuptools_version = get_installed_version("setuptools")
if setuptools_version is not None:
data["setuptools_version"] = setuptools_version
# Use None rather than False so as not to give the impression that
# pip knows it is not being run under CI. Rather, it is a null or
# inconclusive result. Also, we include some value rather than no
# value to make it easier to know that the check has been run.
data["ci"] = True if looks_like_ci() else None
user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
if user_data is not None:
data["user_data"] = user_data
return "{data[installer][name]}/{data[installer][version]} {json}".format(
data=data,
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
) | [
"def",
"user_agent",
"(",
")",
":",
"data",
"=",
"{",
"\"installer\"",
":",
"{",
"\"name\"",
":",
"\"pip\"",
",",
"\"version\"",
":",
"__version__",
"}",
",",
"\"python\"",
":",
"platform",
".",
"python_version",
"(",
")",
",",
"\"implementation\"",
":",
"{",
"\"name\"",
":",
"platform",
".",
"python_implementation",
"(",
")",
",",
"}",
",",
"}",
"if",
"data",
"[",
"\"implementation\"",
"]",
"[",
"\"name\"",
"]",
"==",
"'CPython'",
":",
"data",
"[",
"\"implementation\"",
"]",
"[",
"\"version\"",
"]",
"=",
"platform",
".",
"python_version",
"(",
")",
"elif",
"data",
"[",
"\"implementation\"",
"]",
"[",
"\"name\"",
"]",
"==",
"'PyPy'",
":",
"if",
"sys",
".",
"pypy_version_info",
".",
"releaselevel",
"==",
"'final'",
":",
"pypy_version_info",
"=",
"sys",
".",
"pypy_version_info",
"[",
":",
"3",
"]",
"else",
":",
"pypy_version_info",
"=",
"sys",
".",
"pypy_version_info",
"data",
"[",
"\"implementation\"",
"]",
"[",
"\"version\"",
"]",
"=",
"\".\"",
".",
"join",
"(",
"[",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"pypy_version_info",
"]",
")",
"elif",
"data",
"[",
"\"implementation\"",
"]",
"[",
"\"name\"",
"]",
"==",
"'Jython'",
":",
"# Complete Guess",
"data",
"[",
"\"implementation\"",
"]",
"[",
"\"version\"",
"]",
"=",
"platform",
".",
"python_version",
"(",
")",
"elif",
"data",
"[",
"\"implementation\"",
"]",
"[",
"\"name\"",
"]",
"==",
"'IronPython'",
":",
"# Complete Guess",
"data",
"[",
"\"implementation\"",
"]",
"[",
"\"version\"",
"]",
"=",
"platform",
".",
"python_version",
"(",
")",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"\"linux\"",
")",
":",
"from",
"pip",
".",
"_vendor",
"import",
"distro",
"distro_infos",
"=",
"dict",
"(",
"filter",
"(",
"lambda",
"x",
":",
"x",
"[",
"1",
"]",
",",
"zip",
"(",
"[",
"\"name\"",
",",
"\"version\"",
",",
"\"id\"",
"]",
",",
"distro",
".",
"linux_distribution",
"(",
")",
")",
",",
")",
")",
"libc",
"=",
"dict",
"(",
"filter",
"(",
"lambda",
"x",
":",
"x",
"[",
"1",
"]",
",",
"zip",
"(",
"[",
"\"lib\"",
",",
"\"version\"",
"]",
",",
"libc_ver",
"(",
")",
")",
",",
")",
")",
"if",
"libc",
":",
"distro_infos",
"[",
"\"libc\"",
"]",
"=",
"libc",
"if",
"distro_infos",
":",
"data",
"[",
"\"distro\"",
"]",
"=",
"distro_infos",
"if",
"sys",
".",
"platform",
".",
"startswith",
"(",
"\"darwin\"",
")",
"and",
"platform",
".",
"mac_ver",
"(",
")",
"[",
"0",
"]",
":",
"data",
"[",
"\"distro\"",
"]",
"=",
"{",
"\"name\"",
":",
"\"macOS\"",
",",
"\"version\"",
":",
"platform",
".",
"mac_ver",
"(",
")",
"[",
"0",
"]",
"}",
"if",
"platform",
".",
"system",
"(",
")",
":",
"data",
".",
"setdefault",
"(",
"\"system\"",
",",
"{",
"}",
")",
"[",
"\"name\"",
"]",
"=",
"platform",
".",
"system",
"(",
")",
"if",
"platform",
".",
"release",
"(",
")",
":",
"data",
".",
"setdefault",
"(",
"\"system\"",
",",
"{",
"}",
")",
"[",
"\"release\"",
"]",
"=",
"platform",
".",
"release",
"(",
")",
"if",
"platform",
".",
"machine",
"(",
")",
":",
"data",
"[",
"\"cpu\"",
"]",
"=",
"platform",
".",
"machine",
"(",
")",
"if",
"has_tls",
"(",
")",
":",
"import",
"_ssl",
"as",
"ssl",
"data",
"[",
"\"openssl_version\"",
"]",
"=",
"ssl",
".",
"OPENSSL_VERSION",
"setuptools_version",
"=",
"get_installed_version",
"(",
"\"setuptools\"",
")",
"if",
"setuptools_version",
"is",
"not",
"None",
":",
"data",
"[",
"\"setuptools_version\"",
"]",
"=",
"setuptools_version",
"# Use None rather than False so as not to give the impression that",
"# pip knows it is not being run under CI. Rather, it is a null or",
"# inconclusive result. Also, we include some value rather than no",
"# value to make it easier to know that the check has been run.",
"data",
"[",
"\"ci\"",
"]",
"=",
"True",
"if",
"looks_like_ci",
"(",
")",
"else",
"None",
"user_data",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"\"PIP_USER_AGENT_USER_DATA\"",
")",
"if",
"user_data",
"is",
"not",
"None",
":",
"data",
"[",
"\"user_data\"",
"]",
"=",
"user_data",
"return",
"\"{data[installer][name]}/{data[installer][version]} {json}\"",
".",
"format",
"(",
"data",
"=",
"data",
",",
"json",
"=",
"json",
".",
"dumps",
"(",
"data",
",",
"separators",
"=",
"(",
"\",\"",
",",
"\":\"",
")",
",",
"sort_keys",
"=",
"True",
")",
",",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_internal/network/session.py#L99-L176 |
|
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/tensorflow/python/framework/device.py | python | check_valid | (spec) | Check that a device spec is valid.
Args:
spec: a string.
Raises:
An exception if the spec is invalid. | Check that a device spec is valid. | [
"Check",
"that",
"a",
"device",
"spec",
"is",
"valid",
"."
] | def check_valid(spec):
"""Check that a device spec is valid.
Args:
spec: a string.
Raises:
An exception if the spec is invalid.
"""
# Construct a DeviceSpec. It will assert a failure if spec is invalid.
DeviceSpec.from_string(spec) | [
"def",
"check_valid",
"(",
"spec",
")",
":",
"# Construct a DeviceSpec. It will assert a failure if spec is invalid.",
"DeviceSpec",
".",
"from_string",
"(",
"spec",
")"
] | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/framework/device.py#L231-L241 |
||
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/cli/curses_ui.py | python | CursesUI._info_toast | (self, message) | Display a one-line informational message on screen.
Args:
message: The informational message. | Display a one-line informational message on screen. | [
"Display",
"a",
"one",
"-",
"line",
"informational",
"message",
"on",
"screen",
"."
] | def _info_toast(self, message):
"""Display a one-line informational message on screen.
Args:
message: The informational message.
"""
self._toast(
self.INFO_MESSAGE_PREFIX + message, color=self._INFO_TOAST_COLOR_PAIR) | [
"def",
"_info_toast",
"(",
"self",
",",
"message",
")",
":",
"self",
".",
"_toast",
"(",
"self",
".",
"INFO_MESSAGE_PREFIX",
"+",
"message",
",",
"color",
"=",
"self",
".",
"_INFO_TOAST_COLOR_PAIR",
")"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/cli/curses_ui.py#L1644-L1652 |
||
bundy-dns/bundy | 3d41934996b82b0cd2fe22dd74d2abc1daba835d | src/lib/python/bundy/config/config_data.py | python | MultiConfigData.get_value_maps | (self, identifier = None, all = False) | return result | Returns a list of dicts, containing the following values:
name: name of the entry (string)
type: string containing the type of the value (or 'module')
value: value of the entry if it is a string, int, double or bool
modified: true if the value is a local change that has not
been committed
default: true if the value has not been changed (i.e. the
value is the default from the specification)
TODO: use the consts for those last ones
Throws DataNotFoundError if the identifier is bad | Returns a list of dicts, containing the following values:
name: name of the entry (string)
type: string containing the type of the value (or 'module')
value: value of the entry if it is a string, int, double or bool
modified: true if the value is a local change that has not
been committed
default: true if the value has not been changed (i.e. the
value is the default from the specification)
TODO: use the consts for those last ones
Throws DataNotFoundError if the identifier is bad | [
"Returns",
"a",
"list",
"of",
"dicts",
"containing",
"the",
"following",
"values",
":",
"name",
":",
"name",
"of",
"the",
"entry",
"(",
"string",
")",
"type",
":",
"string",
"containing",
"the",
"type",
"of",
"the",
"value",
"(",
"or",
"module",
")",
"value",
":",
"value",
"of",
"the",
"entry",
"if",
"it",
"is",
"a",
"string",
"int",
"double",
"or",
"bool",
"modified",
":",
"true",
"if",
"the",
"value",
"is",
"a",
"local",
"change",
"that",
"has",
"not",
"been",
"committed",
"default",
":",
"true",
"if",
"the",
"value",
"has",
"not",
"been",
"changed",
"(",
"i",
".",
"e",
".",
"the",
"value",
"is",
"the",
"default",
"from",
"the",
"specification",
")",
"TODO",
":",
"use",
"the",
"consts",
"for",
"those",
"last",
"ones",
"Throws",
"DataNotFoundError",
"if",
"the",
"identifier",
"is",
"bad"
] | def get_value_maps(self, identifier = None, all = False):
"""Returns a list of dicts, containing the following values:
name: name of the entry (string)
type: string containing the type of the value (or 'module')
value: value of the entry if it is a string, int, double or bool
modified: true if the value is a local change that has not
been committed
default: true if the value has not been changed (i.e. the
value is the default from the specification)
TODO: use the consts for those last ones
Throws DataNotFoundError if the identifier is bad
"""
result = []
if not identifier or identifier == "/":
# No identifier, so we need the list of current modules
for module in self._specifications.keys():
if all:
spec = self.get_module_spec(module)
if spec:
spec_part = spec.get_config_spec()
self._append_value_item(result, spec_part, module, all, True)
else:
entry = _create_value_map_entry(module, 'module', None)
result.append(entry)
else:
# Strip off start and end slashes, if they are there
if len(identifier) > 0 and identifier[0] == '/':
identifier = identifier[1:]
if len(identifier) > 0 and identifier[-1] == '/':
identifier = identifier[:-1]
module, sep, id = identifier.partition('/')
spec = self.get_module_spec(module)
if spec:
spec_part = find_spec_part(spec.get_config_spec(), id)
self._append_value_item(result, spec_part, identifier, all, True)
return result | [
"def",
"get_value_maps",
"(",
"self",
",",
"identifier",
"=",
"None",
",",
"all",
"=",
"False",
")",
":",
"result",
"=",
"[",
"]",
"if",
"not",
"identifier",
"or",
"identifier",
"==",
"\"/\"",
":",
"# No identifier, so we need the list of current modules",
"for",
"module",
"in",
"self",
".",
"_specifications",
".",
"keys",
"(",
")",
":",
"if",
"all",
":",
"spec",
"=",
"self",
".",
"get_module_spec",
"(",
"module",
")",
"if",
"spec",
":",
"spec_part",
"=",
"spec",
".",
"get_config_spec",
"(",
")",
"self",
".",
"_append_value_item",
"(",
"result",
",",
"spec_part",
",",
"module",
",",
"all",
",",
"True",
")",
"else",
":",
"entry",
"=",
"_create_value_map_entry",
"(",
"module",
",",
"'module'",
",",
"None",
")",
"result",
".",
"append",
"(",
"entry",
")",
"else",
":",
"# Strip off start and end slashes, if they are there",
"if",
"len",
"(",
"identifier",
")",
">",
"0",
"and",
"identifier",
"[",
"0",
"]",
"==",
"'/'",
":",
"identifier",
"=",
"identifier",
"[",
"1",
":",
"]",
"if",
"len",
"(",
"identifier",
")",
">",
"0",
"and",
"identifier",
"[",
"-",
"1",
"]",
"==",
"'/'",
":",
"identifier",
"=",
"identifier",
"[",
":",
"-",
"1",
"]",
"module",
",",
"sep",
",",
"id",
"=",
"identifier",
".",
"partition",
"(",
"'/'",
")",
"spec",
"=",
"self",
".",
"get_module_spec",
"(",
"module",
")",
"if",
"spec",
":",
"spec_part",
"=",
"find_spec_part",
"(",
"spec",
".",
"get_config_spec",
"(",
")",
",",
"id",
")",
"self",
".",
"_append_value_item",
"(",
"result",
",",
"spec_part",
",",
"identifier",
",",
"all",
",",
"True",
")",
"return",
"result"
] | https://github.com/bundy-dns/bundy/blob/3d41934996b82b0cd2fe22dd74d2abc1daba835d/src/lib/python/bundy/config/config_data.py#L734-L769 |
|
ArduPilot/ardupilot | 6e684b3496122b8158ac412b609d00004b7ac306 | libraries/AP_HAL_ChibiOS/hwdef/scripts/chibios_hwdef.py | python | write_USB_config | (f) | write USB config defines | write USB config defines | [
"write",
"USB",
"config",
"defines"
] | def write_USB_config(f):
'''write USB config defines'''
if not have_type_prefix('OTG'):
return
f.write('// USB configuration\n')
(USB_VID, USB_PID) = get_USB_IDs()
f.write('#define HAL_USB_VENDOR_ID 0x%04x\n' % int(USB_VID))
f.write('#define HAL_USB_PRODUCT_ID 0x%04x\n' % int(USB_PID))
f.write('#define HAL_USB_STRING_MANUFACTURER %s\n' % get_config("USB_STRING_MANUFACTURER", default="\"ArduPilot\""))
default_product = "%BOARD%"
if args.bootloader:
default_product += "-BL"
f.write('#define HAL_USB_STRING_PRODUCT %s\n' % get_config("USB_STRING_PRODUCT", default="\"%s\""%default_product))
f.write('#define HAL_USB_STRING_SERIAL %s\n' % get_config("USB_STRING_SERIAL", default="\"%SERIAL%\""))
f.write('\n\n') | [
"def",
"write_USB_config",
"(",
"f",
")",
":",
"if",
"not",
"have_type_prefix",
"(",
"'OTG'",
")",
":",
"return",
"f",
".",
"write",
"(",
"'// USB configuration\\n'",
")",
"(",
"USB_VID",
",",
"USB_PID",
")",
"=",
"get_USB_IDs",
"(",
")",
"f",
".",
"write",
"(",
"'#define HAL_USB_VENDOR_ID 0x%04x\\n'",
"%",
"int",
"(",
"USB_VID",
")",
")",
"f",
".",
"write",
"(",
"'#define HAL_USB_PRODUCT_ID 0x%04x\\n'",
"%",
"int",
"(",
"USB_PID",
")",
")",
"f",
".",
"write",
"(",
"'#define HAL_USB_STRING_MANUFACTURER %s\\n'",
"%",
"get_config",
"(",
"\"USB_STRING_MANUFACTURER\"",
",",
"default",
"=",
"\"\\\"ArduPilot\\\"\"",
")",
")",
"default_product",
"=",
"\"%BOARD%\"",
"if",
"args",
".",
"bootloader",
":",
"default_product",
"+=",
"\"-BL\"",
"f",
".",
"write",
"(",
"'#define HAL_USB_STRING_PRODUCT %s\\n'",
"%",
"get_config",
"(",
"\"USB_STRING_PRODUCT\"",
",",
"default",
"=",
"\"\\\"%s\\\"\"",
"%",
"default_product",
")",
")",
"f",
".",
"write",
"(",
"'#define HAL_USB_STRING_SERIAL %s\\n'",
"%",
"get_config",
"(",
"\"USB_STRING_SERIAL\"",
",",
"default",
"=",
"\"\\\"%SERIAL%\\\"\"",
")",
")",
"f",
".",
"write",
"(",
"'\\n\\n'",
")"
] | https://github.com/ArduPilot/ardupilot/blob/6e684b3496122b8158ac412b609d00004b7ac306/libraries/AP_HAL_ChibiOS/hwdef/scripts/chibios_hwdef.py#L1194-L1209 |
||
google/certificate-transparency | 2588562fd306a447958471b6f06c1069619c1641 | python/ct/crypto/asn1/print_util.py | python | bits_to_hex | (bit_array, delimiter=":") | return bytes_to_hex(byte_array, delimiter=delimiter) | Convert a bit array to a prettily formated hex string. If the array
length is not a multiple of 8, it is padded with 0-bits from the left.
For example, [1,0,0,1,1,0,1,0,0,1,0] becomes 04:d2.
Args:
bit_array: the bit array to convert
Returns:
the formatted hex string. | Convert a bit array to a prettily formated hex string. If the array
length is not a multiple of 8, it is padded with 0-bits from the left.
For example, [1,0,0,1,1,0,1,0,0,1,0] becomes 04:d2.
Args:
bit_array: the bit array to convert
Returns:
the formatted hex string. | [
"Convert",
"a",
"bit",
"array",
"to",
"a",
"prettily",
"formated",
"hex",
"string",
".",
"If",
"the",
"array",
"length",
"is",
"not",
"a",
"multiple",
"of",
"8",
"it",
"is",
"padded",
"with",
"0",
"-",
"bits",
"from",
"the",
"left",
".",
"For",
"example",
"[",
"1",
"0",
"0",
"1",
"1",
"0",
"1",
"0",
"0",
"1",
"0",
"]",
"becomes",
"04",
":",
"d2",
".",
"Args",
":",
"bit_array",
":",
"the",
"bit",
"array",
"to",
"convert",
"Returns",
":",
"the",
"formatted",
"hex",
"string",
"."
] | def bits_to_hex(bit_array, delimiter=":"):
"""Convert a bit array to a prettily formated hex string. If the array
length is not a multiple of 8, it is padded with 0-bits from the left.
For example, [1,0,0,1,1,0,1,0,0,1,0] becomes 04:d2.
Args:
bit_array: the bit array to convert
Returns:
the formatted hex string."""
# Pad the first partial byte.
partial_bits = len(bit_array) % 8
pad_length = 8 - partial_bits if partial_bits else 0
bitstring = "0"*pad_length + "".join(map(str, bit_array))
byte_array = [int(bitstring[i:i+8], 2) for i in range(0, len(bitstring), 8)]
return delimiter.join(map(lambda x: "%02x" % x, byte_array))
return bytes_to_hex(byte_array, delimiter=delimiter) | [
"def",
"bits_to_hex",
"(",
"bit_array",
",",
"delimiter",
"=",
"\":\"",
")",
":",
"# Pad the first partial byte.",
"partial_bits",
"=",
"len",
"(",
"bit_array",
")",
"%",
"8",
"pad_length",
"=",
"8",
"-",
"partial_bits",
"if",
"partial_bits",
"else",
"0",
"bitstring",
"=",
"\"0\"",
"*",
"pad_length",
"+",
"\"\"",
".",
"join",
"(",
"map",
"(",
"str",
",",
"bit_array",
")",
")",
"byte_array",
"=",
"[",
"int",
"(",
"bitstring",
"[",
"i",
":",
"i",
"+",
"8",
"]",
",",
"2",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"bitstring",
")",
",",
"8",
")",
"]",
"return",
"delimiter",
".",
"join",
"(",
"map",
"(",
"lambda",
"x",
":",
"\"%02x\"",
"%",
"x",
",",
"byte_array",
")",
")",
"return",
"bytes_to_hex",
"(",
"byte_array",
",",
"delimiter",
"=",
"delimiter",
")"
] | https://github.com/google/certificate-transparency/blob/2588562fd306a447958471b6f06c1069619c1641/python/ct/crypto/asn1/print_util.py#L3-L19 |
|
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/tools/compatibility/ast_edits.py | python | _ASTCallVisitor.visit_Attribute | (self, node) | Handle bare Attributes i.e. [tf.foo, tf.bar].
Args:
node: Node that is of type ast.Attribute | Handle bare Attributes i.e. [tf.foo, tf.bar]. | [
"Handle",
"bare",
"Attributes",
"i",
".",
"e",
".",
"[",
"tf",
".",
"foo",
"tf",
".",
"bar",
"]",
"."
] | def visit_Attribute(self, node): # pylint: disable=invalid-name
"""Handle bare Attributes i.e. [tf.foo, tf.bar].
Args:
node: Node that is of type ast.Attribute
"""
full_name = self._get_attribute_full_path(node)
if full_name:
self._rename_functions(node, full_name)
if full_name in self._api_change_spec.change_to_function:
if not hasattr(node, "is_function_for_call"):
new_text = full_name + "()"
self._file_edit.add("Changed %r to %r"%(full_name, new_text),
node.lineno, node.col_offset, full_name, new_text)
ast.NodeVisitor.generic_visit(self, node) | [
"def",
"visit_Attribute",
"(",
"self",
",",
"node",
")",
":",
"# pylint: disable=invalid-name",
"full_name",
"=",
"self",
".",
"_get_attribute_full_path",
"(",
"node",
")",
"if",
"full_name",
":",
"self",
".",
"_rename_functions",
"(",
"node",
",",
"full_name",
")",
"if",
"full_name",
"in",
"self",
".",
"_api_change_spec",
".",
"change_to_function",
":",
"if",
"not",
"hasattr",
"(",
"node",
",",
"\"is_function_for_call\"",
")",
":",
"new_text",
"=",
"full_name",
"+",
"\"()\"",
"self",
".",
"_file_edit",
".",
"add",
"(",
"\"Changed %r to %r\"",
"%",
"(",
"full_name",
",",
"new_text",
")",
",",
"node",
".",
"lineno",
",",
"node",
".",
"col_offset",
",",
"full_name",
",",
"new_text",
")",
"ast",
".",
"NodeVisitor",
".",
"generic_visit",
"(",
"self",
",",
"node",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/tools/compatibility/ast_edits.py#L342-L357 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_windows.py | python | VarHVScrollHelper.ScrollLayout | (*args, **kwargs) | return _windows_.VarHVScrollHelper_ScrollLayout(*args, **kwargs) | ScrollLayout(self) -> bool | ScrollLayout(self) -> bool | [
"ScrollLayout",
"(",
"self",
")",
"-",
">",
"bool"
] | def ScrollLayout(*args, **kwargs):
"""ScrollLayout(self) -> bool"""
return _windows_.VarHVScrollHelper_ScrollLayout(*args, **kwargs) | [
"def",
"ScrollLayout",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"VarHVScrollHelper_ScrollLayout",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L2391-L2393 |
|
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/ntpath.py | python | normcase | (s) | return s.replace("/", "\\").lower() | Normalize case of pathname.
Makes all characters lowercase and all slashes into backslashes. | Normalize case of pathname. | [
"Normalize",
"case",
"of",
"pathname",
"."
] | def normcase(s):
"""Normalize case of pathname.
Makes all characters lowercase and all slashes into backslashes."""
return s.replace("/", "\\").lower() | [
"def",
"normcase",
"(",
"s",
")",
":",
"return",
"s",
".",
"replace",
"(",
"\"/\"",
",",
"\"\\\\\"",
")",
".",
"lower",
"(",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/ntpath.py#L42-L46 |
|
panda3d/panda3d | 833ad89ebad58395d0af0b7ec08538e5e4308265 | direct/src/tkwidgets/Dial.py | python | Dial._setRollover | (self) | Menu command to turn Dial rollover on/off (i.e. does value accumulate
every time you complete a revolution of the dial?) | Menu command to turn Dial rollover on/off (i.e. does value accumulate
every time you complete a revolution of the dial?) | [
"Menu",
"command",
"to",
"turn",
"Dial",
"rollover",
"on",
"/",
"off",
"(",
"i",
".",
"e",
".",
"does",
"value",
"accumulate",
"every",
"time",
"you",
"complete",
"a",
"revolution",
"of",
"the",
"dial?",
")"
] | def _setRollover(self):
"""
Menu command to turn Dial rollover on/off (i.e. does value accumulate
every time you complete a revolution of the dial?)
"""
self._valuator['fRollover'] = self._fRollover.get() | [
"def",
"_setRollover",
"(",
"self",
")",
":",
"self",
".",
"_valuator",
"[",
"'fRollover'",
"]",
"=",
"self",
".",
"_fRollover",
".",
"get",
"(",
")"
] | https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/direct/src/tkwidgets/Dial.py#L118-L123 |
||
yuxng/DA-RNN | 77fbb50b4272514588a10a9f90b7d5f8d46974fb | lib/gt_single_data_layer/layer.py | python | GtSingleDataLayer.forward | (self) | return blobs | Get blobs and copy them into this layer's top blob vector. | Get blobs and copy them into this layer's top blob vector. | [
"Get",
"blobs",
"and",
"copy",
"them",
"into",
"this",
"layer",
"s",
"top",
"blob",
"vector",
"."
] | def forward(self):
"""Get blobs and copy them into this layer's top blob vector."""
blobs = self._get_next_minibatch()
return blobs | [
"def",
"forward",
"(",
"self",
")",
":",
"blobs",
"=",
"self",
".",
"_get_next_minibatch",
"(",
")",
"return",
"blobs"
] | https://github.com/yuxng/DA-RNN/blob/77fbb50b4272514588a10a9f90b7d5f8d46974fb/lib/gt_single_data_layer/layer.py#L47-L51 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/stc.py | python | StyledTextCtrl.AnnotationSetStyles | (*args, **kwargs) | return _stc.StyledTextCtrl_AnnotationSetStyles(*args, **kwargs) | AnnotationSetStyles(self, int line, String styles)
Set the annotation styles for a line | AnnotationSetStyles(self, int line, String styles) | [
"AnnotationSetStyles",
"(",
"self",
"int",
"line",
"String",
"styles",
")"
] | def AnnotationSetStyles(*args, **kwargs):
"""
AnnotationSetStyles(self, int line, String styles)
Set the annotation styles for a line
"""
return _stc.StyledTextCtrl_AnnotationSetStyles(*args, **kwargs) | [
"def",
"AnnotationSetStyles",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextCtrl_AnnotationSetStyles",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/stc.py#L5951-L5957 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/re.py | python | purge | () | Clear the regular expression cache | Clear the regular expression cache | [
"Clear",
"the",
"regular",
"expression",
"cache"
] | def purge():
"Clear the regular expression cache"
_cache.clear()
_cache_repl.clear() | [
"def",
"purge",
"(",
")",
":",
"_cache",
".",
"clear",
"(",
")",
"_cache_repl",
".",
"clear",
"(",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/re.py#L192-L195 |
||
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/turtle.py | python | TNavigator.ycor | (self) | return self._position[1] | Return the turtle's y coordinate
---
No arguments.
Example (for a Turtle instance named turtle):
>>> reset()
>>> turtle.left(60)
>>> turtle.forward(100)
>>> print turtle.ycor()
86.6025403784 | Return the turtle's y coordinate
---
No arguments. | [
"Return",
"the",
"turtle",
"s",
"y",
"coordinate",
"---",
"No",
"arguments",
"."
] | def ycor(self):
""" Return the turtle's y coordinate
---
No arguments.
Example (for a Turtle instance named turtle):
>>> reset()
>>> turtle.left(60)
>>> turtle.forward(100)
>>> print turtle.ycor()
86.6025403784
"""
return self._position[1] | [
"def",
"ycor",
"(",
"self",
")",
":",
"return",
"self",
".",
"_position",
"[",
"1",
"]"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/turtle.py#L1643-L1655 |
|
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/python2_version/klampt/model/trajectory.py | python | Trajectory.split | (self,time) | return (front,back) | Returns a pair of trajectories obtained from splitting this
one at the given time | Returns a pair of trajectories obtained from splitting this
one at the given time | [
"Returns",
"a",
"pair",
"of",
"trajectories",
"obtained",
"from",
"splitting",
"this",
"one",
"at",
"the",
"given",
"time"
] | def split(self,time):
"""Returns a pair of trajectories obtained from splitting this
one at the given time"""
if time <= self.times[0]:
#split before start of trajectory
return self.constructor()([time],[self.milestones[0]]),self.constructor()([time]+self.times,[self.milestones[0]]+self.milestones)
elif time >= self.times[-1]:
#split after end of trajectory
return self.constructor()(self.times+[time],self.milestones+[self.milestones[-1]]),self.constructor()([time],[self.milestones[-1]])
i,u = self.getSegment(time)
assert i >= 0,"getSegment returned -1? something must be wrong with the times"
#split in middle of trajectory
splitpt = self.interpolate_state(self.milestones[i],self.milestones[i+1],u,self.times[i+1]-self.times[i])
front = self.constructor()(self.times[:i+1],self.milestones[:i+1])
back = self.constructor()(self.times[i+1:],self.milestones[i+1:])
if u > 0:
front.times.append(time)
front.milestones.append(splitpt)
if u < 1:
back.times = [time] + back.times
back.milestones = [splitpt] + back.milestones
return (front,back) | [
"def",
"split",
"(",
"self",
",",
"time",
")",
":",
"if",
"time",
"<=",
"self",
".",
"times",
"[",
"0",
"]",
":",
"#split before start of trajectory",
"return",
"self",
".",
"constructor",
"(",
")",
"(",
"[",
"time",
"]",
",",
"[",
"self",
".",
"milestones",
"[",
"0",
"]",
"]",
")",
",",
"self",
".",
"constructor",
"(",
")",
"(",
"[",
"time",
"]",
"+",
"self",
".",
"times",
",",
"[",
"self",
".",
"milestones",
"[",
"0",
"]",
"]",
"+",
"self",
".",
"milestones",
")",
"elif",
"time",
">=",
"self",
".",
"times",
"[",
"-",
"1",
"]",
":",
"#split after end of trajectory",
"return",
"self",
".",
"constructor",
"(",
")",
"(",
"self",
".",
"times",
"+",
"[",
"time",
"]",
",",
"self",
".",
"milestones",
"+",
"[",
"self",
".",
"milestones",
"[",
"-",
"1",
"]",
"]",
")",
",",
"self",
".",
"constructor",
"(",
")",
"(",
"[",
"time",
"]",
",",
"[",
"self",
".",
"milestones",
"[",
"-",
"1",
"]",
"]",
")",
"i",
",",
"u",
"=",
"self",
".",
"getSegment",
"(",
"time",
")",
"assert",
"i",
">=",
"0",
",",
"\"getSegment returned -1? something must be wrong with the times\"",
"#split in middle of trajectory",
"splitpt",
"=",
"self",
".",
"interpolate_state",
"(",
"self",
".",
"milestones",
"[",
"i",
"]",
",",
"self",
".",
"milestones",
"[",
"i",
"+",
"1",
"]",
",",
"u",
",",
"self",
".",
"times",
"[",
"i",
"+",
"1",
"]",
"-",
"self",
".",
"times",
"[",
"i",
"]",
")",
"front",
"=",
"self",
".",
"constructor",
"(",
")",
"(",
"self",
".",
"times",
"[",
":",
"i",
"+",
"1",
"]",
",",
"self",
".",
"milestones",
"[",
":",
"i",
"+",
"1",
"]",
")",
"back",
"=",
"self",
".",
"constructor",
"(",
")",
"(",
"self",
".",
"times",
"[",
"i",
"+",
"1",
":",
"]",
",",
"self",
".",
"milestones",
"[",
"i",
"+",
"1",
":",
"]",
")",
"if",
"u",
">",
"0",
":",
"front",
".",
"times",
".",
"append",
"(",
"time",
")",
"front",
".",
"milestones",
".",
"append",
"(",
"splitpt",
")",
"if",
"u",
"<",
"1",
":",
"back",
".",
"times",
"=",
"[",
"time",
"]",
"+",
"back",
".",
"times",
"back",
".",
"milestones",
"=",
"[",
"splitpt",
"]",
"+",
"back",
".",
"milestones",
"return",
"(",
"front",
",",
"back",
")"
] | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/model/trajectory.py#L299-L320 |
|
whai362/PSENet | 4d95395658662f2223805c36dcd573d9e190ce26 | eval/ic15/rrc_evaluation_funcs.py | python | main_evaluation | (p,default_evaluation_params_fn,validate_data_fn,evaluate_method_fn,show_result=True,per_sample=True) | return resDict | This process validates a method, evaluates it and if it succed generates a ZIP file with a JSON entry for each sample.
Params:
p: Dictionary of parmeters with the GT/submission locations. If None is passed, the parameters send by the system are used.
default_evaluation_params_fn: points to a function that returns a dictionary with the default parameters used for the evaluation
validate_data_fn: points to a method that validates the corrct format of the submission
evaluate_method_fn: points to a function that evaluated the submission and return a Dictionary with the results | This process validates a method, evaluates it and if it succed generates a ZIP file with a JSON entry for each sample.
Params:
p: Dictionary of parmeters with the GT/submission locations. If None is passed, the parameters send by the system are used.
default_evaluation_params_fn: points to a function that returns a dictionary with the default parameters used for the evaluation
validate_data_fn: points to a method that validates the corrct format of the submission
evaluate_method_fn: points to a function that evaluated the submission and return a Dictionary with the results | [
"This",
"process",
"validates",
"a",
"method",
"evaluates",
"it",
"and",
"if",
"it",
"succed",
"generates",
"a",
"ZIP",
"file",
"with",
"a",
"JSON",
"entry",
"for",
"each",
"sample",
".",
"Params",
":",
"p",
":",
"Dictionary",
"of",
"parmeters",
"with",
"the",
"GT",
"/",
"submission",
"locations",
".",
"If",
"None",
"is",
"passed",
"the",
"parameters",
"send",
"by",
"the",
"system",
"are",
"used",
".",
"default_evaluation_params_fn",
":",
"points",
"to",
"a",
"function",
"that",
"returns",
"a",
"dictionary",
"with",
"the",
"default",
"parameters",
"used",
"for",
"the",
"evaluation",
"validate_data_fn",
":",
"points",
"to",
"a",
"method",
"that",
"validates",
"the",
"corrct",
"format",
"of",
"the",
"submission",
"evaluate_method_fn",
":",
"points",
"to",
"a",
"function",
"that",
"evaluated",
"the",
"submission",
"and",
"return",
"a",
"Dictionary",
"with",
"the",
"results"
] | def main_evaluation(p,default_evaluation_params_fn,validate_data_fn,evaluate_method_fn,show_result=True,per_sample=True):
"""
This process validates a method, evaluates it and if it succed generates a ZIP file with a JSON entry for each sample.
Params:
p: Dictionary of parmeters with the GT/submission locations. If None is passed, the parameters send by the system are used.
default_evaluation_params_fn: points to a function that returns a dictionary with the default parameters used for the evaluation
validate_data_fn: points to a method that validates the corrct format of the submission
evaluate_method_fn: points to a function that evaluated the submission and return a Dictionary with the results
"""
if (p == None):
p = dict([s[1:].split('=') for s in sys.argv[1:]])
if(len(sys.argv)<3):
print_help()
evalParams = default_evaluation_params_fn()
if 'p' in p.keys():
evalParams.update( p['p'] if isinstance(p['p'], dict) else json.loads(p['p'][1:-1]) )
resDict={'calculated':True,'Message':'','method':'{}','per_sample':'{}'}
try:
validate_data_fn(p['g'], p['s'], evalParams)
evalData = evaluate_method_fn(p['g'], p['s'], evalParams)
resDict.update(evalData)
except Exception, e:
resDict['Message']= str(e)
resDict['calculated']=False
if 'o' in p:
if not os.path.exists(p['o']):
os.makedirs(p['o'])
resultsOutputname = p['o'] + '/results.zip'
outZip = zipfile.ZipFile(resultsOutputname, mode='w', allowZip64=True)
del resDict['per_sample']
if 'output_items' in resDict.keys():
del resDict['output_items']
outZip.writestr('method.json',json.dumps(resDict))
if not resDict['calculated']:
if show_result:
sys.stderr.write('Error!\n'+ resDict['Message']+'\n\n')
if 'o' in p:
outZip.close()
return resDict
if 'o' in p:
if per_sample == True:
for k,v in evalData['per_sample'].iteritems():
outZip.writestr( k + '.json',json.dumps(v))
if 'output_items' in evalData.keys():
for k, v in evalData['output_items'].iteritems():
outZip.writestr( k,v)
outZip.close()
if show_result:
sys.stdout.write("Calculated!")
sys.stdout.write(json.dumps(resDict['method']))
return resDict | [
"def",
"main_evaluation",
"(",
"p",
",",
"default_evaluation_params_fn",
",",
"validate_data_fn",
",",
"evaluate_method_fn",
",",
"show_result",
"=",
"True",
",",
"per_sample",
"=",
"True",
")",
":",
"if",
"(",
"p",
"==",
"None",
")",
":",
"p",
"=",
"dict",
"(",
"[",
"s",
"[",
"1",
":",
"]",
".",
"split",
"(",
"'='",
")",
"for",
"s",
"in",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
"]",
")",
"if",
"(",
"len",
"(",
"sys",
".",
"argv",
")",
"<",
"3",
")",
":",
"print_help",
"(",
")",
"evalParams",
"=",
"default_evaluation_params_fn",
"(",
")",
"if",
"'p'",
"in",
"p",
".",
"keys",
"(",
")",
":",
"evalParams",
".",
"update",
"(",
"p",
"[",
"'p'",
"]",
"if",
"isinstance",
"(",
"p",
"[",
"'p'",
"]",
",",
"dict",
")",
"else",
"json",
".",
"loads",
"(",
"p",
"[",
"'p'",
"]",
"[",
"1",
":",
"-",
"1",
"]",
")",
")",
"resDict",
"=",
"{",
"'calculated'",
":",
"True",
",",
"'Message'",
":",
"''",
",",
"'method'",
":",
"'{}'",
",",
"'per_sample'",
":",
"'{}'",
"}",
"try",
":",
"validate_data_fn",
"(",
"p",
"[",
"'g'",
"]",
",",
"p",
"[",
"'s'",
"]",
",",
"evalParams",
")",
"evalData",
"=",
"evaluate_method_fn",
"(",
"p",
"[",
"'g'",
"]",
",",
"p",
"[",
"'s'",
"]",
",",
"evalParams",
")",
"resDict",
".",
"update",
"(",
"evalData",
")",
"except",
"Exception",
",",
"e",
":",
"resDict",
"[",
"'Message'",
"]",
"=",
"str",
"(",
"e",
")",
"resDict",
"[",
"'calculated'",
"]",
"=",
"False",
"if",
"'o'",
"in",
"p",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"p",
"[",
"'o'",
"]",
")",
":",
"os",
".",
"makedirs",
"(",
"p",
"[",
"'o'",
"]",
")",
"resultsOutputname",
"=",
"p",
"[",
"'o'",
"]",
"+",
"'/results.zip'",
"outZip",
"=",
"zipfile",
".",
"ZipFile",
"(",
"resultsOutputname",
",",
"mode",
"=",
"'w'",
",",
"allowZip64",
"=",
"True",
")",
"del",
"resDict",
"[",
"'per_sample'",
"]",
"if",
"'output_items'",
"in",
"resDict",
".",
"keys",
"(",
")",
":",
"del",
"resDict",
"[",
"'output_items'",
"]",
"outZip",
".",
"writestr",
"(",
"'method.json'",
",",
"json",
".",
"dumps",
"(",
"resDict",
")",
")",
"if",
"not",
"resDict",
"[",
"'calculated'",
"]",
":",
"if",
"show_result",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Error!\\n'",
"+",
"resDict",
"[",
"'Message'",
"]",
"+",
"'\\n\\n'",
")",
"if",
"'o'",
"in",
"p",
":",
"outZip",
".",
"close",
"(",
")",
"return",
"resDict",
"if",
"'o'",
"in",
"p",
":",
"if",
"per_sample",
"==",
"True",
":",
"for",
"k",
",",
"v",
"in",
"evalData",
"[",
"'per_sample'",
"]",
".",
"iteritems",
"(",
")",
":",
"outZip",
".",
"writestr",
"(",
"k",
"+",
"'.json'",
",",
"json",
".",
"dumps",
"(",
"v",
")",
")",
"if",
"'output_items'",
"in",
"evalData",
".",
"keys",
"(",
")",
":",
"for",
"k",
",",
"v",
"in",
"evalData",
"[",
"'output_items'",
"]",
".",
"iteritems",
"(",
")",
":",
"outZip",
".",
"writestr",
"(",
"k",
",",
"v",
")",
"outZip",
".",
"close",
"(",
")",
"if",
"show_result",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"\"Calculated!\"",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"json",
".",
"dumps",
"(",
"resDict",
"[",
"'method'",
"]",
")",
")",
"return",
"resDict"
] | https://github.com/whai362/PSENet/blob/4d95395658662f2223805c36dcd573d9e190ce26/eval/ic15/rrc_evaluation_funcs.py#L285-L349 |
|
SoarGroup/Soar | a1c5e249499137a27da60533c72969eef3b8ab6b | scons/scons-local-4.1.0/SCons/Node/FS.py | python | Dir.get_contents | (self) | return SCons.Node._get_contents_map[self._func_get_contents](self) | Return content signatures and names of all our children
separated by new-lines. Ensure that the nodes are sorted. | Return content signatures and names of all our children
separated by new-lines. Ensure that the nodes are sorted. | [
"Return",
"content",
"signatures",
"and",
"names",
"of",
"all",
"our",
"children",
"separated",
"by",
"new",
"-",
"lines",
".",
"Ensure",
"that",
"the",
"nodes",
"are",
"sorted",
"."
] | def get_contents(self):
"""Return content signatures and names of all our children
separated by new-lines. Ensure that the nodes are sorted."""
return SCons.Node._get_contents_map[self._func_get_contents](self) | [
"def",
"get_contents",
"(",
"self",
")",
":",
"return",
"SCons",
".",
"Node",
".",
"_get_contents_map",
"[",
"self",
".",
"_func_get_contents",
"]",
"(",
"self",
")"
] | https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Node/FS.py#L1856-L1859 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/mailbox.py | python | Maildir.__len__ | (self) | return len(self._toc) | Return a count of messages in the mailbox. | Return a count of messages in the mailbox. | [
"Return",
"a",
"count",
"of",
"messages",
"in",
"the",
"mailbox",
"."
] | def __len__(self):
"""Return a count of messages in the mailbox."""
self._refresh()
return len(self._toc) | [
"def",
"__len__",
"(",
"self",
")",
":",
"self",
".",
"_refresh",
"(",
")",
"return",
"len",
"(",
"self",
".",
"_toc",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/mailbox.py#L410-L413 |
|
neoml-lib/neoml | a0d370fba05269a1b2258cef126f77bbd2054a3e | NeoML/samples/python/linear.py | python | accuracy | (model, X, y) | return float(correct)/len(y) | Returns the accuracy of model on the given data | Returns the accuracy of model on the given data | [
"Returns",
"the",
"accuracy",
"of",
"model",
"on",
"the",
"given",
"data"
] | def accuracy(model, X, y):
"""Returns the accuracy of model on the given data"""
correct = sum(1 for label, probs in zip(y, model.classify(X))
if label == np.argmax(probs))
return float(correct)/len(y) | [
"def",
"accuracy",
"(",
"model",
",",
"X",
",",
"y",
")",
":",
"correct",
"=",
"sum",
"(",
"1",
"for",
"label",
",",
"probs",
"in",
"zip",
"(",
"y",
",",
"model",
".",
"classify",
"(",
"X",
")",
")",
"if",
"label",
"==",
"np",
".",
"argmax",
"(",
"probs",
")",
")",
"return",
"float",
"(",
"correct",
")",
"/",
"len",
"(",
"y",
")"
] | https://github.com/neoml-lib/neoml/blob/a0d370fba05269a1b2258cef126f77bbd2054a3e/NeoML/samples/python/linear.py#L36-L40 |
|
hpi-xnor/BMXNet-v2 | af2b1859eafc5c721b1397cef02f946aaf2ce20d | python/mxnet/gluon/rnn/rnn_cell.py | python | HybridSequentialRNNCell.add | (self, cell) | Appends a cell into the stack.
Parameters
----------
cell : RecurrentCell
The cell to add. | Appends a cell into the stack. | [
"Appends",
"a",
"cell",
"into",
"the",
"stack",
"."
] | def add(self, cell):
"""Appends a cell into the stack.
Parameters
----------
cell : RecurrentCell
The cell to add.
"""
self.register_child(cell) | [
"def",
"add",
"(",
"self",
",",
"cell",
")",
":",
"self",
".",
"register_child",
"(",
"cell",
")"
] | https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/gluon/rnn/rnn_cell.py#L771-L779 |
||
CMA-ES/libcmaes | 1c39d7f931b267117d365370c6da5334a6948942 | python/lcmaes_interface.py | python | to_params | (x0, sigma0, str_algo=b'acmaes', fplot=None, lbounds=None, ubounds=None, scaling=False, vscaling=None, vshift=None, **kwargs) | return p | return parameter object instance for `lcmaes.pcmaes`.
Keys in `kwargs` must correspond to `name` in `set_name` attributes
of `lcmaes.CMAParameters`, e.g. ``ftarget=1e-7``.
Details: when `fplot is None` (default), the default output filename
is used. | return parameter object instance for `lcmaes.pcmaes`. | [
"return",
"parameter",
"object",
"instance",
"for",
"lcmaes",
".",
"pcmaes",
"."
] | def to_params(x0, sigma0, str_algo=b'acmaes', fplot=None, lbounds=None, ubounds=None, scaling=False, vscaling=None, vshift=None, **kwargs):
"""return parameter object instance for `lcmaes.pcmaes`.
Keys in `kwargs` must correspond to `name` in `set_name` attributes
of `lcmaes.CMAParameters`, e.g. ``ftarget=1e-7``.
Details: when `fplot is None` (default), the default output filename
is used.
"""
has_bounds = not lbounds==None and not ubounds == None
p = None
if has_bounds:
if scaling==False:
gp = lcmaes.make_genopheno_pwqb(lbounds,ubounds,len(ubounds))
p = lcmaes.make_parameters_pwqb(x0,sigma0,gp)
else:
gp = lcmaes.make_genopheno_pwqb_ls(lbounds,ubounds,len(ubounds))
p = lcmaes.make_parameters_pwqb_ls(x0,sigma0,gp,-1,0)
else:
if vscaling is None:
p = lcmaes.make_simple_parameters(x0, sigma0)
else:
gp = lcmaes.make_genopheno_ls(vscaling,vshift)
p = lcmaes.make_parameters_ls(x0,sigma0,gp)
p.set_str_algo(str_algo)
if fplot and fplot != True: # then fplot must be filename
global fplot_current
fplot_current = fplot
if fplot or fplot is None: # 0 or False or '' or "" prevents writing
p.set_fplot(fplot_current)
for key, val in kwargs.items():
setter = "set_" + key
if not hasattr(p, setter):
raise ValueError(setter + " is not known as method of CMAParameters")
getattr(p, setter)(val) # call setter with value
return p | [
"def",
"to_params",
"(",
"x0",
",",
"sigma0",
",",
"str_algo",
"=",
"b'acmaes'",
",",
"fplot",
"=",
"None",
",",
"lbounds",
"=",
"None",
",",
"ubounds",
"=",
"None",
",",
"scaling",
"=",
"False",
",",
"vscaling",
"=",
"None",
",",
"vshift",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"has_bounds",
"=",
"not",
"lbounds",
"==",
"None",
"and",
"not",
"ubounds",
"==",
"None",
"p",
"=",
"None",
"if",
"has_bounds",
":",
"if",
"scaling",
"==",
"False",
":",
"gp",
"=",
"lcmaes",
".",
"make_genopheno_pwqb",
"(",
"lbounds",
",",
"ubounds",
",",
"len",
"(",
"ubounds",
")",
")",
"p",
"=",
"lcmaes",
".",
"make_parameters_pwqb",
"(",
"x0",
",",
"sigma0",
",",
"gp",
")",
"else",
":",
"gp",
"=",
"lcmaes",
".",
"make_genopheno_pwqb_ls",
"(",
"lbounds",
",",
"ubounds",
",",
"len",
"(",
"ubounds",
")",
")",
"p",
"=",
"lcmaes",
".",
"make_parameters_pwqb_ls",
"(",
"x0",
",",
"sigma0",
",",
"gp",
",",
"-",
"1",
",",
"0",
")",
"else",
":",
"if",
"vscaling",
"is",
"None",
":",
"p",
"=",
"lcmaes",
".",
"make_simple_parameters",
"(",
"x0",
",",
"sigma0",
")",
"else",
":",
"gp",
"=",
"lcmaes",
".",
"make_genopheno_ls",
"(",
"vscaling",
",",
"vshift",
")",
"p",
"=",
"lcmaes",
".",
"make_parameters_ls",
"(",
"x0",
",",
"sigma0",
",",
"gp",
")",
"p",
".",
"set_str_algo",
"(",
"str_algo",
")",
"if",
"fplot",
"and",
"fplot",
"!=",
"True",
":",
"# then fplot must be filename",
"global",
"fplot_current",
"fplot_current",
"=",
"fplot",
"if",
"fplot",
"or",
"fplot",
"is",
"None",
":",
"# 0 or False or '' or \"\" prevents writing",
"p",
".",
"set_fplot",
"(",
"fplot_current",
")",
"for",
"key",
",",
"val",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"setter",
"=",
"\"set_\"",
"+",
"key",
"if",
"not",
"hasattr",
"(",
"p",
",",
"setter",
")",
":",
"raise",
"ValueError",
"(",
"setter",
"+",
"\" is not known as method of CMAParameters\"",
")",
"getattr",
"(",
"p",
",",
"setter",
")",
"(",
"val",
")",
"# call setter with value",
"return",
"p"
] | https://github.com/CMA-ES/libcmaes/blob/1c39d7f931b267117d365370c6da5334a6948942/python/lcmaes_interface.py#L28-L63 |
|
wujixiu/helmet-detection | 8eff5c59ddfba5a29e0b76aeb48babcb49246178 | hardhat-wearing-detection/SSD-RPA/scripts/cpp_lint.py | python | GetLineWidth | (line) | Determines the width of the line in column positions.
Args:
line: A string, which may be a Unicode string.
Returns:
The width of the line in column positions, accounting for Unicode
combining characters and wide characters. | Determines the width of the line in column positions. | [
"Determines",
"the",
"width",
"of",
"the",
"line",
"in",
"column",
"positions",
"."
] | def GetLineWidth(line):
"""Determines the width of the line in column positions.
Args:
line: A string, which may be a Unicode string.
Returns:
The width of the line in column positions, accounting for Unicode
combining characters and wide characters.
"""
if isinstance(line, unicode):
width = 0
for uc in unicodedata.normalize('NFC', line):
if unicodedata.east_asian_width(uc) in ('W', 'F'):
width += 2
elif not unicodedata.combining(uc):
width += 1
return width
else:
return len(line) | [
"def",
"GetLineWidth",
"(",
"line",
")",
":",
"if",
"isinstance",
"(",
"line",
",",
"unicode",
")",
":",
"width",
"=",
"0",
"for",
"uc",
"in",
"unicodedata",
".",
"normalize",
"(",
"'NFC'",
",",
"line",
")",
":",
"if",
"unicodedata",
".",
"east_asian_width",
"(",
"uc",
")",
"in",
"(",
"'W'",
",",
"'F'",
")",
":",
"width",
"+=",
"2",
"elif",
"not",
"unicodedata",
".",
"combining",
"(",
"uc",
")",
":",
"width",
"+=",
"1",
"return",
"width",
"else",
":",
"return",
"len",
"(",
"line",
")"
] | https://github.com/wujixiu/helmet-detection/blob/8eff5c59ddfba5a29e0b76aeb48babcb49246178/hardhat-wearing-detection/SSD-RPA/scripts/cpp_lint.py#L3441-L3460 |
||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/traceback.py | python | extract_stack | (f=None, limit = None) | return list | Extract the raw traceback from the current stack frame.
The return value has the same format as for extract_tb(). The
optional 'f' and 'limit' arguments have the same meaning as for
print_stack(). Each item in the list is a quadruple (filename,
line number, function name, text), and the entries are in order
from oldest to newest stack frame. | Extract the raw traceback from the current stack frame. | [
"Extract",
"the",
"raw",
"traceback",
"from",
"the",
"current",
"stack",
"frame",
"."
] | def extract_stack(f=None, limit = None):
"""Extract the raw traceback from the current stack frame.
The return value has the same format as for extract_tb(). The
optional 'f' and 'limit' arguments have the same meaning as for
print_stack(). Each item in the list is a quadruple (filename,
line number, function name, text), and the entries are in order
from oldest to newest stack frame.
"""
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while f is not None and (limit is None or n < limit):
lineno = f.f_lineno
co = f.f_code
filename = co.co_filename
name = co.co_name
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line: line = line.strip()
else: line = None
list.append((filename, lineno, name, line))
f = f.f_back
n = n+1
list.reverse()
return list | [
"def",
"extract_stack",
"(",
"f",
"=",
"None",
",",
"limit",
"=",
"None",
")",
":",
"if",
"f",
"is",
"None",
":",
"try",
":",
"raise",
"ZeroDivisionError",
"except",
"ZeroDivisionError",
":",
"f",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
".",
"tb_frame",
".",
"f_back",
"if",
"limit",
"is",
"None",
":",
"if",
"hasattr",
"(",
"sys",
",",
"'tracebacklimit'",
")",
":",
"limit",
"=",
"sys",
".",
"tracebacklimit",
"list",
"=",
"[",
"]",
"n",
"=",
"0",
"while",
"f",
"is",
"not",
"None",
"and",
"(",
"limit",
"is",
"None",
"or",
"n",
"<",
"limit",
")",
":",
"lineno",
"=",
"f",
".",
"f_lineno",
"co",
"=",
"f",
".",
"f_code",
"filename",
"=",
"co",
".",
"co_filename",
"name",
"=",
"co",
".",
"co_name",
"linecache",
".",
"checkcache",
"(",
"filename",
")",
"line",
"=",
"linecache",
".",
"getline",
"(",
"filename",
",",
"lineno",
",",
"f",
".",
"f_globals",
")",
"if",
"line",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"else",
":",
"line",
"=",
"None",
"list",
".",
"append",
"(",
"(",
"filename",
",",
"lineno",
",",
"name",
",",
"line",
")",
")",
"f",
"=",
"f",
".",
"f_back",
"n",
"=",
"n",
"+",
"1",
"list",
".",
"reverse",
"(",
")",
"return",
"list"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/traceback.py#L280-L312 |
|
bigartm/bigartm | 47e37f982de87aa67bfd475ff1f39da696b181b3 | python/artm/scores.py | python | ClassPrecisionScore.__init__ | (self, name=None, config=None) | :param str name: the identifier of score, will be auto-generated if not specified
:param config: the low-level config of this score
:type config: protobuf object | :param str name: the identifier of score, will be auto-generated if not specified
:param config: the low-level config of this score
:type config: protobuf object | [
":",
"param",
"str",
"name",
":",
"the",
"identifier",
"of",
"score",
"will",
"be",
"auto",
"-",
"generated",
"if",
"not",
"specified",
":",
"param",
"config",
":",
"the",
"low",
"-",
"level",
"config",
"of",
"this",
"score",
":",
"type",
"config",
":",
"protobuf",
"object"
] | def __init__(self, name=None, config=None):
"""
:param str name: the identifier of score, will be auto-generated if not specified
:param config: the low-level config of this score
:type config: protobuf object
"""
BaseScore.__init__(self,
name=name,
class_id=None,
topic_names=None,
model_name=None,
config=config) | [
"def",
"__init__",
"(",
"self",
",",
"name",
"=",
"None",
",",
"config",
"=",
"None",
")",
":",
"BaseScore",
".",
"__init__",
"(",
"self",
",",
"name",
"=",
"name",
",",
"class_id",
"=",
"None",
",",
"topic_names",
"=",
"None",
",",
"model_name",
"=",
"None",
",",
"config",
"=",
"config",
")"
] | https://github.com/bigartm/bigartm/blob/47e37f982de87aa67bfd475ff1f39da696b181b3/python/artm/scores.py#L744-L755 |
||
NVIDIA/thrust | 627dccb359a635afdd69e95a6cc59698f23f70e2 | internal/benchmark/compare_benchmark_results.py | python | io_manager.write_header | (self) | Write the header for the output CSV file. | Write the header for the output CSV file. | [
"Write",
"the",
"header",
"for",
"the",
"output",
"CSV",
"file",
"."
] | def write_header(self):
"""Write the header for the output CSV file."""
# Write the first line of the header.
self.writer.writeheader()
# Write the second line of the header.
self.writer.writerow(self.variable_units) | [
"def",
"write_header",
"(",
"self",
")",
":",
"# Write the first line of the header.",
"self",
".",
"writer",
".",
"writeheader",
"(",
")",
"# Write the second line of the header.",
"self",
".",
"writer",
".",
"writerow",
"(",
"self",
".",
"variable_units",
")"
] | https://github.com/NVIDIA/thrust/blob/627dccb359a635afdd69e95a6cc59698f23f70e2/internal/benchmark/compare_benchmark_results.py#L786-L792 |
||
facebookarchive/LogDevice | ce7726050edc49a1e15d9160e81c890736b779e2 | logdevice/ops/ldops/admin_api.py | python | remove_nodes | (
client: AdminAPI, req: RemoveNodesRequest
) | return await client.removeNodes(req) | Wrapper for removeNodes() Thrift method | Wrapper for removeNodes() Thrift method | [
"Wrapper",
"for",
"removeNodes",
"()",
"Thrift",
"method"
] | async def remove_nodes(
client: AdminAPI, req: RemoveNodesRequest
) -> RemoveNodesResponse:
"""
Wrapper for removeNodes() Thrift method
"""
return await client.removeNodes(req) | [
"async",
"def",
"remove_nodes",
"(",
"client",
":",
"AdminAPI",
",",
"req",
":",
"RemoveNodesRequest",
")",
"->",
"RemoveNodesResponse",
":",
"return",
"await",
"client",
".",
"removeNodes",
"(",
"req",
")"
] | https://github.com/facebookarchive/LogDevice/blob/ce7726050edc49a1e15d9160e81c890736b779e2/logdevice/ops/ldops/admin_api.py#L81-L87 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/estimator/canned/head.py | python | _weights | (features, weight_column) | Fetches weights from features. | Fetches weights from features. | [
"Fetches",
"weights",
"from",
"features",
"."
] | def _weights(features, weight_column):
"""Fetches weights from features."""
with ops.name_scope(None, 'weights', values=features.values()):
if weight_column is None:
return 1.
if isinstance(weight_column, six.string_types):
weight_column = feature_column_lib.numeric_column(key=weight_column)
if not isinstance(weight_column, feature_column_lib._NumericColumn): # pylint: disable=protected-access
raise TypeError('Weight column must be either a string or _NumericColumn.'
' Given type: {}.'.format(type(weight_column)))
weights = weight_column._get_dense_tensor( # pylint: disable=protected-access
feature_column_lib._LazyBuilder(features)) # pylint: disable=protected-access
if not (weights.dtype.is_floating or weights.dtype.is_integer):
raise ValueError('Weight column should be castable to float. '
'Given dtype: {}'.format(weights.dtype))
weights = _maybe_expand_dim(math_ops.to_float(weights, name='weights'))
return weights | [
"def",
"_weights",
"(",
"features",
",",
"weight_column",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"None",
",",
"'weights'",
",",
"values",
"=",
"features",
".",
"values",
"(",
")",
")",
":",
"if",
"weight_column",
"is",
"None",
":",
"return",
"1.",
"if",
"isinstance",
"(",
"weight_column",
",",
"six",
".",
"string_types",
")",
":",
"weight_column",
"=",
"feature_column_lib",
".",
"numeric_column",
"(",
"key",
"=",
"weight_column",
")",
"if",
"not",
"isinstance",
"(",
"weight_column",
",",
"feature_column_lib",
".",
"_NumericColumn",
")",
":",
"# pylint: disable=protected-access",
"raise",
"TypeError",
"(",
"'Weight column must be either a string or _NumericColumn.'",
"' Given type: {}.'",
".",
"format",
"(",
"type",
"(",
"weight_column",
")",
")",
")",
"weights",
"=",
"weight_column",
".",
"_get_dense_tensor",
"(",
"# pylint: disable=protected-access",
"feature_column_lib",
".",
"_LazyBuilder",
"(",
"features",
")",
")",
"# pylint: disable=protected-access",
"if",
"not",
"(",
"weights",
".",
"dtype",
".",
"is_floating",
"or",
"weights",
".",
"dtype",
".",
"is_integer",
")",
":",
"raise",
"ValueError",
"(",
"'Weight column should be castable to float. '",
"'Given dtype: {}'",
".",
"format",
"(",
"weights",
".",
"dtype",
")",
")",
"weights",
"=",
"_maybe_expand_dim",
"(",
"math_ops",
".",
"to_float",
"(",
"weights",
",",
"name",
"=",
"'weights'",
")",
")",
"return",
"weights"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/estimator/canned/head.py#L766-L782 |
||
pmq20/node-packer | 12c46c6e44fbc14d9ee645ebd17d5296b324f7e0 | lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py | python | _TopologicallySortedEnvVarKeys | (env) | Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1].
Throws an Exception in case of dependency cycles. | Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1]. | [
"Takes",
"a",
"dict",
"|env|",
"whose",
"values",
"are",
"strings",
"that",
"can",
"refer",
"to",
"other",
"keys",
"for",
"example",
"env",
"[",
"foo",
"]",
"=",
"$",
"(",
"bar",
")",
"and",
"$",
"(",
"baz",
")",
".",
"Returns",
"a",
"list",
"L",
"of",
"all",
"keys",
"of",
"env",
"such",
"that",
"key2",
"is",
"after",
"key1",
"in",
"L",
"if",
"env",
"[",
"key2",
"]",
"refers",
"to",
"env",
"[",
"key1",
"]",
"."
] | def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1].
Throws an Exception in case of dependency cycles.
"""
# Since environment variables can refer to other variables, the evaluation
# order is important. Below is the logic to compute the dependency graph
# and sort it.
regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
for dependee in matches:
assert '${' not in dependee, 'Nested variables not supported: ' + dependee
return matches
try:
# Topologically sort, and then reverse, because we used an edge definition
# that's inverted from the expected result of this function (see comment
# above).
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
except gyp.common.CycleError as e:
raise GypError(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes)) | [
"def",
"_TopologicallySortedEnvVarKeys",
"(",
"env",
")",
":",
"# Since environment variables can refer to other variables, the evaluation",
"# order is important. Below is the logic to compute the dependency graph",
"# and sort it.",
"regex",
"=",
"re",
".",
"compile",
"(",
"r'\\$\\{([a-zA-Z0-9\\-_]+)\\}'",
")",
"def",
"GetEdges",
"(",
"node",
")",
":",
"# Use a definition of edges such that user_of_variable -> used_varible.",
"# This happens to be easier in this case, since a variable's",
"# definition contains all variables it references in a single string.",
"# We can then reverse the result of the topological sort at the end.",
"# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))",
"matches",
"=",
"set",
"(",
"[",
"v",
"for",
"v",
"in",
"regex",
".",
"findall",
"(",
"env",
"[",
"node",
"]",
")",
"if",
"v",
"in",
"env",
"]",
")",
"for",
"dependee",
"in",
"matches",
":",
"assert",
"'${'",
"not",
"in",
"dependee",
",",
"'Nested variables not supported: '",
"+",
"dependee",
"return",
"matches",
"try",
":",
"# Topologically sort, and then reverse, because we used an edge definition",
"# that's inverted from the expected result of this function (see comment",
"# above).",
"order",
"=",
"gyp",
".",
"common",
".",
"TopologicallySorted",
"(",
"env",
".",
"keys",
"(",
")",
",",
"GetEdges",
")",
"order",
".",
"reverse",
"(",
")",
"return",
"order",
"except",
"gyp",
".",
"common",
".",
"CycleError",
"as",
"e",
":",
"raise",
"GypError",
"(",
"'Xcode environment variables are cyclically dependent: '",
"+",
"str",
"(",
"e",
".",
"nodes",
")",
")"
] | https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L1578-L1609 |
||
ricardoquesada/Spidermonkey | 4a75ea2543408bd1b2c515aa95901523eeef7858 | modules/freetype2/src/tools/docmaker/tohtml.py | python | HtmlFormatter.make_html_para | ( self, words ) | return para_header + line + para_footer | convert words of a paragraph into tagged HTML text, handle xrefs | convert words of a paragraph into tagged HTML text, handle xrefs | [
"convert",
"words",
"of",
"a",
"paragraph",
"into",
"tagged",
"HTML",
"text",
"handle",
"xrefs"
] | def make_html_para( self, words ):
""" convert words of a paragraph into tagged HTML text, handle xrefs """
line = ""
if words:
line = self.make_html_word( words[0] )
for word in words[1:]:
line = line + " " + self.make_html_word( word )
# handle hyperlinks
line = re_url.sub( r'<a href="\1">\1</a>', line )
# convert `...' quotations into real left and right single quotes
line = re.sub( r"(^|\W)`(.*?)'(\W|$)", \
r'\1‘\2’\3', \
line )
# convert tilde into non-breakable space
line = string.replace( line, "~", " " )
return para_header + line + para_footer | [
"def",
"make_html_para",
"(",
"self",
",",
"words",
")",
":",
"line",
"=",
"\"\"",
"if",
"words",
":",
"line",
"=",
"self",
".",
"make_html_word",
"(",
"words",
"[",
"0",
"]",
")",
"for",
"word",
"in",
"words",
"[",
"1",
":",
"]",
":",
"line",
"=",
"line",
"+",
"\" \"",
"+",
"self",
".",
"make_html_word",
"(",
"word",
")",
"# handle hyperlinks",
"line",
"=",
"re_url",
".",
"sub",
"(",
"r'<a href=\"\\1\">\\1</a>'",
",",
"line",
")",
"# convert `...' quotations into real left and right single quotes",
"line",
"=",
"re",
".",
"sub",
"(",
"r\"(^|\\W)`(.*?)'(\\W|$)\"",
",",
"r'\\1‘\\2’\\3'",
",",
"line",
")",
"# convert tilde into non-breakable space",
"line",
"=",
"string",
".",
"replace",
"(",
"line",
",",
"\"~\"",
",",
"\" \"",
")",
"return",
"para_header",
"+",
"line",
"+",
"para_footer"
] | https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/modules/freetype2/src/tools/docmaker/tohtml.py#L258-L274 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/requests/cookies.py | python | RequestsCookieJar.copy | (self) | return new_cj | Return a copy of this RequestsCookieJar. | Return a copy of this RequestsCookieJar. | [
"Return",
"a",
"copy",
"of",
"this",
"RequestsCookieJar",
"."
] | def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.update(self)
return new_cj | [
"def",
"copy",
"(",
"self",
")",
":",
"new_cj",
"=",
"RequestsCookieJar",
"(",
")",
"new_cj",
".",
"update",
"(",
"self",
")",
"return",
"new_cj"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/requests/cookies.py#L415-L419 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/richtext.py | python | RichTextFileHandler.SetEncoding | (*args, **kwargs) | return _richtext.RichTextFileHandler_SetEncoding(*args, **kwargs) | SetEncoding(self, String encoding) | SetEncoding(self, String encoding) | [
"SetEncoding",
"(",
"self",
"String",
"encoding",
")"
] | def SetEncoding(*args, **kwargs):
"""SetEncoding(self, String encoding)"""
return _richtext.RichTextFileHandler_SetEncoding(*args, **kwargs) | [
"def",
"SetEncoding",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextFileHandler_SetEncoding",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/richtext.py#L2824-L2826 |
|
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/tensorflow/contrib/rnn/python/ops/rnn_cell.py | python | AttentionCellWrapper.__call__ | (self, inputs, state, scope=None) | Long short-term memory cell with attention (LSTMA). | Long short-term memory cell with attention (LSTMA). | [
"Long",
"short",
"-",
"term",
"memory",
"cell",
"with",
"attention",
"(",
"LSTMA",
")",
"."
] | def __call__(self, inputs, state, scope=None):
"""Long short-term memory cell with attention (LSTMA)."""
with vs.variable_scope(scope or type(self).__name__):
if self._state_is_tuple:
state, attns, attn_states = state
else:
states = state
state = array_ops.slice(states, [0, 0], [-1, self._cell.state_size])
attns = array_ops.slice(
states, [0, self._cell.state_size], [-1, self._attn_size])
attn_states = array_ops.slice(
states, [0, self._cell.state_size + self._attn_size],
[-1, self._attn_size * self._attn_length])
attn_states = array_ops.reshape(attn_states,
[-1, self._attn_length, self._attn_size])
input_size = self._input_size
if input_size is None:
input_size = inputs.get_shape().as_list()[1]
inputs = _linear([inputs, attns], input_size, True)
lstm_output, new_state = self._cell(inputs, state)
if self._state_is_tuple:
new_state_cat = array_ops.concat(1, nest.flatten(new_state))
else:
new_state_cat = new_state
new_attns, new_attn_states = self._attention(new_state_cat, attn_states)
with vs.variable_scope("AttnOutputProjection"):
output = _linear([lstm_output, new_attns], self._attn_size, True)
new_attn_states = array_ops.concat(1, [new_attn_states,
array_ops.expand_dims(output, 1)])
new_attn_states = array_ops.reshape(
new_attn_states, [-1, self._attn_length * self._attn_size])
new_state = (new_state, new_attns, new_attn_states)
if not self._state_is_tuple:
new_state = array_ops.concat(1, list(new_state))
return output, new_state | [
"def",
"__call__",
"(",
"self",
",",
"inputs",
",",
"state",
",",
"scope",
"=",
"None",
")",
":",
"with",
"vs",
".",
"variable_scope",
"(",
"scope",
"or",
"type",
"(",
"self",
")",
".",
"__name__",
")",
":",
"if",
"self",
".",
"_state_is_tuple",
":",
"state",
",",
"attns",
",",
"attn_states",
"=",
"state",
"else",
":",
"states",
"=",
"state",
"state",
"=",
"array_ops",
".",
"slice",
"(",
"states",
",",
"[",
"0",
",",
"0",
"]",
",",
"[",
"-",
"1",
",",
"self",
".",
"_cell",
".",
"state_size",
"]",
")",
"attns",
"=",
"array_ops",
".",
"slice",
"(",
"states",
",",
"[",
"0",
",",
"self",
".",
"_cell",
".",
"state_size",
"]",
",",
"[",
"-",
"1",
",",
"self",
".",
"_attn_size",
"]",
")",
"attn_states",
"=",
"array_ops",
".",
"slice",
"(",
"states",
",",
"[",
"0",
",",
"self",
".",
"_cell",
".",
"state_size",
"+",
"self",
".",
"_attn_size",
"]",
",",
"[",
"-",
"1",
",",
"self",
".",
"_attn_size",
"*",
"self",
".",
"_attn_length",
"]",
")",
"attn_states",
"=",
"array_ops",
".",
"reshape",
"(",
"attn_states",
",",
"[",
"-",
"1",
",",
"self",
".",
"_attn_length",
",",
"self",
".",
"_attn_size",
"]",
")",
"input_size",
"=",
"self",
".",
"_input_size",
"if",
"input_size",
"is",
"None",
":",
"input_size",
"=",
"inputs",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"[",
"1",
"]",
"inputs",
"=",
"_linear",
"(",
"[",
"inputs",
",",
"attns",
"]",
",",
"input_size",
",",
"True",
")",
"lstm_output",
",",
"new_state",
"=",
"self",
".",
"_cell",
"(",
"inputs",
",",
"state",
")",
"if",
"self",
".",
"_state_is_tuple",
":",
"new_state_cat",
"=",
"array_ops",
".",
"concat",
"(",
"1",
",",
"nest",
".",
"flatten",
"(",
"new_state",
")",
")",
"else",
":",
"new_state_cat",
"=",
"new_state",
"new_attns",
",",
"new_attn_states",
"=",
"self",
".",
"_attention",
"(",
"new_state_cat",
",",
"attn_states",
")",
"with",
"vs",
".",
"variable_scope",
"(",
"\"AttnOutputProjection\"",
")",
":",
"output",
"=",
"_linear",
"(",
"[",
"lstm_output",
",",
"new_attns",
"]",
",",
"self",
".",
"_attn_size",
",",
"True",
")",
"new_attn_states",
"=",
"array_ops",
".",
"concat",
"(",
"1",
",",
"[",
"new_attn_states",
",",
"array_ops",
".",
"expand_dims",
"(",
"output",
",",
"1",
")",
"]",
")",
"new_attn_states",
"=",
"array_ops",
".",
"reshape",
"(",
"new_attn_states",
",",
"[",
"-",
"1",
",",
"self",
".",
"_attn_length",
"*",
"self",
".",
"_attn_size",
"]",
")",
"new_state",
"=",
"(",
"new_state",
",",
"new_attns",
",",
"new_attn_states",
")",
"if",
"not",
"self",
".",
"_state_is_tuple",
":",
"new_state",
"=",
"array_ops",
".",
"concat",
"(",
"1",
",",
"list",
"(",
"new_state",
")",
")",
"return",
"output",
",",
"new_state"
] | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/rnn/python/ops/rnn_cell.py#L737-L771 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_controls.py | python | Treebook.GetTreeCtrl | (*args, **kwargs) | return _controls_.Treebook_GetTreeCtrl(*args, **kwargs) | GetTreeCtrl(self) -> TreeCtrl | GetTreeCtrl(self) -> TreeCtrl | [
"GetTreeCtrl",
"(",
"self",
")",
"-",
">",
"TreeCtrl"
] | def GetTreeCtrl(*args, **kwargs):
"""GetTreeCtrl(self) -> TreeCtrl"""
return _controls_.Treebook_GetTreeCtrl(*args, **kwargs) | [
"def",
"GetTreeCtrl",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"Treebook_GetTreeCtrl",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_controls.py#L3339-L3341 |
|
NVIDIA/MDL-SDK | aa9642b2546ad7b6236b5627385d882c2ed83c5d | src/mdl/jit/generator_jit/gen_libbsdf_runtime_header.py | python | print_wrapped | (parser, fileobj, line, wrap_pos = 99) | print the given line (provided without newline at end) and wrap it at wrap_pos,
splitting the line at commas. Also handles commented out lines. | print the given line (provided without newline at end) and wrap it at wrap_pos,
splitting the line at commas. Also handles commented out lines. | [
"print",
"the",
"given",
"line",
"(",
"provided",
"without",
"newline",
"at",
"end",
")",
"and",
"wrap",
"it",
"at",
"wrap_pos",
"splitting",
"the",
"line",
"at",
"commas",
".",
"Also",
"handles",
"commented",
"out",
"lines",
"."
] | def print_wrapped(parser, fileobj, line, wrap_pos = 99):
"""print the given line (provided without newline at end) and wrap it at wrap_pos,
splitting the line at commas. Also handles commented out lines."""
orig_line = line
prefix = ""
next_prefix = "// " if line.startswith("//") else " "
while parser.indent * 4 + len(prefix) + len(line) >= wrap_pos:
splitpos = line.rfind(',', 0, wrap_pos - parser.indent * 4 - len(prefix))
if splitpos == -1:
raise Exception("Unable to split line: %s" % orig_line)
parser.write(fileobj, prefix + line[:splitpos + 1] + "\n")
line = line[splitpos + 1:].lstrip()
prefix = next_prefix
parser.write(fileobj, prefix + line + "\n") | [
"def",
"print_wrapped",
"(",
"parser",
",",
"fileobj",
",",
"line",
",",
"wrap_pos",
"=",
"99",
")",
":",
"orig_line",
"=",
"line",
"prefix",
"=",
"\"\"",
"next_prefix",
"=",
"\"// \"",
"if",
"line",
".",
"startswith",
"(",
"\"//\"",
")",
"else",
"\" \"",
"while",
"parser",
".",
"indent",
"*",
"4",
"+",
"len",
"(",
"prefix",
")",
"+",
"len",
"(",
"line",
")",
">=",
"wrap_pos",
":",
"splitpos",
"=",
"line",
".",
"rfind",
"(",
"','",
",",
"0",
",",
"wrap_pos",
"-",
"parser",
".",
"indent",
"*",
"4",
"-",
"len",
"(",
"prefix",
")",
")",
"if",
"splitpos",
"==",
"-",
"1",
":",
"raise",
"Exception",
"(",
"\"Unable to split line: %s\"",
"%",
"orig_line",
")",
"parser",
".",
"write",
"(",
"fileobj",
",",
"prefix",
"+",
"line",
"[",
":",
"splitpos",
"+",
"1",
"]",
"+",
"\"\\n\"",
")",
"line",
"=",
"line",
"[",
"splitpos",
"+",
"1",
":",
"]",
".",
"lstrip",
"(",
")",
"prefix",
"=",
"next_prefix",
"parser",
".",
"write",
"(",
"fileobj",
",",
"prefix",
"+",
"line",
"+",
"\"\\n\"",
")"
] | https://github.com/NVIDIA/MDL-SDK/blob/aa9642b2546ad7b6236b5627385d882c2ed83c5d/src/mdl/jit/generator_jit/gen_libbsdf_runtime_header.py#L165-L180 |
||
rampageX/firmware-mod-kit | c94cd6aeee50d92ec5280a6dba6d74828fd3606b | src/binwalk-2.1.1/src/binwalk/core/module.py | python | Kwarg.__init__ | (self, name="", default=None, description="") | Class constructor.
@name - Kwarg name.
@default - Default kwarg value.
@description - Description string.
Return None. | Class constructor. | [
"Class",
"constructor",
"."
] | def __init__(self, name="", default=None, description=""):
'''
Class constructor.
@name - Kwarg name.
@default - Default kwarg value.
@description - Description string.
Return None.
'''
self.name = name
self.default = default
self.description = description | [
"def",
"__init__",
"(",
"self",
",",
"name",
"=",
"\"\"",
",",
"default",
"=",
"None",
",",
"description",
"=",
"\"\"",
")",
":",
"self",
".",
"name",
"=",
"name",
"self",
".",
"default",
"=",
"default",
"self",
".",
"description",
"=",
"description"
] | https://github.com/rampageX/firmware-mod-kit/blob/c94cd6aeee50d92ec5280a6dba6d74828fd3606b/src/binwalk-2.1.1/src/binwalk/core/module.py#L77-L89 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/random.py | python | Random.gauss | (self, mu, sigma) | return mu + z*sigma | Gaussian distribution.
mu is the mean, and sigma is the standard deviation. This is
slightly faster than the normalvariate() function.
Not thread-safe without a lock around calls. | Gaussian distribution. | [
"Gaussian",
"distribution",
"."
] | def gauss(self, mu, sigma):
"""Gaussian distribution.
mu is the mean, and sigma is the standard deviation. This is
slightly faster than the normalvariate() function.
Not thread-safe without a lock around calls.
"""
# When x and y are two variables from [0, 1), uniformly
# distributed, then
#
# cos(2*pi*x)*sqrt(-2*log(1-y))
# sin(2*pi*x)*sqrt(-2*log(1-y))
#
# are two *independent* variables with normal distribution
# (mu = 0, sigma = 1).
# (Lambert Meertens)
# (corrected version; bug discovered by Mike Miller, fixed by LM)
# Multithreading note: When two threads call this function
# simultaneously, it is possible that they will receive the
# same return value. The window is very small though. To
# avoid this, you have to use a lock around all calls. (I
# didn't want to slow this down in the serial case by using a
# lock here.)
random = self.random
z = self.gauss_next
self.gauss_next = None
if z is None:
x2pi = random() * TWOPI
g2rad = _sqrt(-2.0 * _log(1.0 - random()))
z = _cos(x2pi) * g2rad
self.gauss_next = _sin(x2pi) * g2rad
return mu + z*sigma | [
"def",
"gauss",
"(",
"self",
",",
"mu",
",",
"sigma",
")",
":",
"# When x and y are two variables from [0, 1), uniformly",
"# distributed, then",
"#",
"# cos(2*pi*x)*sqrt(-2*log(1-y))",
"# sin(2*pi*x)*sqrt(-2*log(1-y))",
"#",
"# are two *independent* variables with normal distribution",
"# (mu = 0, sigma = 1).",
"# (Lambert Meertens)",
"# (corrected version; bug discovered by Mike Miller, fixed by LM)",
"# Multithreading note: When two threads call this function",
"# simultaneously, it is possible that they will receive the",
"# same return value. The window is very small though. To",
"# avoid this, you have to use a lock around all calls. (I",
"# didn't want to slow this down in the serial case by using a",
"# lock here.)",
"random",
"=",
"self",
".",
"random",
"z",
"=",
"self",
".",
"gauss_next",
"self",
".",
"gauss_next",
"=",
"None",
"if",
"z",
"is",
"None",
":",
"x2pi",
"=",
"random",
"(",
")",
"*",
"TWOPI",
"g2rad",
"=",
"_sqrt",
"(",
"-",
"2.0",
"*",
"_log",
"(",
"1.0",
"-",
"random",
"(",
")",
")",
")",
"z",
"=",
"_cos",
"(",
"x2pi",
")",
"*",
"g2rad",
"self",
".",
"gauss_next",
"=",
"_sin",
"(",
"x2pi",
")",
"*",
"g2rad",
"return",
"mu",
"+",
"z",
"*",
"sigma"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/random.py#L576-L613 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | tools/clang/scripts/run_tool.py | python | _CompilerDispatcher.__ProcessResult | (self, result) | Handles result processing.
Args:
result: The result dictionary returned by _ExecuteTool. | Handles result processing. | [
"Handles",
"result",
"processing",
"."
] | def __ProcessResult(self, result):
"""Handles result processing.
Args:
result: The result dictionary returned by _ExecuteTool.
"""
if result['status']:
self.__success_count += 1
for k, v in result['edits'].iteritems():
self.__edits[k].extend(v)
self.__edit_count += len(v)
else:
self.__failed_count += 1
sys.stdout.write('\nFailed to process %s\n' % result['filename'])
sys.stdout.write(result['stderr'])
sys.stdout.write('\n')
percentage = (float(self.__success_count + self.__failed_count) /
len(self.__filenames)) * 100
sys.stdout.write('Succeeded: %d, Failed: %d, Edits: %d [%.2f%%]\r' %
(self.__success_count, self.__failed_count,
self.__edit_count, percentage))
sys.stdout.flush() | [
"def",
"__ProcessResult",
"(",
"self",
",",
"result",
")",
":",
"if",
"result",
"[",
"'status'",
"]",
":",
"self",
".",
"__success_count",
"+=",
"1",
"for",
"k",
",",
"v",
"in",
"result",
"[",
"'edits'",
"]",
".",
"iteritems",
"(",
")",
":",
"self",
".",
"__edits",
"[",
"k",
"]",
".",
"extend",
"(",
"v",
")",
"self",
".",
"__edit_count",
"+=",
"len",
"(",
"v",
")",
"else",
":",
"self",
".",
"__failed_count",
"+=",
"1",
"sys",
".",
"stdout",
".",
"write",
"(",
"'\\nFailed to process %s\\n'",
"%",
"result",
"[",
"'filename'",
"]",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"result",
"[",
"'stderr'",
"]",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"'\\n'",
")",
"percentage",
"=",
"(",
"float",
"(",
"self",
".",
"__success_count",
"+",
"self",
".",
"__failed_count",
")",
"/",
"len",
"(",
"self",
".",
"__filenames",
")",
")",
"*",
"100",
"sys",
".",
"stdout",
".",
"write",
"(",
"'Succeeded: %d, Failed: %d, Edits: %d [%.2f%%]\\r'",
"%",
"(",
"self",
".",
"__success_count",
",",
"self",
".",
"__failed_count",
",",
"self",
".",
"__edit_count",
",",
"percentage",
")",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/clang/scripts/run_tool.py#L188-L209 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/stc.py | python | StyledTextCtrl.GetMarginOptions | (*args, **kwargs) | return _stc.StyledTextCtrl_GetMarginOptions(*args, **kwargs) | GetMarginOptions(self) -> int | GetMarginOptions(self) -> int | [
"GetMarginOptions",
"(",
"self",
")",
"-",
">",
"int"
] | def GetMarginOptions(*args, **kwargs):
"""GetMarginOptions(self) -> int"""
return _stc.StyledTextCtrl_GetMarginOptions(*args, **kwargs) | [
"def",
"GetMarginOptions",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextCtrl_GetMarginOptions",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/stc.py#L5915-L5917 |
|
papyrussolution/OpenPapyrus | bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91 | Src/OSF/abseil/absl/abseil.podspec.gen.py | python | generate | (args) | Generates a podspec file from all BUILD files under absl directory. | Generates a podspec file from all BUILD files under absl directory. | [
"Generates",
"a",
"podspec",
"file",
"from",
"all",
"BUILD",
"files",
"under",
"absl",
"directory",
"."
] | def generate(args):
"""Generates a podspec file from all BUILD files under absl directory."""
rules = filter(relevant_rule, collect_rules("absl"))
with open(args.output, "wt") as f:
write_podspec(f, rules, vars(args)) | [
"def",
"generate",
"(",
"args",
")",
":",
"rules",
"=",
"filter",
"(",
"relevant_rule",
",",
"collect_rules",
"(",
"\"absl\"",
")",
")",
"with",
"open",
"(",
"args",
".",
"output",
",",
"\"wt\"",
")",
"as",
"f",
":",
"write_podspec",
"(",
"f",
",",
"rules",
",",
"vars",
"(",
"args",
")",
")"
] | https://github.com/papyrussolution/OpenPapyrus/blob/bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91/Src/OSF/abseil/absl/abseil.podspec.gen.py#L200-L204 |
||
milvus-io/milvus | 3b1030de2b6c39e3512833e97f6044d63eb24237 | internal/core/build-support/cpplint.py | python | CheckInvalidIncrement | (filename, clean_lines, linenum, error) | Checks for invalid increment *count++.
For example following function:
void increment_counter(int* count) {
*count++;
}
is invalid, because it effectively does count++, moving pointer, and should
be replaced with ++*count, (*count)++ or *count += 1.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found. | Checks for invalid increment *count++. | [
"Checks",
"for",
"invalid",
"increment",
"*",
"count",
"++",
"."
] | def CheckInvalidIncrement(filename, clean_lines, linenum, error):
"""Checks for invalid increment *count++.
For example following function:
void increment_counter(int* count) {
*count++;
}
is invalid, because it effectively does count++, moving pointer, and should
be replaced with ++*count, (*count)++ or *count += 1.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
if _RE_PATTERN_INVALID_INCREMENT.match(line):
error(filename, linenum, 'runtime/invalid_increment', 5,
'Changing pointer instead of value (or unused value of operator*).') | [
"def",
"CheckInvalidIncrement",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"if",
"_RE_PATTERN_INVALID_INCREMENT",
".",
"match",
"(",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'runtime/invalid_increment'",
",",
"5",
",",
"'Changing pointer instead of value (or unused value of operator*).'",
")"
] | https://github.com/milvus-io/milvus/blob/3b1030de2b6c39e3512833e97f6044d63eb24237/internal/core/build-support/cpplint.py#L2660-L2679 |
||
Kitware/VTK | 5b4df4d90a4f31194d97d3c639dd38ea8f81e8b8 | Wrapping/Python/vtkmodules/util/vtkAlgorithm.py | python | VTKAlgorithm.RequestData | (self, vtkself, request, inInfo, outInfo) | Overwritten by subclass to execute the algorithm. | Overwritten by subclass to execute the algorithm. | [
"Overwritten",
"by",
"subclass",
"to",
"execute",
"the",
"algorithm",
"."
] | def RequestData(self, vtkself, request, inInfo, outInfo):
"""Overwritten by subclass to execute the algorithm."""
raise NotImplementedError('RequestData must be implemented') | [
"def",
"RequestData",
"(",
"self",
",",
"vtkself",
",",
"request",
",",
"inInfo",
",",
"outInfo",
")",
":",
"raise",
"NotImplementedError",
"(",
"'RequestData must be implemented'",
")"
] | https://github.com/Kitware/VTK/blob/5b4df4d90a4f31194d97d3c639dd38ea8f81e8b8/Wrapping/Python/vtkmodules/util/vtkAlgorithm.py#L69-L71 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/numpy/py3/numpy/distutils/ccompiler_opt.py | python | _Parse._parse_policy_werror | (self, has_baseline, final_targets, extra_flags) | return has_baseline, final_targets, extra_flags | force warnings to treated as errors | force warnings to treated as errors | [
"force",
"warnings",
"to",
"treated",
"as",
"errors"
] | def _parse_policy_werror(self, has_baseline, final_targets, extra_flags):
"""force warnings to treated as errors"""
flags = self.cc_flags["werror"]
if not flags:
self.dist_log(
"current compiler doesn't support werror flags, "
"warnings will 'not' treated as errors", stderr=True
)
else:
self.dist_log("compiler warnings are treated as errors")
extra_flags += flags
return has_baseline, final_targets, extra_flags | [
"def",
"_parse_policy_werror",
"(",
"self",
",",
"has_baseline",
",",
"final_targets",
",",
"extra_flags",
")",
":",
"flags",
"=",
"self",
".",
"cc_flags",
"[",
"\"werror\"",
"]",
"if",
"not",
"flags",
":",
"self",
".",
"dist_log",
"(",
"\"current compiler doesn't support werror flags, \"",
"\"warnings will 'not' treated as errors\"",
",",
"stderr",
"=",
"True",
")",
"else",
":",
"self",
".",
"dist_log",
"(",
"\"compiler warnings are treated as errors\"",
")",
"extra_flags",
"+=",
"flags",
"return",
"has_baseline",
",",
"final_targets",
",",
"extra_flags"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py3/numpy/distutils/ccompiler_opt.py#L2086-L2097 |
|
esa/pagmo | 80281d549c8f1b470e1489a5d37c8f06b2e429c0 | PyGMO/problem/__init__.py | python | _dejong_ctor | (self, dim=10) | Constructs a De Jong problem (Box-Constrained Continuous Single-Objective)
USAGE: problem.dejong(dim=10)
* dim: problem dimension | Constructs a De Jong problem (Box-Constrained Continuous Single-Objective) | [
"Constructs",
"a",
"De",
"Jong",
"problem",
"(",
"Box",
"-",
"Constrained",
"Continuous",
"Single",
"-",
"Objective",
")"
] | def _dejong_ctor(self, dim=10):
"""
Constructs a De Jong problem (Box-Constrained Continuous Single-Objective)
USAGE: problem.dejong(dim=10)
* dim: problem dimension
"""
# We construct the arg list for the original constructor exposed by
# boost_python
arg_list = []
arg_list.append(dim)
self._orig_init(*arg_list) | [
"def",
"_dejong_ctor",
"(",
"self",
",",
"dim",
"=",
"10",
")",
":",
"# We construct the arg list for the original constructor exposed by",
"# boost_python",
"arg_list",
"=",
"[",
"]",
"arg_list",
".",
"append",
"(",
"dim",
")",
"self",
".",
"_orig_init",
"(",
"*",
"arg_list",
")"
] | https://github.com/esa/pagmo/blob/80281d549c8f1b470e1489a5d37c8f06b2e429c0/PyGMO/problem/__init__.py#L166-L179 |
||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/gsutil/third_party/rsa/rsa/key.py | python | PublicKey.load_pkcs1_openssl_der | (cls, keyfile) | return cls._load_pkcs1_der(keyinfo['key'][1:]) | Loads a PKCS#1 DER-encoded public key file from OpenSSL.
@param keyfile: contents of a DER-encoded file that contains the public
key, from OpenSSL.
@return: a PublicKey object | Loads a PKCS#1 DER-encoded public key file from OpenSSL. | [
"Loads",
"a",
"PKCS#1",
"DER",
"-",
"encoded",
"public",
"key",
"file",
"from",
"OpenSSL",
"."
] | def load_pkcs1_openssl_der(cls, keyfile):
'''Loads a PKCS#1 DER-encoded public key file from OpenSSL.
@param keyfile: contents of a DER-encoded file that contains the public
key, from OpenSSL.
@return: a PublicKey object
'''
from rsa.asn1 import OpenSSLPubKey
from pyasn1.codec.der import decoder
from pyasn1.type import univ
(keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey())
if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'):
raise TypeError("This is not a DER-encoded OpenSSL-compatible public key")
return cls._load_pkcs1_der(keyinfo['key'][1:]) | [
"def",
"load_pkcs1_openssl_der",
"(",
"cls",
",",
"keyfile",
")",
":",
"from",
"rsa",
".",
"asn1",
"import",
"OpenSSLPubKey",
"from",
"pyasn1",
".",
"codec",
".",
"der",
"import",
"decoder",
"from",
"pyasn1",
".",
"type",
"import",
"univ",
"(",
"keyinfo",
",",
"_",
")",
"=",
"decoder",
".",
"decode",
"(",
"keyfile",
",",
"asn1Spec",
"=",
"OpenSSLPubKey",
"(",
")",
")",
"if",
"keyinfo",
"[",
"'header'",
"]",
"[",
"'oid'",
"]",
"!=",
"univ",
".",
"ObjectIdentifier",
"(",
"'1.2.840.113549.1.1.1'",
")",
":",
"raise",
"TypeError",
"(",
"\"This is not a DER-encoded OpenSSL-compatible public key\"",
")",
"return",
"cls",
".",
"_load_pkcs1_der",
"(",
"keyinfo",
"[",
"'key'",
"]",
"[",
"1",
":",
"]",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/rsa/rsa/key.py#L222-L239 |