nwo
stringlengths 5
58
| sha
stringlengths 40
40
| path
stringlengths 5
172
| language
stringclasses 1
value | identifier
stringlengths 1
100
| parameters
stringlengths 2
3.5k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
21.5k
| docstring
stringlengths 2
17k
| docstring_summary
stringlengths 0
6.58k
| docstring_tokens
sequence | function
stringlengths 35
55.6k
| function_tokens
sequence | url
stringlengths 89
269
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
xl7dev/BurpSuite | d1d4bd4981a87f2f4c0c9744ad7c476336c813da | Extender/faraday/shell/controller/qt3/emulation.py | python | Emulation.onKeyPress | (self, ev) | char received from the gui | char received from the gui | [
"char",
"received",
"from",
"the",
"gui"
] | def onKeyPress(self, ev):
"""char received from the gui"""
raise NotImplementedError() | [
"def",
"onKeyPress",
"(",
"self",
",",
"ev",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | https://github.com/xl7dev/BurpSuite/blob/d1d4bd4981a87f2f4c0c9744ad7c476336c813da/Extender/faraday/shell/controller/qt3/emulation.py#L223-L225 |
||
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | lib/debugger/VendorLib/vs-py-debugger/pythonFiles/parso/python/tree.py | python | Class.get_super_arglist | (self) | Returns the `arglist` node that defines the super classes. It returns
None if there are no arguments. | Returns the `arglist` node that defines the super classes. It returns
None if there are no arguments. | [
"Returns",
"the",
"arglist",
"node",
"that",
"defines",
"the",
"super",
"classes",
".",
"It",
"returns",
"None",
"if",
"there",
"are",
"no",
"arguments",
"."
] | def get_super_arglist(self):
"""
Returns the `arglist` node that defines the super classes. It returns
None if there are no arguments.
"""
if self.children[2] != '(': # Has no parentheses
return None
else:
if self.children[3] == ')': # Empty parentheses
return None
else:
return self.children[3] | [
"def",
"get_super_arglist",
"(",
"self",
")",
":",
"if",
"self",
".",
"children",
"[",
"2",
"]",
"!=",
"'('",
":",
"# Has no parentheses",
"return",
"None",
"else",
":",
"if",
"self",
".",
"children",
"[",
"3",
"]",
"==",
"')'",
":",
"# Empty parentheses",
"return",
"None",
"else",
":",
"return",
"self",
".",
"children",
"[",
"3",
"]"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/lib/debugger/VendorLib/vs-py-debugger/pythonFiles/parso/python/tree.py#L483-L494 |
||
ibuler/jumpserver | 0aa43c7cabc012cf02f39826fdce80f4b7b7654b | jperm/ansible_api.py | python | MyInventory.gen_inventory | (self) | add hosts to inventory. | add hosts to inventory. | [
"add",
"hosts",
"to",
"inventory",
"."
] | def gen_inventory(self):
"""
add hosts to inventory.
"""
if isinstance(self.resource, list):
self.my_add_group(self.resource, 'default_group')
elif isinstance(self.resource, dict):
for groupname, hosts_and_vars in self.resource.iteritems():
self.my_add_group(hosts_and_vars.get("hosts"), groupname, hosts_and_vars.get("vars")) | [
"def",
"gen_inventory",
"(",
"self",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"resource",
",",
"list",
")",
":",
"self",
".",
"my_add_group",
"(",
"self",
".",
"resource",
",",
"'default_group'",
")",
"elif",
"isinstance",
"(",
"self",
".",
"resource",
",",
"dict",
")",
":",
"for",
"groupname",
",",
"hosts_and_vars",
"in",
"self",
".",
"resource",
".",
"iteritems",
"(",
")",
":",
"self",
".",
"my_add_group",
"(",
"hosts_and_vars",
".",
"get",
"(",
"\"hosts\"",
")",
",",
"groupname",
",",
"hosts_and_vars",
".",
"get",
"(",
"\"vars\"",
")",
")"
] | https://github.com/ibuler/jumpserver/blob/0aa43c7cabc012cf02f39826fdce80f4b7b7654b/jperm/ansible_api.py#L108-L116 |
||
rapidpro/rapidpro | 8b6e58221fff967145f0b3411d85bcc15a0d3e72 | temba/channels/models.py | python | ChannelLog.get_url_display | (self, user, anon_mask) | return self._get_display_value(user, self.url, anon_mask) | Gets the URL as it should be displayed to the given user | Gets the URL as it should be displayed to the given user | [
"Gets",
"the",
"URL",
"as",
"it",
"should",
"be",
"displayed",
"to",
"the",
"given",
"user"
] | def get_url_display(self, user, anon_mask):
"""
Gets the URL as it should be displayed to the given user
"""
return self._get_display_value(user, self.url, anon_mask) | [
"def",
"get_url_display",
"(",
"self",
",",
"user",
",",
"anon_mask",
")",
":",
"return",
"self",
".",
"_get_display_value",
"(",
"user",
",",
"self",
".",
"url",
",",
"anon_mask",
")"
] | https://github.com/rapidpro/rapidpro/blob/8b6e58221fff967145f0b3411d85bcc15a0d3e72/temba/channels/models.py#L1224-L1228 |
|
nodejs/node-convergence-archive | e11fe0c2777561827cdb7207d46b0917ef3c42a7 | deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py | python | XcodeSettings.GetCflagsC | (self, configname) | return cflags_c | Returns flags that need to be added to .c, and .m compilations. | Returns flags that need to be added to .c, and .m compilations. | [
"Returns",
"flags",
"that",
"need",
"to",
"be",
"added",
"to",
".",
"c",
"and",
".",
"m",
"compilations",
"."
] | def GetCflagsC(self, configname):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
cflags_c.append('-ansi')
else:
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
return cflags_c | [
"def",
"GetCflagsC",
"(",
"self",
",",
"configname",
")",
":",
"self",
".",
"configname",
"=",
"configname",
"cflags_c",
"=",
"[",
"]",
"if",
"self",
".",
"_Settings",
"(",
")",
".",
"get",
"(",
"'GCC_C_LANGUAGE_STANDARD'",
",",
"''",
")",
"==",
"'ansi'",
":",
"cflags_c",
".",
"append",
"(",
"'-ansi'",
")",
"else",
":",
"self",
".",
"_Appendf",
"(",
"cflags_c",
",",
"'GCC_C_LANGUAGE_STANDARD'",
",",
"'-std=%s'",
")",
"cflags_c",
"+=",
"self",
".",
"_Settings",
"(",
")",
".",
"get",
"(",
"'OTHER_CFLAGS'",
",",
"[",
"]",
")",
"self",
".",
"configname",
"=",
"None",
"return",
"cflags_c"
] | https://github.com/nodejs/node-convergence-archive/blob/e11fe0c2777561827cdb7207d46b0917ef3c42a7/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L431-L441 |
|
facebookarchive/nuclide | 2a2a0a642d136768b7d2a6d35a652dc5fb77d70a | pkg/nuclide-python-rpc/VendorLib/jedi/evaluate/sys_path.py | python | _get_sys_path_with_egglinks | (sys_path) | return result | Find all paths including those referenced by egg-links.
Egg-link-referenced directories are inserted into path immediately before
the directory on which their links were found. Such directories are not
taken into consideration by normal import mechanism, but they are traversed
when doing pkg_resources.require. | Find all paths including those referenced by egg-links. | [
"Find",
"all",
"paths",
"including",
"those",
"referenced",
"by",
"egg",
"-",
"links",
"."
] | def _get_sys_path_with_egglinks(sys_path):
"""Find all paths including those referenced by egg-links.
Egg-link-referenced directories are inserted into path immediately before
the directory on which their links were found. Such directories are not
taken into consideration by normal import mechanism, but they are traversed
when doing pkg_resources.require.
"""
result = []
for p in sys_path:
# pkg_resources does not define a specific order for egg-link files
# using os.listdir to enumerate them, we're sorting them to have
# reproducible tests.
for egg_link in sorted(glob.glob(os.path.join(p, '*.egg-link'))):
with open(egg_link) as fd:
for line in fd:
line = line.strip()
if line:
result.append(os.path.join(p, line))
# pkg_resources package only interprets the first
# non-empty line in egg-link files.
break
result.append(p)
return result | [
"def",
"_get_sys_path_with_egglinks",
"(",
"sys_path",
")",
":",
"result",
"=",
"[",
"]",
"for",
"p",
"in",
"sys_path",
":",
"# pkg_resources does not define a specific order for egg-link files",
"# using os.listdir to enumerate them, we're sorting them to have",
"# reproducible tests.",
"for",
"egg_link",
"in",
"sorted",
"(",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"p",
",",
"'*.egg-link'",
")",
")",
")",
":",
"with",
"open",
"(",
"egg_link",
")",
"as",
"fd",
":",
"for",
"line",
"in",
"fd",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"line",
":",
"result",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"p",
",",
"line",
")",
")",
"# pkg_resources package only interprets the first",
"# non-empty line in egg-link files.",
"break",
"result",
".",
"append",
"(",
"p",
")",
"return",
"result"
] | https://github.com/facebookarchive/nuclide/blob/2a2a0a642d136768b7d2a6d35a652dc5fb77d70a/pkg/nuclide-python-rpc/VendorLib/jedi/evaluate/sys_path.py#L28-L51 |
|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/Formulator/Widget.py | python | Widget.render_dict | (self, field, value) | return None | This is yet another field rendering. It is designed to allow code to
understand field's value data by providing its type and format when
applicable. | This is yet another field rendering. It is designed to allow code to
understand field's value data by providing its type and format when
applicable. | [
"This",
"is",
"yet",
"another",
"field",
"rendering",
".",
"It",
"is",
"designed",
"to",
"allow",
"code",
"to",
"understand",
"field",
"s",
"value",
"data",
"by",
"providing",
"its",
"type",
"and",
"format",
"when",
"applicable",
"."
] | def render_dict(self, field, value):
"""
This is yet another field rendering. It is designed to allow code to
understand field's value data by providing its type and format when
applicable.
"""
return None | [
"def",
"render_dict",
"(",
"self",
",",
"field",
",",
"value",
")",
":",
"return",
"None"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/Formulator/Widget.py#L227-L233 |
|
mceSystems/node-jsc | 90634f3064fab8e89a85b3942f0cc5054acc86fa | deps/jscshim/webkit/Tools/Scripts/webkitpy/thirdparty/irc/irclib.py | python | ServerConnection.part | (self, channels, message="") | Send a PART command. | Send a PART command. | [
"Send",
"a",
"PART",
"command",
"."
] | def part(self, channels, message=""):
"""Send a PART command."""
if type(channels) == types.StringType:
self.send_raw("PART " + channels + (message and (" " + message)))
else:
self.send_raw("PART " + ",".join(channels) + (message and (" " + message))) | [
"def",
"part",
"(",
"self",
",",
"channels",
",",
"message",
"=",
"\"\"",
")",
":",
"if",
"type",
"(",
"channels",
")",
"==",
"types",
".",
"StringType",
":",
"self",
".",
"send_raw",
"(",
"\"PART \"",
"+",
"channels",
"+",
"(",
"message",
"and",
"(",
"\" \"",
"+",
"message",
")",
")",
")",
"else",
":",
"self",
".",
"send_raw",
"(",
"\"PART \"",
"+",
"\",\"",
".",
"join",
"(",
"channels",
")",
"+",
"(",
"message",
"and",
"(",
"\" \"",
"+",
"message",
")",
")",
")"
] | https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/deps/jscshim/webkit/Tools/Scripts/webkitpy/thirdparty/irc/irclib.py#L744-L749 |
||
nodejs/node-chakracore | 770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43 | deps/v8/PRESUBMIT.py | python | _CheckMacroUndefs | (input_api, output_api) | return [] | Checks that each #define in a .cc file is eventually followed by an #undef.
TODO(clemensh): This check should eventually be enabled for all cc files via
tools/presubmit.py (https://crbug.com/v8/6811). | Checks that each #define in a .cc file is eventually followed by an #undef. | [
"Checks",
"that",
"each",
"#define",
"in",
"a",
".",
"cc",
"file",
"is",
"eventually",
"followed",
"by",
"an",
"#undef",
"."
] | def _CheckMacroUndefs(input_api, output_api):
"""
Checks that each #define in a .cc file is eventually followed by an #undef.
TODO(clemensh): This check should eventually be enabled for all cc files via
tools/presubmit.py (https://crbug.com/v8/6811).
"""
def FilterFile(affected_file):
# Skip header files, as they often define type lists which are used in
# other files.
white_list = (r'.+\.cc',r'.+\.cpp',r'.+\.c')
return input_api.FilterSourceFile(affected_file, white_list=white_list)
def TouchesMacros(f):
for line in f.GenerateScmDiff().splitlines():
if not line.startswith('+') and not line.startswith('-'):
continue
if define_pattern.match(line[1:]) or undef_pattern.match(line[1:]):
return True
return False
define_pattern = input_api.re.compile(r'#define (\w+)')
undef_pattern = input_api.re.compile(r'#undef (\w+)')
errors = []
for f in input_api.AffectedFiles(
file_filter=FilterFile, include_deletes=False):
if not TouchesMacros(f):
continue
defined_macros = dict()
with open(f.LocalPath()) as fh:
line_nr = 0
for line in fh:
line_nr += 1
define_match = define_pattern.match(line)
if define_match:
name = define_match.group(1)
defined_macros[name] = line_nr
undef_match = undef_pattern.match(line)
if undef_match:
name = undef_match.group(1)
if not name in defined_macros:
errors.append('{}:{}: Macro named \'{}\' was not defined before.'
.format(f.LocalPath(), line_nr, name))
else:
del defined_macros[name]
for name, line_nr in sorted(defined_macros.items(), key=lambda e: e[1]):
errors.append('{}:{}: Macro missing #undef: {}'
.format(f.LocalPath(), line_nr, name))
if errors:
return [output_api.PresubmitPromptOrNotify(
'Detected mismatches in #define / #undef in the file(s) where you '
'modified preprocessor macros.',
errors)]
return [] | [
"def",
"_CheckMacroUndefs",
"(",
"input_api",
",",
"output_api",
")",
":",
"def",
"FilterFile",
"(",
"affected_file",
")",
":",
"# Skip header files, as they often define type lists which are used in",
"# other files.",
"white_list",
"=",
"(",
"r'.+\\.cc'",
",",
"r'.+\\.cpp'",
",",
"r'.+\\.c'",
")",
"return",
"input_api",
".",
"FilterSourceFile",
"(",
"affected_file",
",",
"white_list",
"=",
"white_list",
")",
"def",
"TouchesMacros",
"(",
"f",
")",
":",
"for",
"line",
"in",
"f",
".",
"GenerateScmDiff",
"(",
")",
".",
"splitlines",
"(",
")",
":",
"if",
"not",
"line",
".",
"startswith",
"(",
"'+'",
")",
"and",
"not",
"line",
".",
"startswith",
"(",
"'-'",
")",
":",
"continue",
"if",
"define_pattern",
".",
"match",
"(",
"line",
"[",
"1",
":",
"]",
")",
"or",
"undef_pattern",
".",
"match",
"(",
"line",
"[",
"1",
":",
"]",
")",
":",
"return",
"True",
"return",
"False",
"define_pattern",
"=",
"input_api",
".",
"re",
".",
"compile",
"(",
"r'#define (\\w+)'",
")",
"undef_pattern",
"=",
"input_api",
".",
"re",
".",
"compile",
"(",
"r'#undef (\\w+)'",
")",
"errors",
"=",
"[",
"]",
"for",
"f",
"in",
"input_api",
".",
"AffectedFiles",
"(",
"file_filter",
"=",
"FilterFile",
",",
"include_deletes",
"=",
"False",
")",
":",
"if",
"not",
"TouchesMacros",
"(",
"f",
")",
":",
"continue",
"defined_macros",
"=",
"dict",
"(",
")",
"with",
"open",
"(",
"f",
".",
"LocalPath",
"(",
")",
")",
"as",
"fh",
":",
"line_nr",
"=",
"0",
"for",
"line",
"in",
"fh",
":",
"line_nr",
"+=",
"1",
"define_match",
"=",
"define_pattern",
".",
"match",
"(",
"line",
")",
"if",
"define_match",
":",
"name",
"=",
"define_match",
".",
"group",
"(",
"1",
")",
"defined_macros",
"[",
"name",
"]",
"=",
"line_nr",
"undef_match",
"=",
"undef_pattern",
".",
"match",
"(",
"line",
")",
"if",
"undef_match",
":",
"name",
"=",
"undef_match",
".",
"group",
"(",
"1",
")",
"if",
"not",
"name",
"in",
"defined_macros",
":",
"errors",
".",
"append",
"(",
"'{}:{}: Macro named \\'{}\\' was not defined before.'",
".",
"format",
"(",
"f",
".",
"LocalPath",
"(",
")",
",",
"line_nr",
",",
"name",
")",
")",
"else",
":",
"del",
"defined_macros",
"[",
"name",
"]",
"for",
"name",
",",
"line_nr",
"in",
"sorted",
"(",
"defined_macros",
".",
"items",
"(",
")",
",",
"key",
"=",
"lambda",
"e",
":",
"e",
"[",
"1",
"]",
")",
":",
"errors",
".",
"append",
"(",
"'{}:{}: Macro missing #undef: {}'",
".",
"format",
"(",
"f",
".",
"LocalPath",
"(",
")",
",",
"line_nr",
",",
"name",
")",
")",
"if",
"errors",
":",
"return",
"[",
"output_api",
".",
"PresubmitPromptOrNotify",
"(",
"'Detected mismatches in #define / #undef in the file(s) where you '",
"'modified preprocessor macros.'",
",",
"errors",
")",
"]",
"return",
"[",
"]"
] | https://github.com/nodejs/node-chakracore/blob/770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43/deps/v8/PRESUBMIT.py#L375-L432 |
|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5Type/ERP5Type.py | python | ERP5TypeInformation.constructTempInstance | (self, container, id, *args, **kw ) | return self.constructInstance(container, id, temp_object=1, *args, **kw) | All ERP5Type.Document.newTempXXXX are constructTempInstance methods | All ERP5Type.Document.newTempXXXX are constructTempInstance methods | [
"All",
"ERP5Type",
".",
"Document",
".",
"newTempXXXX",
"are",
"constructTempInstance",
"methods"
] | def constructTempInstance(self, container, id, *args, **kw ):
"""
All ERP5Type.Document.newTempXXXX are constructTempInstance methods
"""
return self.constructInstance(container, id, temp_object=1, *args, **kw) | [
"def",
"constructTempInstance",
"(",
"self",
",",
"container",
",",
"id",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"return",
"self",
".",
"constructInstance",
"(",
"container",
",",
"id",
",",
"temp_object",
"=",
"1",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5Type/ERP5Type.py#L362-L366 |
|
redapple0204/my-boring-python | 1ab378e9d4f39ad920ff542ef3b2db68f0575a98 | pythonenv3.8/lib/python3.8/site-packages/pip/_internal/operations/prepare.py | python | RequirementPreparer.prepare_linked_requirement | (
self,
req, # type: InstallRequirement
session, # type: PipSession
finder, # type: PackageFinder
upgrade_allowed, # type: bool
require_hashes # type: bool
) | return abstract_dist | Prepare a requirement that would be obtained from req.link | Prepare a requirement that would be obtained from req.link | [
"Prepare",
"a",
"requirement",
"that",
"would",
"be",
"obtained",
"from",
"req",
".",
"link"
] | def prepare_linked_requirement(
self,
req, # type: InstallRequirement
session, # type: PipSession
finder, # type: PackageFinder
upgrade_allowed, # type: bool
require_hashes # type: bool
):
# type: (...) -> AbstractDistribution
"""Prepare a requirement that would be obtained from req.link
"""
# TODO: Breakup into smaller functions
if req.link and req.link.scheme == 'file':
path = url_to_path(req.link.url)
logger.info('Processing %s', display_path(path))
else:
logger.info('Collecting %s', req)
with indent_log():
# @@ if filesystem packages are not marked
# editable in a req, a non deterministic error
# occurs when the script attempts to unpack the
# build directory
req.ensure_has_source_dir(self.build_dir)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
# package unpacked in `req.source_dir`
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '%s' due to a"
" pre-existing build directory (%s). This is "
"likely due to a previous installation that failed"
". pip is being responsible and not assuming it "
"can delete this. Please delete it and try again."
% (req, req.source_dir)
)
req.populate_link(finder, upgrade_allowed, require_hashes)
# We can't hit this spot and have populate_link return None.
# req.satisfied_by is None here (because we're
# guarded) and upgrade has no impact except when satisfied_by
# is not None.
# Then inside find_requirement existing_applicable -> False
# If no new versions are found, DistributionNotFound is raised,
# otherwise a result is guaranteed.
assert req.link
link = req.link
# Now that we have the real link, we can tell what kind of
# requirements we have and raise some more informative errors
# than otherwise. (For example, we can raise VcsHashUnsupported
# for a VCS URL rather than HashMissing.)
if require_hashes:
# We could check these first 2 conditions inside
# unpack_url and save repetition of conditions, but then
# we would report less-useful error messages for
# unhashable requirements, complaining that there's no
# hash provided.
if is_vcs_url(link):
raise VcsHashUnsupported()
elif is_file_url(link) and is_dir_url(link):
raise DirectoryUrlHashUnsupported()
if not req.original_link and not req.is_pinned:
# Unpinned packages are asking for trouble when a new
# version is uploaded. This isn't a security check, but
# it saves users a surprising hash mismatch in the
# future.
#
# file:/// URLs aren't pinnable, so don't complain
# about them not being pinned.
raise HashUnpinned()
hashes = req.hashes(trust_internet=not require_hashes)
if require_hashes and not hashes:
# Known-good hashes are missing for this requirement, so
# shim it with a facade object that will provoke hash
# computation and then raise a HashMissing exception
# showing the user what the hash should be.
hashes = MissingHashes()
try:
download_dir = self.download_dir
# We always delete unpacked sdists after pip ran.
autodelete_unpacked = True
if req.link.is_wheel and self.wheel_download_dir:
# when doing 'pip wheel` we download wheels to a
# dedicated dir.
download_dir = self.wheel_download_dir
if req.link.is_wheel:
if download_dir:
# When downloading, we only unpack wheels to get
# metadata.
autodelete_unpacked = True
else:
# When installing a wheel, we use the unpacked
# wheel.
autodelete_unpacked = False
unpack_url(
req.link, req.source_dir,
download_dir, autodelete_unpacked,
session=session, hashes=hashes,
progress_bar=self.progress_bar
)
except requests.HTTPError as exc:
logger.critical(
'Could not install requirement %s because of error %s',
req,
exc,
)
raise InstallationError(
'Could not install requirement %s because of HTTP '
'error %s for URL %s' %
(req, exc, req.link)
)
abstract_dist = make_distribution_for_install_requirement(req)
with self.req_tracker.track(req):
abstract_dist.prepare_distribution_metadata(
finder, self.build_isolation,
)
if self._download_should_save:
# Make a .zip of the source_dir we already created.
if not req.link.is_artifact:
req.archive(self.download_dir)
return abstract_dist | [
"def",
"prepare_linked_requirement",
"(",
"self",
",",
"req",
",",
"# type: InstallRequirement",
"session",
",",
"# type: PipSession",
"finder",
",",
"# type: PackageFinder",
"upgrade_allowed",
",",
"# type: bool",
"require_hashes",
"# type: bool",
")",
":",
"# type: (...) -> AbstractDistribution",
"# TODO: Breakup into smaller functions",
"if",
"req",
".",
"link",
"and",
"req",
".",
"link",
".",
"scheme",
"==",
"'file'",
":",
"path",
"=",
"url_to_path",
"(",
"req",
".",
"link",
".",
"url",
")",
"logger",
".",
"info",
"(",
"'Processing %s'",
",",
"display_path",
"(",
"path",
")",
")",
"else",
":",
"logger",
".",
"info",
"(",
"'Collecting %s'",
",",
"req",
")",
"with",
"indent_log",
"(",
")",
":",
"# @@ if filesystem packages are not marked",
"# editable in a req, a non deterministic error",
"# occurs when the script attempts to unpack the",
"# build directory",
"req",
".",
"ensure_has_source_dir",
"(",
"self",
".",
"build_dir",
")",
"# If a checkout exists, it's unwise to keep going. version",
"# inconsistencies are logged later, but do not fail the",
"# installation.",
"# FIXME: this won't upgrade when there's an existing",
"# package unpacked in `req.source_dir`",
"# package unpacked in `req.source_dir`",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"req",
".",
"source_dir",
",",
"'setup.py'",
")",
")",
":",
"raise",
"PreviousBuildDirError",
"(",
"\"pip can't proceed with requirements '%s' due to a\"",
"\" pre-existing build directory (%s). This is \"",
"\"likely due to a previous installation that failed\"",
"\". pip is being responsible and not assuming it \"",
"\"can delete this. Please delete it and try again.\"",
"%",
"(",
"req",
",",
"req",
".",
"source_dir",
")",
")",
"req",
".",
"populate_link",
"(",
"finder",
",",
"upgrade_allowed",
",",
"require_hashes",
")",
"# We can't hit this spot and have populate_link return None.",
"# req.satisfied_by is None here (because we're",
"# guarded) and upgrade has no impact except when satisfied_by",
"# is not None.",
"# Then inside find_requirement existing_applicable -> False",
"# If no new versions are found, DistributionNotFound is raised,",
"# otherwise a result is guaranteed.",
"assert",
"req",
".",
"link",
"link",
"=",
"req",
".",
"link",
"# Now that we have the real link, we can tell what kind of",
"# requirements we have and raise some more informative errors",
"# than otherwise. (For example, we can raise VcsHashUnsupported",
"# for a VCS URL rather than HashMissing.)",
"if",
"require_hashes",
":",
"# We could check these first 2 conditions inside",
"# unpack_url and save repetition of conditions, but then",
"# we would report less-useful error messages for",
"# unhashable requirements, complaining that there's no",
"# hash provided.",
"if",
"is_vcs_url",
"(",
"link",
")",
":",
"raise",
"VcsHashUnsupported",
"(",
")",
"elif",
"is_file_url",
"(",
"link",
")",
"and",
"is_dir_url",
"(",
"link",
")",
":",
"raise",
"DirectoryUrlHashUnsupported",
"(",
")",
"if",
"not",
"req",
".",
"original_link",
"and",
"not",
"req",
".",
"is_pinned",
":",
"# Unpinned packages are asking for trouble when a new",
"# version is uploaded. This isn't a security check, but",
"# it saves users a surprising hash mismatch in the",
"# future.",
"#",
"# file:/// URLs aren't pinnable, so don't complain",
"# about them not being pinned.",
"raise",
"HashUnpinned",
"(",
")",
"hashes",
"=",
"req",
".",
"hashes",
"(",
"trust_internet",
"=",
"not",
"require_hashes",
")",
"if",
"require_hashes",
"and",
"not",
"hashes",
":",
"# Known-good hashes are missing for this requirement, so",
"# shim it with a facade object that will provoke hash",
"# computation and then raise a HashMissing exception",
"# showing the user what the hash should be.",
"hashes",
"=",
"MissingHashes",
"(",
")",
"try",
":",
"download_dir",
"=",
"self",
".",
"download_dir",
"# We always delete unpacked sdists after pip ran.",
"autodelete_unpacked",
"=",
"True",
"if",
"req",
".",
"link",
".",
"is_wheel",
"and",
"self",
".",
"wheel_download_dir",
":",
"# when doing 'pip wheel` we download wheels to a",
"# dedicated dir.",
"download_dir",
"=",
"self",
".",
"wheel_download_dir",
"if",
"req",
".",
"link",
".",
"is_wheel",
":",
"if",
"download_dir",
":",
"# When downloading, we only unpack wheels to get",
"# metadata.",
"autodelete_unpacked",
"=",
"True",
"else",
":",
"# When installing a wheel, we use the unpacked",
"# wheel.",
"autodelete_unpacked",
"=",
"False",
"unpack_url",
"(",
"req",
".",
"link",
",",
"req",
".",
"source_dir",
",",
"download_dir",
",",
"autodelete_unpacked",
",",
"session",
"=",
"session",
",",
"hashes",
"=",
"hashes",
",",
"progress_bar",
"=",
"self",
".",
"progress_bar",
")",
"except",
"requests",
".",
"HTTPError",
"as",
"exc",
":",
"logger",
".",
"critical",
"(",
"'Could not install requirement %s because of error %s'",
",",
"req",
",",
"exc",
",",
")",
"raise",
"InstallationError",
"(",
"'Could not install requirement %s because of HTTP '",
"'error %s for URL %s'",
"%",
"(",
"req",
",",
"exc",
",",
"req",
".",
"link",
")",
")",
"abstract_dist",
"=",
"make_distribution_for_install_requirement",
"(",
"req",
")",
"with",
"self",
".",
"req_tracker",
".",
"track",
"(",
"req",
")",
":",
"abstract_dist",
".",
"prepare_distribution_metadata",
"(",
"finder",
",",
"self",
".",
"build_isolation",
",",
")",
"if",
"self",
".",
"_download_should_save",
":",
"# Make a .zip of the source_dir we already created.",
"if",
"not",
"req",
".",
"link",
".",
"is_artifact",
":",
"req",
".",
"archive",
"(",
"self",
".",
"download_dir",
")",
"return",
"abstract_dist"
] | https://github.com/redapple0204/my-boring-python/blob/1ab378e9d4f39ad920ff542ef3b2db68f0575a98/pythonenv3.8/lib/python3.8/site-packages/pip/_internal/operations/prepare.py#L95-L221 |
|
catmaid/CATMAID | 9f3312f2eacfc6fab48e4c6f1bd24672cc9c9ecf | django/applications/catmaid/control/landmarks.py | python | LandmarkGroupList.put | (self, request:Request, project_id) | return Response(serializer.data) | Add a new landmarkgroup. Expect at least the name as parameter.
---
parameters:
- name: project_id
description: Project of landmark group
type: integer
paramType: path
required: true
- name: name
description: Name of new landmark group
type: string
required: true | Add a new landmarkgroup. Expect at least the name as parameter.
---
parameters:
- name: project_id
description: Project of landmark group
type: integer
paramType: path
required: true
- name: name
description: Name of new landmark group
type: string
required: true | [
"Add",
"a",
"new",
"landmarkgroup",
".",
"Expect",
"at",
"least",
"the",
"name",
"as",
"parameter",
".",
"---",
"parameters",
":",
"-",
"name",
":",
"project_id",
"description",
":",
"Project",
"of",
"landmark",
"group",
"type",
":",
"integer",
"paramType",
":",
"path",
"required",
":",
"true",
"-",
"name",
":",
"name",
"description",
":",
"Name",
"of",
"new",
"landmark",
"group",
"type",
":",
"string",
"required",
":",
"true"
] | def put(self, request:Request, project_id) -> Response:
"""Add a new landmarkgroup. Expect at least the name as parameter.
---
parameters:
- name: project_id
description: Project of landmark group
type: integer
paramType: path
required: true
- name: name
description: Name of new landmark group
type: string
required: true
"""
name = request.data.get('name')
landmarkgroup_class = Class.objects.get(project_id=project_id, class_name='landmarkgroup')
# Prevent creation of duplicate landmark group classes
existing_landmarkgroups = ClassInstance.objects.filter(project_id=project_id,
name=name, class_column=landmarkgroup_class)
if existing_landmarkgroups:
raise ValueError(f"There is already a landmark group with name {name}")
landmarkgroup = ClassInstance.objects.create(project_id=project_id,
class_column=landmarkgroup_class, user=request.user,
name=name)
landmarkgroup.save()
serializer = BasicClassInstanceSerializer(landmarkgroup)
return Response(serializer.data) | [
"def",
"put",
"(",
"self",
",",
"request",
":",
"Request",
",",
"project_id",
")",
"->",
"Response",
":",
"name",
"=",
"request",
".",
"data",
".",
"get",
"(",
"'name'",
")",
"landmarkgroup_class",
"=",
"Class",
".",
"objects",
".",
"get",
"(",
"project_id",
"=",
"project_id",
",",
"class_name",
"=",
"'landmarkgroup'",
")",
"# Prevent creation of duplicate landmark group classes",
"existing_landmarkgroups",
"=",
"ClassInstance",
".",
"objects",
".",
"filter",
"(",
"project_id",
"=",
"project_id",
",",
"name",
"=",
"name",
",",
"class_column",
"=",
"landmarkgroup_class",
")",
"if",
"existing_landmarkgroups",
":",
"raise",
"ValueError",
"(",
"f\"There is already a landmark group with name {name}\"",
")",
"landmarkgroup",
"=",
"ClassInstance",
".",
"objects",
".",
"create",
"(",
"project_id",
"=",
"project_id",
",",
"class_column",
"=",
"landmarkgroup_class",
",",
"user",
"=",
"request",
".",
"user",
",",
"name",
"=",
"name",
")",
"landmarkgroup",
".",
"save",
"(",
")",
"serializer",
"=",
"BasicClassInstanceSerializer",
"(",
"landmarkgroup",
")",
"return",
"Response",
"(",
"serializer",
".",
"data",
")"
] | https://github.com/catmaid/CATMAID/blob/9f3312f2eacfc6fab48e4c6f1bd24672cc9c9ecf/django/applications/catmaid/control/landmarks.py#L440-L469 |
|
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py | python | Window.post | (self, uMsg, wParam = None, lParam = None) | Post a low-level window message asyncronically.
@type uMsg: int
@param uMsg: Message code.
@param wParam:
The type and meaning of this parameter depends on the message.
@param lParam:
The type and meaning of this parameter depends on the message.
@raise WindowsError: An error occured while sending the message. | Post a low-level window message asyncronically. | [
"Post",
"a",
"low",
"-",
"level",
"window",
"message",
"asyncronically",
"."
] | def post(self, uMsg, wParam = None, lParam = None):
"""
Post a low-level window message asyncronically.
@type uMsg: int
@param uMsg: Message code.
@param wParam:
The type and meaning of this parameter depends on the message.
@param lParam:
The type and meaning of this parameter depends on the message.
@raise WindowsError: An error occured while sending the message.
"""
win32.PostMessage(self.get_handle(), uMsg, wParam, lParam) | [
"def",
"post",
"(",
"self",
",",
"uMsg",
",",
"wParam",
"=",
"None",
",",
"lParam",
"=",
"None",
")",
":",
"win32",
".",
"PostMessage",
"(",
"self",
".",
"get_handle",
"(",
")",
",",
"uMsg",
",",
"wParam",
",",
"lParam",
")"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py#L744-L759 |
||
nodejs/http2 | 734ad72e3939e62bcff0f686b8ec426b8aaa22e3 | tools/gyp/pylib/gyp/generator/cmake.py | python | WriteActions | (target_name, actions, extra_sources, extra_deps,
path_to_gyp, output) | Write CMake for the 'actions' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined. | Write CMake for the 'actions' in the target. | [
"Write",
"CMake",
"for",
"the",
"actions",
"in",
"the",
"target",
"."
] | def WriteActions(target_name, actions, extra_sources, extra_deps,
path_to_gyp, output):
"""Write CMake for the 'actions' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
for action in actions:
action_name = StringToCMakeTargetName(action['action_name'])
action_target_name = '%s__%s' % (target_name, action_name)
inputs = action['inputs']
inputs_name = action_target_name + '__input'
SetVariableList(output, inputs_name,
[NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
outputs = action['outputs']
cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
for out in outputs]
outputs_name = action_target_name + '__output'
SetVariableList(output, outputs_name, cmake_outputs)
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources.extend(zip(cmake_outputs, outputs))
# add_custom_command
output.write('add_custom_command(OUTPUT ')
WriteVariable(output, outputs_name)
output.write('\n')
if len(dirs) > 0:
for directory in dirs:
output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
output.write(directory)
output.write('\n')
output.write(' COMMAND ')
output.write(gyp.common.EncodePOSIXShellList(action['action']))
output.write('\n')
output.write(' DEPENDS ')
WriteVariable(output, inputs_name)
output.write('\n')
output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
output.write(' COMMENT ')
if 'message' in action:
output.write(action['message'])
else:
output.write(action_target_name)
output.write('\n')
output.write(' VERBATIM\n')
output.write(')\n')
# add_custom_target
output.write('add_custom_target(')
output.write(action_target_name)
output.write('\n DEPENDS ')
WriteVariable(output, outputs_name)
output.write('\n SOURCES ')
WriteVariable(output, inputs_name)
output.write('\n)\n')
extra_deps.append(action_target_name) | [
"def",
"WriteActions",
"(",
"target_name",
",",
"actions",
",",
"extra_sources",
",",
"extra_deps",
",",
"path_to_gyp",
",",
"output",
")",
":",
"for",
"action",
"in",
"actions",
":",
"action_name",
"=",
"StringToCMakeTargetName",
"(",
"action",
"[",
"'action_name'",
"]",
")",
"action_target_name",
"=",
"'%s__%s'",
"%",
"(",
"target_name",
",",
"action_name",
")",
"inputs",
"=",
"action",
"[",
"'inputs'",
"]",
"inputs_name",
"=",
"action_target_name",
"+",
"'__input'",
"SetVariableList",
"(",
"output",
",",
"inputs_name",
",",
"[",
"NormjoinPathForceCMakeSource",
"(",
"path_to_gyp",
",",
"dep",
")",
"for",
"dep",
"in",
"inputs",
"]",
")",
"outputs",
"=",
"action",
"[",
"'outputs'",
"]",
"cmake_outputs",
"=",
"[",
"NormjoinPathForceCMakeSource",
"(",
"path_to_gyp",
",",
"out",
")",
"for",
"out",
"in",
"outputs",
"]",
"outputs_name",
"=",
"action_target_name",
"+",
"'__output'",
"SetVariableList",
"(",
"output",
",",
"outputs_name",
",",
"cmake_outputs",
")",
"# Build up a list of outputs.",
"# Collect the output dirs we'll need.",
"dirs",
"=",
"set",
"(",
"dir",
"for",
"dir",
"in",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"o",
")",
"for",
"o",
"in",
"outputs",
")",
"if",
"dir",
")",
"if",
"int",
"(",
"action",
".",
"get",
"(",
"'process_outputs_as_sources'",
",",
"False",
")",
")",
":",
"extra_sources",
".",
"extend",
"(",
"zip",
"(",
"cmake_outputs",
",",
"outputs",
")",
")",
"# add_custom_command",
"output",
".",
"write",
"(",
"'add_custom_command(OUTPUT '",
")",
"WriteVariable",
"(",
"output",
",",
"outputs_name",
")",
"output",
".",
"write",
"(",
"'\\n'",
")",
"if",
"len",
"(",
"dirs",
")",
">",
"0",
":",
"for",
"directory",
"in",
"dirs",
":",
"output",
".",
"write",
"(",
"' COMMAND ${CMAKE_COMMAND} -E make_directory '",
")",
"output",
".",
"write",
"(",
"directory",
")",
"output",
".",
"write",
"(",
"'\\n'",
")",
"output",
".",
"write",
"(",
"' COMMAND '",
")",
"output",
".",
"write",
"(",
"gyp",
".",
"common",
".",
"EncodePOSIXShellList",
"(",
"action",
"[",
"'action'",
"]",
")",
")",
"output",
".",
"write",
"(",
"'\\n'",
")",
"output",
".",
"write",
"(",
"' DEPENDS '",
")",
"WriteVariable",
"(",
"output",
",",
"inputs_name",
")",
"output",
".",
"write",
"(",
"'\\n'",
")",
"output",
".",
"write",
"(",
"' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/'",
")",
"output",
".",
"write",
"(",
"path_to_gyp",
")",
"output",
".",
"write",
"(",
"'\\n'",
")",
"output",
".",
"write",
"(",
"' COMMENT '",
")",
"if",
"'message'",
"in",
"action",
":",
"output",
".",
"write",
"(",
"action",
"[",
"'message'",
"]",
")",
"else",
":",
"output",
".",
"write",
"(",
"action_target_name",
")",
"output",
".",
"write",
"(",
"'\\n'",
")",
"output",
".",
"write",
"(",
"' VERBATIM\\n'",
")",
"output",
".",
"write",
"(",
"')\\n'",
")",
"# add_custom_target",
"output",
".",
"write",
"(",
"'add_custom_target('",
")",
"output",
".",
"write",
"(",
"action_target_name",
")",
"output",
".",
"write",
"(",
"'\\n DEPENDS '",
")",
"WriteVariable",
"(",
"output",
",",
"outputs_name",
")",
"output",
".",
"write",
"(",
"'\\n SOURCES '",
")",
"WriteVariable",
"(",
"output",
",",
"inputs_name",
")",
"output",
".",
"write",
"(",
"'\\n)\\n'",
")",
"extra_deps",
".",
"append",
"(",
"action_target_name",
")"
] | https://github.com/nodejs/http2/blob/734ad72e3939e62bcff0f686b8ec426b8aaa22e3/tools/gyp/pylib/gyp/generator/cmake.py#L244-L320 |
||
redapple0204/my-boring-python | 1ab378e9d4f39ad920ff542ef3b2db68f0575a98 | pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/distlib/database.py | python | InstalledDistribution.get_distinfo_file | (self, path) | return os.path.join(self.path, path) | Returns a path located under the ``.dist-info`` directory. Returns a
string representing the path.
:parameter path: a ``'/'``-separated path relative to the
``.dist-info`` directory or an absolute path;
If *path* is an absolute path and doesn't start
with the ``.dist-info`` directory path,
a :class:`DistlibException` is raised
:type path: str
:rtype: str | Returns a path located under the ``.dist-info`` directory. Returns a
string representing the path. | [
"Returns",
"a",
"path",
"located",
"under",
"the",
".",
"dist",
"-",
"info",
"directory",
".",
"Returns",
"a",
"string",
"representing",
"the",
"path",
"."
] | def get_distinfo_file(self, path):
"""
Returns a path located under the ``.dist-info`` directory. Returns a
string representing the path.
:parameter path: a ``'/'``-separated path relative to the
``.dist-info`` directory or an absolute path;
If *path* is an absolute path and doesn't start
with the ``.dist-info`` directory path,
a :class:`DistlibException` is raised
:type path: str
:rtype: str
"""
# Check if it is an absolute path # XXX use relpath, add tests
if path.find(os.sep) >= 0:
# it's an absolute path?
distinfo_dirname, path = path.split(os.sep)[-2:]
if distinfo_dirname != self.path.split(os.sep)[-1]:
raise DistlibException(
'dist-info file %r does not belong to the %r %s '
'distribution' % (path, self.name, self.version))
# The file must be relative
if path not in DIST_FILES:
raise DistlibException('invalid path for a dist-info file: '
'%r at %r' % (path, self.path))
return os.path.join(self.path, path) | [
"def",
"get_distinfo_file",
"(",
"self",
",",
"path",
")",
":",
"# Check if it is an absolute path # XXX use relpath, add tests",
"if",
"path",
".",
"find",
"(",
"os",
".",
"sep",
")",
">=",
"0",
":",
"# it's an absolute path?",
"distinfo_dirname",
",",
"path",
"=",
"path",
".",
"split",
"(",
"os",
".",
"sep",
")",
"[",
"-",
"2",
":",
"]",
"if",
"distinfo_dirname",
"!=",
"self",
".",
"path",
".",
"split",
"(",
"os",
".",
"sep",
")",
"[",
"-",
"1",
"]",
":",
"raise",
"DistlibException",
"(",
"'dist-info file %r does not belong to the %r %s '",
"'distribution'",
"%",
"(",
"path",
",",
"self",
".",
"name",
",",
"self",
".",
"version",
")",
")",
"# The file must be relative",
"if",
"path",
"not",
"in",
"DIST_FILES",
":",
"raise",
"DistlibException",
"(",
"'invalid path for a dist-info file: '",
"'%r at %r'",
"%",
"(",
"path",
",",
"self",
".",
"path",
")",
")",
"return",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
",",
"path",
")"
] | https://github.com/redapple0204/my-boring-python/blob/1ab378e9d4f39ad920ff542ef3b2db68f0575a98/pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/distlib/database.py#L804-L831 |
|
KhronosGroup/Vulkan-Docs | ee155139142a2a71b56238419bf0a6859f7b0a93 | scripts/reflow.py | python | ReflowState.endSentence | (self, word) | return True | Return True if word ends with a sentence-period, False otherwise.
Allows for contraction cases which will not end a line:
- A single letter (if breakInitial is True)
- Abbreviations: 'c.f.', 'e.g.', 'i.e.' (or mixed-case versions) | Return True if word ends with a sentence-period, False otherwise. | [
"Return",
"True",
"if",
"word",
"ends",
"with",
"a",
"sentence",
"-",
"period",
"False",
"otherwise",
"."
] | def endSentence(self, word):
"""Return True if word ends with a sentence-period, False otherwise.
Allows for contraction cases which will not end a line:
- A single letter (if breakInitial is True)
- Abbreviations: 'c.f.', 'e.g.', 'i.e.' (or mixed-case versions)"""
if (word[-1:] != '.' or
endAbbrev.search(word) or
(self.breakInitial and endInitial.match(word))):
return False
return True | [
"def",
"endSentence",
"(",
"self",
",",
"word",
")",
":",
"if",
"(",
"word",
"[",
"-",
"1",
":",
"]",
"!=",
"'.'",
"or",
"endAbbrev",
".",
"search",
"(",
"word",
")",
"or",
"(",
"self",
".",
"breakInitial",
"and",
"endInitial",
".",
"match",
"(",
"word",
")",
")",
")",
":",
"return",
"False",
"return",
"True"
] | https://github.com/KhronosGroup/Vulkan-Docs/blob/ee155139142a2a71b56238419bf0a6859f7b0a93/scripts/reflow.py#L212-L224 |
|
jumpserver/jumpserver | acdde5a236db1bfdb84268a1871b572ef4849974 | apps/assets/tasks/push_system_user.py | python | push_system_user_a_asset_manual | (system_user, asset, username=None) | return push_system_user_util(system_user, [asset], task_name=task_name, username=username) | 将系统用户推送到一个资产上 | 将系统用户推送到一个资产上 | [
"将系统用户推送到一个资产上"
] | def push_system_user_a_asset_manual(system_user, asset, username=None):
"""
将系统用户推送到一个资产上
"""
# if username is None:
# username = system_user.username
task_name = gettext_noop("Push system users to asset: ") + "{}({}) => {}".format(
system_user.name, username, asset
)
return push_system_user_util(system_user, [asset], task_name=task_name, username=username) | [
"def",
"push_system_user_a_asset_manual",
"(",
"system_user",
",",
"asset",
",",
"username",
"=",
"None",
")",
":",
"# if username is None:",
"# username = system_user.username",
"task_name",
"=",
"gettext_noop",
"(",
"\"Push system users to asset: \"",
")",
"+",
"\"{}({}) => {}\"",
".",
"format",
"(",
"system_user",
".",
"name",
",",
"username",
",",
"asset",
")",
"return",
"push_system_user_util",
"(",
"system_user",
",",
"[",
"asset",
"]",
",",
"task_name",
"=",
"task_name",
",",
"username",
"=",
"username",
")"
] | https://github.com/jumpserver/jumpserver/blob/acdde5a236db1bfdb84268a1871b572ef4849974/apps/assets/tasks/push_system_user.py#L288-L297 |
|
wotermelon/toJump | 3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f | lib/mac/systrace/catapult/devil/devil/android/device_utils.py | python | DeviceUtils.GetPids | (self, process_name=None, timeout=None, retries=None) | return procs_pids | Returns the PIDs of processes containing the given name as substring.
Note that the |process_name| is often the package name.
Args:
process_name: A string containing the process name to get the PIDs for.
If missing returns PIDs for all processes.
timeout: timeout in seconds
retries: number of retries
Returns:
A dict mapping process name to a list of PIDs for each process that
contained the provided |process_name|.
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device. | Returns the PIDs of processes containing the given name as substring. | [
"Returns",
"the",
"PIDs",
"of",
"processes",
"containing",
"the",
"given",
"name",
"as",
"substring",
"."
] | def GetPids(self, process_name=None, timeout=None, retries=None):
"""Returns the PIDs of processes containing the given name as substring.
Note that the |process_name| is often the package name.
Args:
process_name: A string containing the process name to get the PIDs for.
If missing returns PIDs for all processes.
timeout: timeout in seconds
retries: number of retries
Returns:
A dict mapping process name to a list of PIDs for each process that
contained the provided |process_name|.
Raises:
CommandTimeoutError on timeout.
DeviceUnreachableError on missing device.
"""
procs_pids = collections.defaultdict(list)
try:
ps_cmd = 'ps'
# ps behavior was changed in Android above N, http://crbug.com/686716
if (self.build_version_sdk >= version_codes.NOUGAT_MR1
and self.build_id[0] > 'N'):
ps_cmd = 'ps -e'
if process_name:
ps_output = self._RunPipedShellCommand(
'%s | grep -F %s' % (ps_cmd, cmd_helper.SingleQuote(process_name)))
else:
ps_output = self.RunShellCommand(
ps_cmd.split(), check_return=True, large_output=True)
except device_errors.AdbShellCommandFailedError as e:
if e.status and isinstance(e.status, list) and not e.status[0]:
# If ps succeeded but grep failed, there were no processes with the
# given name.
return procs_pids
else:
raise
process_name = process_name or ''
for line in ps_output:
try:
ps_data = line.split()
pid, process = ps_data[1], ps_data[-1]
if process_name in process and pid != 'PID':
procs_pids[process].append(pid)
except IndexError:
pass
return procs_pids | [
"def",
"GetPids",
"(",
"self",
",",
"process_name",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"retries",
"=",
"None",
")",
":",
"procs_pids",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"try",
":",
"ps_cmd",
"=",
"'ps'",
"# ps behavior was changed in Android above N, http://crbug.com/686716",
"if",
"(",
"self",
".",
"build_version_sdk",
">=",
"version_codes",
".",
"NOUGAT_MR1",
"and",
"self",
".",
"build_id",
"[",
"0",
"]",
">",
"'N'",
")",
":",
"ps_cmd",
"=",
"'ps -e'",
"if",
"process_name",
":",
"ps_output",
"=",
"self",
".",
"_RunPipedShellCommand",
"(",
"'%s | grep -F %s'",
"%",
"(",
"ps_cmd",
",",
"cmd_helper",
".",
"SingleQuote",
"(",
"process_name",
")",
")",
")",
"else",
":",
"ps_output",
"=",
"self",
".",
"RunShellCommand",
"(",
"ps_cmd",
".",
"split",
"(",
")",
",",
"check_return",
"=",
"True",
",",
"large_output",
"=",
"True",
")",
"except",
"device_errors",
".",
"AdbShellCommandFailedError",
"as",
"e",
":",
"if",
"e",
".",
"status",
"and",
"isinstance",
"(",
"e",
".",
"status",
",",
"list",
")",
"and",
"not",
"e",
".",
"status",
"[",
"0",
"]",
":",
"# If ps succeeded but grep failed, there were no processes with the",
"# given name.",
"return",
"procs_pids",
"else",
":",
"raise",
"process_name",
"=",
"process_name",
"or",
"''",
"for",
"line",
"in",
"ps_output",
":",
"try",
":",
"ps_data",
"=",
"line",
".",
"split",
"(",
")",
"pid",
",",
"process",
"=",
"ps_data",
"[",
"1",
"]",
",",
"ps_data",
"[",
"-",
"1",
"]",
"if",
"process_name",
"in",
"process",
"and",
"pid",
"!=",
"'PID'",
":",
"procs_pids",
"[",
"process",
"]",
".",
"append",
"(",
"pid",
")",
"except",
"IndexError",
":",
"pass",
"return",
"procs_pids"
] | https://github.com/wotermelon/toJump/blob/3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f/lib/mac/systrace/catapult/devil/devil/android/device_utils.py#L2149-L2198 |
|
Opentrons/opentrons | 466e0567065d8773a81c25cd1b5c7998e00adf2c | api/src/opentrons/protocols/geometry/module_geometry.py | python | _load_from_v1 | (
definition: "ModuleDefinitionV1", parent: Location, api_level: APIVersion
) | return mod | Load a module geometry from a v1 definition.
The definition should be schema checked before being passed to this
function; all definitions passed here are assumed to be valid. | Load a module geometry from a v1 definition. | [
"Load",
"a",
"module",
"geometry",
"from",
"a",
"v1",
"definition",
"."
] | def _load_from_v1(
definition: "ModuleDefinitionV1", parent: Location, api_level: APIVersion
) -> ModuleGeometry:
"""Load a module geometry from a v1 definition.
The definition should be schema checked before being passed to this
function; all definitions passed here are assumed to be valid.
"""
mod_name = definition["loadName"]
model_lookup: Mapping[str, ModuleModel] = {
"thermocycler": ThermocyclerModuleModel.THERMOCYCLER_V1,
"magdeck": MagneticModuleModel.MAGNETIC_V1,
"tempdeck": TemperatureModuleModel.TEMPERATURE_V1,
}
type_lookup = {
"thermocycler": ModuleType.THERMOCYCLER,
"tempdeck": ModuleType.TEMPERATURE,
"magdeck": ModuleType.MAGNETIC,
}
model = model_lookup[mod_name]
offset = Point(
definition["labwareOffset"]["x"],
definition["labwareOffset"]["y"],
definition["labwareOffset"]["z"],
)
overall_height = definition["dimensions"]["bareOverallHeight"]
height_over_labware = definition["dimensions"]["overLabwareHeight"]
if model in ThermocyclerModuleModel:
lid_height = definition["dimensions"]["lidHeight"]
mod: ModuleGeometry = ThermocyclerGeometry(
definition["displayName"],
model,
type_lookup[mod_name],
offset,
overall_height,
height_over_labware,
lid_height,
parent,
api_level,
)
else:
mod = ModuleGeometry(
definition["displayName"],
model,
type_lookup[mod_name],
offset,
overall_height,
height_over_labware,
parent,
api_level,
)
return mod | [
"def",
"_load_from_v1",
"(",
"definition",
":",
"\"ModuleDefinitionV1\"",
",",
"parent",
":",
"Location",
",",
"api_level",
":",
"APIVersion",
")",
"->",
"ModuleGeometry",
":",
"mod_name",
"=",
"definition",
"[",
"\"loadName\"",
"]",
"model_lookup",
":",
"Mapping",
"[",
"str",
",",
"ModuleModel",
"]",
"=",
"{",
"\"thermocycler\"",
":",
"ThermocyclerModuleModel",
".",
"THERMOCYCLER_V1",
",",
"\"magdeck\"",
":",
"MagneticModuleModel",
".",
"MAGNETIC_V1",
",",
"\"tempdeck\"",
":",
"TemperatureModuleModel",
".",
"TEMPERATURE_V1",
",",
"}",
"type_lookup",
"=",
"{",
"\"thermocycler\"",
":",
"ModuleType",
".",
"THERMOCYCLER",
",",
"\"tempdeck\"",
":",
"ModuleType",
".",
"TEMPERATURE",
",",
"\"magdeck\"",
":",
"ModuleType",
".",
"MAGNETIC",
",",
"}",
"model",
"=",
"model_lookup",
"[",
"mod_name",
"]",
"offset",
"=",
"Point",
"(",
"definition",
"[",
"\"labwareOffset\"",
"]",
"[",
"\"x\"",
"]",
",",
"definition",
"[",
"\"labwareOffset\"",
"]",
"[",
"\"y\"",
"]",
",",
"definition",
"[",
"\"labwareOffset\"",
"]",
"[",
"\"z\"",
"]",
",",
")",
"overall_height",
"=",
"definition",
"[",
"\"dimensions\"",
"]",
"[",
"\"bareOverallHeight\"",
"]",
"height_over_labware",
"=",
"definition",
"[",
"\"dimensions\"",
"]",
"[",
"\"overLabwareHeight\"",
"]",
"if",
"model",
"in",
"ThermocyclerModuleModel",
":",
"lid_height",
"=",
"definition",
"[",
"\"dimensions\"",
"]",
"[",
"\"lidHeight\"",
"]",
"mod",
":",
"ModuleGeometry",
"=",
"ThermocyclerGeometry",
"(",
"definition",
"[",
"\"displayName\"",
"]",
",",
"model",
",",
"type_lookup",
"[",
"mod_name",
"]",
",",
"offset",
",",
"overall_height",
",",
"height_over_labware",
",",
"lid_height",
",",
"parent",
",",
"api_level",
",",
")",
"else",
":",
"mod",
"=",
"ModuleGeometry",
"(",
"definition",
"[",
"\"displayName\"",
"]",
",",
"model",
",",
"type_lookup",
"[",
"mod_name",
"]",
",",
"offset",
",",
"overall_height",
",",
"height_over_labware",
",",
"parent",
",",
"api_level",
",",
")",
"return",
"mod"
] | https://github.com/Opentrons/opentrons/blob/466e0567065d8773a81c25cd1b5c7998e00adf2c/api/src/opentrons/protocols/geometry/module_geometry.py#L328-L380 |
|
nodejs/node | ac3c33c1646bf46104c15ae035982c06364da9b8 | tools/cpplint.py | python | ExpectingFunctionArgs | (clean_lines, linenum) | return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or
(linenum >= 2 and
(Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\((?:\S+,)?\s*$',
clean_lines.elided[linenum - 1]) or
Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\(\s*$',
clean_lines.elided[linenum - 2]) or
Search(r'\bstd::m?function\s*\<\s*$',
clean_lines.elided[linenum - 1])))) | Checks whether where function type arguments are expected.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
Returns:
True if the line at 'linenum' is inside something that expects arguments
of function types. | Checks whether where function type arguments are expected. | [
"Checks",
"whether",
"where",
"function",
"type",
"arguments",
"are",
"expected",
"."
] | def ExpectingFunctionArgs(clean_lines, linenum):
"""Checks whether where function type arguments are expected.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
Returns:
True if the line at 'linenum' is inside something that expects arguments
of function types.
"""
line = clean_lines.elided[linenum]
return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or
(linenum >= 2 and
(Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\((?:\S+,)?\s*$',
clean_lines.elided[linenum - 1]) or
Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\(\s*$',
clean_lines.elided[linenum - 2]) or
Search(r'\bstd::m?function\s*\<\s*$',
clean_lines.elided[linenum - 1])))) | [
"def",
"ExpectingFunctionArgs",
"(",
"clean_lines",
",",
"linenum",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"return",
"(",
"Match",
"(",
"r'^\\s*MOCK_(CONST_)?METHOD\\d+(_T)?\\('",
",",
"line",
")",
"or",
"(",
"linenum",
">=",
"2",
"and",
"(",
"Match",
"(",
"r'^\\s*MOCK_(?:CONST_)?METHOD\\d+(?:_T)?\\((?:\\S+,)?\\s*$'",
",",
"clean_lines",
".",
"elided",
"[",
"linenum",
"-",
"1",
"]",
")",
"or",
"Match",
"(",
"r'^\\s*MOCK_(?:CONST_)?METHOD\\d+(?:_T)?\\(\\s*$'",
",",
"clean_lines",
".",
"elided",
"[",
"linenum",
"-",
"2",
"]",
")",
"or",
"Search",
"(",
"r'\\bstd::m?function\\s*\\<\\s*$'",
",",
"clean_lines",
".",
"elided",
"[",
"linenum",
"-",
"1",
"]",
")",
")",
")",
")"
] | https://github.com/nodejs/node/blob/ac3c33c1646bf46104c15ae035982c06364da9b8/tools/cpplint.py#L5960-L5979 |
|
alex-cory/fasthacks | 72b099f11df2e5640d61e55c80706c3b234eacbe | notes/JavaScript/nodejs/nodeJS_Lynda/chap07/05/nodeJS Example/node_modules/connect-mongo/node_modules/mongodb/upload.py | python | VersionControlSystem.GetBaseFile | (self, filename) | Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file. | Get the content of the upstream version of a file. | [
"Get",
"the",
"content",
"of",
"the",
"upstream",
"version",
"of",
"a",
"file",
"."
] | def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__) | [
"def",
"GetBaseFile",
"(",
"self",
",",
"filename",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"abstract method -- subclass %s must override\"",
"%",
"self",
".",
"__class__",
")"
] | https://github.com/alex-cory/fasthacks/blob/72b099f11df2e5640d61e55c80706c3b234eacbe/notes/JavaScript/nodejs/nodeJS_Lynda/chap07/05/nodeJS Example/node_modules/connect-mongo/node_modules/mongodb/upload.py#L821-L835 |
||
jeeliz/jeelizFaceFilter | be3ffa5a76c930a98b2b7895c1dfa1faa4a1fa82 | libs/three/blenderExporter/io_three/exporter/texture.py | python | Texture.image | (self) | return self.scene.image(self[constants.IMAGE]) | :return: the image object of the current texture
:rtype: image.Image | [] | def image(self):
"""
:return: the image object of the current texture
:rtype: image.Image
"""
return self.scene.image(self[constants.IMAGE]) | [
"def",
"image",
"(",
"self",
")",
":",
"return",
"self",
".",
"scene",
".",
"image",
"(",
"self",
"[",
"constants",
".",
"IMAGE",
"]",
")"
] | https://github.com/jeeliz/jeelizFaceFilter/blob/be3ffa5a76c930a98b2b7895c1dfa1faa4a1fa82/libs/three/blenderExporter/io_three/exporter/texture.py#L35-L42 |
||
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/unclosured/lib/python2.7/mailbox.py | python | Mailbox.__init__ | (self, path, factory=None, create=True) | Initialize a Mailbox instance. | Initialize a Mailbox instance. | [
"Initialize",
"a",
"Mailbox",
"instance",
"."
] | def __init__(self, path, factory=None, create=True):
"""Initialize a Mailbox instance."""
self._path = os.path.abspath(os.path.expanduser(path))
self._factory = factory | [
"def",
"__init__",
"(",
"self",
",",
"path",
",",
"factory",
"=",
"None",
",",
"create",
"=",
"True",
")",
":",
"self",
".",
"_path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"path",
")",
")",
"self",
".",
"_factory",
"=",
"factory"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/unclosured/lib/python2.7/mailbox.py#L45-L48 |
||
frenck/home-assistant-config | 91fb77e527bc470b557b6156fd1d60515e0b0be9 | custom_components/samsungtv_smart/media_player.py | python | SamsungTVDevice._power_off_in_progress | (self) | return (
self._end_of_power_off is not None
and self._end_of_power_off > dt_util.utcnow()
) | Check if a power off request is in progress. | Check if a power off request is in progress. | [
"Check",
"if",
"a",
"power",
"off",
"request",
"is",
"in",
"progress",
"."
] | def _power_off_in_progress(self):
"""Check if a power off request is in progress."""
return (
self._end_of_power_off is not None
and self._end_of_power_off > dt_util.utcnow()
) | [
"def",
"_power_off_in_progress",
"(",
"self",
")",
":",
"return",
"(",
"self",
".",
"_end_of_power_off",
"is",
"not",
"None",
"and",
"self",
".",
"_end_of_power_off",
">",
"dt_util",
".",
"utcnow",
"(",
")",
")"
] | https://github.com/frenck/home-assistant-config/blob/91fb77e527bc470b557b6156fd1d60515e0b0be9/custom_components/samsungtv_smart/media_player.py#L392-L397 |
|
jxcore/jxcore | b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410 | deps/v8_3_28/v8/tools/grokdump.py | python | InspectionPadawan.FindObjectOrSmi | (self, tagged_address) | When used as a mixin in place of V8Heap. | When used as a mixin in place of V8Heap. | [
"When",
"used",
"as",
"a",
"mixin",
"in",
"place",
"of",
"V8Heap",
"."
] | def FindObjectOrSmi(self, tagged_address):
"""When used as a mixin in place of V8Heap."""
found_obj = self.SenseObject(tagged_address)
if found_obj: return found_obj
if (tagged_address & 1) == 0:
return "Smi(%d)" % (tagged_address / 2)
else:
return "Unknown(%s)" % self.reader.FormatIntPtr(tagged_address) | [
"def",
"FindObjectOrSmi",
"(",
"self",
",",
"tagged_address",
")",
":",
"found_obj",
"=",
"self",
".",
"SenseObject",
"(",
"tagged_address",
")",
"if",
"found_obj",
":",
"return",
"found_obj",
"if",
"(",
"tagged_address",
"&",
"1",
")",
"==",
"0",
":",
"return",
"\"Smi(%d)\"",
"%",
"(",
"tagged_address",
"/",
"2",
")",
"else",
":",
"return",
"\"Unknown(%s)\"",
"%",
"self",
".",
"reader",
".",
"FormatIntPtr",
"(",
"tagged_address",
")"
] | https://github.com/jxcore/jxcore/blob/b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410/deps/v8_3_28/v8/tools/grokdump.py#L1731-L1738 |
||
HaliteChallenge/Halite-II | 5cf95b4aef38621a44a503f90399af598fb51214 | bot-bosses/tscommander/hlt/entity.py | python | Planet._parse | (tokens) | return planets, remainder | Parse planet data given a tokenized input.
:param list[str] tokens: The tokenized input
:return: the populated planet dict and the unused tokens.
:rtype: (dict, list[str]) | Parse planet data given a tokenized input. | [
"Parse",
"planet",
"data",
"given",
"a",
"tokenized",
"input",
"."
] | def _parse(tokens):
"""
Parse planet data given a tokenized input.
:param list[str] tokens: The tokenized input
:return: the populated planet dict and the unused tokens.
:rtype: (dict, list[str])
"""
num_planets, *remainder = tokens
num_planets = int(num_planets)
planets = {}
for _ in range(num_planets):
plid, planet, remainder = Planet._parse_single(remainder)
planets[plid] = planet
return planets, remainder | [
"def",
"_parse",
"(",
"tokens",
")",
":",
"num_planets",
",",
"",
"*",
"remainder",
"=",
"tokens",
"num_planets",
"=",
"int",
"(",
"num_planets",
")",
"planets",
"=",
"{",
"}",
"for",
"_",
"in",
"range",
"(",
"num_planets",
")",
":",
"plid",
",",
"planet",
",",
"remainder",
"=",
"Planet",
".",
"_parse_single",
"(",
"remainder",
")",
"planets",
"[",
"plid",
"]",
"=",
"planet",
"return",
"planets",
",",
"remainder"
] | https://github.com/HaliteChallenge/Halite-II/blob/5cf95b4aef38621a44a503f90399af598fb51214/bot-bosses/tscommander/hlt/entity.py#L187-L203 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/reloop-closured/lib/python2.7/pydoc.py | python | doc | (thing, title='Python Library Documentation: %s', forceload=0) | Display text documentation, given an object or a path to an object. | Display text documentation, given an object or a path to an object. | [
"Display",
"text",
"documentation",
"given",
"an",
"object",
"or",
"a",
"path",
"to",
"an",
"object",
"."
] | def doc(thing, title='Python Library Documentation: %s', forceload=0):
"""Display text documentation, given an object or a path to an object."""
try:
pager(render_doc(thing, title, forceload))
except (ImportError, ErrorDuringImport), value:
print value | [
"def",
"doc",
"(",
"thing",
",",
"title",
"=",
"'Python Library Documentation: %s'",
",",
"forceload",
"=",
"0",
")",
":",
"try",
":",
"pager",
"(",
"render_doc",
"(",
"thing",
",",
"title",
",",
"forceload",
")",
")",
"except",
"(",
"ImportError",
",",
"ErrorDuringImport",
")",
",",
"value",
":",
"print",
"value"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/reloop-closured/lib/python2.7/pydoc.py#L1508-L1513 |
||
saimn/sigal | 246aed53ff2d29d680cc81929e59f19023e463bb | sigal/gallery.py | python | Media._get_markdown_metadata | (self) | return meta | Get metadata from filename.md. | Get metadata from filename.md. | [
"Get",
"metadata",
"from",
"filename",
".",
"md",
"."
] | def _get_markdown_metadata(self):
"""Get metadata from filename.md."""
meta = {'title': '', 'description': '', 'meta': {}}
if isfile(self.markdown_metadata_filepath):
meta.update(read_markdown(self.markdown_metadata_filepath))
return meta | [
"def",
"_get_markdown_metadata",
"(",
"self",
")",
":",
"meta",
"=",
"{",
"'title'",
":",
"''",
",",
"'description'",
":",
"''",
",",
"'meta'",
":",
"{",
"}",
"}",
"if",
"isfile",
"(",
"self",
".",
"markdown_metadata_filepath",
")",
":",
"meta",
".",
"update",
"(",
"read_markdown",
"(",
"self",
".",
"markdown_metadata_filepath",
")",
")",
"return",
"meta"
] | https://github.com/saimn/sigal/blob/246aed53ff2d29d680cc81929e59f19023e463bb/sigal/gallery.py#L213-L218 |
|
Southpaw-TACTIC/TACTIC | ba9b87aef0ee3b3ea51446f25b285ebbca06f62c | src/pyasm/application/common/interpreter/tactic_client_lib/tactic_server_stub.py | python | TacticServerStub.get_all_children | (self, search_key, child_type, filters=[], columns=[]) | return self.server.get_all_children(self.ticket, search_key, child_type, filters, columns) | API Function: get_all_children(search_key, child_type, columns=[])
Get all children of a particular child type of an sobject
@param:
ticket - authentication ticket
search_key - a unique identifier key representing an sobject
child_type - the search_type of the children to search for
@keyparam:
filters - extra filters on the query : see query method for examples
columns - list of column names to be included in the returned dictionary
@return:
list of dictionary - a list of sobjects dictionaries | API Function: get_all_children(search_key, child_type, columns=[])
Get all children of a particular child type of an sobject | [
"API",
"Function",
":",
"get_all_children",
"(",
"search_key",
"child_type",
"columns",
"=",
"[]",
")",
"Get",
"all",
"children",
"of",
"a",
"particular",
"child",
"type",
"of",
"an",
"sobject"
] | def get_all_children(self, search_key, child_type, filters=[], columns=[]):
'''API Function: get_all_children(search_key, child_type, columns=[])
Get all children of a particular child type of an sobject
@param:
ticket - authentication ticket
search_key - a unique identifier key representing an sobject
child_type - the search_type of the children to search for
@keyparam:
filters - extra filters on the query : see query method for examples
columns - list of column names to be included in the returned dictionary
@return:
list of dictionary - a list of sobjects dictionaries
'''
#filters = []
return self.server.get_all_children(self.ticket, search_key, child_type, filters, columns) | [
"def",
"get_all_children",
"(",
"self",
",",
"search_key",
",",
"child_type",
",",
"filters",
"=",
"[",
"]",
",",
"columns",
"=",
"[",
"]",
")",
":",
"#filters = []",
"return",
"self",
".",
"server",
".",
"get_all_children",
"(",
"self",
".",
"ticket",
",",
"search_key",
",",
"child_type",
",",
"filters",
",",
"columns",
")"
] | https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/src/pyasm/application/common/interpreter/tactic_client_lib/tactic_server_stub.py#L1269-L1286 |
|
sbrshk/whatever | f7ba72effd6f836ca701ed889c747db804d5ea8f | node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py | python | _CreateProjectObjects | (target_list, target_dicts, options, msvs_version) | return projects | Create a MSVSProject object for the targets found in target list.
Arguments:
target_list: the list of targets to generate project objects for.
target_dicts: the dictionary of specifications.
options: global generator options.
msvs_version: the MSVSVersion object.
Returns:
A set of created projects, keyed by target. | Create a MSVSProject object for the targets found in target list. | [
"Create",
"a",
"MSVSProject",
"object",
"for",
"the",
"targets",
"found",
"in",
"target",
"list",
"."
] | def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
"""Create a MSVSProject object for the targets found in target list.
Arguments:
target_list: the list of targets to generate project objects for.
target_dicts: the dictionary of specifications.
options: global generator options.
msvs_version: the MSVSVersion object.
Returns:
A set of created projects, keyed by target.
"""
global fixpath_prefix
# Generate each project.
projects = {}
for qualified_target in target_list:
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise GypError(
'Multiple toolsets not supported in msvs build (target %s)' %
qualified_target)
proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
options, msvs_version)
guid = _GetGuidOfProject(proj_path, spec)
overrides = _GetPlatformOverridesOfProject(spec)
build_file = gyp.common.BuildFile(qualified_target)
# Create object for this project.
obj = MSVSNew.MSVSProject(
proj_path,
name=spec['target_name'],
guid=guid,
spec=spec,
build_file=build_file,
config_platform_overrides=overrides,
fixpath_prefix=fixpath_prefix)
# Set project toolset if any (MS build only)
if msvs_version.UsesVcxproj():
obj.set_msbuild_toolset(
_GetMsbuildToolsetOfProject(proj_path, spec, msvs_version))
projects[qualified_target] = obj
# Set all the dependencies, but not if we are using an external builder like
# ninja
for project in projects.values():
if not project.spec.get('msvs_external_builder'):
deps = project.spec.get('dependencies', [])
deps = [projects[d] for d in deps]
project.set_dependencies(deps)
return projects | [
"def",
"_CreateProjectObjects",
"(",
"target_list",
",",
"target_dicts",
",",
"options",
",",
"msvs_version",
")",
":",
"global",
"fixpath_prefix",
"# Generate each project.",
"projects",
"=",
"{",
"}",
"for",
"qualified_target",
"in",
"target_list",
":",
"spec",
"=",
"target_dicts",
"[",
"qualified_target",
"]",
"if",
"spec",
"[",
"'toolset'",
"]",
"!=",
"'target'",
":",
"raise",
"GypError",
"(",
"'Multiple toolsets not supported in msvs build (target %s)'",
"%",
"qualified_target",
")",
"proj_path",
",",
"fixpath_prefix",
"=",
"_GetPathOfProject",
"(",
"qualified_target",
",",
"spec",
",",
"options",
",",
"msvs_version",
")",
"guid",
"=",
"_GetGuidOfProject",
"(",
"proj_path",
",",
"spec",
")",
"overrides",
"=",
"_GetPlatformOverridesOfProject",
"(",
"spec",
")",
"build_file",
"=",
"gyp",
".",
"common",
".",
"BuildFile",
"(",
"qualified_target",
")",
"# Create object for this project.",
"obj",
"=",
"MSVSNew",
".",
"MSVSProject",
"(",
"proj_path",
",",
"name",
"=",
"spec",
"[",
"'target_name'",
"]",
",",
"guid",
"=",
"guid",
",",
"spec",
"=",
"spec",
",",
"build_file",
"=",
"build_file",
",",
"config_platform_overrides",
"=",
"overrides",
",",
"fixpath_prefix",
"=",
"fixpath_prefix",
")",
"# Set project toolset if any (MS build only)",
"if",
"msvs_version",
".",
"UsesVcxproj",
"(",
")",
":",
"obj",
".",
"set_msbuild_toolset",
"(",
"_GetMsbuildToolsetOfProject",
"(",
"proj_path",
",",
"spec",
",",
"msvs_version",
")",
")",
"projects",
"[",
"qualified_target",
"]",
"=",
"obj",
"# Set all the dependencies, but not if we are using an external builder like",
"# ninja",
"for",
"project",
"in",
"projects",
".",
"values",
"(",
")",
":",
"if",
"not",
"project",
".",
"spec",
".",
"get",
"(",
"'msvs_external_builder'",
")",
":",
"deps",
"=",
"project",
".",
"spec",
".",
"get",
"(",
"'dependencies'",
",",
"[",
"]",
")",
"deps",
"=",
"[",
"projects",
"[",
"d",
"]",
"for",
"d",
"in",
"deps",
"]",
"project",
".",
"set_dependencies",
"(",
"deps",
")",
"return",
"projects"
] | https://github.com/sbrshk/whatever/blob/f7ba72effd6f836ca701ed889c747db804d5ea8f/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py#L1796-L1842 |
|
mozilla/spidernode | aafa9e5273f954f272bb4382fc007af14674b4c2 | tools/gyp/pylib/gyp/MSVSVersion.py | python | VisualStudioVersion.ProjectVersion | (self) | return self.project_version | Get the version number of the vcproj or vcxproj files. | Get the version number of the vcproj or vcxproj files. | [
"Get",
"the",
"version",
"number",
"of",
"the",
"vcproj",
"or",
"vcxproj",
"files",
"."
] | def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version | [
"def",
"ProjectVersion",
"(",
"self",
")",
":",
"return",
"self",
".",
"project_version"
] | https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/tools/gyp/pylib/gyp/MSVSVersion.py#L43-L45 |
|
logandk/serverless-wsgi | 1662bd9cd3849d73325ba728ed5162b45c89b1ab | serverless_wsgi.py | python | all_casings | (input_string) | Permute all casings of a given string.
A pretty algoritm, via @Amber
http://stackoverflow.com/questions/6792803/finding-all-possible-case-permutations-in-python | Permute all casings of a given string.
A pretty algoritm, via | [
"Permute",
"all",
"casings",
"of",
"a",
"given",
"string",
".",
"A",
"pretty",
"algoritm",
"via"
] | def all_casings(input_string):
"""
Permute all casings of a given string.
A pretty algoritm, via @Amber
http://stackoverflow.com/questions/6792803/finding-all-possible-case-permutations-in-python
"""
if not input_string:
yield ""
else:
first = input_string[:1]
if first.lower() == first.upper():
for sub_casing in all_casings(input_string[1:]):
yield first + sub_casing
else:
for sub_casing in all_casings(input_string[1:]):
yield first.lower() + sub_casing
yield first.upper() + sub_casing | [
"def",
"all_casings",
"(",
"input_string",
")",
":",
"if",
"not",
"input_string",
":",
"yield",
"\"\"",
"else",
":",
"first",
"=",
"input_string",
"[",
":",
"1",
"]",
"if",
"first",
".",
"lower",
"(",
")",
"==",
"first",
".",
"upper",
"(",
")",
":",
"for",
"sub_casing",
"in",
"all_casings",
"(",
"input_string",
"[",
"1",
":",
"]",
")",
":",
"yield",
"first",
"+",
"sub_casing",
"else",
":",
"for",
"sub_casing",
"in",
"all_casings",
"(",
"input_string",
"[",
"1",
":",
"]",
")",
":",
"yield",
"first",
".",
"lower",
"(",
")",
"+",
"sub_casing",
"yield",
"first",
".",
"upper",
"(",
")",
"+",
"sub_casing"
] | https://github.com/logandk/serverless-wsgi/blob/1662bd9cd3849d73325ba728ed5162b45c89b1ab/serverless_wsgi.py#L32-L48 |
||
nodejs/node-chakracore | 770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43 | deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py | python | VisualStudioVersion.DefaultToolset | (self) | return self.default_toolset | Returns the msbuild toolset version that will be used in the absence
of a user override. | Returns the msbuild toolset version that will be used in the absence
of a user override. | [
"Returns",
"the",
"msbuild",
"toolset",
"version",
"that",
"will",
"be",
"used",
"in",
"the",
"absence",
"of",
"a",
"user",
"override",
"."
] | def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
of a user override."""
return self.default_toolset | [
"def",
"DefaultToolset",
"(",
"self",
")",
":",
"return",
"self",
".",
"default_toolset"
] | https://github.com/nodejs/node-chakracore/blob/770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py#L66-L69 |
|
frenck/home-assistant-config | 91fb77e527bc470b557b6156fd1d60515e0b0be9 | custom_components/hacs/base.py | python | HacsBase.async_github_api_method | (
self,
method: Callable[[], Awaitable[TV]],
*args,
raise_exception: bool = True,
**kwargs,
) | return None | Call a GitHub API method | Call a GitHub API method | [
"Call",
"a",
"GitHub",
"API",
"method"
] | async def async_github_api_method(
self,
method: Callable[[], Awaitable[TV]],
*args,
raise_exception: bool = True,
**kwargs,
) -> TV | None:
"""Call a GitHub API method"""
_exception = None
try:
return await method(*args, **kwargs)
except GitHubAuthenticationException as exception:
self.disable_hacs(HacsDisabledReason.INVALID_TOKEN)
_exception = exception
except GitHubRatelimitException as exception:
self.disable_hacs(HacsDisabledReason.RATE_LIMIT)
_exception = exception
except GitHubNotModifiedException as exception:
raise exception
except GitHubException as exception:
_exception = exception
except BaseException as exception: # lgtm [py/catch-base-exception] pylint: disable=broad-except
self.log.exception(exception)
_exception = exception
if raise_exception and _exception is not None:
raise HacsException(_exception)
return None | [
"async",
"def",
"async_github_api_method",
"(",
"self",
",",
"method",
":",
"Callable",
"[",
"[",
"]",
",",
"Awaitable",
"[",
"TV",
"]",
"]",
",",
"*",
"args",
",",
"raise_exception",
":",
"bool",
"=",
"True",
",",
"*",
"*",
"kwargs",
",",
")",
"->",
"TV",
"|",
"None",
":",
"_exception",
"=",
"None",
"try",
":",
"return",
"await",
"method",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"GitHubAuthenticationException",
"as",
"exception",
":",
"self",
".",
"disable_hacs",
"(",
"HacsDisabledReason",
".",
"INVALID_TOKEN",
")",
"_exception",
"=",
"exception",
"except",
"GitHubRatelimitException",
"as",
"exception",
":",
"self",
".",
"disable_hacs",
"(",
"HacsDisabledReason",
".",
"RATE_LIMIT",
")",
"_exception",
"=",
"exception",
"except",
"GitHubNotModifiedException",
"as",
"exception",
":",
"raise",
"exception",
"except",
"GitHubException",
"as",
"exception",
":",
"_exception",
"=",
"exception",
"except",
"BaseException",
"as",
"exception",
":",
"# lgtm [py/catch-base-exception] pylint: disable=broad-except",
"self",
".",
"log",
".",
"exception",
"(",
"exception",
")",
"_exception",
"=",
"exception",
"if",
"raise_exception",
"and",
"_exception",
"is",
"not",
"None",
":",
"raise",
"HacsException",
"(",
"_exception",
")",
"return",
"None"
] | https://github.com/frenck/home-assistant-config/blob/91fb77e527bc470b557b6156fd1d60515e0b0be9/custom_components/hacs/base.py#L452-L480 |
|
Southpaw-TACTIC/TACTIC | ba9b87aef0ee3b3ea51446f25b285ebbca06f62c | src/tactic/ui/app/page_nav_container_wdg.py | python | PageNavContainerWdg.get_display | (self) | return container_div | # get the global drag_ghost_div
drag_ghost_div = self.get_drag_ghost_div()
drag_ghost_div.set_id( "drag_ghost_copy" )
drag_ghost_div.add_class( "SPT_PUW" ) # make it a Page Utility Widget (now processed client side)
drag_ghost_div.set_z_start( 400 ) | # get the global drag_ghost_div
drag_ghost_div = self.get_drag_ghost_div()
drag_ghost_div.set_id( "drag_ghost_copy" )
drag_ghost_div.add_class( "SPT_PUW" ) # make it a Page Utility Widget (now processed client side) | [
"#",
"get",
"the",
"global",
"drag_ghost_div",
"drag_ghost_div",
"=",
"self",
".",
"get_drag_ghost_div",
"()",
"drag_ghost_div",
".",
"set_id",
"(",
"drag_ghost_copy",
")",
"drag_ghost_div",
".",
"add_class",
"(",
"SPT_PUW",
")",
"#",
"make",
"it",
"a",
"Page",
"Utility",
"Widget",
"(",
"now",
"processed",
"client",
"side",
")"
] | def get_display(self):
hash = self.kwargs.get("hash")
is_admin_project = Project.get().is_admin()
security = Environment.get_security()
if is_admin_project and not security.check_access("builtin", "view_site_admin", "allow"):
return Error403Wdg()
# create the elements
config = WidgetConfig.get(xml=self.config_xml, view="application")
left_nav_handler = config.get_display_handler("left_nav")
left_nav_options = config.get_display_options("left_nav")
view_side_bar = None
if left_nav_handler:
try:
left_nav_wdg = Common.create_from_class_path(left_nav_handler, [], left_nav_options)
except Exception as e:
print("WARNING: ", e)
# try default if it doesn't work for whatever
self.config_xml = self.get_default_config()
# create the elements
config = WidgetConfig.get(xml=self.config_xml, view="application")
left_nav_handler = config.get_display_handler("left_nav")
left_nav_options = config.get_display_options("left_nav")
left_nav_wdg = Common.create_from_class_path(left_nav_handler, [], left_nav_options)
# caching
side_bar_cache = self.get_side_bar_cache(left_nav_wdg)
else:
view_side_bar = False
# create the main table
core_table = Table()
#core_table.add_tbody()
core_table.add_row()
core_table.add_style("border: 0px")
core_table.add_style("border-collapse: collapse")
core_table.add_style("width: 100%")
# determine if the side bar is visible
if view_side_bar == None:
view_side_bar = security.check_access("builtin", "view_side_bar", "allow", default='allow')
# add the main cells
tr, td = core_table.add_row_cell()
td.add_style("padding: 0px")
td.add_style("margin: 0px")
# add the main resizable table
from tactic.ui.container import ResizableTableWdg
main_table = ResizableTableWdg()
main_table.set_keep_table_size()
main_table.add_style("width: 100%")
td.add(main_table)
left_nav_td = main_table.add_cell()
if view_side_bar:
left_nav_td.add_class("spt_panel")
left_nav_td.add_style("padding: 0px")
main_body_td = main_table.add_cell(resize=False)
main_body_td.add_style("padding: 10px")
main_body_td.set_style( "width: 100%; vertical-align: top; text-align: center; padding-top: 3px" )
if view_side_bar:
left_nav_td.set_style( "vertical-align: top" )
# create the left navigation panel
left_nav_div = DivWdg()
left_nav_td.add(left_nav_div)
left_nav_div.set_id("side_bar" )
# add the detail to the panel
left_nav_div.add_attr("spt_class_name", left_nav_handler)
for name, value in left_nav_options.items():
left_nav_div.add_attr("spt_%s" % name, value)
left_nav_div.add_style("max_width: 185px")
left_nav_div.add_style("width: 185px")
left_nav_div.add_style("text-align: right")
left_nav_div.add_style("vertical-align: top")
left_nav_div.add_style("overflow: hidden")
left_nav_div.add_class("spt_resizable")
side_bar_inner = DivWdg()
left_nav_div.add(side_bar_inner)
#side_bar_inner.add_style("padding-left: 1px")
side_bar_inner.add_style("width: 100%")
# add side bar to nav
side_bar_inner.add(side_bar_cache)
left_nav_div.add_style("border-style: solid")
left_nav_div.add_style("border-width: 0px 1px 0px 0px")
#left_nav_div.add_color("border-color", "border")
left_nav_div.add_color("border-color", "border", -10)
web = WebContainer.get_web()
browser = web.get_browser()
if browser in ['Qt','Webkit']:
min_width = "1px"
else:
min_width = "0px"
left_nav_div.add_behavior( {
'type': 'listen',
'event_name': 'side_bar|hide_now',
'min_width': min_width,
'cbjs_action': '''
var size = bvr.src_el.getSize();
bvr.src_el.setAttribute("spt_size", size.x);
bvr.src_el.setStyle("width", bvr.min_width);
'''
} )
left_nav_div.add_behavior( {
'type': 'listen',
'event_name': 'side_bar|hide',
'min_width': min_width,
'cbjs_action': '''
var size = bvr.src_el.getSize();
bvr.src_el.setAttribute("spt_size", size.x);
new Fx.Tween(bvr.src_el, {duration:'short'}).start('width', bvr.min_width);
'''
} )
left_nav_div.add_behavior( {
'type': 'listen',
'event_name': 'side_bar|show',
'min_width': min_width,
'cbjs_action': '''
var width = bvr.src_el.getAttribute("spt_size");
if (!width) {
width = 185;
}
if (parseInt(width) < 5) {
width = 185;
}
//bvr.src_el.setStyle("width", width + "px");
new Fx.Tween(bvr.src_el, {duration:'short'}).start('width', bvr.min_width, width+"px");
'''
} )
left_nav_div.add_behavior( {
'type': 'listen',
'event_name': 'side_bar|toggle',
'cbjs_action': '''
var size = bvr.src_el.getSize();
if (size.x < 5) {
spt.named_events.fire_event("side_bar|show", {} );
}
else {
spt.named_events.fire_event("side_bar|hide", {} );
}
'''
} )
# create the main body panel
palette = WebContainer.get_palette()
color = palette.color("background2")
main_body_rounded = DivWdg()
main_body_inner = main_body_rounded
main_body_inner.add_style("min-height: 500px")
main_body_panel = DivWdg()
main_body_panel.set_id("main_body")
main_body_panel.add_class("spt_main_panel")
main_body_inner.add(main_body_panel)
tab = MainBodyTabWdg()
main_body_panel.add(tab)
# TEST: NEW LAYOUT
if Config.get_value("install", "layout") == "fixed":
main_body_panel.add_style("margin-top: 31px")
main_body_rounded.add_color("background", "background")
main_body_rounded.add_style("padding: 3px 0px 0px 0px")
# add the content to the main body panel
try:
if self.widget:
tab.add(self.widget)
element_name = self.widget.get_name()
else:
main_body_handler = config.get_display_handler("main_body")
main_body_options = config.get_display_options("main_body")
element_name = main_body_options.get("element_name")
title = main_body_options.get("title")
main_body_content = Common.create_from_class_path(main_body_handler, [], main_body_options)
# get the web values from top_layout
main_body_values = config.get_web_options("main_body")
web = WebContainer.get_web()
if isinstance(main_body_values, dict):
for name, value in main_body_values.items():
web.set_form_value(name, value)
main_body_content.set_name(element_name)
tab.add(main_body_content, element_name, title)
self.set_as_panel(main_body_panel, class_name=main_body_handler, kwargs=main_body_options)
main_body_panel.add_behavior( {
'type': 'load',
'element_name': element_name,
'cbjs_action': '''
if (spt.help)
spt.help.set_view(bvr.element_name);
'''
} )
except Exception as e:
# handle an error in the drawing
buffer = self.get_buffer_on_exception()
error_wdg = self.handle_exception(e)
main_body_content = DivWdg()
main_body_content.add(error_wdg)
main_body_content = main_body_content.get_buffer_display()
tab.add(main_body_content, element_name, title)
# add the main container
container_div = DivWdg()
container_div.set_style("width: 100%;")
container_div.add_behavior( {
"type": "load",
"cbjs_action": '''
// add a resize class to the tab widget
var tab_header = document.id(document.body).getElement(".spt_tab_header_top");
tab_header.addClass("spt_window_resize_anchor");
var onresize = function() {
var body = document.id(document.body);
var size = body.getSize();
//var content = body.getElement(".content");
//content.setStyle("height", size.y);
var offset_els = document.id(document.body).getElements(".spt_window_resize_anchor");
var offset = 0;
for (var i = 0; i < offset_els.length; i++) {
var offset_el = offset_els[i];
var offset_size = offset_el.getSize();
offset += offset_size.y;
}
//console.log("offset: " + offset);
var els = body.getElements(".spt_window_resize");
for (var i = 0; i < els.length; i++) {
var el = els[i];
var offset = el.getAttribute("spt_window_resize_offset");
if (offset) {
offset = parseInt(offset);
el.setStyle("height", size.y - offset);
el.setAttribute("height", size.y - offset);
}
var offset = el.getAttribute("spt_window_resize_xoffset");
if (offset != null) {
offset = parseInt(offset);
el.setStyle("width", size.x - offset);
el.setAttribute("width", size.x - offset);
}
}
}
window.onresize = onresize;
window.onresize();
'''
} )
# NOTE: the td should not be the sliding element! So we create a div inside the TD to be the sliding element
main_body_div = DivWdg()
main_body_div.set_id("horizontal_main_body_slider")
main_body_div.add(main_body_inner)
"""
# get the global drag_ghost_div
drag_ghost_div = self.get_drag_ghost_div()
drag_ghost_div.set_id( "drag_ghost_copy" )
drag_ghost_div.add_class( "SPT_PUW" ) # make it a Page Utility Widget (now processed client side)
drag_ghost_div.set_z_start( 400 )
"""
from .page_header_wdg import PageHeaderWdg
header_panel = DivWdg()
header_panel.set_id("main_header")
header_panel.add_attr("spt_class_name", "tactic.ui.app.PageHeaderWdg")
header_wdg = PageHeaderWdg()
header_panel.add(header_wdg)
container_div.add( header_panel.get_buffer_display() )
main_body_dis = main_body_div.get_buffer_display()
main_body_td.add(main_body_dis)
container_div.add( core_table )
#container_div.add( drag_ghost_div )
is_admin = False
security = Environment.get_security()
if security.check_access("builtin", "view_site_admin", "allow"):
is_admin = True
if is_admin:
from .quick_box_wdg import QuickBoxWdg
quick_box = QuickBoxWdg()
container_div.add(quick_box)
container_div.add_behavior( {
'type': 'load',
'cbjs_action': '''
spt.named_events.fire_event("close_admin_bar");
'''
} )
return container_div | [
"def",
"get_display",
"(",
"self",
")",
":",
"hash",
"=",
"self",
".",
"kwargs",
".",
"get",
"(",
"\"hash\"",
")",
"is_admin_project",
"=",
"Project",
".",
"get",
"(",
")",
".",
"is_admin",
"(",
")",
"security",
"=",
"Environment",
".",
"get_security",
"(",
")",
"if",
"is_admin_project",
"and",
"not",
"security",
".",
"check_access",
"(",
"\"builtin\"",
",",
"\"view_site_admin\"",
",",
"\"allow\"",
")",
":",
"return",
"Error403Wdg",
"(",
")",
"# create the elements",
"config",
"=",
"WidgetConfig",
".",
"get",
"(",
"xml",
"=",
"self",
".",
"config_xml",
",",
"view",
"=",
"\"application\"",
")",
"left_nav_handler",
"=",
"config",
".",
"get_display_handler",
"(",
"\"left_nav\"",
")",
"left_nav_options",
"=",
"config",
".",
"get_display_options",
"(",
"\"left_nav\"",
")",
"view_side_bar",
"=",
"None",
"if",
"left_nav_handler",
":",
"try",
":",
"left_nav_wdg",
"=",
"Common",
".",
"create_from_class_path",
"(",
"left_nav_handler",
",",
"[",
"]",
",",
"left_nav_options",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"\"WARNING: \"",
",",
"e",
")",
"# try default if it doesn't work for whatever",
"self",
".",
"config_xml",
"=",
"self",
".",
"get_default_config",
"(",
")",
"# create the elements",
"config",
"=",
"WidgetConfig",
".",
"get",
"(",
"xml",
"=",
"self",
".",
"config_xml",
",",
"view",
"=",
"\"application\"",
")",
"left_nav_handler",
"=",
"config",
".",
"get_display_handler",
"(",
"\"left_nav\"",
")",
"left_nav_options",
"=",
"config",
".",
"get_display_options",
"(",
"\"left_nav\"",
")",
"left_nav_wdg",
"=",
"Common",
".",
"create_from_class_path",
"(",
"left_nav_handler",
",",
"[",
"]",
",",
"left_nav_options",
")",
"# caching",
"side_bar_cache",
"=",
"self",
".",
"get_side_bar_cache",
"(",
"left_nav_wdg",
")",
"else",
":",
"view_side_bar",
"=",
"False",
"# create the main table",
"core_table",
"=",
"Table",
"(",
")",
"#core_table.add_tbody()",
"core_table",
".",
"add_row",
"(",
")",
"core_table",
".",
"add_style",
"(",
"\"border: 0px\"",
")",
"core_table",
".",
"add_style",
"(",
"\"border-collapse: collapse\"",
")",
"core_table",
".",
"add_style",
"(",
"\"width: 100%\"",
")",
"# determine if the side bar is visible",
"if",
"view_side_bar",
"==",
"None",
":",
"view_side_bar",
"=",
"security",
".",
"check_access",
"(",
"\"builtin\"",
",",
"\"view_side_bar\"",
",",
"\"allow\"",
",",
"default",
"=",
"'allow'",
")",
"# add the main cells",
"tr",
",",
"td",
"=",
"core_table",
".",
"add_row_cell",
"(",
")",
"td",
".",
"add_style",
"(",
"\"padding: 0px\"",
")",
"td",
".",
"add_style",
"(",
"\"margin: 0px\"",
")",
"# add the main resizable table",
"from",
"tactic",
".",
"ui",
".",
"container",
"import",
"ResizableTableWdg",
"main_table",
"=",
"ResizableTableWdg",
"(",
")",
"main_table",
".",
"set_keep_table_size",
"(",
")",
"main_table",
".",
"add_style",
"(",
"\"width: 100%\"",
")",
"td",
".",
"add",
"(",
"main_table",
")",
"left_nav_td",
"=",
"main_table",
".",
"add_cell",
"(",
")",
"if",
"view_side_bar",
":",
"left_nav_td",
".",
"add_class",
"(",
"\"spt_panel\"",
")",
"left_nav_td",
".",
"add_style",
"(",
"\"padding: 0px\"",
")",
"main_body_td",
"=",
"main_table",
".",
"add_cell",
"(",
"resize",
"=",
"False",
")",
"main_body_td",
".",
"add_style",
"(",
"\"padding: 10px\"",
")",
"main_body_td",
".",
"set_style",
"(",
"\"width: 100%; vertical-align: top; text-align: center; padding-top: 3px\"",
")",
"if",
"view_side_bar",
":",
"left_nav_td",
".",
"set_style",
"(",
"\"vertical-align: top\"",
")",
"# create the left navigation panel",
"left_nav_div",
"=",
"DivWdg",
"(",
")",
"left_nav_td",
".",
"add",
"(",
"left_nav_div",
")",
"left_nav_div",
".",
"set_id",
"(",
"\"side_bar\"",
")",
"# add the detail to the panel",
"left_nav_div",
".",
"add_attr",
"(",
"\"spt_class_name\"",
",",
"left_nav_handler",
")",
"for",
"name",
",",
"value",
"in",
"left_nav_options",
".",
"items",
"(",
")",
":",
"left_nav_div",
".",
"add_attr",
"(",
"\"spt_%s\"",
"%",
"name",
",",
"value",
")",
"left_nav_div",
".",
"add_style",
"(",
"\"max_width: 185px\"",
")",
"left_nav_div",
".",
"add_style",
"(",
"\"width: 185px\"",
")",
"left_nav_div",
".",
"add_style",
"(",
"\"text-align: right\"",
")",
"left_nav_div",
".",
"add_style",
"(",
"\"vertical-align: top\"",
")",
"left_nav_div",
".",
"add_style",
"(",
"\"overflow: hidden\"",
")",
"left_nav_div",
".",
"add_class",
"(",
"\"spt_resizable\"",
")",
"side_bar_inner",
"=",
"DivWdg",
"(",
")",
"left_nav_div",
".",
"add",
"(",
"side_bar_inner",
")",
"#side_bar_inner.add_style(\"padding-left: 1px\")",
"side_bar_inner",
".",
"add_style",
"(",
"\"width: 100%\"",
")",
"# add side bar to nav",
"side_bar_inner",
".",
"add",
"(",
"side_bar_cache",
")",
"left_nav_div",
".",
"add_style",
"(",
"\"border-style: solid\"",
")",
"left_nav_div",
".",
"add_style",
"(",
"\"border-width: 0px 1px 0px 0px\"",
")",
"#left_nav_div.add_color(\"border-color\", \"border\")",
"left_nav_div",
".",
"add_color",
"(",
"\"border-color\"",
",",
"\"border\"",
",",
"-",
"10",
")",
"web",
"=",
"WebContainer",
".",
"get_web",
"(",
")",
"browser",
"=",
"web",
".",
"get_browser",
"(",
")",
"if",
"browser",
"in",
"[",
"'Qt'",
",",
"'Webkit'",
"]",
":",
"min_width",
"=",
"\"1px\"",
"else",
":",
"min_width",
"=",
"\"0px\"",
"left_nav_div",
".",
"add_behavior",
"(",
"{",
"'type'",
":",
"'listen'",
",",
"'event_name'",
":",
"'side_bar|hide_now'",
",",
"'min_width'",
":",
"min_width",
",",
"'cbjs_action'",
":",
"'''\n var size = bvr.src_el.getSize();\n bvr.src_el.setAttribute(\"spt_size\", size.x);\n bvr.src_el.setStyle(\"width\", bvr.min_width);\n\n '''",
"}",
")",
"left_nav_div",
".",
"add_behavior",
"(",
"{",
"'type'",
":",
"'listen'",
",",
"'event_name'",
":",
"'side_bar|hide'",
",",
"'min_width'",
":",
"min_width",
",",
"'cbjs_action'",
":",
"'''\n var size = bvr.src_el.getSize();\n bvr.src_el.setAttribute(\"spt_size\", size.x);\n new Fx.Tween(bvr.src_el, {duration:'short'}).start('width', bvr.min_width);\n\n '''",
"}",
")",
"left_nav_div",
".",
"add_behavior",
"(",
"{",
"'type'",
":",
"'listen'",
",",
"'event_name'",
":",
"'side_bar|show'",
",",
"'min_width'",
":",
"min_width",
",",
"'cbjs_action'",
":",
"'''\n var width = bvr.src_el.getAttribute(\"spt_size\");\n if (!width) {\n width = 185;\n }\n if (parseInt(width) < 5) {\n width = 185;\n }\n //bvr.src_el.setStyle(\"width\", width + \"px\");\n new Fx.Tween(bvr.src_el, {duration:'short'}).start('width', bvr.min_width, width+\"px\");\n '''",
"}",
")",
"left_nav_div",
".",
"add_behavior",
"(",
"{",
"'type'",
":",
"'listen'",
",",
"'event_name'",
":",
"'side_bar|toggle'",
",",
"'cbjs_action'",
":",
"'''\n var size = bvr.src_el.getSize();\n if (size.x < 5) {\n spt.named_events.fire_event(\"side_bar|show\", {} );\n }\n else {\n spt.named_events.fire_event(\"side_bar|hide\", {} );\n }\n '''",
"}",
")",
"# create the main body panel",
"palette",
"=",
"WebContainer",
".",
"get_palette",
"(",
")",
"color",
"=",
"palette",
".",
"color",
"(",
"\"background2\"",
")",
"main_body_rounded",
"=",
"DivWdg",
"(",
")",
"main_body_inner",
"=",
"main_body_rounded",
"main_body_inner",
".",
"add_style",
"(",
"\"min-height: 500px\"",
")",
"main_body_panel",
"=",
"DivWdg",
"(",
")",
"main_body_panel",
".",
"set_id",
"(",
"\"main_body\"",
")",
"main_body_panel",
".",
"add_class",
"(",
"\"spt_main_panel\"",
")",
"main_body_inner",
".",
"add",
"(",
"main_body_panel",
")",
"tab",
"=",
"MainBodyTabWdg",
"(",
")",
"main_body_panel",
".",
"add",
"(",
"tab",
")",
"# TEST: NEW LAYOUT",
"if",
"Config",
".",
"get_value",
"(",
"\"install\"",
",",
"\"layout\"",
")",
"==",
"\"fixed\"",
":",
"main_body_panel",
".",
"add_style",
"(",
"\"margin-top: 31px\"",
")",
"main_body_rounded",
".",
"add_color",
"(",
"\"background\"",
",",
"\"background\"",
")",
"main_body_rounded",
".",
"add_style",
"(",
"\"padding: 3px 0px 0px 0px\"",
")",
"# add the content to the main body panel",
"try",
":",
"if",
"self",
".",
"widget",
":",
"tab",
".",
"add",
"(",
"self",
".",
"widget",
")",
"element_name",
"=",
"self",
".",
"widget",
".",
"get_name",
"(",
")",
"else",
":",
"main_body_handler",
"=",
"config",
".",
"get_display_handler",
"(",
"\"main_body\"",
")",
"main_body_options",
"=",
"config",
".",
"get_display_options",
"(",
"\"main_body\"",
")",
"element_name",
"=",
"main_body_options",
".",
"get",
"(",
"\"element_name\"",
")",
"title",
"=",
"main_body_options",
".",
"get",
"(",
"\"title\"",
")",
"main_body_content",
"=",
"Common",
".",
"create_from_class_path",
"(",
"main_body_handler",
",",
"[",
"]",
",",
"main_body_options",
")",
"# get the web values from top_layout",
"main_body_values",
"=",
"config",
".",
"get_web_options",
"(",
"\"main_body\"",
")",
"web",
"=",
"WebContainer",
".",
"get_web",
"(",
")",
"if",
"isinstance",
"(",
"main_body_values",
",",
"dict",
")",
":",
"for",
"name",
",",
"value",
"in",
"main_body_values",
".",
"items",
"(",
")",
":",
"web",
".",
"set_form_value",
"(",
"name",
",",
"value",
")",
"main_body_content",
".",
"set_name",
"(",
"element_name",
")",
"tab",
".",
"add",
"(",
"main_body_content",
",",
"element_name",
",",
"title",
")",
"self",
".",
"set_as_panel",
"(",
"main_body_panel",
",",
"class_name",
"=",
"main_body_handler",
",",
"kwargs",
"=",
"main_body_options",
")",
"main_body_panel",
".",
"add_behavior",
"(",
"{",
"'type'",
":",
"'load'",
",",
"'element_name'",
":",
"element_name",
",",
"'cbjs_action'",
":",
"'''\n if (spt.help)\n spt.help.set_view(bvr.element_name);\n '''",
"}",
")",
"except",
"Exception",
"as",
"e",
":",
"# handle an error in the drawing",
"buffer",
"=",
"self",
".",
"get_buffer_on_exception",
"(",
")",
"error_wdg",
"=",
"self",
".",
"handle_exception",
"(",
"e",
")",
"main_body_content",
"=",
"DivWdg",
"(",
")",
"main_body_content",
".",
"add",
"(",
"error_wdg",
")",
"main_body_content",
"=",
"main_body_content",
".",
"get_buffer_display",
"(",
")",
"tab",
".",
"add",
"(",
"main_body_content",
",",
"element_name",
",",
"title",
")",
"# add the main container",
"container_div",
"=",
"DivWdg",
"(",
")",
"container_div",
".",
"set_style",
"(",
"\"width: 100%;\"",
")",
"container_div",
".",
"add_behavior",
"(",
"{",
"\"type\"",
":",
"\"load\"",
",",
"\"cbjs_action\"",
":",
"'''\n\n // add a resize class to the tab widget\n var tab_header = document.id(document.body).getElement(\".spt_tab_header_top\");\n tab_header.addClass(\"spt_window_resize_anchor\");\n\n var onresize = function() {\n var body = document.id(document.body);\n var size = body.getSize();\n\n //var content = body.getElement(\".content\");\n //content.setStyle(\"height\", size.y);\n\n var offset_els = document.id(document.body).getElements(\".spt_window_resize_anchor\");\n var offset = 0;\n for (var i = 0; i < offset_els.length; i++) {\n var offset_el = offset_els[i];\n var offset_size = offset_el.getSize();\n offset += offset_size.y;\n }\n\n //console.log(\"offset: \" + offset);\n\n var els = body.getElements(\".spt_window_resize\");\n for (var i = 0; i < els.length; i++) {\n var el = els[i];\n var offset = el.getAttribute(\"spt_window_resize_offset\");\n \n if (offset) {\n offset = parseInt(offset);\n el.setStyle(\"height\", size.y - offset);\n el.setAttribute(\"height\", size.y - offset);\n }\n\n\n var offset = el.getAttribute(\"spt_window_resize_xoffset\");\n if (offset != null) {\n offset = parseInt(offset);\n el.setStyle(\"width\", size.x - offset);\n el.setAttribute(\"width\", size.x - offset);\n }\n\n\n }\n }\n\n window.onresize = onresize;\n window.onresize();\n\n\n '''",
"}",
")",
"# NOTE: the td should not be the sliding element! So we create a div inside the TD to be the sliding element",
"main_body_div",
"=",
"DivWdg",
"(",
")",
"main_body_div",
".",
"set_id",
"(",
"\"horizontal_main_body_slider\"",
")",
"main_body_div",
".",
"add",
"(",
"main_body_inner",
")",
"from",
".",
"page_header_wdg",
"import",
"PageHeaderWdg",
"header_panel",
"=",
"DivWdg",
"(",
")",
"header_panel",
".",
"set_id",
"(",
"\"main_header\"",
")",
"header_panel",
".",
"add_attr",
"(",
"\"spt_class_name\"",
",",
"\"tactic.ui.app.PageHeaderWdg\"",
")",
"header_wdg",
"=",
"PageHeaderWdg",
"(",
")",
"header_panel",
".",
"add",
"(",
"header_wdg",
")",
"container_div",
".",
"add",
"(",
"header_panel",
".",
"get_buffer_display",
"(",
")",
")",
"main_body_dis",
"=",
"main_body_div",
".",
"get_buffer_display",
"(",
")",
"main_body_td",
".",
"add",
"(",
"main_body_dis",
")",
"container_div",
".",
"add",
"(",
"core_table",
")",
"#container_div.add( drag_ghost_div )",
"is_admin",
"=",
"False",
"security",
"=",
"Environment",
".",
"get_security",
"(",
")",
"if",
"security",
".",
"check_access",
"(",
"\"builtin\"",
",",
"\"view_site_admin\"",
",",
"\"allow\"",
")",
":",
"is_admin",
"=",
"True",
"if",
"is_admin",
":",
"from",
".",
"quick_box_wdg",
"import",
"QuickBoxWdg",
"quick_box",
"=",
"QuickBoxWdg",
"(",
")",
"container_div",
".",
"add",
"(",
"quick_box",
")",
"container_div",
".",
"add_behavior",
"(",
"{",
"'type'",
":",
"'load'",
",",
"'cbjs_action'",
":",
"'''\n spt.named_events.fire_event(\"close_admin_bar\");\n '''",
"}",
")",
"return",
"container_div"
] | https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/src/tactic/ui/app/page_nav_container_wdg.py#L262-L626 |
|
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/reloop-closured/lib/python2.7/cookielib.py | python | domain_match | (A, B) | return True | Return True if domain A domain-matches domain B, according to RFC 2965.
A and B may be host domain names or IP addresses.
RFC 2965, section 1:
Host names can be specified either as an IP address or a HDN string.
Sometimes we compare one host name with another. (Such comparisons SHALL
be case-insensitive.) Host A's name domain-matches host B's if
* their host name strings string-compare equal; or
* A is a HDN string and has the form NB, where N is a non-empty
name string, B has the form .B', and B' is a HDN string. (So,
x.y.com domain-matches .Y.com but not Y.com.)
Note that domain-match is not a commutative operation: a.b.c.com
domain-matches .c.com, but not the reverse. | Return True if domain A domain-matches domain B, according to RFC 2965. | [
"Return",
"True",
"if",
"domain",
"A",
"domain",
"-",
"matches",
"domain",
"B",
"according",
"to",
"RFC",
"2965",
"."
] | def domain_match(A, B):
"""Return True if domain A domain-matches domain B, according to RFC 2965.
A and B may be host domain names or IP addresses.
RFC 2965, section 1:
Host names can be specified either as an IP address or a HDN string.
Sometimes we compare one host name with another. (Such comparisons SHALL
be case-insensitive.) Host A's name domain-matches host B's if
* their host name strings string-compare equal; or
* A is a HDN string and has the form NB, where N is a non-empty
name string, B has the form .B', and B' is a HDN string. (So,
x.y.com domain-matches .Y.com but not Y.com.)
Note that domain-match is not a commutative operation: a.b.c.com
domain-matches .c.com, but not the reverse.
"""
# Note that, if A or B are IP addresses, the only relevant part of the
# definition of the domain-match algorithm is the direct string-compare.
A = A.lower()
B = B.lower()
if A == B:
return True
if not is_HDN(A):
return False
i = A.rfind(B)
if i == -1 or i == 0:
# A does not have form NB, or N is the empty string
return False
if not B.startswith("."):
return False
if not is_HDN(B[1:]):
return False
return True | [
"def",
"domain_match",
"(",
"A",
",",
"B",
")",
":",
"# Note that, if A or B are IP addresses, the only relevant part of the",
"# definition of the domain-match algorithm is the direct string-compare.",
"A",
"=",
"A",
".",
"lower",
"(",
")",
"B",
"=",
"B",
".",
"lower",
"(",
")",
"if",
"A",
"==",
"B",
":",
"return",
"True",
"if",
"not",
"is_HDN",
"(",
"A",
")",
":",
"return",
"False",
"i",
"=",
"A",
".",
"rfind",
"(",
"B",
")",
"if",
"i",
"==",
"-",
"1",
"or",
"i",
"==",
"0",
":",
"# A does not have form NB, or N is the empty string",
"return",
"False",
"if",
"not",
"B",
".",
"startswith",
"(",
"\".\"",
")",
":",
"return",
"False",
"if",
"not",
"is_HDN",
"(",
"B",
"[",
"1",
":",
"]",
")",
":",
"return",
"False",
"return",
"True"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/reloop-closured/lib/python2.7/cookielib.py#L512-L549 |
|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5/mixin/variated.py | python | VariatedMixin.getVariationCategoryItemList | (self, base_category_list=(), base=1,
display_id='logical_path', display_base_category=1,
current_category=None, omit_optional_variation=0,
omit_option_base_category=None, **kw) | return variation_category_item_list | Returns the list of possible variations | Returns the list of possible variations | [
"Returns",
"the",
"list",
"of",
"possible",
"variations"
] | def getVariationCategoryItemList(self, base_category_list=(), base=1,
display_id='logical_path', display_base_category=1,
current_category=None, omit_optional_variation=0,
omit_option_base_category=None, **kw):
"""
Returns the list of possible variations
"""
#XXX backwards compatibility
if omit_option_base_category is not None:
warn("Please use omit_optional_variation instead of"\
" omit_option_base_category.", DeprecationWarning)
omit_optional_variation = omit_option_base_category
variation_category_item_list = []
if current_category is not None:
variation_category_item_list.append((current_category,current_category))
if base_category_list is ():
base_category_list = self.getVariationBaseCategoryList()
if omit_optional_variation == 1:
base_category_list = [x for x in base_category_list if x not in
self.getPortalOptionBaseCategoryList()]
# Prepare 2 rendering
portal_categories = self.portal_categories
for base_category in base_category_list:
variation_category_list = self._getVariationCategoryList(
base_category_list=[base_category])
category_list = []
object_list = []
for variation_category_path in variation_category_list:
try:
variation_category = portal_categories.resolveCategory(
variation_category_path)
var_cat_portal_type = variation_category.getPortalType()
except AttributeError:
variation_category_item_list.append((variation_category_path,
variation_category_path))
else:
if var_cat_portal_type != 'Category':
object_list.append(variation_category)
else:
category_list.append(variation_category)
# Render categories
variation_category_item_list.extend(Renderer(
display_base_category=display_base_category,
display_none_category=0, base=base,
current_category=current_category,
display_id=display_id, **kw).\
render(category_list))
# Render the others
variation_category_item_list.extend(Renderer(
base_category=base_category,
display_base_category=display_base_category,
display_none_category=0, base=base,
current_category=current_category,
display_id='title', **kw).\
render(object_list))
return variation_category_item_list | [
"def",
"getVariationCategoryItemList",
"(",
"self",
",",
"base_category_list",
"=",
"(",
")",
",",
"base",
"=",
"1",
",",
"display_id",
"=",
"'logical_path'",
",",
"display_base_category",
"=",
"1",
",",
"current_category",
"=",
"None",
",",
"omit_optional_variation",
"=",
"0",
",",
"omit_option_base_category",
"=",
"None",
",",
"*",
"*",
"kw",
")",
":",
"#XXX backwards compatibility",
"if",
"omit_option_base_category",
"is",
"not",
"None",
":",
"warn",
"(",
"\"Please use omit_optional_variation instead of\"",
"\" omit_option_base_category.\"",
",",
"DeprecationWarning",
")",
"omit_optional_variation",
"=",
"omit_option_base_category",
"variation_category_item_list",
"=",
"[",
"]",
"if",
"current_category",
"is",
"not",
"None",
":",
"variation_category_item_list",
".",
"append",
"(",
"(",
"current_category",
",",
"current_category",
")",
")",
"if",
"base_category_list",
"is",
"(",
")",
":",
"base_category_list",
"=",
"self",
".",
"getVariationBaseCategoryList",
"(",
")",
"if",
"omit_optional_variation",
"==",
"1",
":",
"base_category_list",
"=",
"[",
"x",
"for",
"x",
"in",
"base_category_list",
"if",
"x",
"not",
"in",
"self",
".",
"getPortalOptionBaseCategoryList",
"(",
")",
"]",
"# Prepare 2 rendering",
"portal_categories",
"=",
"self",
".",
"portal_categories",
"for",
"base_category",
"in",
"base_category_list",
":",
"variation_category_list",
"=",
"self",
".",
"_getVariationCategoryList",
"(",
"base_category_list",
"=",
"[",
"base_category",
"]",
")",
"category_list",
"=",
"[",
"]",
"object_list",
"=",
"[",
"]",
"for",
"variation_category_path",
"in",
"variation_category_list",
":",
"try",
":",
"variation_category",
"=",
"portal_categories",
".",
"resolveCategory",
"(",
"variation_category_path",
")",
"var_cat_portal_type",
"=",
"variation_category",
".",
"getPortalType",
"(",
")",
"except",
"AttributeError",
":",
"variation_category_item_list",
".",
"append",
"(",
"(",
"variation_category_path",
",",
"variation_category_path",
")",
")",
"else",
":",
"if",
"var_cat_portal_type",
"!=",
"'Category'",
":",
"object_list",
".",
"append",
"(",
"variation_category",
")",
"else",
":",
"category_list",
".",
"append",
"(",
"variation_category",
")",
"# Render categories",
"variation_category_item_list",
".",
"extend",
"(",
"Renderer",
"(",
"display_base_category",
"=",
"display_base_category",
",",
"display_none_category",
"=",
"0",
",",
"base",
"=",
"base",
",",
"current_category",
"=",
"current_category",
",",
"display_id",
"=",
"display_id",
",",
"*",
"*",
"kw",
")",
".",
"render",
"(",
"category_list",
")",
")",
"# Render the others",
"variation_category_item_list",
".",
"extend",
"(",
"Renderer",
"(",
"base_category",
"=",
"base_category",
",",
"display_base_category",
"=",
"display_base_category",
",",
"display_none_category",
"=",
"0",
",",
"base",
"=",
"base",
",",
"current_category",
"=",
"current_category",
",",
"display_id",
"=",
"'title'",
",",
"*",
"*",
"kw",
")",
".",
"render",
"(",
"object_list",
")",
")",
"return",
"variation_category_item_list"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5/mixin/variated.py#L120-L178 |
|
pyscada/PyScada | 16c7c01be39cdd55f5c0360e0991712397a14cb5 | pyscada/utils/scheduler.py | python | Scheduler.signal | (self, signum=None, frame=None) | handle signals | handle signals | [
"handle",
"signals"
] | def signal(self, signum=None, frame=None):
"""
handle signals
"""
logger.debug('PID %d, received signal: %d' % (self.pid, signum))
self.SIG_QUEUE.append(signum) | [
"def",
"signal",
"(",
"self",
",",
"signum",
"=",
"None",
",",
"frame",
"=",
"None",
")",
":",
"logger",
".",
"debug",
"(",
"'PID %d, received signal: %d'",
"%",
"(",
"self",
".",
"pid",
",",
"signum",
")",
")",
"self",
".",
"SIG_QUEUE",
".",
"append",
"(",
"signum",
")"
] | https://github.com/pyscada/PyScada/blob/16c7c01be39cdd55f5c0360e0991712397a14cb5/pyscada/utils/scheduler.py#L584-L589 |
||
redapple0204/my-boring-python | 1ab378e9d4f39ad920ff542ef3b2db68f0575a98 | pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/tarfile.py | python | TarFile.makefile | (self, tarinfo, targetpath) | Make a file called targetpath. | Make a file called targetpath. | [
"Make",
"a",
"file",
"called",
"targetpath",
"."
] | def makefile(self, tarinfo, targetpath):
"""Make a file called targetpath.
"""
source = self.fileobj
source.seek(tarinfo.offset_data)
target = bltn_open(targetpath, "wb")
if tarinfo.sparse is not None:
for offset, size in tarinfo.sparse:
target.seek(offset)
copyfileobj(source, target, size)
else:
copyfileobj(source, target, tarinfo.size)
target.seek(tarinfo.size)
target.truncate()
target.close() | [
"def",
"makefile",
"(",
"self",
",",
"tarinfo",
",",
"targetpath",
")",
":",
"source",
"=",
"self",
".",
"fileobj",
"source",
".",
"seek",
"(",
"tarinfo",
".",
"offset_data",
")",
"target",
"=",
"bltn_open",
"(",
"targetpath",
",",
"\"wb\"",
")",
"if",
"tarinfo",
".",
"sparse",
"is",
"not",
"None",
":",
"for",
"offset",
",",
"size",
"in",
"tarinfo",
".",
"sparse",
":",
"target",
".",
"seek",
"(",
"offset",
")",
"copyfileobj",
"(",
"source",
",",
"target",
",",
"size",
")",
"else",
":",
"copyfileobj",
"(",
"source",
",",
"target",
",",
"tarinfo",
".",
"size",
")",
"target",
".",
"seek",
"(",
"tarinfo",
".",
"size",
")",
"target",
".",
"truncate",
"(",
")",
"target",
".",
"close",
"(",
")"
] | https://github.com/redapple0204/my-boring-python/blob/1ab378e9d4f39ad920ff542ef3b2db68f0575a98/pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/tarfile.py#L2296-L2310 |
||
jam-py/jam-py | 0821492cdff8665928e0f093a4435aa64285a45c | jam/third_party/sqlalchemy/sql/operators.py | python | ColumnOperators.__rmul__ | (self, other) | return self.reverse_operate(mul, other) | Implement the ``*`` operator in reverse.
See :meth:`.ColumnOperators.__mul__`. | Implement the ``*`` operator in reverse. | [
"Implement",
"the",
"*",
"operator",
"in",
"reverse",
"."
] | def __rmul__(self, other):
"""Implement the ``*`` operator in reverse.
See :meth:`.ColumnOperators.__mul__`.
"""
return self.reverse_operate(mul, other) | [
"def",
"__rmul__",
"(",
"self",
",",
"other",
")",
":",
"return",
"self",
".",
"reverse_operate",
"(",
"mul",
",",
"other",
")"
] | https://github.com/jam-py/jam-py/blob/0821492cdff8665928e0f093a4435aa64285a45c/jam/third_party/sqlalchemy/sql/operators.py#L1010-L1016 |
|
uccser/cs-field-guide | ea6e08e3cf170209d5bacdcef4ce6934fe1ecdb4 | csfieldguide/search/forms.py | python | all_items | (searchqueryset) | return searchqueryset.all() | Return all items of SearchQuerySet.
Args:
searchqueryset (SearchQuerySet): QuerySet of search items.
Returns:
All items in index. | Return all items of SearchQuerySet. | [
"Return",
"all",
"items",
"of",
"SearchQuerySet",
"."
] | def all_items(searchqueryset):
"""Return all items of SearchQuerySet.
Args:
searchqueryset (SearchQuerySet): QuerySet of search items.
Returns:
All items in index.
"""
return searchqueryset.all() | [
"def",
"all_items",
"(",
"searchqueryset",
")",
":",
"return",
"searchqueryset",
".",
"all",
"(",
")"
] | https://github.com/uccser/cs-field-guide/blob/ea6e08e3cf170209d5bacdcef4ce6934fe1ecdb4/csfieldguide/search/forms.py#L31-L40 |
|
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py | python | Window.get_process | (self) | return self.__process | @rtype: L{Process}
@return: Parent Process object. | [] | def get_process(self):
"""
@rtype: L{Process}
@return: Parent Process object.
"""
if self.__process is not None:
return self.__process
self.__load_Process_class()
self.__process = Process(self.get_pid())
return self.__process | [
"def",
"get_process",
"(",
"self",
")",
":",
"if",
"self",
".",
"__process",
"is",
"not",
"None",
":",
"return",
"self",
".",
"__process",
"self",
".",
"__load_Process_class",
"(",
")",
"self",
".",
"__process",
"=",
"Process",
"(",
"self",
".",
"get_pid",
"(",
")",
")",
"return",
"self",
".",
"__process"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py#L193-L202 |
||
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5Type/Core/CacheFactory.py | python | CacheFactory.get | (self, cache_id, default=None) | return default | Get value or return default from all contained Cache Bag
or Cache Plugin. | Get value or return default from all contained Cache Bag
or Cache Plugin. | [
"Get",
"value",
"or",
"return",
"default",
"from",
"all",
"contained",
"Cache",
"Bag",
"or",
"Cache",
"Plugin",
"."
] | def get(self, cache_id, default=None):
"""
Get value or return default from all contained Cache Bag
or Cache Plugin.
"""
cache_plugin_list = self.getCachePluginList(list(self.allowed_types) + ['ERP5 Cache Bag'])
for cache_plugin in cache_plugin_list:
value = cache_plugin.get(cache_id, default)
if value is not None:
return value
return default | [
"def",
"get",
"(",
"self",
",",
"cache_id",
",",
"default",
"=",
"None",
")",
":",
"cache_plugin_list",
"=",
"self",
".",
"getCachePluginList",
"(",
"list",
"(",
"self",
".",
"allowed_types",
")",
"+",
"[",
"'ERP5 Cache Bag'",
"]",
")",
"for",
"cache_plugin",
"in",
"cache_plugin_list",
":",
"value",
"=",
"cache_plugin",
".",
"get",
"(",
"cache_id",
",",
"default",
")",
"if",
"value",
"is",
"not",
"None",
":",
"return",
"value",
"return",
"default"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5Type/Core/CacheFactory.py#L77-L87 |
|
xl7dev/BurpSuite | d1d4bd4981a87f2f4c0c9744ad7c476336c813da | Extender/Sqlmap/lib/core/common.py | python | getConsoleWidth | (default=80) | return width or default | Returns console width | Returns console width | [
"Returns",
"console",
"width"
] | def getConsoleWidth(default=80):
"""
Returns console width
"""
width = None
if os.getenv("COLUMNS", "").isdigit():
width = int(os.getenv("COLUMNS"))
else:
try:
try:
FNULL = open(os.devnull, 'w')
except IOError:
FNULL = None
process = execute("stty size", shell=True, stdout=PIPE, stderr=FNULL or PIPE)
stdout, _ = process.communicate()
items = stdout.split()
if len(items) == 2 and items[1].isdigit():
width = int(items[1])
except OSError:
pass
if width is None:
try:
import curses
stdscr = curses.initscr()
_, width = stdscr.getmaxyx()
curses.endwin()
except:
pass
return width or default | [
"def",
"getConsoleWidth",
"(",
"default",
"=",
"80",
")",
":",
"width",
"=",
"None",
"if",
"os",
".",
"getenv",
"(",
"\"COLUMNS\"",
",",
"\"\"",
")",
".",
"isdigit",
"(",
")",
":",
"width",
"=",
"int",
"(",
"os",
".",
"getenv",
"(",
"\"COLUMNS\"",
")",
")",
"else",
":",
"try",
":",
"try",
":",
"FNULL",
"=",
"open",
"(",
"os",
".",
"devnull",
",",
"'w'",
")",
"except",
"IOError",
":",
"FNULL",
"=",
"None",
"process",
"=",
"execute",
"(",
"\"stty size\"",
",",
"shell",
"=",
"True",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"FNULL",
"or",
"PIPE",
")",
"stdout",
",",
"_",
"=",
"process",
".",
"communicate",
"(",
")",
"items",
"=",
"stdout",
".",
"split",
"(",
")",
"if",
"len",
"(",
"items",
")",
"==",
"2",
"and",
"items",
"[",
"1",
"]",
".",
"isdigit",
"(",
")",
":",
"width",
"=",
"int",
"(",
"items",
"[",
"1",
"]",
")",
"except",
"OSError",
":",
"pass",
"if",
"width",
"is",
"None",
":",
"try",
":",
"import",
"curses",
"stdscr",
"=",
"curses",
".",
"initscr",
"(",
")",
"_",
",",
"width",
"=",
"stdscr",
".",
"getmaxyx",
"(",
")",
"curses",
".",
"endwin",
"(",
")",
"except",
":",
"pass",
"return",
"width",
"or",
"default"
] | https://github.com/xl7dev/BurpSuite/blob/d1d4bd4981a87f2f4c0c9744ad7c476336c813da/Extender/Sqlmap/lib/core/common.py#L1711-L1745 |
|
Netflix-Skunkworks/sleepy-puppy | 67d114bcb75ee1a6e60b56177a54f985bef4fee7 | sleepypuppy/admin/assessment/views.py | python | AssessmentView.delete_captures | (self, assessment) | Remove captures and local captures upon assessment deletion | Remove captures and local captures upon assessment deletion | [
"Remove",
"captures",
"and",
"local",
"captures",
"upon",
"assessment",
"deletion"
] | def delete_captures(self, assessment):
"""
Remove captures and local captures upon assessment deletion
"""
cascaded_captures = Capture.query.filter_by(
assessment=assessment.name).all()
for capture in cascaded_captures:
try:
os.remove("uploads/{}.png".format(capture.screenshot))
os.remove(
"uploads/small_{}.png".format(capture.screenshot))
except:
pass
try:
# Cascade delete for Assessment
Capture.query.filter_by(assessment=assessment.name).delete()
AccessLog.query.filter_by(assessment=assessment.name).delete()
GenericCollector.query.filter_by(assessment=assessment.name).delete()
except Exception as err:
app.logger.warn(err)
try:
db.session.commit()
except Exception as err:
app.logger.warn(err) | [
"def",
"delete_captures",
"(",
"self",
",",
"assessment",
")",
":",
"cascaded_captures",
"=",
"Capture",
".",
"query",
".",
"filter_by",
"(",
"assessment",
"=",
"assessment",
".",
"name",
")",
".",
"all",
"(",
")",
"for",
"capture",
"in",
"cascaded_captures",
":",
"try",
":",
"os",
".",
"remove",
"(",
"\"uploads/{}.png\"",
".",
"format",
"(",
"capture",
".",
"screenshot",
")",
")",
"os",
".",
"remove",
"(",
"\"uploads/small_{}.png\"",
".",
"format",
"(",
"capture",
".",
"screenshot",
")",
")",
"except",
":",
"pass",
"try",
":",
"# Cascade delete for Assessment",
"Capture",
".",
"query",
".",
"filter_by",
"(",
"assessment",
"=",
"assessment",
".",
"name",
")",
".",
"delete",
"(",
")",
"AccessLog",
".",
"query",
".",
"filter_by",
"(",
"assessment",
"=",
"assessment",
".",
"name",
")",
".",
"delete",
"(",
")",
"GenericCollector",
".",
"query",
".",
"filter_by",
"(",
"assessment",
"=",
"assessment",
".",
"name",
")",
".",
"delete",
"(",
")",
"except",
"Exception",
"as",
"err",
":",
"app",
".",
"logger",
".",
"warn",
"(",
"err",
")",
"try",
":",
"db",
".",
"session",
".",
"commit",
"(",
")",
"except",
"Exception",
"as",
"err",
":",
"app",
".",
"logger",
".",
"warn",
"(",
"err",
")"
] | https://github.com/Netflix-Skunkworks/sleepy-puppy/blob/67d114bcb75ee1a6e60b56177a54f985bef4fee7/sleepypuppy/admin/assessment/views.py#L94-L119 |
||
hotosm/tasking-manager | 1a7b02c6ccd431029a96d709d4d786c83cb37f5e | backend/models/dtos/campaign_dto.py | python | CampaignListDTO.__init__ | (self) | DTO constructor initialise all arrays to empty | DTO constructor initialise all arrays to empty | [
"DTO",
"constructor",
"initialise",
"all",
"arrays",
"to",
"empty"
] | def __init__(self):
""" DTO constructor initialise all arrays to empty"""
super().__init__()
self.campaigns = [] | [
"def",
"__init__",
"(",
"self",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
")",
"self",
".",
"campaigns",
"=",
"[",
"]"
] | https://github.com/hotosm/tasking-manager/blob/1a7b02c6ccd431029a96d709d4d786c83cb37f5e/backend/models/dtos/campaign_dto.py#L51-L54 |
||
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5Catalog/CatalogTool.py | python | CatalogTool.getPredicatePropertyDict | (self, object) | return property_dict | Construct a dictionnary with a list of properties
to catalog into the table predicate | Construct a dictionnary with a list of properties
to catalog into the table predicate | [
"Construct",
"a",
"dictionnary",
"with",
"a",
"list",
"of",
"properties",
"to",
"catalog",
"into",
"the",
"table",
"predicate"
] | def getPredicatePropertyDict(self, object):
"""
Construct a dictionnary with a list of properties
to catalog into the table predicate
"""
if not object.providesIPredicate():
return None
object = object.asPredicate()
if object is None:
return None
property_dict = {}
identity_criterion = getattr(object,'_identity_criterion',None)
range_criterion = getattr(object,'_range_criterion',None)
if identity_criterion is not None:
for property, value in identity_criterion.items():
if value is not None:
property_dict[property] = value
if range_criterion is not None:
for property, (min, max) in range_criterion.items():
if min is not None:
property_dict['%s_range_min' % property] = min
if max is not None:
property_dict['%s_range_max' % property] = max
property_dict['membership_criterion_category_list'] = object.getMembershipCriterionCategoryList()
return property_dict | [
"def",
"getPredicatePropertyDict",
"(",
"self",
",",
"object",
")",
":",
"if",
"not",
"object",
".",
"providesIPredicate",
"(",
")",
":",
"return",
"None",
"object",
"=",
"object",
".",
"asPredicate",
"(",
")",
"if",
"object",
"is",
"None",
":",
"return",
"None",
"property_dict",
"=",
"{",
"}",
"identity_criterion",
"=",
"getattr",
"(",
"object",
",",
"'_identity_criterion'",
",",
"None",
")",
"range_criterion",
"=",
"getattr",
"(",
"object",
",",
"'_range_criterion'",
",",
"None",
")",
"if",
"identity_criterion",
"is",
"not",
"None",
":",
"for",
"property",
",",
"value",
"in",
"identity_criterion",
".",
"items",
"(",
")",
":",
"if",
"value",
"is",
"not",
"None",
":",
"property_dict",
"[",
"property",
"]",
"=",
"value",
"if",
"range_criterion",
"is",
"not",
"None",
":",
"for",
"property",
",",
"(",
"min",
",",
"max",
")",
"in",
"range_criterion",
".",
"items",
"(",
")",
":",
"if",
"min",
"is",
"not",
"None",
":",
"property_dict",
"[",
"'%s_range_min'",
"%",
"property",
"]",
"=",
"min",
"if",
"max",
"is",
"not",
"None",
":",
"property_dict",
"[",
"'%s_range_max'",
"%",
"property",
"]",
"=",
"max",
"property_dict",
"[",
"'membership_criterion_category_list'",
"]",
"=",
"object",
".",
"getMembershipCriterionCategoryList",
"(",
")",
"return",
"property_dict"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5Catalog/CatalogTool.py#L1038-L1062 |
|
arschles/go-in-5-minutes | c02918d1def999b2d59c060818e8adb735e24719 | episode24/node_modules/node-gyp/gyp/pylib/gyp/input.py | python | PruneUnwantedTargets | (targets, flat_list, dependency_nodes, root_targets,
data) | return wanted_targets, wanted_flat_list | Return only the targets that are deep dependencies of |root_targets|. | Return only the targets that are deep dependencies of |root_targets|. | [
"Return",
"only",
"the",
"targets",
"that",
"are",
"deep",
"dependencies",
"of",
"|root_targets|",
"."
] | def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
data):
"""Return only the targets that are deep dependencies of |root_targets|."""
qualified_root_targets = []
for target in root_targets:
target = target.strip()
qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
if not qualified_targets:
raise GypError("Could not find target %s" % target)
qualified_root_targets.extend(qualified_targets)
wanted_targets = {}
for target in qualified_root_targets:
wanted_targets[target] = targets[target]
for dependency in dependency_nodes[target].DeepDependencies():
wanted_targets[dependency] = targets[dependency]
wanted_flat_list = [t for t in flat_list if t in wanted_targets]
# Prune unwanted targets from each build_file's data dict.
for build_file in data['target_build_files']:
if not 'targets' in data[build_file]:
continue
new_targets = []
for target in data[build_file]['targets']:
qualified_name = gyp.common.QualifiedTarget(build_file,
target['target_name'],
target['toolset'])
if qualified_name in wanted_targets:
new_targets.append(target)
data[build_file]['targets'] = new_targets
return wanted_targets, wanted_flat_list | [
"def",
"PruneUnwantedTargets",
"(",
"targets",
",",
"flat_list",
",",
"dependency_nodes",
",",
"root_targets",
",",
"data",
")",
":",
"qualified_root_targets",
"=",
"[",
"]",
"for",
"target",
"in",
"root_targets",
":",
"target",
"=",
"target",
".",
"strip",
"(",
")",
"qualified_targets",
"=",
"gyp",
".",
"common",
".",
"FindQualifiedTargets",
"(",
"target",
",",
"flat_list",
")",
"if",
"not",
"qualified_targets",
":",
"raise",
"GypError",
"(",
"\"Could not find target %s\"",
"%",
"target",
")",
"qualified_root_targets",
".",
"extend",
"(",
"qualified_targets",
")",
"wanted_targets",
"=",
"{",
"}",
"for",
"target",
"in",
"qualified_root_targets",
":",
"wanted_targets",
"[",
"target",
"]",
"=",
"targets",
"[",
"target",
"]",
"for",
"dependency",
"in",
"dependency_nodes",
"[",
"target",
"]",
".",
"DeepDependencies",
"(",
")",
":",
"wanted_targets",
"[",
"dependency",
"]",
"=",
"targets",
"[",
"dependency",
"]",
"wanted_flat_list",
"=",
"[",
"t",
"for",
"t",
"in",
"flat_list",
"if",
"t",
"in",
"wanted_targets",
"]",
"# Prune unwanted targets from each build_file's data dict.",
"for",
"build_file",
"in",
"data",
"[",
"'target_build_files'",
"]",
":",
"if",
"not",
"'targets'",
"in",
"data",
"[",
"build_file",
"]",
":",
"continue",
"new_targets",
"=",
"[",
"]",
"for",
"target",
"in",
"data",
"[",
"build_file",
"]",
"[",
"'targets'",
"]",
":",
"qualified_name",
"=",
"gyp",
".",
"common",
".",
"QualifiedTarget",
"(",
"build_file",
",",
"target",
"[",
"'target_name'",
"]",
",",
"target",
"[",
"'toolset'",
"]",
")",
"if",
"qualified_name",
"in",
"wanted_targets",
":",
"new_targets",
".",
"append",
"(",
"target",
")",
"data",
"[",
"build_file",
"]",
"[",
"'targets'",
"]",
"=",
"new_targets",
"return",
"wanted_targets",
",",
"wanted_flat_list"
] | https://github.com/arschles/go-in-5-minutes/blob/c02918d1def999b2d59c060818e8adb735e24719/episode24/node_modules/node-gyp/gyp/pylib/gyp/input.py#L2671-L2703 |
|
ayojs/ayo | 45a1c8cf6384f5bcc81d834343c3ed9d78b97df3 | deps/v8/third_party/jinja2/filters.py | python | do_slice | (value, slices, fill_with=None) | Slice an iterator and return a list of lists containing
those items. Useful if you want to create a div containing
three ul tags that represent columns:
.. sourcecode:: html+jinja
<div class="columwrapper">
{%- for column in items|slice(3) %}
<ul class="column-{{ loop.index }}">
{%- for item in column %}
<li>{{ item }}</li>
{%- endfor %}
</ul>
{%- endfor %}
</div>
If you pass it a second argument it's used to fill missing
values on the last iteration. | Slice an iterator and return a list of lists containing
those items. Useful if you want to create a div containing
three ul tags that represent columns: | [
"Slice",
"an",
"iterator",
"and",
"return",
"a",
"list",
"of",
"lists",
"containing",
"those",
"items",
".",
"Useful",
"if",
"you",
"want",
"to",
"create",
"a",
"div",
"containing",
"three",
"ul",
"tags",
"that",
"represent",
"columns",
":"
] | def do_slice(value, slices, fill_with=None):
"""Slice an iterator and return a list of lists containing
those items. Useful if you want to create a div containing
three ul tags that represent columns:
.. sourcecode:: html+jinja
<div class="columwrapper">
{%- for column in items|slice(3) %}
<ul class="column-{{ loop.index }}">
{%- for item in column %}
<li>{{ item }}</li>
{%- endfor %}
</ul>
{%- endfor %}
</div>
If you pass it a second argument it's used to fill missing
values on the last iteration.
"""
seq = list(value)
length = len(seq)
items_per_slice = length // slices
slices_with_extra = length % slices
offset = 0
for slice_number in range(slices):
start = offset + slice_number * items_per_slice
if slice_number < slices_with_extra:
offset += 1
end = offset + (slice_number + 1) * items_per_slice
tmp = seq[start:end]
if fill_with is not None and slice_number >= slices_with_extra:
tmp.append(fill_with)
yield tmp | [
"def",
"do_slice",
"(",
"value",
",",
"slices",
",",
"fill_with",
"=",
"None",
")",
":",
"seq",
"=",
"list",
"(",
"value",
")",
"length",
"=",
"len",
"(",
"seq",
")",
"items_per_slice",
"=",
"length",
"//",
"slices",
"slices_with_extra",
"=",
"length",
"%",
"slices",
"offset",
"=",
"0",
"for",
"slice_number",
"in",
"range",
"(",
"slices",
")",
":",
"start",
"=",
"offset",
"+",
"slice_number",
"*",
"items_per_slice",
"if",
"slice_number",
"<",
"slices_with_extra",
":",
"offset",
"+=",
"1",
"end",
"=",
"offset",
"+",
"(",
"slice_number",
"+",
"1",
")",
"*",
"items_per_slice",
"tmp",
"=",
"seq",
"[",
"start",
":",
"end",
"]",
"if",
"fill_with",
"is",
"not",
"None",
"and",
"slice_number",
">=",
"slices_with_extra",
":",
"tmp",
".",
"append",
"(",
"fill_with",
")",
"yield",
"tmp"
] | https://github.com/ayojs/ayo/blob/45a1c8cf6384f5bcc81d834343c3ed9d78b97df3/deps/v8/third_party/jinja2/filters.py#L571-L604 |
||
trigger-corp/browser-extensions | 895c14ddb5713613c58c4af60b5dcf0d66fea552 | generate/lib/run-firefox/simplejson/__init__.py | python | load | (fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw) | return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, **kw) | Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
If the contents of ``fp`` is encoded with an ASCII based encoding other
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
be specified. Encodings that are not ASCII based (such as UCS-2) are
not allowed, and should be wrapped with
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
object and passed to ``loads()``
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg. | Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object. | [
"Deserialize",
"fp",
"(",
"a",
".",
"read",
"()",
"-",
"supporting",
"file",
"-",
"like",
"object",
"containing",
"a",
"JSON",
"document",
")",
"to",
"a",
"Python",
"object",
"."
] | def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""
Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
If the contents of ``fp`` is encoded with an ASCII based encoding other
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
be specified. Encodings that are not ASCII based (such as UCS-2) are
not allowed, and should be wrapped with
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
object and passed to ``loads()``
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, **kw) | [
"def",
"load",
"(",
"fp",
",",
"encoding",
"=",
"None",
",",
"cls",
"=",
"None",
",",
"object_hook",
"=",
"None",
",",
"parse_float",
"=",
"None",
",",
"parse_int",
"=",
"None",
",",
"parse_constant",
"=",
"None",
",",
"*",
"*",
"kw",
")",
":",
"return",
"loads",
"(",
"fp",
".",
"read",
"(",
")",
",",
"encoding",
"=",
"encoding",
",",
"cls",
"=",
"cls",
",",
"object_hook",
"=",
"object_hook",
",",
"parse_float",
"=",
"parse_float",
",",
"parse_int",
"=",
"parse_int",
",",
"parse_constant",
"=",
"parse_constant",
",",
"*",
"*",
"kw",
")"
] | https://github.com/trigger-corp/browser-extensions/blob/895c14ddb5713613c58c4af60b5dcf0d66fea552/generate/lib/run-firefox/simplejson/__init__.py#L249-L273 |
|
stdlib-js/stdlib | e3c14dd9a7985ed1cd1cc80e83b6659aeabeb7df | lib/node_modules/@stdlib/math/base/special/tan/benchmark/python/benchmark.py | python | main | () | Run the benchmark. | Run the benchmark. | [
"Run",
"the",
"benchmark",
"."
] | def main():
"""Run the benchmark."""
benchmark() | [
"def",
"main",
"(",
")",
":",
"benchmark",
"(",
")"
] | https://github.com/stdlib-js/stdlib/blob/e3c14dd9a7985ed1cd1cc80e83b6659aeabeb7df/lib/node_modules/@stdlib/math/base/special/tan/benchmark/python/benchmark.py#L91-L93 |
||
GoogleCloudPlatform/PerfKitExplorer | 9efa61015d50c25f6d753f0212ad3bf16876d496 | third_party/py/oauth2client/appengine.py | python | OAuth2Decorator.has_credentials | (self) | return self.credentials is not None and not self.credentials.invalid | True if for the logged in user there are valid access Credentials.
Must only be called from with a webapp.RequestHandler subclassed method
that had been decorated with either @oauth_required or @oauth_aware. | True if for the logged in user there are valid access Credentials. | [
"True",
"if",
"for",
"the",
"logged",
"in",
"user",
"there",
"are",
"valid",
"access",
"Credentials",
"."
] | def has_credentials(self):
"""True if for the logged in user there are valid access Credentials.
Must only be called from with a webapp.RequestHandler subclassed method
that had been decorated with either @oauth_required or @oauth_aware.
"""
return self.credentials is not None and not self.credentials.invalid | [
"def",
"has_credentials",
"(",
"self",
")",
":",
"return",
"self",
".",
"credentials",
"is",
"not",
"None",
"and",
"not",
"self",
".",
"credentials",
".",
"invalid"
] | https://github.com/GoogleCloudPlatform/PerfKitExplorer/blob/9efa61015d50c25f6d753f0212ad3bf16876d496/third_party/py/oauth2client/appengine.py#L784-L790 |
|
carlosperate/ardublockly | 04fa48273b5651386d0ef1ce6dd446795ffc2594 | ardublocklyserver/local-packages/serial/urlhandler/protocol_spy.py | python | sixteen | (data) | \
yield tuples of hex and ASCII display in multiples of 16. Includes a
space after 8 bytes and (None, None) after 16 bytes and at the end. | \
yield tuples of hex and ASCII display in multiples of 16. Includes a
space after 8 bytes and (None, None) after 16 bytes and at the end. | [
"\\",
"yield",
"tuples",
"of",
"hex",
"and",
"ASCII",
"display",
"in",
"multiples",
"of",
"16",
".",
"Includes",
"a",
"space",
"after",
"8",
"bytes",
"and",
"(",
"None",
"None",
")",
"after",
"16",
"bytes",
"and",
"at",
"the",
"end",
"."
] | def sixteen(data):
"""\
yield tuples of hex and ASCII display in multiples of 16. Includes a
space after 8 bytes and (None, None) after 16 bytes and at the end.
"""
n = 0
for b in serial.iterbytes(data):
yield ('{:02X} '.format(ord(b)), b.decode('ascii') if b' ' <= b < b'\x7f' else '.')
n += 1
if n == 8:
yield (' ', '')
elif n >= 16:
yield (None, None)
n = 0
if n > 0:
while n < 16:
n += 1
if n == 8:
yield (' ', '')
yield (' ', ' ')
yield (None, None) | [
"def",
"sixteen",
"(",
"data",
")",
":",
"n",
"=",
"0",
"for",
"b",
"in",
"serial",
".",
"iterbytes",
"(",
"data",
")",
":",
"yield",
"(",
"'{:02X} '",
".",
"format",
"(",
"ord",
"(",
"b",
")",
")",
",",
"b",
".",
"decode",
"(",
"'ascii'",
")",
"if",
"b' '",
"<=",
"b",
"<",
"b'\\x7f'",
"else",
"'.'",
")",
"n",
"+=",
"1",
"if",
"n",
"==",
"8",
":",
"yield",
"(",
"' '",
",",
"''",
")",
"elif",
"n",
">=",
"16",
":",
"yield",
"(",
"None",
",",
"None",
")",
"n",
"=",
"0",
"if",
"n",
">",
"0",
":",
"while",
"n",
"<",
"16",
":",
"n",
"+=",
"1",
"if",
"n",
"==",
"8",
":",
"yield",
"(",
"' '",
",",
"''",
")",
"yield",
"(",
"' '",
",",
"' '",
")",
"yield",
"(",
"None",
",",
"None",
")"
] | https://github.com/carlosperate/ardublockly/blob/04fa48273b5651386d0ef1ce6dd446795ffc2594/ardublocklyserver/local-packages/serial/urlhandler/protocol_spy.py#L34-L54 |
||
nodejs/http2 | 734ad72e3939e62bcff0f686b8ec426b8aaa22e3 | deps/v8/tools/stats-viewer.py | python | StatsViewer.MountSharedData | (self) | Mount the binary counters file as a memory-mapped file. If
something goes wrong print an informative message and exit the
program. | Mount the binary counters file as a memory-mapped file. If
something goes wrong print an informative message and exit the
program. | [
"Mount",
"the",
"binary",
"counters",
"file",
"as",
"a",
"memory",
"-",
"mapped",
"file",
".",
"If",
"something",
"goes",
"wrong",
"print",
"an",
"informative",
"message",
"and",
"exit",
"the",
"program",
"."
] | def MountSharedData(self):
"""Mount the binary counters file as a memory-mapped file. If
something goes wrong print an informative message and exit the
program."""
if not os.path.exists(self.data_name):
maps_name = "/proc/%s/maps" % self.data_name
if not os.path.exists(maps_name):
print "\"%s\" is neither a counter file nor a PID." % self.data_name
sys.exit(1)
maps_file = open(maps_name, "r")
try:
self.data_name = None
for m in re.finditer(r"/dev/shm/\S*", maps_file.read()):
if os.path.exists(m.group(0)):
self.data_name = m.group(0)
break
if self.data_name is None:
print "Can't find counter file in maps for PID %s." % self.data_name
sys.exit(1)
finally:
maps_file.close()
data_file = open(self.data_name, "r")
size = os.fstat(data_file.fileno()).st_size
fileno = data_file.fileno()
self.shared_mmap = mmap.mmap(fileno, size, access=mmap.ACCESS_READ)
data_access = SharedDataAccess(self.shared_mmap)
if data_access.IntAt(0) == COUNTERS_FILE_MAGIC_NUMBER:
return CounterCollection(data_access)
elif data_access.IntAt(0) == CHROME_COUNTERS_FILE_MAGIC_NUMBER:
return ChromeCounterCollection(data_access)
print "File %s is not stats data." % self.data_name
sys.exit(1) | [
"def",
"MountSharedData",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"data_name",
")",
":",
"maps_name",
"=",
"\"/proc/%s/maps\"",
"%",
"self",
".",
"data_name",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"maps_name",
")",
":",
"print",
"\"\\\"%s\\\" is neither a counter file nor a PID.\"",
"%",
"self",
".",
"data_name",
"sys",
".",
"exit",
"(",
"1",
")",
"maps_file",
"=",
"open",
"(",
"maps_name",
",",
"\"r\"",
")",
"try",
":",
"self",
".",
"data_name",
"=",
"None",
"for",
"m",
"in",
"re",
".",
"finditer",
"(",
"r\"/dev/shm/\\S*\"",
",",
"maps_file",
".",
"read",
"(",
")",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"m",
".",
"group",
"(",
"0",
")",
")",
":",
"self",
".",
"data_name",
"=",
"m",
".",
"group",
"(",
"0",
")",
"break",
"if",
"self",
".",
"data_name",
"is",
"None",
":",
"print",
"\"Can't find counter file in maps for PID %s.\"",
"%",
"self",
".",
"data_name",
"sys",
".",
"exit",
"(",
"1",
")",
"finally",
":",
"maps_file",
".",
"close",
"(",
")",
"data_file",
"=",
"open",
"(",
"self",
".",
"data_name",
",",
"\"r\"",
")",
"size",
"=",
"os",
".",
"fstat",
"(",
"data_file",
".",
"fileno",
"(",
")",
")",
".",
"st_size",
"fileno",
"=",
"data_file",
".",
"fileno",
"(",
")",
"self",
".",
"shared_mmap",
"=",
"mmap",
".",
"mmap",
"(",
"fileno",
",",
"size",
",",
"access",
"=",
"mmap",
".",
"ACCESS_READ",
")",
"data_access",
"=",
"SharedDataAccess",
"(",
"self",
".",
"shared_mmap",
")",
"if",
"data_access",
".",
"IntAt",
"(",
"0",
")",
"==",
"COUNTERS_FILE_MAGIC_NUMBER",
":",
"return",
"CounterCollection",
"(",
"data_access",
")",
"elif",
"data_access",
".",
"IntAt",
"(",
"0",
")",
"==",
"CHROME_COUNTERS_FILE_MAGIC_NUMBER",
":",
"return",
"ChromeCounterCollection",
"(",
"data_access",
")",
"print",
"\"File %s is not stats data.\"",
"%",
"self",
".",
"data_name",
"sys",
".",
"exit",
"(",
"1",
")"
] | https://github.com/nodejs/http2/blob/734ad72e3939e62bcff0f686b8ec426b8aaa22e3/deps/v8/tools/stats-viewer.py#L96-L127 |
||
redapple0204/my-boring-python | 1ab378e9d4f39ad920ff542ef3b2db68f0575a98 | pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/tarfile.py | python | TarInfo._create_pax_generic_header | (cls, pax_headers, type, encoding) | return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
cls._create_payload(records) | Return a POSIX.1-2008 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be strings. | Return a POSIX.1-2008 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be strings. | [
"Return",
"a",
"POSIX",
".",
"1",
"-",
"2008",
"extended",
"or",
"global",
"header",
"sequence",
"that",
"contains",
"a",
"list",
"of",
"keyword",
"value",
"pairs",
".",
"The",
"values",
"must",
"be",
"strings",
"."
] | def _create_pax_generic_header(cls, pax_headers, type, encoding):
"""Return a POSIX.1-2008 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be strings.
"""
# Check if one of the fields contains surrogate characters and thereby
# forces hdrcharset=BINARY, see _proc_pax() for more information.
binary = False
for keyword, value in pax_headers.items():
try:
value.encode("utf8", "strict")
except UnicodeEncodeError:
binary = True
break
records = b""
if binary:
# Put the hdrcharset field at the beginning of the header.
records += b"21 hdrcharset=BINARY\n"
for keyword, value in pax_headers.items():
keyword = keyword.encode("utf8")
if binary:
# Try to restore the original byte representation of `value'.
# Needless to say, that the encoding must match the string.
value = value.encode(encoding, "surrogateescape")
else:
value = value.encode("utf8")
l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
n = p = 0
while True:
n = l + len(str(p))
if n == p:
break
p = n
records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
# We use a hardcoded "././@PaxHeader" name like star does
# instead of the one that POSIX recommends.
info = {}
info["name"] = "././@PaxHeader"
info["type"] = type
info["size"] = len(records)
info["magic"] = POSIX_MAGIC
# Create pax header + record blocks.
return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
cls._create_payload(records) | [
"def",
"_create_pax_generic_header",
"(",
"cls",
",",
"pax_headers",
",",
"type",
",",
"encoding",
")",
":",
"# Check if one of the fields contains surrogate characters and thereby",
"# forces hdrcharset=BINARY, see _proc_pax() for more information.",
"binary",
"=",
"False",
"for",
"keyword",
",",
"value",
"in",
"pax_headers",
".",
"items",
"(",
")",
":",
"try",
":",
"value",
".",
"encode",
"(",
"\"utf8\"",
",",
"\"strict\"",
")",
"except",
"UnicodeEncodeError",
":",
"binary",
"=",
"True",
"break",
"records",
"=",
"b\"\"",
"if",
"binary",
":",
"# Put the hdrcharset field at the beginning of the header.",
"records",
"+=",
"b\"21 hdrcharset=BINARY\\n\"",
"for",
"keyword",
",",
"value",
"in",
"pax_headers",
".",
"items",
"(",
")",
":",
"keyword",
"=",
"keyword",
".",
"encode",
"(",
"\"utf8\"",
")",
"if",
"binary",
":",
"# Try to restore the original byte representation of `value'.",
"# Needless to say, that the encoding must match the string.",
"value",
"=",
"value",
".",
"encode",
"(",
"encoding",
",",
"\"surrogateescape\"",
")",
"else",
":",
"value",
"=",
"value",
".",
"encode",
"(",
"\"utf8\"",
")",
"l",
"=",
"len",
"(",
"keyword",
")",
"+",
"len",
"(",
"value",
")",
"+",
"3",
"# ' ' + '=' + '\\n'",
"n",
"=",
"p",
"=",
"0",
"while",
"True",
":",
"n",
"=",
"l",
"+",
"len",
"(",
"str",
"(",
"p",
")",
")",
"if",
"n",
"==",
"p",
":",
"break",
"p",
"=",
"n",
"records",
"+=",
"bytes",
"(",
"str",
"(",
"p",
")",
",",
"\"ascii\"",
")",
"+",
"b\" \"",
"+",
"keyword",
"+",
"b\"=\"",
"+",
"value",
"+",
"b\"\\n\"",
"# We use a hardcoded \"././@PaxHeader\" name like star does",
"# instead of the one that POSIX recommends.",
"info",
"=",
"{",
"}",
"info",
"[",
"\"name\"",
"]",
"=",
"\"././@PaxHeader\"",
"info",
"[",
"\"type\"",
"]",
"=",
"type",
"info",
"[",
"\"size\"",
"]",
"=",
"len",
"(",
"records",
")",
"info",
"[",
"\"magic\"",
"]",
"=",
"POSIX_MAGIC",
"# Create pax header + record blocks.",
"return",
"cls",
".",
"_create_header",
"(",
"info",
",",
"USTAR_FORMAT",
",",
"\"ascii\"",
",",
"\"replace\"",
")",
"+",
"cls",
".",
"_create_payload",
"(",
"records",
")"
] | https://github.com/redapple0204/my-boring-python/blob/1ab378e9d4f39ad920ff542ef3b2db68f0575a98/pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/tarfile.py#L1169-L1217 |
|
cuckoosandbox/cuckoo | 50452a39ff7c3e0c4c94d114bc6317101633b958 | cuckoo/auxiliary/sniffer.py | python | Sniffer.stop | (self) | Stop sniffing.
@return: operation status. | Stop sniffing. | [
"Stop",
"sniffing",
"."
] | def stop(self):
"""Stop sniffing.
@return: operation status.
"""
# The tcpdump process was never started in the first place.
if not self.proc:
return
# The tcpdump process has already quit, generally speaking this
# indicates an error such as "permission denied".
if self.proc.poll():
out, err = self.proc.communicate()
raise CuckooOperationalError(
"Error running tcpdump to sniff the network traffic during "
"the analysis; stdout = %r and stderr = %r. Did you enable "
"the extra capabilities to allow running tcpdump as non-root "
"user and disable AppArmor properly (the latter only applies "
"to Ubuntu-based distributions with AppArmor, see also %s)?" %
(out, err, faq("permission-denied-for-tcpdump"))
)
try:
self.proc.terminate()
except:
try:
if not self.proc.poll():
log.debug("Killing sniffer")
self.proc.kill()
except OSError as e:
log.debug("Error killing sniffer: %s. Continue", e)
except Exception as e:
log.exception("Unable to stop the sniffer with pid %d: %s",
self.proc.pid, e)
# Ensure expected output was received from tcpdump.
out, err = self.proc.communicate()
self._check_output(out, err) | [
"def",
"stop",
"(",
"self",
")",
":",
"# The tcpdump process was never started in the first place.",
"if",
"not",
"self",
".",
"proc",
":",
"return",
"# The tcpdump process has already quit, generally speaking this",
"# indicates an error such as \"permission denied\".",
"if",
"self",
".",
"proc",
".",
"poll",
"(",
")",
":",
"out",
",",
"err",
"=",
"self",
".",
"proc",
".",
"communicate",
"(",
")",
"raise",
"CuckooOperationalError",
"(",
"\"Error running tcpdump to sniff the network traffic during \"",
"\"the analysis; stdout = %r and stderr = %r. Did you enable \"",
"\"the extra capabilities to allow running tcpdump as non-root \"",
"\"user and disable AppArmor properly (the latter only applies \"",
"\"to Ubuntu-based distributions with AppArmor, see also %s)?\"",
"%",
"(",
"out",
",",
"err",
",",
"faq",
"(",
"\"permission-denied-for-tcpdump\"",
")",
")",
")",
"try",
":",
"self",
".",
"proc",
".",
"terminate",
"(",
")",
"except",
":",
"try",
":",
"if",
"not",
"self",
".",
"proc",
".",
"poll",
"(",
")",
":",
"log",
".",
"debug",
"(",
"\"Killing sniffer\"",
")",
"self",
".",
"proc",
".",
"kill",
"(",
")",
"except",
"OSError",
"as",
"e",
":",
"log",
".",
"debug",
"(",
"\"Error killing sniffer: %s. Continue\"",
",",
"e",
")",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"exception",
"(",
"\"Unable to stop the sniffer with pid %d: %s\"",
",",
"self",
".",
"proc",
".",
"pid",
",",
"e",
")",
"# Ensure expected output was received from tcpdump.",
"out",
",",
"err",
"=",
"self",
".",
"proc",
".",
"communicate",
"(",
")",
"self",
".",
"_check_output",
"(",
"out",
",",
"err",
")"
] | https://github.com/cuckoosandbox/cuckoo/blob/50452a39ff7c3e0c4c94d114bc6317101633b958/cuckoo/auxiliary/sniffer.py#L138-L174 |
||
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ZSQLCatalog/ZSQLCatalog.py | python | ZCatalog.getHotReindexingState | (self) | return value | Return the current hot reindexing state. | Return the current hot reindexing state. | [
"Return",
"the",
"current",
"hot",
"reindexing",
"state",
"."
] | def getHotReindexingState(self):
"""
Return the current hot reindexing state.
"""
value = getattr(self, 'hot_reindexing_state', None)
if value is None:
return HOT_REINDEXING_FINISHED_STATE
return value | [
"def",
"getHotReindexingState",
"(",
"self",
")",
":",
"value",
"=",
"getattr",
"(",
"self",
",",
"'hot_reindexing_state'",
",",
"None",
")",
"if",
"value",
"is",
"None",
":",
"return",
"HOT_REINDEXING_FINISHED_STATE",
"return",
"value"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ZSQLCatalog/ZSQLCatalog.py#L231-L238 |
|
Nexedi/erp5 | 44df1959c0e21576cf5e9803d602d95efb4b695b | product/ERP5/ERP5Site.py | python | ERP5Site.getPortalResourceTypeList | (self) | return self._getPortalGroupedTypeList('resource') or \
self._getPortalConfiguration('portal_resource_type_list') | Return resource types. | Return resource types. | [
"Return",
"resource",
"types",
"."
] | def getPortalResourceTypeList(self):
"""
Return resource types.
"""
return self._getPortalGroupedTypeList('resource') or \
self._getPortalConfiguration('portal_resource_type_list') | [
"def",
"getPortalResourceTypeList",
"(",
"self",
")",
":",
"return",
"self",
".",
"_getPortalGroupedTypeList",
"(",
"'resource'",
")",
"or",
"self",
".",
"_getPortalConfiguration",
"(",
"'portal_resource_type_list'",
")"
] | https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5/ERP5Site.py#L903-L908 |
|
korolr/dotfiles | 8e46933503ecb8d8651739ffeb1d2d4f0f5c6524 | .config/sublime-text-3/Packages/python-jinja2/all/jinja2/environment.py | python | Template.from_code | (cls, environment, code, globals, uptodate=None) | return rv | Creates a template object from compiled code and the globals. This
is used by the loaders and environment to create a template object. | Creates a template object from compiled code and the globals. This
is used by the loaders and environment to create a template object. | [
"Creates",
"a",
"template",
"object",
"from",
"compiled",
"code",
"and",
"the",
"globals",
".",
"This",
"is",
"used",
"by",
"the",
"loaders",
"and",
"environment",
"to",
"create",
"a",
"template",
"object",
"."
] | def from_code(cls, environment, code, globals, uptodate=None):
"""Creates a template object from compiled code and the globals. This
is used by the loaders and environment to create a template object.
"""
namespace = {
'environment': environment,
'__file__': code.co_filename
}
exec(code, namespace)
rv = cls._from_namespace(environment, namespace, globals)
rv._uptodate = uptodate
return rv | [
"def",
"from_code",
"(",
"cls",
",",
"environment",
",",
"code",
",",
"globals",
",",
"uptodate",
"=",
"None",
")",
":",
"namespace",
"=",
"{",
"'environment'",
":",
"environment",
",",
"'__file__'",
":",
"code",
".",
"co_filename",
"}",
"exec",
"(",
"code",
",",
"namespace",
")",
"rv",
"=",
"cls",
".",
"_from_namespace",
"(",
"environment",
",",
"namespace",
",",
"globals",
")",
"rv",
".",
"_uptodate",
"=",
"uptodate",
"return",
"rv"
] | https://github.com/korolr/dotfiles/blob/8e46933503ecb8d8651739ffeb1d2d4f0f5c6524/.config/sublime-text-3/Packages/python-jinja2/all/jinja2/environment.py#L929-L940 |
|
KhronosGroup/OpenCL-Docs | 2f8b8140b71cfbc9698678f74fb35b6ab6d46f66 | scripts/conventions.py | python | ProseListFormats.quantifier | (self, n) | return '' | Return the desired quantifier for a list of a given length. | Return the desired quantifier for a list of a given length. | [
"Return",
"the",
"desired",
"quantifier",
"for",
"a",
"list",
"of",
"a",
"given",
"length",
"."
] | def quantifier(self, n):
"""Return the desired quantifier for a list of a given length."""
if self == ProseListFormats.ANY_OR:
if n > 1:
return 'any of '
elif self == ProseListFormats.EACH_AND:
if n > 2:
return 'each of '
if n == 2:
return 'both of '
return '' | [
"def",
"quantifier",
"(",
"self",
",",
"n",
")",
":",
"if",
"self",
"==",
"ProseListFormats",
".",
"ANY_OR",
":",
"if",
"n",
">",
"1",
":",
"return",
"'any of '",
"elif",
"self",
"==",
"ProseListFormats",
".",
"EACH_AND",
":",
"if",
"n",
">",
"2",
":",
"return",
"'each of '",
"if",
"n",
"==",
"2",
":",
"return",
"'both of '",
"return",
"''"
] | https://github.com/KhronosGroup/OpenCL-Docs/blob/2f8b8140b71cfbc9698678f74fb35b6ab6d46f66/scripts/conventions.py#L53-L63 |
|
ayojs/ayo | 45a1c8cf6384f5bcc81d834343c3ed9d78b97df3 | tools/gyp/pylib/gyp/MSVSUtil.py | python | ShardTargets | (target_list, target_dicts) | return (new_target_list, new_target_dicts) | Shard some targets apart to work around the linkers limits.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
Returns:
Tuple of the new sharded versions of the inputs. | Shard some targets apart to work around the linkers limits. | [
"Shard",
"some",
"targets",
"apart",
"to",
"work",
"around",
"the",
"linkers",
"limits",
"."
] | def ShardTargets(target_list, target_dicts):
"""Shard some targets apart to work around the linkers limits.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
Returns:
Tuple of the new sharded versions of the inputs.
"""
# Gather the targets to shard, and how many pieces.
targets_to_shard = {}
for t in target_dicts:
shards = int(target_dicts[t].get('msvs_shard', 0))
if shards:
targets_to_shard[t] = shards
# Shard target_list.
new_target_list = []
for t in target_list:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
new_target_list.append(_ShardName(t, i))
else:
new_target_list.append(t)
# Shard target_dict.
new_target_dicts = {}
for t in target_dicts:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
name = _ShardName(t, i)
new_target_dicts[name] = copy.copy(target_dicts[t])
new_target_dicts[name]['target_name'] = _ShardName(
new_target_dicts[name]['target_name'], i)
sources = new_target_dicts[name].get('sources', [])
new_sources = []
for pos in range(i, len(sources), targets_to_shard[t]):
new_sources.append(sources[pos])
new_target_dicts[name]['sources'] = new_sources
else:
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
for t in sorted(new_target_dicts):
for deptype in ('dependencies', 'dependencies_original'):
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
new_dependencies = []
for d in dependencies:
if d in targets_to_shard:
for i in range(targets_to_shard[d]):
new_dependencies.append(_ShardName(d, i))
else:
new_dependencies.append(d)
new_target_dicts[t][deptype] = new_dependencies
return (new_target_list, new_target_dicts) | [
"def",
"ShardTargets",
"(",
"target_list",
",",
"target_dicts",
")",
":",
"# Gather the targets to shard, and how many pieces.",
"targets_to_shard",
"=",
"{",
"}",
"for",
"t",
"in",
"target_dicts",
":",
"shards",
"=",
"int",
"(",
"target_dicts",
"[",
"t",
"]",
".",
"get",
"(",
"'msvs_shard'",
",",
"0",
")",
")",
"if",
"shards",
":",
"targets_to_shard",
"[",
"t",
"]",
"=",
"shards",
"# Shard target_list.",
"new_target_list",
"=",
"[",
"]",
"for",
"t",
"in",
"target_list",
":",
"if",
"t",
"in",
"targets_to_shard",
":",
"for",
"i",
"in",
"range",
"(",
"targets_to_shard",
"[",
"t",
"]",
")",
":",
"new_target_list",
".",
"append",
"(",
"_ShardName",
"(",
"t",
",",
"i",
")",
")",
"else",
":",
"new_target_list",
".",
"append",
"(",
"t",
")",
"# Shard target_dict.",
"new_target_dicts",
"=",
"{",
"}",
"for",
"t",
"in",
"target_dicts",
":",
"if",
"t",
"in",
"targets_to_shard",
":",
"for",
"i",
"in",
"range",
"(",
"targets_to_shard",
"[",
"t",
"]",
")",
":",
"name",
"=",
"_ShardName",
"(",
"t",
",",
"i",
")",
"new_target_dicts",
"[",
"name",
"]",
"=",
"copy",
".",
"copy",
"(",
"target_dicts",
"[",
"t",
"]",
")",
"new_target_dicts",
"[",
"name",
"]",
"[",
"'target_name'",
"]",
"=",
"_ShardName",
"(",
"new_target_dicts",
"[",
"name",
"]",
"[",
"'target_name'",
"]",
",",
"i",
")",
"sources",
"=",
"new_target_dicts",
"[",
"name",
"]",
".",
"get",
"(",
"'sources'",
",",
"[",
"]",
")",
"new_sources",
"=",
"[",
"]",
"for",
"pos",
"in",
"range",
"(",
"i",
",",
"len",
"(",
"sources",
")",
",",
"targets_to_shard",
"[",
"t",
"]",
")",
":",
"new_sources",
".",
"append",
"(",
"sources",
"[",
"pos",
"]",
")",
"new_target_dicts",
"[",
"name",
"]",
"[",
"'sources'",
"]",
"=",
"new_sources",
"else",
":",
"new_target_dicts",
"[",
"t",
"]",
"=",
"target_dicts",
"[",
"t",
"]",
"# Shard dependencies.",
"for",
"t",
"in",
"sorted",
"(",
"new_target_dicts",
")",
":",
"for",
"deptype",
"in",
"(",
"'dependencies'",
",",
"'dependencies_original'",
")",
":",
"dependencies",
"=",
"copy",
".",
"copy",
"(",
"new_target_dicts",
"[",
"t",
"]",
".",
"get",
"(",
"deptype",
",",
"[",
"]",
")",
")",
"new_dependencies",
"=",
"[",
"]",
"for",
"d",
"in",
"dependencies",
":",
"if",
"d",
"in",
"targets_to_shard",
":",
"for",
"i",
"in",
"range",
"(",
"targets_to_shard",
"[",
"d",
"]",
")",
":",
"new_dependencies",
".",
"append",
"(",
"_ShardName",
"(",
"d",
",",
"i",
")",
")",
"else",
":",
"new_dependencies",
".",
"append",
"(",
"d",
")",
"new_target_dicts",
"[",
"t",
"]",
"[",
"deptype",
"]",
"=",
"new_dependencies",
"return",
"(",
"new_target_list",
",",
"new_target_dicts",
")"
] | https://github.com/ayojs/ayo/blob/45a1c8cf6384f5bcc81d834343c3ed9d78b97df3/tools/gyp/pylib/gyp/MSVSUtil.py#L74-L126 |
|
RASSec/A_Scan_Framework | 4a46cf14b8c717dc0196071bbfd27e2d9c85bb17 | pocscan/plugins/tangscan/tangscan/thirdparty/requests/packages/urllib3/__init__.py | python | add_stderr_logger | (level=logging.DEBUG) | return handler | Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it. | Helper for quickly adding a StreamHandler to the logger. Useful for
debugging. | [
"Helper",
"for",
"quickly",
"adding",
"a",
"StreamHandler",
"to",
"the",
"logger",
".",
"Useful",
"for",
"debugging",
"."
] | def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it.
"""
# This method needs to be in this __init__.py to get the __name__ correct
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(handler)
logger.setLevel(level)
logger.debug('Added a stderr logging handler to logger: %s' % __name__)
return handler | [
"def",
"add_stderr_logger",
"(",
"level",
"=",
"logging",
".",
"DEBUG",
")",
":",
"# This method needs to be in this __init__.py to get the __name__ correct",
"# even if urllib3 is vendored within another package.",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"handler",
"=",
"logging",
".",
"StreamHandler",
"(",
")",
"handler",
".",
"setFormatter",
"(",
"logging",
".",
"Formatter",
"(",
"'%(asctime)s %(levelname)s %(message)s'",
")",
")",
"logger",
".",
"addHandler",
"(",
"handler",
")",
"logger",
".",
"setLevel",
"(",
"level",
")",
"logger",
".",
"debug",
"(",
"'Added a stderr logging handler to logger: %s'",
"%",
"__name__",
")",
"return",
"handler"
] | https://github.com/RASSec/A_Scan_Framework/blob/4a46cf14b8c717dc0196071bbfd27e2d9c85bb17/pocscan/plugins/tangscan/tangscan/thirdparty/requests/packages/urllib3/__init__.py#L37-L52 |
|
ayojs/ayo | 45a1c8cf6384f5bcc81d834343c3ed9d78b97df3 | deps/v8/third_party/jinja2/filters.py | python | do_wordcount | (s) | return len(_word_re.findall(s)) | Count the words in that string. | Count the words in that string. | [
"Count",
"the",
"words",
"in",
"that",
"string",
"."
] | def do_wordcount(s):
"""Count the words in that string."""
return len(_word_re.findall(s)) | [
"def",
"do_wordcount",
"(",
"s",
")",
":",
"return",
"len",
"(",
"_word_re",
".",
"findall",
"(",
"s",
")",
")"
] | https://github.com/ayojs/ayo/blob/45a1c8cf6384f5bcc81d834343c3ed9d78b97df3/deps/v8/third_party/jinja2/filters.py#L509-L511 |
|
xl7dev/BurpSuite | d1d4bd4981a87f2f4c0c9744ad7c476336c813da | Extender/faraday/model/controller.py | python | ModelController.delApplicationASYNC | (self, host, app_name) | ASYNC API
Adds an action to the ModelController actions queue indicating a
particular host must be removed from the model | ASYNC API
Adds an action to the ModelController actions queue indicating a
particular host must be removed from the model | [
"ASYNC",
"API",
"Adds",
"an",
"action",
"to",
"the",
"ModelController",
"actions",
"queue",
"indicating",
"a",
"particular",
"host",
"must",
"be",
"removed",
"from",
"the",
"model"
] | def delApplicationASYNC(self, host, app_name):
"""
ASYNC API
Adds an action to the ModelController actions queue indicating a
particular host must be removed from the model
"""
self.__addPendingAction(modelactions.DELAPPLICATION, host, app_name) | [
"def",
"delApplicationASYNC",
"(",
"self",
",",
"host",
",",
"app_name",
")",
":",
"self",
".",
"__addPendingAction",
"(",
"modelactions",
".",
"DELAPPLICATION",
",",
"host",
",",
"app_name",
")"
] | https://github.com/xl7dev/BurpSuite/blob/d1d4bd4981a87f2f4c0c9744ad7c476336c813da/Extender/faraday/model/controller.py#L877-L883 |
||
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | dist/debugger/VendorLib/vs-py-debugger/pythonFiles/PythonTools/visualstudio_py_repl.py | python | ReplBackend._cmd_excx | (self) | handles executing a single file, module or process | handles executing a single file, module or process | [
"handles",
"executing",
"a",
"single",
"file",
"module",
"or",
"process"
] | def _cmd_excx(self):
"""handles executing a single file, module or process"""
filetype = read_string(self.conn)
filename = read_string(self.conn)
args = read_string(self.conn)
self.execute_file_ex(filetype, filename, args) | [
"def",
"_cmd_excx",
"(",
"self",
")",
":",
"filetype",
"=",
"read_string",
"(",
"self",
".",
"conn",
")",
"filename",
"=",
"read_string",
"(",
"self",
".",
"conn",
")",
"args",
"=",
"read_string",
"(",
"self",
".",
"conn",
")",
"self",
".",
"execute_file_ex",
"(",
"filetype",
",",
"filename",
",",
"args",
")"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/PythonTools/visualstudio_py_repl.py#L346-L351 |
||
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/closured/lib/python2.7/cookielib.py | python | user_domain_match | (A, B) | return False | For blocking/accepting domains.
A and B may be host domain names or IP addresses. | For blocking/accepting domains. | [
"For",
"blocking",
"/",
"accepting",
"domains",
"."
] | def user_domain_match(A, B):
"""For blocking/accepting domains.
A and B may be host domain names or IP addresses.
"""
A = A.lower()
B = B.lower()
if not (liberal_is_HDN(A) and liberal_is_HDN(B)):
if A == B:
# equal IP addresses
return True
return False
initial_dot = B.startswith(".")
if initial_dot and A.endswith(B):
return True
if not initial_dot and A == B:
return True
return False | [
"def",
"user_domain_match",
"(",
"A",
",",
"B",
")",
":",
"A",
"=",
"A",
".",
"lower",
"(",
")",
"B",
"=",
"B",
".",
"lower",
"(",
")",
"if",
"not",
"(",
"liberal_is_HDN",
"(",
"A",
")",
"and",
"liberal_is_HDN",
"(",
"B",
")",
")",
":",
"if",
"A",
"==",
"B",
":",
"# equal IP addresses",
"return",
"True",
"return",
"False",
"initial_dot",
"=",
"B",
".",
"startswith",
"(",
"\".\"",
")",
"if",
"initial_dot",
"and",
"A",
".",
"endswith",
"(",
"B",
")",
":",
"return",
"True",
"if",
"not",
"initial_dot",
"and",
"A",
"==",
"B",
":",
"return",
"True",
"return",
"False"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/cookielib.py#L561-L579 |
|
54xingzhe/weixin_crawler | 7e255e49717b7555424103398e16e2da40af4d38 | project/Application/gzh_category/__init__.py | python | GZHCategory.get_all_cat_data | (self) | return category | :return:返回当前的所有分类数据, 温和前端的数据格式 | :return:返回当前的所有分类数据, 温和前端的数据格式 | [
":",
"return",
":",
"返回当前的所有分类数据",
"温和前端的数据格式"
] | def get_all_cat_data(self):
"""
:return:返回当前的所有分类数据, 温和前端的数据格式
"""
category = []
# 获得数据库中所有公众号的名称为添加选项做准备
gzh_list = list(get_article_metadata().keys())
# 获得分类名称和该分类下的所有公众号
queue = DBQ.get_queue_by_kv(queue_type=self.queue_type)
for cat in queue:
data = {}
data['cat_name'] = cat['name']
data['cat_members'] = []
for mem in cat['queue']:
data['cat_members'].append(mem['id'])
# 找出不在分类中的公众号作为待加入选项
# data['cat_available'] = [x for x in gzh_list if x not in data['cat_members']]
data['cat_available'] = sub_list(gzh_list, data['cat_members'])
category.append(data)
return category | [
"def",
"get_all_cat_data",
"(",
"self",
")",
":",
"category",
"=",
"[",
"]",
"# 获得数据库中所有公众号的名称为添加选项做准备",
"gzh_list",
"=",
"list",
"(",
"get_article_metadata",
"(",
")",
".",
"keys",
"(",
")",
")",
"# 获得分类名称和该分类下的所有公众号",
"queue",
"=",
"DBQ",
".",
"get_queue_by_kv",
"(",
"queue_type",
"=",
"self",
".",
"queue_type",
")",
"for",
"cat",
"in",
"queue",
":",
"data",
"=",
"{",
"}",
"data",
"[",
"'cat_name'",
"]",
"=",
"cat",
"[",
"'name'",
"]",
"data",
"[",
"'cat_members'",
"]",
"=",
"[",
"]",
"for",
"mem",
"in",
"cat",
"[",
"'queue'",
"]",
":",
"data",
"[",
"'cat_members'",
"]",
".",
"append",
"(",
"mem",
"[",
"'id'",
"]",
")",
"# 找出不在分类中的公众号作为待加入选项",
"# data['cat_available'] = [x for x in gzh_list if x not in data['cat_members']]",
"data",
"[",
"'cat_available'",
"]",
"=",
"sub_list",
"(",
"gzh_list",
",",
"data",
"[",
"'cat_members'",
"]",
")",
"category",
".",
"append",
"(",
"data",
")",
"return",
"category"
] | https://github.com/54xingzhe/weixin_crawler/blob/7e255e49717b7555424103398e16e2da40af4d38/project/Application/gzh_category/__init__.py#L13-L32 |
|
jxcore/jxcore | b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410 | tools/closure_linter/gflags.py | python | _MakeXMLSafe | (s) | return s | Escapes <, >, and & from s, and removes XML 1.0-illegal chars. | Escapes <, >, and & from s, and removes XML 1.0-illegal chars. | [
"Escapes",
"<",
">",
"and",
"&",
"from",
"s",
"and",
"removes",
"XML",
"1",
".",
"0",
"-",
"illegal",
"chars",
"."
] | def _MakeXMLSafe(s):
"""Escapes <, >, and & from s, and removes XML 1.0-illegal chars."""
s = cgi.escape(s) # Escape <, >, and &
# Remove characters that cannot appear in an XML 1.0 document
# (http://www.w3.org/TR/REC-xml/#charsets).
#
# NOTE: if there are problems with current solution, one may move to
# XML 1.1, which allows such chars, if they're entity-escaped (&#xHH;).
s = re.sub(r'[\x00-\x08\x0b\x0c\x0e-\x1f]', '', s)
return s | [
"def",
"_MakeXMLSafe",
"(",
"s",
")",
":",
"s",
"=",
"cgi",
".",
"escape",
"(",
"s",
")",
"# Escape <, >, and &",
"# Remove characters that cannot appear in an XML 1.0 document",
"# (http://www.w3.org/TR/REC-xml/#charsets).",
"#",
"# NOTE: if there are problems with current solution, one may move to",
"# XML 1.1, which allows such chars, if they're entity-escaped (&#xHH;).",
"s",
"=",
"re",
".",
"sub",
"(",
"r'[\\x00-\\x08\\x0b\\x0c\\x0e-\\x1f]'",
",",
"''",
",",
"s",
")",
"return",
"s"
] | https://github.com/jxcore/jxcore/blob/b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410/tools/closure_linter/gflags.py#L1553-L1562 |
|
crits/crits | 6b357daa5c3060cf622d3a3b0c7b41a9ca69c049 | crits/objects/handlers.py | python | create_indicator_from_object | (rel_type, rel_id, ind_type, value,
source_name, method, reference, tlp, analyst, request) | return result | Create an indicator out of this object.
:param rel_type: The top-level object type this object is for.
:type rel_type: str
:param rel_id: The ObjectId of the top-level object.
:param ind_type: The indicator type to use.
:type ind_type: str
:param value: The indicator value.
:type value: str
:param source_name: The source name for the indicator.
:type source_name: str
:param method: The source method for the indicator.
:type method: str
:param reference: The source reference for the indicator.
:type reference: str
:param analyst: The user creating this indicator.
:type analyst: CRITsUser
:param request: The Django request.
:type request: :class:`django.http.HttpRequest`
:returns: dict with keys "success" (bool) and "message" (str) | Create an indicator out of this object. | [
"Create",
"an",
"indicator",
"out",
"of",
"this",
"object",
"."
] | def create_indicator_from_object(rel_type, rel_id, ind_type, value,
source_name, method, reference, tlp, analyst, request):
"""
Create an indicator out of this object.
:param rel_type: The top-level object type this object is for.
:type rel_type: str
:param rel_id: The ObjectId of the top-level object.
:param ind_type: The indicator type to use.
:type ind_type: str
:param value: The indicator value.
:type value: str
:param source_name: The source name for the indicator.
:type source_name: str
:param method: The source method for the indicator.
:type method: str
:param reference: The source reference for the indicator.
:type reference: str
:param analyst: The user creating this indicator.
:type analyst: CRITsUser
:param request: The Django request.
:type request: :class:`django.http.HttpRequest`
:returns: dict with keys "success" (bool) and "message" (str)
"""
result = None
me = class_from_id(rel_type, rel_id)
if not me:
result = {'success': False,
'message': "Could not find %s" % rel_type}
elif value == None or value.strip() == "":
result = {'success': False,
'message': "Can't create indicator with an empty value field"}
elif ind_type == None or ind_type.strip() == "":
result = {'success': False,
'message': "Can't create indicator with an empty type field"}
elif source_name == None or source_name.strip() == "":
result = {'success': False,
'message': "Can't create indicator with an empty source field"}
else:
value = value.lower().strip()
ind_type = ind_type.strip()
source_name = source_name.strip()
create_indicator_result = {}
from crits.indicators.handlers import handle_indicator_ind
campaign = me.campaign if hasattr(me, 'campaign') else None
create_indicator_result = handle_indicator_ind(value,
source_name,
ind_type,
IndicatorThreatTypes.UNKNOWN,
IndicatorAttackTypes.UNKNOWN,
analyst,
source_method=method,
source_reference=reference,
source_tlp=tlp,
add_domain=True,
campaign=campaign)
# Check if an error occurred, if it did then return the error result
if create_indicator_result.get('success', True) == False:
return create_indicator_result
indicator = Indicator.objects(ind_type=ind_type,
value=value).first()
if not indicator:
result = {'success': False,
'message': "Could not create indicator"}
else:
results = me.add_relationship(indicator,
RelationshipTypes.RELATED_TO,
analyst=analyst.username,
get_rels=True)
if results['success']:
me.save(username=analyst)
relationship= {'type': rel_type, 'value': rel_id}
message = render_to_string('relationships_listing_widget.html',
{'relationship': relationship,
'nohide': True,
'relationships': results['message']},
request=request)
result = {'success': True, 'message': message}
else:
message = "Indicator created. Could not create relationship"
result = {'success': False,
'message': message}
return result | [
"def",
"create_indicator_from_object",
"(",
"rel_type",
",",
"rel_id",
",",
"ind_type",
",",
"value",
",",
"source_name",
",",
"method",
",",
"reference",
",",
"tlp",
",",
"analyst",
",",
"request",
")",
":",
"result",
"=",
"None",
"me",
"=",
"class_from_id",
"(",
"rel_type",
",",
"rel_id",
")",
"if",
"not",
"me",
":",
"result",
"=",
"{",
"'success'",
":",
"False",
",",
"'message'",
":",
"\"Could not find %s\"",
"%",
"rel_type",
"}",
"elif",
"value",
"==",
"None",
"or",
"value",
".",
"strip",
"(",
")",
"==",
"\"\"",
":",
"result",
"=",
"{",
"'success'",
":",
"False",
",",
"'message'",
":",
"\"Can't create indicator with an empty value field\"",
"}",
"elif",
"ind_type",
"==",
"None",
"or",
"ind_type",
".",
"strip",
"(",
")",
"==",
"\"\"",
":",
"result",
"=",
"{",
"'success'",
":",
"False",
",",
"'message'",
":",
"\"Can't create indicator with an empty type field\"",
"}",
"elif",
"source_name",
"==",
"None",
"or",
"source_name",
".",
"strip",
"(",
")",
"==",
"\"\"",
":",
"result",
"=",
"{",
"'success'",
":",
"False",
",",
"'message'",
":",
"\"Can't create indicator with an empty source field\"",
"}",
"else",
":",
"value",
"=",
"value",
".",
"lower",
"(",
")",
".",
"strip",
"(",
")",
"ind_type",
"=",
"ind_type",
".",
"strip",
"(",
")",
"source_name",
"=",
"source_name",
".",
"strip",
"(",
")",
"create_indicator_result",
"=",
"{",
"}",
"from",
"crits",
".",
"indicators",
".",
"handlers",
"import",
"handle_indicator_ind",
"campaign",
"=",
"me",
".",
"campaign",
"if",
"hasattr",
"(",
"me",
",",
"'campaign'",
")",
"else",
"None",
"create_indicator_result",
"=",
"handle_indicator_ind",
"(",
"value",
",",
"source_name",
",",
"ind_type",
",",
"IndicatorThreatTypes",
".",
"UNKNOWN",
",",
"IndicatorAttackTypes",
".",
"UNKNOWN",
",",
"analyst",
",",
"source_method",
"=",
"method",
",",
"source_reference",
"=",
"reference",
",",
"source_tlp",
"=",
"tlp",
",",
"add_domain",
"=",
"True",
",",
"campaign",
"=",
"campaign",
")",
"# Check if an error occurred, if it did then return the error result",
"if",
"create_indicator_result",
".",
"get",
"(",
"'success'",
",",
"True",
")",
"==",
"False",
":",
"return",
"create_indicator_result",
"indicator",
"=",
"Indicator",
".",
"objects",
"(",
"ind_type",
"=",
"ind_type",
",",
"value",
"=",
"value",
")",
".",
"first",
"(",
")",
"if",
"not",
"indicator",
":",
"result",
"=",
"{",
"'success'",
":",
"False",
",",
"'message'",
":",
"\"Could not create indicator\"",
"}",
"else",
":",
"results",
"=",
"me",
".",
"add_relationship",
"(",
"indicator",
",",
"RelationshipTypes",
".",
"RELATED_TO",
",",
"analyst",
"=",
"analyst",
".",
"username",
",",
"get_rels",
"=",
"True",
")",
"if",
"results",
"[",
"'success'",
"]",
":",
"me",
".",
"save",
"(",
"username",
"=",
"analyst",
")",
"relationship",
"=",
"{",
"'type'",
":",
"rel_type",
",",
"'value'",
":",
"rel_id",
"}",
"message",
"=",
"render_to_string",
"(",
"'relationships_listing_widget.html'",
",",
"{",
"'relationship'",
":",
"relationship",
",",
"'nohide'",
":",
"True",
",",
"'relationships'",
":",
"results",
"[",
"'message'",
"]",
"}",
",",
"request",
"=",
"request",
")",
"result",
"=",
"{",
"'success'",
":",
"True",
",",
"'message'",
":",
"message",
"}",
"else",
":",
"message",
"=",
"\"Indicator created. Could not create relationship\"",
"result",
"=",
"{",
"'success'",
":",
"False",
",",
"'message'",
":",
"message",
"}",
"return",
"result"
] | https://github.com/crits/crits/blob/6b357daa5c3060cf622d3a3b0c7b41a9ca69c049/crits/objects/handlers.py#L501-L590 |
|
domogik/domogik | fefd584d354875bcb15f351cbc455abffaa6501f | src/domogik/common/database.py | python | DbHelper.update_user_account | (self, a_id, a_new_login=None, a_person_id=None, a_is_admin=None, a_skin_used=None) | return user_acc | Update a user account
@param a_id : Account id to be updated
@param a_new_login : The new login (optional)
@param a_person_id : id of the person associated to the account
@param a_is_admin : True if it is an admin account, False otherwise (optional)
@param a_skin_used : name of the skin choosen by the user (optional, default='skins/default')
@return a UserAccount object | Update a user account | [
"Update",
"a",
"user",
"account"
] | def update_user_account(self, a_id, a_new_login=None, a_person_id=None, a_is_admin=None, a_skin_used=None):
"""Update a user account
@param a_id : Account id to be updated
@param a_new_login : The new login (optional)
@param a_person_id : id of the person associated to the account
@param a_is_admin : True if it is an admin account, False otherwise (optional)
@param a_skin_used : name of the skin choosen by the user (optional, default='skins/default')
@return a UserAccount object
"""
# Make sure previously modified objects outer of this method won't be commited
self.__session.expire_all()
user_acc = self.__session.query(UserAccount).filter_by(id=a_id).first()
if user_acc is None:
self.__raise_dbhelper_exception(u"UserAccount with id {0} couldn't be found".format(a_id))
if a_new_login is not None:
user_acc.login = ucode(a_new_login)
if a_person_id is not None:
person = self.__session.query(Person).filter_by(id=a_person_id).first()
if person is None:
self.__raise_dbhelper_exception(u"Person id '{0}' does not exist".format(a_person_id))
user_acc.person_id = a_person_id
if a_is_admin is not None:
user_acc.is_admin = a_is_admin
if a_skin_used is not None:
user_acc.skin_used = ucode(a_skin_used)
self.__session.add(user_acc)
self._do_commit()
return user_acc | [
"def",
"update_user_account",
"(",
"self",
",",
"a_id",
",",
"a_new_login",
"=",
"None",
",",
"a_person_id",
"=",
"None",
",",
"a_is_admin",
"=",
"None",
",",
"a_skin_used",
"=",
"None",
")",
":",
"# Make sure previously modified objects outer of this method won't be commited",
"self",
".",
"__session",
".",
"expire_all",
"(",
")",
"user_acc",
"=",
"self",
".",
"__session",
".",
"query",
"(",
"UserAccount",
")",
".",
"filter_by",
"(",
"id",
"=",
"a_id",
")",
".",
"first",
"(",
")",
"if",
"user_acc",
"is",
"None",
":",
"self",
".",
"__raise_dbhelper_exception",
"(",
"u\"UserAccount with id {0} couldn't be found\"",
".",
"format",
"(",
"a_id",
")",
")",
"if",
"a_new_login",
"is",
"not",
"None",
":",
"user_acc",
".",
"login",
"=",
"ucode",
"(",
"a_new_login",
")",
"if",
"a_person_id",
"is",
"not",
"None",
":",
"person",
"=",
"self",
".",
"__session",
".",
"query",
"(",
"Person",
")",
".",
"filter_by",
"(",
"id",
"=",
"a_person_id",
")",
".",
"first",
"(",
")",
"if",
"person",
"is",
"None",
":",
"self",
".",
"__raise_dbhelper_exception",
"(",
"u\"Person id '{0}' does not exist\"",
".",
"format",
"(",
"a_person_id",
")",
")",
"user_acc",
".",
"person_id",
"=",
"a_person_id",
"if",
"a_is_admin",
"is",
"not",
"None",
":",
"user_acc",
".",
"is_admin",
"=",
"a_is_admin",
"if",
"a_skin_used",
"is",
"not",
"None",
":",
"user_acc",
".",
"skin_used",
"=",
"ucode",
"(",
"a_skin_used",
")",
"self",
".",
"__session",
".",
"add",
"(",
"user_acc",
")",
"self",
".",
"_do_commit",
"(",
")",
"return",
"user_acc"
] | https://github.com/domogik/domogik/blob/fefd584d354875bcb15f351cbc455abffaa6501f/src/domogik/common/database.py#L1666-L1695 |
|
defunctzombie/libuv.js | 04a76a470dfdcad14ea8f19b6f215f205a9214f8 | tools/gyp/pylib/gyp/input.py | python | DependencyGraphNode.DependenciesToLinkAgainst | (self, targets) | return self._LinkDependenciesInternal(targets, True) | Returns a list of dependency targets that are linked into this target. | Returns a list of dependency targets that are linked into this target. | [
"Returns",
"a",
"list",
"of",
"dependency",
"targets",
"that",
"are",
"linked",
"into",
"this",
"target",
"."
] | def DependenciesToLinkAgainst(self, targets):
"""
Returns a list of dependency targets that are linked into this target.
"""
return self._LinkDependenciesInternal(targets, True) | [
"def",
"DependenciesToLinkAgainst",
"(",
"self",
",",
"targets",
")",
":",
"return",
"self",
".",
"_LinkDependenciesInternal",
"(",
"targets",
",",
"True",
")"
] | https://github.com/defunctzombie/libuv.js/blob/04a76a470dfdcad14ea8f19b6f215f205a9214f8/tools/gyp/pylib/gyp/input.py#L1702-L1706 |
|
odoo/odoo | 8de8c196a137f4ebbf67d7c7c83fee36f873f5c8 | odoo/addons/base/models/res_config.py | python | ResConfigConfigurable.cancel | (self) | Method called when the user click on the ``Skip`` button.
``cancel`` should be overloaded instead of ``action_skip``. As with
``execute``, if it returns an action dictionary that action is
executed in stead of the default (going to the next configuration item)
The default implementation is a NOOP.
``cancel`` is also called by the default implementation of
``action_cancel``. | Method called when the user click on the ``Skip`` button. | [
"Method",
"called",
"when",
"the",
"user",
"click",
"on",
"the",
"Skip",
"button",
"."
] | def cancel(self):
""" Method called when the user click on the ``Skip`` button.
``cancel`` should be overloaded instead of ``action_skip``. As with
``execute``, if it returns an action dictionary that action is
executed in stead of the default (going to the next configuration item)
The default implementation is a NOOP.
``cancel`` is also called by the default implementation of
``action_cancel``.
"""
pass | [
"def",
"cancel",
"(",
"self",
")",
":",
"pass"
] | https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/odoo/addons/base/models/res_config.py#L83-L95 |
||
marcotcr/lime | fd7eb2e6f760619c29fca0187c07b82157601b32 | lime/lime_tabular.py | python | LimeTabularExplainer.__data_inverse | (self,
data_row,
num_samples,
sampling_method) | return data, inverse | Generates a neighborhood around a prediction.
For numerical features, perturb them by sampling from a Normal(0,1) and
doing the inverse operation of mean-centering and scaling, according to
the means and stds in the training data. For categorical features,
perturb by sampling according to the training distribution, and making
a binary feature that is 1 when the value is the same as the instance
being explained.
Args:
data_row: 1d numpy array, corresponding to a row
num_samples: size of the neighborhood to learn the linear model
sampling_method: 'gaussian' or 'lhs'
Returns:
A tuple (data, inverse), where:
data: dense num_samples * K matrix, where categorical features
are encoded with either 0 (not equal to the corresponding value
in data_row) or 1. The first row is the original instance.
inverse: same as data, except the categorical features are not
binary, but categorical (as the original data) | Generates a neighborhood around a prediction. | [
"Generates",
"a",
"neighborhood",
"around",
"a",
"prediction",
"."
] | def __data_inverse(self,
data_row,
num_samples,
sampling_method):
"""Generates a neighborhood around a prediction.
For numerical features, perturb them by sampling from a Normal(0,1) and
doing the inverse operation of mean-centering and scaling, according to
the means and stds in the training data. For categorical features,
perturb by sampling according to the training distribution, and making
a binary feature that is 1 when the value is the same as the instance
being explained.
Args:
data_row: 1d numpy array, corresponding to a row
num_samples: size of the neighborhood to learn the linear model
sampling_method: 'gaussian' or 'lhs'
Returns:
A tuple (data, inverse), where:
data: dense num_samples * K matrix, where categorical features
are encoded with either 0 (not equal to the corresponding value
in data_row) or 1. The first row is the original instance.
inverse: same as data, except the categorical features are not
binary, but categorical (as the original data)
"""
is_sparse = sp.sparse.issparse(data_row)
if is_sparse:
num_cols = data_row.shape[1]
data = sp.sparse.csr_matrix((num_samples, num_cols), dtype=data_row.dtype)
else:
num_cols = data_row.shape[0]
data = np.zeros((num_samples, num_cols))
categorical_features = range(num_cols)
if self.discretizer is None:
instance_sample = data_row
scale = self.scaler.scale_
mean = self.scaler.mean_
if is_sparse:
# Perturb only the non-zero values
non_zero_indexes = data_row.nonzero()[1]
num_cols = len(non_zero_indexes)
instance_sample = data_row[:, non_zero_indexes]
scale = scale[non_zero_indexes]
mean = mean[non_zero_indexes]
if sampling_method == 'gaussian':
data = self.random_state.normal(0, 1, num_samples * num_cols
).reshape(num_samples, num_cols)
data = np.array(data)
elif sampling_method == 'lhs':
data = lhs(num_cols, samples=num_samples
).reshape(num_samples, num_cols)
means = np.zeros(num_cols)
stdvs = np.array([1]*num_cols)
for i in range(num_cols):
data[:, i] = norm(loc=means[i], scale=stdvs[i]).ppf(data[:, i])
data = np.array(data)
else:
warnings.warn('''Invalid input for sampling_method.
Defaulting to Gaussian sampling.''', UserWarning)
data = self.random_state.normal(0, 1, num_samples * num_cols
).reshape(num_samples, num_cols)
data = np.array(data)
if self.sample_around_instance:
data = data * scale + instance_sample
else:
data = data * scale + mean
if is_sparse:
if num_cols == 0:
data = sp.sparse.csr_matrix((num_samples,
data_row.shape[1]),
dtype=data_row.dtype)
else:
indexes = np.tile(non_zero_indexes, num_samples)
indptr = np.array(
range(0, len(non_zero_indexes) * (num_samples + 1),
len(non_zero_indexes)))
data_1d_shape = data.shape[0] * data.shape[1]
data_1d = data.reshape(data_1d_shape)
data = sp.sparse.csr_matrix(
(data_1d, indexes, indptr),
shape=(num_samples, data_row.shape[1]))
categorical_features = self.categorical_features
first_row = data_row
else:
first_row = self.discretizer.discretize(data_row)
data[0] = data_row.copy()
inverse = data.copy()
for column in categorical_features:
values = self.feature_values[column]
freqs = self.feature_frequencies[column]
inverse_column = self.random_state.choice(values, size=num_samples,
replace=True, p=freqs)
binary_column = (inverse_column == first_row[column]).astype(int)
binary_column[0] = 1
inverse_column[0] = data[0, column]
data[:, column] = binary_column
inverse[:, column] = inverse_column
if self.discretizer is not None:
inverse[1:] = self.discretizer.undiscretize(inverse[1:])
inverse[0] = data_row
return data, inverse | [
"def",
"__data_inverse",
"(",
"self",
",",
"data_row",
",",
"num_samples",
",",
"sampling_method",
")",
":",
"is_sparse",
"=",
"sp",
".",
"sparse",
".",
"issparse",
"(",
"data_row",
")",
"if",
"is_sparse",
":",
"num_cols",
"=",
"data_row",
".",
"shape",
"[",
"1",
"]",
"data",
"=",
"sp",
".",
"sparse",
".",
"csr_matrix",
"(",
"(",
"num_samples",
",",
"num_cols",
")",
",",
"dtype",
"=",
"data_row",
".",
"dtype",
")",
"else",
":",
"num_cols",
"=",
"data_row",
".",
"shape",
"[",
"0",
"]",
"data",
"=",
"np",
".",
"zeros",
"(",
"(",
"num_samples",
",",
"num_cols",
")",
")",
"categorical_features",
"=",
"range",
"(",
"num_cols",
")",
"if",
"self",
".",
"discretizer",
"is",
"None",
":",
"instance_sample",
"=",
"data_row",
"scale",
"=",
"self",
".",
"scaler",
".",
"scale_",
"mean",
"=",
"self",
".",
"scaler",
".",
"mean_",
"if",
"is_sparse",
":",
"# Perturb only the non-zero values",
"non_zero_indexes",
"=",
"data_row",
".",
"nonzero",
"(",
")",
"[",
"1",
"]",
"num_cols",
"=",
"len",
"(",
"non_zero_indexes",
")",
"instance_sample",
"=",
"data_row",
"[",
":",
",",
"non_zero_indexes",
"]",
"scale",
"=",
"scale",
"[",
"non_zero_indexes",
"]",
"mean",
"=",
"mean",
"[",
"non_zero_indexes",
"]",
"if",
"sampling_method",
"==",
"'gaussian'",
":",
"data",
"=",
"self",
".",
"random_state",
".",
"normal",
"(",
"0",
",",
"1",
",",
"num_samples",
"*",
"num_cols",
")",
".",
"reshape",
"(",
"num_samples",
",",
"num_cols",
")",
"data",
"=",
"np",
".",
"array",
"(",
"data",
")",
"elif",
"sampling_method",
"==",
"'lhs'",
":",
"data",
"=",
"lhs",
"(",
"num_cols",
",",
"samples",
"=",
"num_samples",
")",
".",
"reshape",
"(",
"num_samples",
",",
"num_cols",
")",
"means",
"=",
"np",
".",
"zeros",
"(",
"num_cols",
")",
"stdvs",
"=",
"np",
".",
"array",
"(",
"[",
"1",
"]",
"*",
"num_cols",
")",
"for",
"i",
"in",
"range",
"(",
"num_cols",
")",
":",
"data",
"[",
":",
",",
"i",
"]",
"=",
"norm",
"(",
"loc",
"=",
"means",
"[",
"i",
"]",
",",
"scale",
"=",
"stdvs",
"[",
"i",
"]",
")",
".",
"ppf",
"(",
"data",
"[",
":",
",",
"i",
"]",
")",
"data",
"=",
"np",
".",
"array",
"(",
"data",
")",
"else",
":",
"warnings",
".",
"warn",
"(",
"'''Invalid input for sampling_method.\n Defaulting to Gaussian sampling.'''",
",",
"UserWarning",
")",
"data",
"=",
"self",
".",
"random_state",
".",
"normal",
"(",
"0",
",",
"1",
",",
"num_samples",
"*",
"num_cols",
")",
".",
"reshape",
"(",
"num_samples",
",",
"num_cols",
")",
"data",
"=",
"np",
".",
"array",
"(",
"data",
")",
"if",
"self",
".",
"sample_around_instance",
":",
"data",
"=",
"data",
"*",
"scale",
"+",
"instance_sample",
"else",
":",
"data",
"=",
"data",
"*",
"scale",
"+",
"mean",
"if",
"is_sparse",
":",
"if",
"num_cols",
"==",
"0",
":",
"data",
"=",
"sp",
".",
"sparse",
".",
"csr_matrix",
"(",
"(",
"num_samples",
",",
"data_row",
".",
"shape",
"[",
"1",
"]",
")",
",",
"dtype",
"=",
"data_row",
".",
"dtype",
")",
"else",
":",
"indexes",
"=",
"np",
".",
"tile",
"(",
"non_zero_indexes",
",",
"num_samples",
")",
"indptr",
"=",
"np",
".",
"array",
"(",
"range",
"(",
"0",
",",
"len",
"(",
"non_zero_indexes",
")",
"*",
"(",
"num_samples",
"+",
"1",
")",
",",
"len",
"(",
"non_zero_indexes",
")",
")",
")",
"data_1d_shape",
"=",
"data",
".",
"shape",
"[",
"0",
"]",
"*",
"data",
".",
"shape",
"[",
"1",
"]",
"data_1d",
"=",
"data",
".",
"reshape",
"(",
"data_1d_shape",
")",
"data",
"=",
"sp",
".",
"sparse",
".",
"csr_matrix",
"(",
"(",
"data_1d",
",",
"indexes",
",",
"indptr",
")",
",",
"shape",
"=",
"(",
"num_samples",
",",
"data_row",
".",
"shape",
"[",
"1",
"]",
")",
")",
"categorical_features",
"=",
"self",
".",
"categorical_features",
"first_row",
"=",
"data_row",
"else",
":",
"first_row",
"=",
"self",
".",
"discretizer",
".",
"discretize",
"(",
"data_row",
")",
"data",
"[",
"0",
"]",
"=",
"data_row",
".",
"copy",
"(",
")",
"inverse",
"=",
"data",
".",
"copy",
"(",
")",
"for",
"column",
"in",
"categorical_features",
":",
"values",
"=",
"self",
".",
"feature_values",
"[",
"column",
"]",
"freqs",
"=",
"self",
".",
"feature_frequencies",
"[",
"column",
"]",
"inverse_column",
"=",
"self",
".",
"random_state",
".",
"choice",
"(",
"values",
",",
"size",
"=",
"num_samples",
",",
"replace",
"=",
"True",
",",
"p",
"=",
"freqs",
")",
"binary_column",
"=",
"(",
"inverse_column",
"==",
"first_row",
"[",
"column",
"]",
")",
".",
"astype",
"(",
"int",
")",
"binary_column",
"[",
"0",
"]",
"=",
"1",
"inverse_column",
"[",
"0",
"]",
"=",
"data",
"[",
"0",
",",
"column",
"]",
"data",
"[",
":",
",",
"column",
"]",
"=",
"binary_column",
"inverse",
"[",
":",
",",
"column",
"]",
"=",
"inverse_column",
"if",
"self",
".",
"discretizer",
"is",
"not",
"None",
":",
"inverse",
"[",
"1",
":",
"]",
"=",
"self",
".",
"discretizer",
".",
"undiscretize",
"(",
"inverse",
"[",
"1",
":",
"]",
")",
"inverse",
"[",
"0",
"]",
"=",
"data_row",
"return",
"data",
",",
"inverse"
] | https://github.com/marcotcr/lime/blob/fd7eb2e6f760619c29fca0187c07b82157601b32/lime/lime_tabular.py#L475-L578 |
|
mozilla/spidernode | aafa9e5273f954f272bb4382fc007af14674b4c2 | deps/spidershim/spidermonkey/python/mozbuild/mozbuild/util.py | python | expand_variables | (s, variables) | return result | Given a string with $(var) variable references, replace those references
with the corresponding entries from the given `variables` dict.
If a variable value is not a string, it is iterated and its items are
joined with a whitespace. | Given a string with $(var) variable references, replace those references
with the corresponding entries from the given `variables` dict. | [
"Given",
"a",
"string",
"with",
"$",
"(",
"var",
")",
"variable",
"references",
"replace",
"those",
"references",
"with",
"the",
"corresponding",
"entries",
"from",
"the",
"given",
"variables",
"dict",
"."
] | def expand_variables(s, variables):
'''Given a string with $(var) variable references, replace those references
with the corresponding entries from the given `variables` dict.
If a variable value is not a string, it is iterated and its items are
joined with a whitespace.'''
result = ''
for s, name in pair(VARIABLES_RE.split(s)):
result += s
value = variables.get(name)
if not value:
continue
if not isinstance(value, types.StringTypes):
value = ' '.join(value)
result += value
return result | [
"def",
"expand_variables",
"(",
"s",
",",
"variables",
")",
":",
"result",
"=",
"''",
"for",
"s",
",",
"name",
"in",
"pair",
"(",
"VARIABLES_RE",
".",
"split",
"(",
"s",
")",
")",
":",
"result",
"+=",
"s",
"value",
"=",
"variables",
".",
"get",
"(",
"name",
")",
"if",
"not",
"value",
":",
"continue",
"if",
"not",
"isinstance",
"(",
"value",
",",
"types",
".",
"StringTypes",
")",
":",
"value",
"=",
"' '",
".",
"join",
"(",
"value",
")",
"result",
"+=",
"value",
"return",
"result"
] | https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/deps/spidershim/spidermonkey/python/mozbuild/mozbuild/util.py#L1113-L1128 |
|
xl7dev/BurpSuite | d1d4bd4981a87f2f4c0c9744ad7c476336c813da | Extender/burp-protobuf-decoder/Lib/google/protobuf/internal/encoder.py | python | _ModifiedEncoder | (wire_type, encode_value, compute_value_size, modify_value) | return SpecificEncoder | Like SimpleEncoder but additionally invokes modify_value on every value
before passing it to encode_value. Usually modify_value is ZigZagEncode. | Like SimpleEncoder but additionally invokes modify_value on every value
before passing it to encode_value. Usually modify_value is ZigZagEncode. | [
"Like",
"SimpleEncoder",
"but",
"additionally",
"invokes",
"modify_value",
"on",
"every",
"value",
"before",
"passing",
"it",
"to",
"encode_value",
".",
"Usually",
"modify_value",
"is",
"ZigZagEncode",
"."
] | def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
"""Like SimpleEncoder but additionally invokes modify_value on every value
before passing it to encode_value. Usually modify_value is ZigZagEncode."""
def SpecificEncoder(field_number, is_repeated, is_packed):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(modify_value(element))
local_EncodeVarint(write, size)
for element in value:
encode_value(write, modify_value(element))
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
encode_value(write, modify_value(element))
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
return encode_value(write, modify_value(value))
return EncodeField
return SpecificEncoder | [
"def",
"_ModifiedEncoder",
"(",
"wire_type",
",",
"encode_value",
",",
"compute_value_size",
",",
"modify_value",
")",
":",
"def",
"SpecificEncoder",
"(",
"field_number",
",",
"is_repeated",
",",
"is_packed",
")",
":",
"if",
"is_packed",
":",
"tag_bytes",
"=",
"TagBytes",
"(",
"field_number",
",",
"wire_format",
".",
"WIRETYPE_LENGTH_DELIMITED",
")",
"local_EncodeVarint",
"=",
"_EncodeVarint",
"def",
"EncodePackedField",
"(",
"write",
",",
"value",
")",
":",
"write",
"(",
"tag_bytes",
")",
"size",
"=",
"0",
"for",
"element",
"in",
"value",
":",
"size",
"+=",
"compute_value_size",
"(",
"modify_value",
"(",
"element",
")",
")",
"local_EncodeVarint",
"(",
"write",
",",
"size",
")",
"for",
"element",
"in",
"value",
":",
"encode_value",
"(",
"write",
",",
"modify_value",
"(",
"element",
")",
")",
"return",
"EncodePackedField",
"elif",
"is_repeated",
":",
"tag_bytes",
"=",
"TagBytes",
"(",
"field_number",
",",
"wire_type",
")",
"def",
"EncodeRepeatedField",
"(",
"write",
",",
"value",
")",
":",
"for",
"element",
"in",
"value",
":",
"write",
"(",
"tag_bytes",
")",
"encode_value",
"(",
"write",
",",
"modify_value",
"(",
"element",
")",
")",
"return",
"EncodeRepeatedField",
"else",
":",
"tag_bytes",
"=",
"TagBytes",
"(",
"field_number",
",",
"wire_type",
")",
"def",
"EncodeField",
"(",
"write",
",",
"value",
")",
":",
"write",
"(",
"tag_bytes",
")",
"return",
"encode_value",
"(",
"write",
",",
"modify_value",
"(",
"value",
")",
")",
"return",
"EncodeField",
"return",
"SpecificEncoder"
] | https://github.com/xl7dev/BurpSuite/blob/d1d4bd4981a87f2f4c0c9744ad7c476336c813da/Extender/burp-protobuf-decoder/Lib/google/protobuf/internal/encoder.py#L439-L470 |
|
lambdamusic/Ontospy | 534e408372edd392590e12839c32a403430aac23 | ontospy/ontodocs/viz/viz_html_multi.py | python | KompleteViz._buildTemplates | (self) | return main_url | OVERRIDING THIS METHOD from Factory | OVERRIDING THIS METHOD from Factory | [
"OVERRIDING",
"THIS",
"METHOD",
"from",
"Factory"
] | def _buildTemplates(self):
"""
OVERRIDING THIS METHOD from Factory
"""
# INDEX - MAIN PAGE
contents = self._renderTemplate("html-multi/index.html", extraContext={"theme": self.theme, "index_page_flag" : True})
FILE_NAME = "index.html"
main_url = self._save2File(contents, FILE_NAME, self.output_path)
# DASHBOARD
contents = self._renderTemplate("html-multi/statistics.html", extraContext={"theme": self.theme})
FILE_NAME = "statistics.html"
self._save2File(contents, FILE_NAME, self.output_path)
browser_output_path = self.output_path
# ENTITIES A-Z
extra_context = {"ontograph": self.ontospy_graph, "theme": self.theme}
contents = self._renderTemplate("html-multi/browser/browser_entities_az.html", extraContext=extra_context)
FILE_NAME = "entities-az.html"
self._save2File(contents, FILE_NAME, browser_output_path)
if self.ontospy_graph.all_classes:
# CLASSES = ENTITIES TREE
extra_context = {"ontograph": self.ontospy_graph, "theme": self.theme,
"treetype" : "classes",
'treeTable' : formatHTML_EntityTreeTable(self.ontospy_graph.ontologyClassTree())}
contents = self._renderTemplate("html-multi/browser/browser_entities_tree.html", extraContext=extra_context)
FILE_NAME = "entities-tree-classes.html"
self._save2File(contents, FILE_NAME, browser_output_path)
# BROWSER PAGES - CLASSES ======
for entity in self.ontospy_graph.all_classes:
extra_context = {"main_entity": entity,
"main_entity_type": "class",
"theme": self.theme,
"ontograph": self.ontospy_graph
}
extra_context.update(self.highlight_code(entity))
contents = self._renderTemplate("html-multi/browser/browser_classinfo.html", extraContext=extra_context)
FILE_NAME = entity.slug + ".html"
self._save2File(contents, FILE_NAME, browser_output_path)
if self.ontospy_graph.all_properties:
# PROPERTIES = ENTITIES TREE
extra_context = {"ontograph": self.ontospy_graph, "theme": self.theme,
"treetype" : "properties",
'treeTable' : formatHTML_EntityTreeTable(self.ontospy_graph.ontologyPropTree())}
contents = self._renderTemplate("html-multi/browser/browser_entities_tree.html", extraContext=extra_context)
FILE_NAME = "entities-tree-properties.html"
self._save2File(contents, FILE_NAME, browser_output_path)
# BROWSER PAGES - PROPERTIES ======
for entity in self.ontospy_graph.all_properties:
extra_context = {"main_entity": entity,
"main_entity_type": "property",
"theme": self.theme,
"ontograph": self.ontospy_graph
}
extra_context.update(self.highlight_code(entity))
contents = self._renderTemplate("html-multi/browser/browser_propinfo.html", extraContext=extra_context)
FILE_NAME = entity.slug + ".html"
self._save2File(contents, FILE_NAME, browser_output_path)
if self.ontospy_graph.all_skos_concepts:
# CONCEPTS = ENTITIES TREE
extra_context = {"ontograph": self.ontospy_graph, "theme": self.theme,
"treetype" : "concepts",
'treeTable' : formatHTML_EntityTreeTable(self.ontospy_graph.ontologyConceptTree())}
contents = self._renderTemplate("html-multi/browser/browser_entities_tree.html", extraContext=extra_context)
FILE_NAME = "entities-tree-concepts.html"
self._save2File(contents, FILE_NAME, browser_output_path)
# BROWSER PAGES - CONCEPTS ======
for entity in self.ontospy_graph.all_skos_concepts:
extra_context = {"main_entity": entity,
"main_entity_type": "concept",
"theme": self.theme,
"ontograph": self.ontospy_graph
}
extra_context.update(self.highlight_code(entity))
contents = self._renderTemplate("html-multi/browser/browser_conceptinfo.html", extraContext=extra_context)
FILE_NAME = entity.slug + ".html"
self._save2File(contents, FILE_NAME, browser_output_path)
if self.ontospy_graph.all_shapes:
# SHAPES = ENTITIES TREE
extra_context = {"ontograph": self.ontospy_graph, "theme": self.theme,
"treetype" : "shapes", 'treeTable' : formatHTML_EntityTreeTable(self.ontospy_graph.ontologyShapeTree()) }
contents = self._renderTemplate("html-multi/browser/browser_entities_tree.html", extraContext=extra_context)
FILE_NAME = "entities-tree-shapes.html"
self._save2File(contents, FILE_NAME, browser_output_path)
# BROWSER PAGES - SHAPES ======
for entity in self.ontospy_graph.all_shapes:
extra_context = {"main_entity": entity,
"main_entity_type": "shape",
"theme": self.theme,
"ontograph": self.ontospy_graph
}
extra_context.update(self.highlight_code(entity))
contents = self._renderTemplate("html-multi/browser/browser_shapeinfo.html", extraContext=extra_context)
FILE_NAME = entity.slug + ".html"
self._save2File(contents, FILE_NAME, browser_output_path)
if self.ontospy_graph.all_individuals:
# INDIVIDUALS (FLAT) TREE
extra_context = {
"ontograph": self.ontospy_graph,
"theme": self.theme,
"treetype" : "individuals",
'treeTable' : formatHTML_EntityTreeTable(self.ontospy_graph.ontologyIndividualsTree())}
contents = self._renderTemplate("html-multi/browser/browser_entities_tree.html", extraContext=extra_context)
FILE_NAME = "entities-tree-individuals.html"
self._save2File(contents, FILE_NAME, browser_output_path)
# BROWSER PAGES - CLASSES ======
for entity in self.ontospy_graph.all_individuals:
extra_context = {"main_entity": entity,
"main_entity_type": "individual",
"theme": self.theme,
"ontograph": self.ontospy_graph
}
extra_context.update(self.highlight_code(entity))
contents = self._renderTemplate(
"html-multi/browser/browser_individualinfo.html",
extraContext=extra_context)
FILE_NAME = entity.slug + ".html"
self._save2File(contents, FILE_NAME, browser_output_path)
return main_url | [
"def",
"_buildTemplates",
"(",
"self",
")",
":",
"# INDEX - MAIN PAGE",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/index.html\"",
",",
"extraContext",
"=",
"{",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"index_page_flag\"",
":",
"True",
"}",
")",
"FILE_NAME",
"=",
"\"index.html\"",
"main_url",
"=",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"self",
".",
"output_path",
")",
"# DASHBOARD",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/statistics.html\"",
",",
"extraContext",
"=",
"{",
"\"theme\"",
":",
"self",
".",
"theme",
"}",
")",
"FILE_NAME",
"=",
"\"statistics.html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"self",
".",
"output_path",
")",
"browser_output_path",
"=",
"self",
".",
"output_path",
"# ENTITIES A-Z",
"extra_context",
"=",
"{",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
",",
"\"theme\"",
":",
"self",
".",
"theme",
"}",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_entities_az.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"\"entities-az.html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"if",
"self",
".",
"ontospy_graph",
".",
"all_classes",
":",
"# CLASSES = ENTITIES TREE",
"extra_context",
"=",
"{",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
",",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"treetype\"",
":",
"\"classes\"",
",",
"'treeTable'",
":",
"formatHTML_EntityTreeTable",
"(",
"self",
".",
"ontospy_graph",
".",
"ontologyClassTree",
"(",
")",
")",
"}",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_entities_tree.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"\"entities-tree-classes.html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"# BROWSER PAGES - CLASSES ======",
"for",
"entity",
"in",
"self",
".",
"ontospy_graph",
".",
"all_classes",
":",
"extra_context",
"=",
"{",
"\"main_entity\"",
":",
"entity",
",",
"\"main_entity_type\"",
":",
"\"class\"",
",",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
"}",
"extra_context",
".",
"update",
"(",
"self",
".",
"highlight_code",
"(",
"entity",
")",
")",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_classinfo.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"entity",
".",
"slug",
"+",
"\".html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"if",
"self",
".",
"ontospy_graph",
".",
"all_properties",
":",
"# PROPERTIES = ENTITIES TREE",
"extra_context",
"=",
"{",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
",",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"treetype\"",
":",
"\"properties\"",
",",
"'treeTable'",
":",
"formatHTML_EntityTreeTable",
"(",
"self",
".",
"ontospy_graph",
".",
"ontologyPropTree",
"(",
")",
")",
"}",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_entities_tree.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"\"entities-tree-properties.html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"# BROWSER PAGES - PROPERTIES ======",
"for",
"entity",
"in",
"self",
".",
"ontospy_graph",
".",
"all_properties",
":",
"extra_context",
"=",
"{",
"\"main_entity\"",
":",
"entity",
",",
"\"main_entity_type\"",
":",
"\"property\"",
",",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
"}",
"extra_context",
".",
"update",
"(",
"self",
".",
"highlight_code",
"(",
"entity",
")",
")",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_propinfo.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"entity",
".",
"slug",
"+",
"\".html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"if",
"self",
".",
"ontospy_graph",
".",
"all_skos_concepts",
":",
"# CONCEPTS = ENTITIES TREE",
"extra_context",
"=",
"{",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
",",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"treetype\"",
":",
"\"concepts\"",
",",
"'treeTable'",
":",
"formatHTML_EntityTreeTable",
"(",
"self",
".",
"ontospy_graph",
".",
"ontologyConceptTree",
"(",
")",
")",
"}",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_entities_tree.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"\"entities-tree-concepts.html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"# BROWSER PAGES - CONCEPTS ======",
"for",
"entity",
"in",
"self",
".",
"ontospy_graph",
".",
"all_skos_concepts",
":",
"extra_context",
"=",
"{",
"\"main_entity\"",
":",
"entity",
",",
"\"main_entity_type\"",
":",
"\"concept\"",
",",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
"}",
"extra_context",
".",
"update",
"(",
"self",
".",
"highlight_code",
"(",
"entity",
")",
")",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_conceptinfo.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"entity",
".",
"slug",
"+",
"\".html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"if",
"self",
".",
"ontospy_graph",
".",
"all_shapes",
":",
"# SHAPES = ENTITIES TREE",
"extra_context",
"=",
"{",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
",",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"treetype\"",
":",
"\"shapes\"",
",",
"'treeTable'",
":",
"formatHTML_EntityTreeTable",
"(",
"self",
".",
"ontospy_graph",
".",
"ontologyShapeTree",
"(",
")",
")",
"}",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_entities_tree.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"\"entities-tree-shapes.html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"# BROWSER PAGES - SHAPES ======",
"for",
"entity",
"in",
"self",
".",
"ontospy_graph",
".",
"all_shapes",
":",
"extra_context",
"=",
"{",
"\"main_entity\"",
":",
"entity",
",",
"\"main_entity_type\"",
":",
"\"shape\"",
",",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
"}",
"extra_context",
".",
"update",
"(",
"self",
".",
"highlight_code",
"(",
"entity",
")",
")",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_shapeinfo.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"entity",
".",
"slug",
"+",
"\".html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"if",
"self",
".",
"ontospy_graph",
".",
"all_individuals",
":",
"# INDIVIDUALS (FLAT) TREE",
"extra_context",
"=",
"{",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
",",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"treetype\"",
":",
"\"individuals\"",
",",
"'treeTable'",
":",
"formatHTML_EntityTreeTable",
"(",
"self",
".",
"ontospy_graph",
".",
"ontologyIndividualsTree",
"(",
")",
")",
"}",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_entities_tree.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"\"entities-tree-individuals.html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"# BROWSER PAGES - CLASSES ======",
"for",
"entity",
"in",
"self",
".",
"ontospy_graph",
".",
"all_individuals",
":",
"extra_context",
"=",
"{",
"\"main_entity\"",
":",
"entity",
",",
"\"main_entity_type\"",
":",
"\"individual\"",
",",
"\"theme\"",
":",
"self",
".",
"theme",
",",
"\"ontograph\"",
":",
"self",
".",
"ontospy_graph",
"}",
"extra_context",
".",
"update",
"(",
"self",
".",
"highlight_code",
"(",
"entity",
")",
")",
"contents",
"=",
"self",
".",
"_renderTemplate",
"(",
"\"html-multi/browser/browser_individualinfo.html\"",
",",
"extraContext",
"=",
"extra_context",
")",
"FILE_NAME",
"=",
"entity",
".",
"slug",
"+",
"\".html\"",
"self",
".",
"_save2File",
"(",
"contents",
",",
"FILE_NAME",
",",
"browser_output_path",
")",
"return",
"main_url"
] | https://github.com/lambdamusic/Ontospy/blob/534e408372edd392590e12839c32a403430aac23/ontospy/ontodocs/viz/viz_html_multi.py#L40-L187 |
|
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | lib/debugger/VendorLib/vs-py-debugger/pythonFiles/jedi/api/environment.py | python | create_environment | (path, safe=True) | return Environment(path, _get_executable_path(path, safe=safe)) | Make it possible to create an environment by hand.
:raises: :exc:`.InvalidPythonEnvironment`
:returns: :class:`Environment` | Make it possible to create an environment by hand. | [
"Make",
"it",
"possible",
"to",
"create",
"an",
"environment",
"by",
"hand",
"."
] | def create_environment(path, safe=True):
"""
Make it possible to create an environment by hand.
:raises: :exc:`.InvalidPythonEnvironment`
:returns: :class:`Environment`
"""
return Environment(path, _get_executable_path(path, safe=safe)) | [
"def",
"create_environment",
"(",
"path",
",",
"safe",
"=",
"True",
")",
":",
"return",
"Environment",
"(",
"path",
",",
"_get_executable_path",
"(",
"path",
",",
"safe",
"=",
"safe",
")",
")"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/lib/debugger/VendorLib/vs-py-debugger/pythonFiles/jedi/api/environment.py#L288-L295 |
|
nodejs/quic | 5baab3f3a05548d3b51bea98868412b08766e34d | tools/gyp/pylib/gyp/generator/cmake.py | python | SetFilesProperty | (output, variable, property_name, values, sep) | Given a set of source files, sets the given property on them. | Given a set of source files, sets the given property on them. | [
"Given",
"a",
"set",
"of",
"source",
"files",
"sets",
"the",
"given",
"property",
"on",
"them",
"."
] | def SetFilesProperty(output, variable, property_name, values, sep):
"""Given a set of source files, sets the given property on them."""
output.write('set_source_files_properties(')
WriteVariable(output, variable)
output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
output.write('")\n') | [
"def",
"SetFilesProperty",
"(",
"output",
",",
"variable",
",",
"property_name",
",",
"values",
",",
"sep",
")",
":",
"output",
".",
"write",
"(",
"'set_source_files_properties('",
")",
"WriteVariable",
"(",
"output",
",",
"variable",
")",
"output",
".",
"write",
"(",
"' PROPERTIES '",
")",
"output",
".",
"write",
"(",
"property_name",
")",
"output",
".",
"write",
"(",
"' \"'",
")",
"for",
"value",
"in",
"values",
":",
"output",
".",
"write",
"(",
"CMakeStringEscape",
"(",
"value",
")",
")",
"output",
".",
"write",
"(",
"sep",
")",
"output",
".",
"write",
"(",
"'\")\\n'",
")"
] | https://github.com/nodejs/quic/blob/5baab3f3a05548d3b51bea98868412b08766e34d/tools/gyp/pylib/gyp/generator/cmake.py#L156-L166 |
||
frenck/home-assistant-config | 91fb77e527bc470b557b6156fd1d60515e0b0be9 | custom_components/hacs/repositories/integration.py | python | HacsIntegrationRepository.__init__ | (self, hacs: HacsBase, full_name: str) | Initialize. | Initialize. | [
"Initialize",
"."
] | def __init__(self, hacs: HacsBase, full_name: str):
"""Initialize."""
super().__init__(hacs=hacs)
self.data.full_name = full_name
self.data.full_name_lower = full_name.lower()
self.data.category = HacsCategory.INTEGRATION
self.content.path.remote = "custom_components"
self.content.path.local = self.localpath | [
"def",
"__init__",
"(",
"self",
",",
"hacs",
":",
"HacsBase",
",",
"full_name",
":",
"str",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"hacs",
"=",
"hacs",
")",
"self",
".",
"data",
".",
"full_name",
"=",
"full_name",
"self",
".",
"data",
".",
"full_name_lower",
"=",
"full_name",
".",
"lower",
"(",
")",
"self",
".",
"data",
".",
"category",
"=",
"HacsCategory",
".",
"INTEGRATION",
"self",
".",
"content",
".",
"path",
".",
"remote",
"=",
"\"custom_components\"",
"self",
".",
"content",
".",
"path",
".",
"local",
"=",
"self",
".",
"localpath"
] | https://github.com/frenck/home-assistant-config/blob/91fb77e527bc470b557b6156fd1d60515e0b0be9/custom_components/hacs/repositories/integration.py#L24-L31 |
||
mozilla/spidernode | aafa9e5273f954f272bb4382fc007af14674b4c2 | deps/npm/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py | python | Writer._line | (self, text, indent=0) | Write 'text' word-wrapped at self.width characters. | Write 'text' word-wrapped at self.width characters. | [
"Write",
"text",
"word",
"-",
"wrapped",
"at",
"self",
".",
"width",
"characters",
"."
] | def _line(self, text, indent=0):
"""Write 'text' word-wrapped at self.width characters."""
leading_space = ' ' * indent
while len(leading_space) + len(text) > self.width:
# The text is too wide; wrap if possible.
# Find the rightmost space that would obey our width constraint and
# that's not an escaped space.
available_space = self.width - len(leading_space) - len(' $')
space = available_space
while True:
space = text.rfind(' ', 0, space)
if space < 0 or \
self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# No such space; just use the first unescaped space we can find.
space = available_space - 1
while True:
space = text.find(' ', space + 1)
if space < 0 or \
self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# Give up on breaking.
break
self.output.write(leading_space + text[0:space] + ' $\n')
text = text[space+1:]
# Subsequent lines are continuations, so indent them.
leading_space = ' ' * (indent+2)
self.output.write(leading_space + text + '\n') | [
"def",
"_line",
"(",
"self",
",",
"text",
",",
"indent",
"=",
"0",
")",
":",
"leading_space",
"=",
"' '",
"*",
"indent",
"while",
"len",
"(",
"leading_space",
")",
"+",
"len",
"(",
"text",
")",
">",
"self",
".",
"width",
":",
"# The text is too wide; wrap if possible.",
"# Find the rightmost space that would obey our width constraint and",
"# that's not an escaped space.",
"available_space",
"=",
"self",
".",
"width",
"-",
"len",
"(",
"leading_space",
")",
"-",
"len",
"(",
"' $'",
")",
"space",
"=",
"available_space",
"while",
"True",
":",
"space",
"=",
"text",
".",
"rfind",
"(",
"' '",
",",
"0",
",",
"space",
")",
"if",
"space",
"<",
"0",
"or",
"self",
".",
"_count_dollars_before_index",
"(",
"text",
",",
"space",
")",
"%",
"2",
"==",
"0",
":",
"break",
"if",
"space",
"<",
"0",
":",
"# No such space; just use the first unescaped space we can find.",
"space",
"=",
"available_space",
"-",
"1",
"while",
"True",
":",
"space",
"=",
"text",
".",
"find",
"(",
"' '",
",",
"space",
"+",
"1",
")",
"if",
"space",
"<",
"0",
"or",
"self",
".",
"_count_dollars_before_index",
"(",
"text",
",",
"space",
")",
"%",
"2",
"==",
"0",
":",
"break",
"if",
"space",
"<",
"0",
":",
"# Give up on breaking.",
"break",
"self",
".",
"output",
".",
"write",
"(",
"leading_space",
"+",
"text",
"[",
"0",
":",
"space",
"]",
"+",
"' $\\n'",
")",
"text",
"=",
"text",
"[",
"space",
"+",
"1",
":",
"]",
"# Subsequent lines are continuations, so indent them.",
"leading_space",
"=",
"' '",
"*",
"(",
"indent",
"+",
"2",
")",
"self",
".",
"output",
".",
"write",
"(",
"leading_space",
"+",
"text",
"+",
"'\\n'",
")"
] | https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py#L111-L145 |
||
mozilla/spidernode | aafa9e5273f954f272bb4382fc007af14674b4c2 | tools/gyp/pylib/gyp/msvs_emulation.py | python | PrecompiledHeader._PchHeader | (self) | return os.path.split(self.settings.msvs_precompiled_header[self.config])[1] | Get the header that will appear in an #include line for all source
files. | Get the header that will appear in an #include line for all source
files. | [
"Get",
"the",
"header",
"that",
"will",
"appear",
"in",
"an",
"#include",
"line",
"for",
"all",
"source",
"files",
"."
] | def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1] | [
"def",
"_PchHeader",
"(",
"self",
")",
":",
"return",
"os",
".",
"path",
".",
"split",
"(",
"self",
".",
"settings",
".",
"msvs_precompiled_header",
"[",
"self",
".",
"config",
"]",
")",
"[",
"1",
"]"
] | https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/tools/gyp/pylib/gyp/msvs_emulation.py#L888-L891 |
|
crits/crits | 6b357daa5c3060cf622d3a3b0c7b41a9ca69c049 | crits/exploits/views.py | python | edit_exploit_name | (request, id_) | Set exploit name. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param id_: The ObjectId of the Exploit.
:type id_: str
:returns: :class:`django.http.HttpResponseRedirect` | Set exploit name. Should be an AJAX POST. | [
"Set",
"exploit",
"name",
".",
"Should",
"be",
"an",
"AJAX",
"POST",
"."
] | def edit_exploit_name(request, id_):
"""
Set exploit name. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param id_: The ObjectId of the Exploit.
:type id_: str
:returns: :class:`django.http.HttpResponseRedirect`
"""
if request.method == "POST" and request.is_ajax():
user = request.user.username
name = request.POST.get('name', None)
if not name:
return HttpResponse(json.dumps({'success': False,
'message': 'Not all info provided.'}),
content_type="application/json")
result = set_exploit_name(id_,
name,
user)
return HttpResponse(json.dumps(result),
content_type="application/json")
else:
error = "Expected AJAX POST"
return render(request, "error.html", {"error" : error }) | [
"def",
"edit_exploit_name",
"(",
"request",
",",
"id_",
")",
":",
"if",
"request",
".",
"method",
"==",
"\"POST\"",
"and",
"request",
".",
"is_ajax",
"(",
")",
":",
"user",
"=",
"request",
".",
"user",
".",
"username",
"name",
"=",
"request",
".",
"POST",
".",
"get",
"(",
"'name'",
",",
"None",
")",
"if",
"not",
"name",
":",
"return",
"HttpResponse",
"(",
"json",
".",
"dumps",
"(",
"{",
"'success'",
":",
"False",
",",
"'message'",
":",
"'Not all info provided.'",
"}",
")",
",",
"content_type",
"=",
"\"application/json\"",
")",
"result",
"=",
"set_exploit_name",
"(",
"id_",
",",
"name",
",",
"user",
")",
"return",
"HttpResponse",
"(",
"json",
".",
"dumps",
"(",
"result",
")",
",",
"content_type",
"=",
"\"application/json\"",
")",
"else",
":",
"error",
"=",
"\"Expected AJAX POST\"",
"return",
"render",
"(",
"request",
",",
"\"error.html\"",
",",
"{",
"\"error\"",
":",
"error",
"}",
")"
] | https://github.com/crits/crits/blob/6b357daa5c3060cf622d3a3b0c7b41a9ca69c049/crits/exploits/views.py#L144-L169 |
||
hotosm/tasking-manager | 1a7b02c6ccd431029a96d709d4d786c83cb37f5e | backend/services/validator_service.py | python | ValidatorService.get_user_invalidated_tasks | (
as_validator,
username: str,
preferred_locale: str,
closed=None,
project_id=None,
page=1,
page_size=10,
sort_by="updated_date",
sort_direction="desc",
) | return invalidated_tasks_dto | Get invalidated tasks either mapped or invalidated by the user | Get invalidated tasks either mapped or invalidated by the user | [
"Get",
"invalidated",
"tasks",
"either",
"mapped",
"or",
"invalidated",
"by",
"the",
"user"
] | def get_user_invalidated_tasks(
as_validator,
username: str,
preferred_locale: str,
closed=None,
project_id=None,
page=1,
page_size=10,
sort_by="updated_date",
sort_direction="desc",
) -> InvalidatedTasks:
""" Get invalidated tasks either mapped or invalidated by the user """
user = UserService.get_user_by_username(username)
query = (
TaskInvalidationHistory.query.filter_by(invalidator_id=user.id)
if as_validator
else TaskInvalidationHistory.query.filter_by(mapper_id=user.id)
)
if closed is not None:
query = query.filter_by(is_closed=closed)
if project_id is not None:
query = query.filter_by(project_id=project_id)
results = query.order_by(text(sort_by + " " + sort_direction)).paginate(
page, page_size, True
)
project_names = {}
invalidated_tasks_dto = InvalidatedTasks()
for entry in results.items:
dto = InvalidatedTask()
dto.task_id = entry.task_id
dto.project_id = entry.project_id
dto.history_id = entry.invalidation_history_id
dto.closed = entry.is_closed
dto.updated_date = entry.updated_date
if dto.project_id not in project_names:
project_names[dto.project_id] = ProjectInfo.get_dto_for_locale(
dto.project_id, preferred_locale
).name
dto.project_name = project_names[dto.project_id]
invalidated_tasks_dto.invalidated_tasks.append(dto)
invalidated_tasks_dto.pagination = Pagination(results)
return invalidated_tasks_dto | [
"def",
"get_user_invalidated_tasks",
"(",
"as_validator",
",",
"username",
":",
"str",
",",
"preferred_locale",
":",
"str",
",",
"closed",
"=",
"None",
",",
"project_id",
"=",
"None",
",",
"page",
"=",
"1",
",",
"page_size",
"=",
"10",
",",
"sort_by",
"=",
"\"updated_date\"",
",",
"sort_direction",
"=",
"\"desc\"",
",",
")",
"->",
"InvalidatedTasks",
":",
"user",
"=",
"UserService",
".",
"get_user_by_username",
"(",
"username",
")",
"query",
"=",
"(",
"TaskInvalidationHistory",
".",
"query",
".",
"filter_by",
"(",
"invalidator_id",
"=",
"user",
".",
"id",
")",
"if",
"as_validator",
"else",
"TaskInvalidationHistory",
".",
"query",
".",
"filter_by",
"(",
"mapper_id",
"=",
"user",
".",
"id",
")",
")",
"if",
"closed",
"is",
"not",
"None",
":",
"query",
"=",
"query",
".",
"filter_by",
"(",
"is_closed",
"=",
"closed",
")",
"if",
"project_id",
"is",
"not",
"None",
":",
"query",
"=",
"query",
".",
"filter_by",
"(",
"project_id",
"=",
"project_id",
")",
"results",
"=",
"query",
".",
"order_by",
"(",
"text",
"(",
"sort_by",
"+",
"\" \"",
"+",
"sort_direction",
")",
")",
".",
"paginate",
"(",
"page",
",",
"page_size",
",",
"True",
")",
"project_names",
"=",
"{",
"}",
"invalidated_tasks_dto",
"=",
"InvalidatedTasks",
"(",
")",
"for",
"entry",
"in",
"results",
".",
"items",
":",
"dto",
"=",
"InvalidatedTask",
"(",
")",
"dto",
".",
"task_id",
"=",
"entry",
".",
"task_id",
"dto",
".",
"project_id",
"=",
"entry",
".",
"project_id",
"dto",
".",
"history_id",
"=",
"entry",
".",
"invalidation_history_id",
"dto",
".",
"closed",
"=",
"entry",
".",
"is_closed",
"dto",
".",
"updated_date",
"=",
"entry",
".",
"updated_date",
"if",
"dto",
".",
"project_id",
"not",
"in",
"project_names",
":",
"project_names",
"[",
"dto",
".",
"project_id",
"]",
"=",
"ProjectInfo",
".",
"get_dto_for_locale",
"(",
"dto",
".",
"project_id",
",",
"preferred_locale",
")",
".",
"name",
"dto",
".",
"project_name",
"=",
"project_names",
"[",
"dto",
".",
"project_id",
"]",
"invalidated_tasks_dto",
".",
"invalidated_tasks",
".",
"append",
"(",
"dto",
")",
"invalidated_tasks_dto",
".",
"pagination",
"=",
"Pagination",
"(",
"results",
")",
"return",
"invalidated_tasks_dto"
] | https://github.com/hotosm/tasking-manager/blob/1a7b02c6ccd431029a96d709d4d786c83cb37f5e/backend/services/validator_service.py#L276-L323 |
|
mozilla/ichnaea | 63a2bf1ba057c1b90931f6bf0f88c570c21aaf27 | ichnaea/api/exceptions.py | python | BaseAPIError.json_body | (self) | return {
"error": {
"errors": [
{
"domain": self.domain,
"reason": self.reason,
"message": self.message,
}
],
"code": self.code,
"message": self.message,
}
} | A JSON representation of this response. | A JSON representation of this response. | [
"A",
"JSON",
"representation",
"of",
"this",
"response",
"."
] | def json_body(self):
"""A JSON representation of this response."""
return {
"error": {
"errors": [
{
"domain": self.domain,
"reason": self.reason,
"message": self.message,
}
],
"code": self.code,
"message": self.message,
}
} | [
"def",
"json_body",
"(",
"self",
")",
":",
"return",
"{",
"\"error\"",
":",
"{",
"\"errors\"",
":",
"[",
"{",
"\"domain\"",
":",
"self",
".",
"domain",
",",
"\"reason\"",
":",
"self",
".",
"reason",
",",
"\"message\"",
":",
"self",
".",
"message",
",",
"}",
"]",
",",
"\"code\"",
":",
"self",
".",
"code",
",",
"\"message\"",
":",
"self",
".",
"message",
",",
"}",
"}"
] | https://github.com/mozilla/ichnaea/blob/63a2bf1ba057c1b90931f6bf0f88c570c21aaf27/ichnaea/api/exceptions.py#L71-L85 |
|
mapillary/OpenSfM | 9766a11e11544fc71fe689f33b34d0610cca2944 | opensfm/config.py | python | load_config | (filepath) | DEPRECATED: Load config from a config.yaml filepath | DEPRECATED: Load config from a config.yaml filepath | [
"DEPRECATED",
":",
"Load",
"config",
"from",
"a",
"config",
".",
"yaml",
"filepath"
] | def load_config(filepath):
"""DEPRECATED: Load config from a config.yaml filepath"""
if not os.path.isfile(filepath):
return default_config()
with open(filepath) as fin:
return load_config_from_fileobject(fin) | [
"def",
"load_config",
"(",
"filepath",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"filepath",
")",
":",
"return",
"default_config",
"(",
")",
"with",
"open",
"(",
"filepath",
")",
"as",
"fin",
":",
"return",
"load_config_from_fileobject",
"(",
"fin",
")"
] | https://github.com/mapillary/OpenSfM/blob/9766a11e11544fc71fe689f33b34d0610cca2944/opensfm/config.py#L197-L203 |
||
crits/crits | 6b357daa5c3060cf622d3a3b0c7b41a9ca69c049 | crits/core/crits_mongoengine.py | python | CritsBaseAttributes.edit_relationship_reason | (self, rel_item=None, rel_id=None, type_=None,
rel_type=None, rel_date=None, new_reason="N/A",
analyst=None) | return self._modify_relationship(rel_item=rel_item, rel_id=rel_id,
type_=type_, rel_type=rel_type,
rel_date=rel_date, new_reason=new_reason,
modification="reason", analyst=analyst) | Modify a relationship type for a relationship to this top-level object.
If rel_item is provided it will be used, otherwise rel_id and type_ must
be provided.
:param rel_item: The top-level object to relate to.
:type rel_item: class which inherits from
:class:`crits.core.crits_mongoengine.CritsBaseAttributes`
:param rel_id: The ObjectId of the top-level object to relate to.
:type rel_id: str
:param type_: The type of top-level object to relate to.
:type type_: str
:param rel_type: The type of relationship.
:type rel_type: str
:param rel_date: The date this relationship applies.
:type rel_date: datetime.datetime
:param new_confidence: The new confidence of the relationship.
:type new_confidence: int
:param analyst: The user editing this relationship.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str) | Modify a relationship type for a relationship to this top-level object.
If rel_item is provided it will be used, otherwise rel_id and type_ must
be provided. | [
"Modify",
"a",
"relationship",
"type",
"for",
"a",
"relationship",
"to",
"this",
"top",
"-",
"level",
"object",
".",
"If",
"rel_item",
"is",
"provided",
"it",
"will",
"be",
"used",
"otherwise",
"rel_id",
"and",
"type_",
"must",
"be",
"provided",
"."
] | def edit_relationship_reason(self, rel_item=None, rel_id=None, type_=None,
rel_type=None, rel_date=None, new_reason="N/A",
analyst=None):
"""
Modify a relationship type for a relationship to this top-level object.
If rel_item is provided it will be used, otherwise rel_id and type_ must
be provided.
:param rel_item: The top-level object to relate to.
:type rel_item: class which inherits from
:class:`crits.core.crits_mongoengine.CritsBaseAttributes`
:param rel_id: The ObjectId of the top-level object to relate to.
:type rel_id: str
:param type_: The type of top-level object to relate to.
:type type_: str
:param rel_type: The type of relationship.
:type rel_type: str
:param rel_date: The date this relationship applies.
:type rel_date: datetime.datetime
:param new_confidence: The new confidence of the relationship.
:type new_confidence: int
:param analyst: The user editing this relationship.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str)
"""
return self._modify_relationship(rel_item=rel_item, rel_id=rel_id,
type_=type_, rel_type=rel_type,
rel_date=rel_date, new_reason=new_reason,
modification="reason", analyst=analyst) | [
"def",
"edit_relationship_reason",
"(",
"self",
",",
"rel_item",
"=",
"None",
",",
"rel_id",
"=",
"None",
",",
"type_",
"=",
"None",
",",
"rel_type",
"=",
"None",
",",
"rel_date",
"=",
"None",
",",
"new_reason",
"=",
"\"N/A\"",
",",
"analyst",
"=",
"None",
")",
":",
"return",
"self",
".",
"_modify_relationship",
"(",
"rel_item",
"=",
"rel_item",
",",
"rel_id",
"=",
"rel_id",
",",
"type_",
"=",
"type_",
",",
"rel_type",
"=",
"rel_type",
",",
"rel_date",
"=",
"rel_date",
",",
"new_reason",
"=",
"new_reason",
",",
"modification",
"=",
"\"reason\"",
",",
"analyst",
"=",
"analyst",
")"
] | https://github.com/crits/crits/blob/6b357daa5c3060cf622d3a3b0c7b41a9ca69c049/crits/core/crits_mongoengine.py#L2225-L2253 |
|
nodejs/node-chakracore | 770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43 | tools/jinja2/environment.py | python | Template.generate | (self, *args, **kwargs) | For very large templates it can be useful to not render the whole
template at once but evaluate each statement after another and yield
piece for piece. This method basically does exactly that and returns
a generator that yields one item after another as unicode strings.
It accepts the same arguments as :meth:`render`. | For very large templates it can be useful to not render the whole
template at once but evaluate each statement after another and yield
piece for piece. This method basically does exactly that and returns
a generator that yields one item after another as unicode strings. | [
"For",
"very",
"large",
"templates",
"it",
"can",
"be",
"useful",
"to",
"not",
"render",
"the",
"whole",
"template",
"at",
"once",
"but",
"evaluate",
"each",
"statement",
"after",
"another",
"and",
"yield",
"piece",
"for",
"piece",
".",
"This",
"method",
"basically",
"does",
"exactly",
"that",
"and",
"returns",
"a",
"generator",
"that",
"yields",
"one",
"item",
"after",
"another",
"as",
"unicode",
"strings",
"."
] | def generate(self, *args, **kwargs):
"""For very large templates it can be useful to not render the whole
template at once but evaluate each statement after another and yield
piece for piece. This method basically does exactly that and returns
a generator that yields one item after another as unicode strings.
It accepts the same arguments as :meth:`render`.
"""
vars = dict(*args, **kwargs)
try:
for event in self.root_render_func(self.new_context(vars)):
yield event
except Exception:
exc_info = sys.exc_info()
else:
return
yield self.environment.handle_exception(exc_info, True) | [
"def",
"generate",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"vars",
"=",
"dict",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"try",
":",
"for",
"event",
"in",
"self",
".",
"root_render_func",
"(",
"self",
".",
"new_context",
"(",
"vars",
")",
")",
":",
"yield",
"event",
"except",
"Exception",
":",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"else",
":",
"return",
"yield",
"self",
".",
"environment",
".",
"handle_exception",
"(",
"exc_info",
",",
"True",
")"
] | https://github.com/nodejs/node-chakracore/blob/770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43/tools/jinja2/environment.py#L1029-L1045 |
||
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/exit_handlers.py | python | ExitHandlers.add_atexit_handler | (self, handle_atexit, nodupe=True) | Add an atexit handler to the list managed here. | Add an atexit handler to the list managed here. | [
"Add",
"an",
"atexit",
"handler",
"to",
"the",
"list",
"managed",
"here",
"."
] | def add_atexit_handler(self, handle_atexit, nodupe=True):
"""Add an atexit handler to the list managed here."""
if nodupe and handle_atexit in self._atexit_handlers:
raise ValueError('atexit handler alraedy added')
self._atexit_handlers.append(handle_atexit) | [
"def",
"add_atexit_handler",
"(",
"self",
",",
"handle_atexit",
",",
"nodupe",
"=",
"True",
")",
":",
"if",
"nodupe",
"and",
"handle_atexit",
"in",
"self",
".",
"_atexit_handlers",
":",
"raise",
"ValueError",
"(",
"'atexit handler alraedy added'",
")",
"self",
".",
"_atexit_handlers",
".",
"append",
"(",
"handle_atexit",
")"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/exit_handlers.py#L67-L71 |
||
mceSystems/node-jsc | 90634f3064fab8e89a85b3942f0cc5054acc86fa | deps/v8/third_party/markupsafe/_native.py | python | soft_unicode | (s) | return s | Make a string unicode if it isn't already. That way a markup
string is not converted back to unicode. | Make a string unicode if it isn't already. That way a markup
string is not converted back to unicode. | [
"Make",
"a",
"string",
"unicode",
"if",
"it",
"isn",
"t",
"already",
".",
"That",
"way",
"a",
"markup",
"string",
"is",
"not",
"converted",
"back",
"to",
"unicode",
"."
] | def soft_unicode(s):
"""Make a string unicode if it isn't already. That way a markup
string is not converted back to unicode.
"""
if not isinstance(s, text_type):
s = text_type(s)
return s | [
"def",
"soft_unicode",
"(",
"s",
")",
":",
"if",
"not",
"isinstance",
"(",
"s",
",",
"text_type",
")",
":",
"s",
"=",
"text_type",
"(",
"s",
")",
"return",
"s"
] | https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/deps/v8/third_party/markupsafe/_native.py#L40-L46 |
|
jasonsanjose/brackets-sass | 88b351f2ebc3aaa514494eac368d197f63438caf | node/2.0.3/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/MSVSSettings.py | python | FixVCMacroSlashes | (s) | return s | Replace macros which have excessive following slashes.
These macros are known to have a built-in trailing slash. Furthermore, many
scripts hiccup on processing paths with extra slashes in the middle.
This list is probably not exhaustive. Add as needed. | Replace macros which have excessive following slashes. | [
"Replace",
"macros",
"which",
"have",
"excessive",
"following",
"slashes",
"."
] | def FixVCMacroSlashes(s):
"""Replace macros which have excessive following slashes.
These macros are known to have a built-in trailing slash. Furthermore, many
scripts hiccup on processing paths with extra slashes in the middle.
This list is probably not exhaustive. Add as needed.
"""
if '$' in s:
s = fix_vc_macro_slashes_regex.sub(r'\1', s)
return s | [
"def",
"FixVCMacroSlashes",
"(",
"s",
")",
":",
"if",
"'$'",
"in",
"s",
":",
"s",
"=",
"fix_vc_macro_slashes_regex",
".",
"sub",
"(",
"r'\\1'",
",",
"s",
")",
"return",
"s"
] | https://github.com/jasonsanjose/brackets-sass/blob/88b351f2ebc3aaa514494eac368d197f63438caf/node/2.0.3/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/MSVSSettings.py#L370-L380 |
|
nodejs/node | ac3c33c1646bf46104c15ae035982c06364da9b8 | tools/inspector_protocol/jinja2/compiler.py | python | CodeGenerator.macro_body | (self, node, frame) | return frame, macro_ref | Dump the function def of a macro or call block. | Dump the function def of a macro or call block. | [
"Dump",
"the",
"function",
"def",
"of",
"a",
"macro",
"or",
"call",
"block",
"."
] | def macro_body(self, node, frame):
"""Dump the function def of a macro or call block."""
frame = frame.inner()
frame.symbols.analyze_node(node)
macro_ref = MacroRef(node)
explicit_caller = None
skip_special_params = set()
args = []
for idx, arg in enumerate(node.args):
if arg.name == 'caller':
explicit_caller = idx
if arg.name in ('kwargs', 'varargs'):
skip_special_params.add(arg.name)
args.append(frame.symbols.ref(arg.name))
undeclared = find_undeclared(node.body, ('caller', 'kwargs', 'varargs'))
if 'caller' in undeclared:
# In older Jinja2 versions there was a bug that allowed caller
# to retain the special behavior even if it was mentioned in
# the argument list. However thankfully this was only really
# working if it was the last argument. So we are explicitly
# checking this now and error out if it is anywhere else in
# the argument list.
if explicit_caller is not None:
try:
node.defaults[explicit_caller - len(node.args)]
except IndexError:
self.fail('When defining macros or call blocks the '
'special "caller" argument must be omitted '
'or be given a default.', node.lineno)
else:
args.append(frame.symbols.declare_parameter('caller'))
macro_ref.accesses_caller = True
if 'kwargs' in undeclared and not 'kwargs' in skip_special_params:
args.append(frame.symbols.declare_parameter('kwargs'))
macro_ref.accesses_kwargs = True
if 'varargs' in undeclared and not 'varargs' in skip_special_params:
args.append(frame.symbols.declare_parameter('varargs'))
macro_ref.accesses_varargs = True
# macros are delayed, they never require output checks
frame.require_output_check = False
frame.symbols.analyze_node(node)
self.writeline('%s(%s):' % (self.func('macro'), ', '.join(args)), node)
self.indent()
self.buffer(frame)
self.enter_frame(frame)
self.push_parameter_definitions(frame)
for idx, arg in enumerate(node.args):
ref = frame.symbols.ref(arg.name)
self.writeline('if %s is missing:' % ref)
self.indent()
try:
default = node.defaults[idx - len(node.args)]
except IndexError:
self.writeline('%s = undefined(%r, name=%r)' % (
ref,
'parameter %r was not provided' % arg.name,
arg.name))
else:
self.writeline('%s = ' % ref)
self.visit(default, frame)
self.mark_parameter_stored(ref)
self.outdent()
self.pop_parameter_definitions()
self.blockvisit(node.body, frame)
self.return_buffer_contents(frame, force_unescaped=True)
self.leave_frame(frame, with_python_scope=True)
self.outdent()
return frame, macro_ref | [
"def",
"macro_body",
"(",
"self",
",",
"node",
",",
"frame",
")",
":",
"frame",
"=",
"frame",
".",
"inner",
"(",
")",
"frame",
".",
"symbols",
".",
"analyze_node",
"(",
"node",
")",
"macro_ref",
"=",
"MacroRef",
"(",
"node",
")",
"explicit_caller",
"=",
"None",
"skip_special_params",
"=",
"set",
"(",
")",
"args",
"=",
"[",
"]",
"for",
"idx",
",",
"arg",
"in",
"enumerate",
"(",
"node",
".",
"args",
")",
":",
"if",
"arg",
".",
"name",
"==",
"'caller'",
":",
"explicit_caller",
"=",
"idx",
"if",
"arg",
".",
"name",
"in",
"(",
"'kwargs'",
",",
"'varargs'",
")",
":",
"skip_special_params",
".",
"add",
"(",
"arg",
".",
"name",
")",
"args",
".",
"append",
"(",
"frame",
".",
"symbols",
".",
"ref",
"(",
"arg",
".",
"name",
")",
")",
"undeclared",
"=",
"find_undeclared",
"(",
"node",
".",
"body",
",",
"(",
"'caller'",
",",
"'kwargs'",
",",
"'varargs'",
")",
")",
"if",
"'caller'",
"in",
"undeclared",
":",
"# In older Jinja2 versions there was a bug that allowed caller",
"# to retain the special behavior even if it was mentioned in",
"# the argument list. However thankfully this was only really",
"# working if it was the last argument. So we are explicitly",
"# checking this now and error out if it is anywhere else in",
"# the argument list.",
"if",
"explicit_caller",
"is",
"not",
"None",
":",
"try",
":",
"node",
".",
"defaults",
"[",
"explicit_caller",
"-",
"len",
"(",
"node",
".",
"args",
")",
"]",
"except",
"IndexError",
":",
"self",
".",
"fail",
"(",
"'When defining macros or call blocks the '",
"'special \"caller\" argument must be omitted '",
"'or be given a default.'",
",",
"node",
".",
"lineno",
")",
"else",
":",
"args",
".",
"append",
"(",
"frame",
".",
"symbols",
".",
"declare_parameter",
"(",
"'caller'",
")",
")",
"macro_ref",
".",
"accesses_caller",
"=",
"True",
"if",
"'kwargs'",
"in",
"undeclared",
"and",
"not",
"'kwargs'",
"in",
"skip_special_params",
":",
"args",
".",
"append",
"(",
"frame",
".",
"symbols",
".",
"declare_parameter",
"(",
"'kwargs'",
")",
")",
"macro_ref",
".",
"accesses_kwargs",
"=",
"True",
"if",
"'varargs'",
"in",
"undeclared",
"and",
"not",
"'varargs'",
"in",
"skip_special_params",
":",
"args",
".",
"append",
"(",
"frame",
".",
"symbols",
".",
"declare_parameter",
"(",
"'varargs'",
")",
")",
"macro_ref",
".",
"accesses_varargs",
"=",
"True",
"# macros are delayed, they never require output checks",
"frame",
".",
"require_output_check",
"=",
"False",
"frame",
".",
"symbols",
".",
"analyze_node",
"(",
"node",
")",
"self",
".",
"writeline",
"(",
"'%s(%s):'",
"%",
"(",
"self",
".",
"func",
"(",
"'macro'",
")",
",",
"', '",
".",
"join",
"(",
"args",
")",
")",
",",
"node",
")",
"self",
".",
"indent",
"(",
")",
"self",
".",
"buffer",
"(",
"frame",
")",
"self",
".",
"enter_frame",
"(",
"frame",
")",
"self",
".",
"push_parameter_definitions",
"(",
"frame",
")",
"for",
"idx",
",",
"arg",
"in",
"enumerate",
"(",
"node",
".",
"args",
")",
":",
"ref",
"=",
"frame",
".",
"symbols",
".",
"ref",
"(",
"arg",
".",
"name",
")",
"self",
".",
"writeline",
"(",
"'if %s is missing:'",
"%",
"ref",
")",
"self",
".",
"indent",
"(",
")",
"try",
":",
"default",
"=",
"node",
".",
"defaults",
"[",
"idx",
"-",
"len",
"(",
"node",
".",
"args",
")",
"]",
"except",
"IndexError",
":",
"self",
".",
"writeline",
"(",
"'%s = undefined(%r, name=%r)'",
"%",
"(",
"ref",
",",
"'parameter %r was not provided'",
"%",
"arg",
".",
"name",
",",
"arg",
".",
"name",
")",
")",
"else",
":",
"self",
".",
"writeline",
"(",
"'%s = '",
"%",
"ref",
")",
"self",
".",
"visit",
"(",
"default",
",",
"frame",
")",
"self",
".",
"mark_parameter_stored",
"(",
"ref",
")",
"self",
".",
"outdent",
"(",
")",
"self",
".",
"pop_parameter_definitions",
"(",
")",
"self",
".",
"blockvisit",
"(",
"node",
".",
"body",
",",
"frame",
")",
"self",
".",
"return_buffer_contents",
"(",
"frame",
",",
"force_unescaped",
"=",
"True",
")",
"self",
".",
"leave_frame",
"(",
"frame",
",",
"with_python_scope",
"=",
"True",
")",
"self",
".",
"outdent",
"(",
")",
"return",
"frame",
",",
"macro_ref"
] | https://github.com/nodejs/node/blob/ac3c33c1646bf46104c15ae035982c06364da9b8/tools/inspector_protocol/jinja2/compiler.py#L505-L580 |
|
riolet/SAM | 3e372dbf533cf9a08fd5674017a212af16e0224a | sam/models/security/ruling_process.py | python | RuleJob.__init__ | (self, subscription_id, datasource_id, start, end, ruleset) | For use with the RuleProcessor subprocess
:param subscription_id: the subscription this job applies to
:type subscription_id: int
:param datasource_id: the datasource in which to analyze traffic
:type datasource_id: int
:param start: the start of the analysis timerange
:type start: datetime
:param end: the end of the analysis timerange
:type end: datetime
:param ruleset: a list of rules to check traffic against
:type ruleset: list[ rule.Rule ] | For use with the RuleProcessor subprocess
:param subscription_id: the subscription this job applies to
:type subscription_id: int
:param datasource_id: the datasource in which to analyze traffic
:type datasource_id: int
:param start: the start of the analysis timerange
:type start: datetime
:param end: the end of the analysis timerange
:type end: datetime
:param ruleset: a list of rules to check traffic against
:type ruleset: list[ rule.Rule ] | [
"For",
"use",
"with",
"the",
"RuleProcessor",
"subprocess",
":",
"param",
"subscription_id",
":",
"the",
"subscription",
"this",
"job",
"applies",
"to",
":",
"type",
"subscription_id",
":",
"int",
":",
"param",
"datasource_id",
":",
"the",
"datasource",
"in",
"which",
"to",
"analyze",
"traffic",
":",
"type",
"datasource_id",
":",
"int",
":",
"param",
"start",
":",
"the",
"start",
"of",
"the",
"analysis",
"timerange",
":",
"type",
"start",
":",
"datetime",
":",
"param",
"end",
":",
"the",
"end",
"of",
"the",
"analysis",
"timerange",
":",
"type",
"end",
":",
"datetime",
":",
"param",
"ruleset",
":",
"a",
"list",
"of",
"rules",
"to",
"check",
"traffic",
"against",
":",
"type",
"ruleset",
":",
"list",
"[",
"rule",
".",
"Rule",
"]"
] | def __init__(self, subscription_id, datasource_id, start, end, ruleset):
"""
For use with the RuleProcessor subprocess
:param subscription_id: the subscription this job applies to
:type subscription_id: int
:param datasource_id: the datasource in which to analyze traffic
:type datasource_id: int
:param start: the start of the analysis timerange
:type start: datetime
:param end: the end of the analysis timerange
:type end: datetime
:param ruleset: a list of rules to check traffic against
:type ruleset: list[ rule.Rule ]
"""
self.sub_id = subscription_id
self.ds_id = datasource_id
self.time_start = start
self.time_end = end
self.rules = ruleset
self.id = 0 # id to check on this job
self.status = "Created." # Created, Queued, Running rule_n / num_rules, Complete
self.completion = 0 | [
"def",
"__init__",
"(",
"self",
",",
"subscription_id",
",",
"datasource_id",
",",
"start",
",",
"end",
",",
"ruleset",
")",
":",
"self",
".",
"sub_id",
"=",
"subscription_id",
"self",
".",
"ds_id",
"=",
"datasource_id",
"self",
".",
"time_start",
"=",
"start",
"self",
".",
"time_end",
"=",
"end",
"self",
".",
"rules",
"=",
"ruleset",
"self",
".",
"id",
"=",
"0",
"# id to check on this job",
"self",
".",
"status",
"=",
"\"Created.\"",
"# Created, Queued, Running rule_n / num_rules, Complete",
"self",
".",
"completion",
"=",
"0"
] | https://github.com/riolet/SAM/blob/3e372dbf533cf9a08fd5674017a212af16e0224a/sam/models/security/ruling_process.py#L35-L56 |
||
opentraveldata/geobases | e9ef3708155cb320684aa710a11d5a228a7d80c0 | GeoBases/GeoBaseModule.py | python | GeoBase.distance | (self, key0, key1) | return haversine(self.getLocation(key0), self.getLocation(key1)) | Compute distance between two elements.
This is just a wrapper between the original haversine
function, but it is probably one of the most used feature :)
:param key0: the first key
:param key1: the second key
:returns: the distance (km)
>>> geo_t.distance('frnic', 'frpaz')
683.526... | Compute distance between two elements. | [
"Compute",
"distance",
"between",
"two",
"elements",
"."
] | def distance(self, key0, key1):
"""Compute distance between two elements.
This is just a wrapper between the original haversine
function, but it is probably one of the most used feature :)
:param key0: the first key
:param key1: the second key
:returns: the distance (km)
>>> geo_t.distance('frnic', 'frpaz')
683.526...
"""
return haversine(self.getLocation(key0), self.getLocation(key1)) | [
"def",
"distance",
"(",
"self",
",",
"key0",
",",
"key1",
")",
":",
"return",
"haversine",
"(",
"self",
".",
"getLocation",
"(",
"key0",
")",
",",
"self",
".",
"getLocation",
"(",
"key1",
")",
")"
] | https://github.com/opentraveldata/geobases/blob/e9ef3708155cb320684aa710a11d5a228a7d80c0/GeoBases/GeoBaseModule.py#L1698-L1711 |
|
Southpaw-TACTIC/TACTIC | ba9b87aef0ee3b3ea51446f25b285ebbca06f62c | src/pyasm/prod/web/app_load_wdg.py | python | MayaNamespaceWdg.init | (self) | insert = IconButtonWdg('insert', icon=IconWdg.INSERT, long=True)
insert_txt = TextWdg('new_namespace')
remove = IconButtonWdg('remove', icon=IconWdg.DELETE, long=True )
remove_div = DivWdg(remove)
row_div.add(insert)
row_div.add(insert_txt)
row_div.add(HtmlElement.br(2))
row_div.add(remove_div) | insert = IconButtonWdg('insert', icon=IconWdg.INSERT, long=True)
insert_txt = TextWdg('new_namespace')
remove = IconButtonWdg('remove', icon=IconWdg.DELETE, long=True )
remove_div = DivWdg(remove)
row_div.add(insert)
row_div.add(insert_txt)
row_div.add(HtmlElement.br(2))
row_div.add(remove_div) | [
"insert",
"=",
"IconButtonWdg",
"(",
"insert",
"icon",
"=",
"IconWdg",
".",
"INSERT",
"long",
"=",
"True",
")",
"insert_txt",
"=",
"TextWdg",
"(",
"new_namespace",
")",
"remove",
"=",
"IconButtonWdg",
"(",
"remove",
"icon",
"=",
"IconWdg",
".",
"DELETE",
"long",
"=",
"True",
")",
"remove_div",
"=",
"DivWdg",
"(",
"remove",
")",
"row_div",
".",
"add",
"(",
"insert",
")",
"row_div",
".",
"add",
"(",
"insert_txt",
")",
"row_div",
".",
"add",
"(",
"HtmlElement",
".",
"br",
"(",
"2",
"))",
"row_div",
".",
"add",
"(",
"remove_div",
")"
] | def init(self):
intro = IntrospectWdg()
intro.add_style("float", "right")
session = SessionContents.get()
if not session:
row_div = DivWdg(intro)
row_div.add("Click on Introspect to start")
self.add(row_div)
return
namespace_dict = session.get_namespace()
current = namespace_dict.get('current')
namespaces = namespace_dict.get('child')
if 'UI' in namespaces:
namespaces.remove('UI')
div = DivWdg()
row_div = DivWdg()
ns_span = SpanWdg('Namespace: ')
ns_select = SelectWdg(self.NS_SELECT)
ns_span.add(ns_select)
ns_select.append_option('%s (current)' %current, current)
if namespaces:
ns_select.set_option('values', '|'.join(namespaces))
# append root namespace if not found
if ':' not in namespaces and ':' not in current:
ns_select.append_option(':',':')
add_node = ProdIconButtonWdg('Assign Selected', long=True)
add_node.add_event('onclick', "add_node_to_namespace('%s')" \
%self.NS_SELECT )
set_node = ProdIconSubmitWdg('Set Namespace', long=True)
set_node.add_event('onclick', "set_namespace('%s')" \
%self.NS_SELECT )
hint = HintWdg("After selecting the top node of objects in the session,"\
" click on [ Assign Selected ]")
row_div.add(intro)
row_div.add(ns_span)
row_div.add(HtmlElement.br(2))
row_div.add(set_node)
row_div.add(add_node)
row_div.add(hint)
row_div.add(HtmlElement.br(2))
# TODO: add these add/remove namespace functions
'''
insert = IconButtonWdg('insert', icon=IconWdg.INSERT, long=True)
insert_txt = TextWdg('new_namespace')
remove = IconButtonWdg('remove', icon=IconWdg.DELETE, long=True )
remove_div = DivWdg(remove)
row_div.add(insert)
row_div.add(insert_txt)
row_div.add(HtmlElement.br(2))
row_div.add(remove_div)
'''
div.add(row_div)
self.add(div)
hidden = HiddenWdg("namespace_info")
self.add(hidden)
insert = ProdIconButtonWdg('Contents')
insert.add_event("onclick", "get_namespace_contents()" )
self.add(insert)
contents = hidden.get_value().split("\t")
table = Table()
for content in contents:
table.add_row()
table.add_cell(content)
self.add(table) | [
"def",
"init",
"(",
"self",
")",
":",
"intro",
"=",
"IntrospectWdg",
"(",
")",
"intro",
".",
"add_style",
"(",
"\"float\"",
",",
"\"right\"",
")",
"session",
"=",
"SessionContents",
".",
"get",
"(",
")",
"if",
"not",
"session",
":",
"row_div",
"=",
"DivWdg",
"(",
"intro",
")",
"row_div",
".",
"add",
"(",
"\"Click on Introspect to start\"",
")",
"self",
".",
"add",
"(",
"row_div",
")",
"return",
"namespace_dict",
"=",
"session",
".",
"get_namespace",
"(",
")",
"current",
"=",
"namespace_dict",
".",
"get",
"(",
"'current'",
")",
"namespaces",
"=",
"namespace_dict",
".",
"get",
"(",
"'child'",
")",
"if",
"'UI'",
"in",
"namespaces",
":",
"namespaces",
".",
"remove",
"(",
"'UI'",
")",
"div",
"=",
"DivWdg",
"(",
")",
"row_div",
"=",
"DivWdg",
"(",
")",
"ns_span",
"=",
"SpanWdg",
"(",
"'Namespace: '",
")",
"ns_select",
"=",
"SelectWdg",
"(",
"self",
".",
"NS_SELECT",
")",
"ns_span",
".",
"add",
"(",
"ns_select",
")",
"ns_select",
".",
"append_option",
"(",
"'%s (current)'",
"%",
"current",
",",
"current",
")",
"if",
"namespaces",
":",
"ns_select",
".",
"set_option",
"(",
"'values'",
",",
"'|'",
".",
"join",
"(",
"namespaces",
")",
")",
"# append root namespace if not found",
"if",
"':'",
"not",
"in",
"namespaces",
"and",
"':'",
"not",
"in",
"current",
":",
"ns_select",
".",
"append_option",
"(",
"':'",
",",
"':'",
")",
"add_node",
"=",
"ProdIconButtonWdg",
"(",
"'Assign Selected'",
",",
"long",
"=",
"True",
")",
"add_node",
".",
"add_event",
"(",
"'onclick'",
",",
"\"add_node_to_namespace('%s')\"",
"%",
"self",
".",
"NS_SELECT",
")",
"set_node",
"=",
"ProdIconSubmitWdg",
"(",
"'Set Namespace'",
",",
"long",
"=",
"True",
")",
"set_node",
".",
"add_event",
"(",
"'onclick'",
",",
"\"set_namespace('%s')\"",
"%",
"self",
".",
"NS_SELECT",
")",
"hint",
"=",
"HintWdg",
"(",
"\"After selecting the top node of objects in the session,\"",
"\" click on [ Assign Selected ]\"",
")",
"row_div",
".",
"add",
"(",
"intro",
")",
"row_div",
".",
"add",
"(",
"ns_span",
")",
"row_div",
".",
"add",
"(",
"HtmlElement",
".",
"br",
"(",
"2",
")",
")",
"row_div",
".",
"add",
"(",
"set_node",
")",
"row_div",
".",
"add",
"(",
"add_node",
")",
"row_div",
".",
"add",
"(",
"hint",
")",
"row_div",
".",
"add",
"(",
"HtmlElement",
".",
"br",
"(",
"2",
")",
")",
"# TODO: add these add/remove namespace functions",
"div",
".",
"add",
"(",
"row_div",
")",
"self",
".",
"add",
"(",
"div",
")",
"hidden",
"=",
"HiddenWdg",
"(",
"\"namespace_info\"",
")",
"self",
".",
"add",
"(",
"hidden",
")",
"insert",
"=",
"ProdIconButtonWdg",
"(",
"'Contents'",
")",
"insert",
".",
"add_event",
"(",
"\"onclick\"",
",",
"\"get_namespace_contents()\"",
")",
"self",
".",
"add",
"(",
"insert",
")",
"contents",
"=",
"hidden",
".",
"get_value",
"(",
")",
".",
"split",
"(",
"\"\\t\"",
")",
"table",
"=",
"Table",
"(",
")",
"for",
"content",
"in",
"contents",
":",
"table",
".",
"add_row",
"(",
")",
"table",
".",
"add_cell",
"(",
"content",
")",
"self",
".",
"add",
"(",
"table",
")"
] | https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/src/pyasm/prod/web/app_load_wdg.py#L635-L719 |
||
replit-archive/jsrepl | 36d79b6288ca5d26208e8bade2a168c6ebcb2376 | extern/python/closured/lib/python2.7/imaplib.py | python | IMAP4_stream.open | (self, host = None, port = None) | Setup a stream connection.
This connection will be used by the routines:
read, readline, send, shutdown. | Setup a stream connection.
This connection will be used by the routines:
read, readline, send, shutdown. | [
"Setup",
"a",
"stream",
"connection",
".",
"This",
"connection",
"will",
"be",
"used",
"by",
"the",
"routines",
":",
"read",
"readline",
"send",
"shutdown",
"."
] | def open(self, host = None, port = None):
"""Setup a stream connection.
This connection will be used by the routines:
read, readline, send, shutdown.
"""
self.host = None # For compatibility with parent class
self.port = None
self.sock = None
self.file = None
self.process = subprocess.Popen(self.command,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
shell=True, close_fds=True)
self.writefile = self.process.stdin
self.readfile = self.process.stdout | [
"def",
"open",
"(",
"self",
",",
"host",
"=",
"None",
",",
"port",
"=",
"None",
")",
":",
"self",
".",
"host",
"=",
"None",
"# For compatibility with parent class",
"self",
".",
"port",
"=",
"None",
"self",
".",
"sock",
"=",
"None",
"self",
".",
"file",
"=",
"None",
"self",
".",
"process",
"=",
"subprocess",
".",
"Popen",
"(",
"self",
".",
"command",
",",
"stdin",
"=",
"subprocess",
".",
"PIPE",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"shell",
"=",
"True",
",",
"close_fds",
"=",
"True",
")",
"self",
".",
"writefile",
"=",
"self",
".",
"process",
".",
"stdin",
"self",
".",
"readfile",
"=",
"self",
".",
"process",
".",
"stdout"
] | https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/imaplib.py#L1226-L1239 |
||
nodejs/node | ac3c33c1646bf46104c15ae035982c06364da9b8 | deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py | python | MakefileWriter.WriteDoCmd | (
self, outputs, inputs, command, part_of_all, comment=None, postbuilds=False
) | Write a Makefile rule that uses do_cmd.
This makes the outputs dependent on the command line that was run,
as well as support the V= make command line flag. | Write a Makefile rule that uses do_cmd. | [
"Write",
"a",
"Makefile",
"rule",
"that",
"uses",
"do_cmd",
"."
] | def WriteDoCmd(
self, outputs, inputs, command, part_of_all, comment=None, postbuilds=False
):
"""Write a Makefile rule that uses do_cmd.
This makes the outputs dependent on the command line that was run,
as well as support the V= make command line flag.
"""
suffix = ""
if postbuilds:
assert "," not in command
suffix = ",,1" # Tell do_cmd to honor $POSTBUILDS
self.WriteMakeRule(
outputs,
inputs,
actions=[f"$(call do_cmd,{command}{suffix})"],
comment=comment,
command=command,
force=True,
)
# Add our outputs to the list of targets we read depfiles from.
# all_deps is only used for deps file reading, and for deps files we replace
# spaces with ? because escaping doesn't work with make's $(sort) and
# other functions.
outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
self.WriteLn("all_deps += %s" % " ".join(outputs)) | [
"def",
"WriteDoCmd",
"(",
"self",
",",
"outputs",
",",
"inputs",
",",
"command",
",",
"part_of_all",
",",
"comment",
"=",
"None",
",",
"postbuilds",
"=",
"False",
")",
":",
"suffix",
"=",
"\"\"",
"if",
"postbuilds",
":",
"assert",
"\",\"",
"not",
"in",
"command",
"suffix",
"=",
"\",,1\"",
"# Tell do_cmd to honor $POSTBUILDS",
"self",
".",
"WriteMakeRule",
"(",
"outputs",
",",
"inputs",
",",
"actions",
"=",
"[",
"f\"$(call do_cmd,{command}{suffix})\"",
"]",
",",
"comment",
"=",
"comment",
",",
"command",
"=",
"command",
",",
"force",
"=",
"True",
",",
")",
"# Add our outputs to the list of targets we read depfiles from.",
"# all_deps is only used for deps file reading, and for deps files we replace",
"# spaces with ? because escaping doesn't work with make's $(sort) and",
"# other functions.",
"outputs",
"=",
"[",
"QuoteSpaces",
"(",
"o",
",",
"SPACE_REPLACEMENT",
")",
"for",
"o",
"in",
"outputs",
"]",
"self",
".",
"WriteLn",
"(",
"\"all_deps += %s\"",
"%",
"\" \"",
".",
"join",
"(",
"outputs",
")",
")"
] | https://github.com/nodejs/node/blob/ac3c33c1646bf46104c15ae035982c06364da9b8/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py#L1912-L1937 |
||
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/_pydev_imps/_pydev_SocketServer.py | python | TCPServer.close_request | (self, request) | Called to clean up an individual request. | Called to clean up an individual request. | [
"Called",
"to",
"clean",
"up",
"an",
"individual",
"request",
"."
] | def close_request(self, request):
"""Called to clean up an individual request."""
request.close() | [
"def",
"close_request",
"(",
"self",
",",
"request",
")",
":",
"request",
".",
"close",
"(",
")"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/_pydev_imps/_pydev_SocketServer.py#L464-L466 |
||
xixiaoyao/CS224n-winter-together | f1fbcd4db284a804cb9dfc24b65481ba66e7d32c | Assignments/assignment4/Herais/nmt_model.py | python | NMT.forward | (self, source: List[List[str]], target: List[List[str]]) | return scores | Take a mini-batch of source and target sentences, compute the log-likelihood of
target sentences under the language models learned by the NMT system.
@param source (List[List[str]]): list of source sentence tokens
@param target (List[List[str]]): list of target sentence tokens, wrapped by `<s>` and `</s>`
@returns scores (Tensor): a variable/tensor of shape (b, ) representing the
log-likelihood of generating the gold-standard target sentence for
each example in the input batch. Here b = batch size. | Take a mini-batch of source and target sentences, compute the log-likelihood of
target sentences under the language models learned by the NMT system. | [
"Take",
"a",
"mini",
"-",
"batch",
"of",
"source",
"and",
"target",
"sentences",
"compute",
"the",
"log",
"-",
"likelihood",
"of",
"target",
"sentences",
"under",
"the",
"language",
"models",
"learned",
"by",
"the",
"NMT",
"system",
"."
] | def forward(self, source: List[List[str]], target: List[List[str]]) -> torch.Tensor:
""" Take a mini-batch of source and target sentences, compute the log-likelihood of
target sentences under the language models learned by the NMT system.
@param source (List[List[str]]): list of source sentence tokens
@param target (List[List[str]]): list of target sentence tokens, wrapped by `<s>` and `</s>`
@returns scores (Tensor): a variable/tensor of shape (b, ) representing the
log-likelihood of generating the gold-standard target sentence for
each example in the input batch. Here b = batch size.
"""
# Compute sentence lengths
source_lengths = [len(s) for s in source]
# Convert list of lists into tensors
source_padded = self.vocab.src.to_input_tensor(source, device=self.device) # Tensor: (src_len, b)
target_padded = self.vocab.tgt.to_input_tensor(target, device=self.device) # Tensor: (tgt_len, b)
### Run the network forward:
### 1. Apply the encoder to `source_padded` by calling `self.encode()`
### 2. Generate sentence masks for `source_padded` by calling `self.generate_sent_masks()`
### 3. Apply the decoder to compute combined-output by calling `self.decode()`
### 4. Compute log probability distribution over the target vocabulary using the
### combined_outputs returned by the `self.decode()` function.
enc_hiddens, dec_init_state = self.encode(source_padded, source_lengths)
enc_masks = self.generate_sent_masks(enc_hiddens, source_lengths)
combined_outputs = self.decode(enc_hiddens, enc_masks, dec_init_state, target_padded)
P = F.log_softmax(self.target_vocab_projection(combined_outputs), dim=-1)
# Zero out, probabilities for which we have nothing in the target text
target_masks = (target_padded != self.vocab.tgt['<pad>']).float()
# Compute log probability of generating true target words
target_gold_words_log_prob = torch.gather(P, index=target_padded[1:].unsqueeze(-1), dim=-1).squeeze(-1) * target_masks[1:]
scores = target_gold_words_log_prob.sum(dim=0)
return scores | [
"def",
"forward",
"(",
"self",
",",
"source",
":",
"List",
"[",
"List",
"[",
"str",
"]",
"]",
",",
"target",
":",
"List",
"[",
"List",
"[",
"str",
"]",
"]",
")",
"->",
"torch",
".",
"Tensor",
":",
"# Compute sentence lengths",
"source_lengths",
"=",
"[",
"len",
"(",
"s",
")",
"for",
"s",
"in",
"source",
"]",
"# Convert list of lists into tensors",
"source_padded",
"=",
"self",
".",
"vocab",
".",
"src",
".",
"to_input_tensor",
"(",
"source",
",",
"device",
"=",
"self",
".",
"device",
")",
"# Tensor: (src_len, b)",
"target_padded",
"=",
"self",
".",
"vocab",
".",
"tgt",
".",
"to_input_tensor",
"(",
"target",
",",
"device",
"=",
"self",
".",
"device",
")",
"# Tensor: (tgt_len, b)",
"### Run the network forward:",
"### 1. Apply the encoder to `source_padded` by calling `self.encode()`",
"### 2. Generate sentence masks for `source_padded` by calling `self.generate_sent_masks()`",
"### 3. Apply the decoder to compute combined-output by calling `self.decode()`",
"### 4. Compute log probability distribution over the target vocabulary using the",
"### combined_outputs returned by the `self.decode()` function.",
"enc_hiddens",
",",
"dec_init_state",
"=",
"self",
".",
"encode",
"(",
"source_padded",
",",
"source_lengths",
")",
"enc_masks",
"=",
"self",
".",
"generate_sent_masks",
"(",
"enc_hiddens",
",",
"source_lengths",
")",
"combined_outputs",
"=",
"self",
".",
"decode",
"(",
"enc_hiddens",
",",
"enc_masks",
",",
"dec_init_state",
",",
"target_padded",
")",
"P",
"=",
"F",
".",
"log_softmax",
"(",
"self",
".",
"target_vocab_projection",
"(",
"combined_outputs",
")",
",",
"dim",
"=",
"-",
"1",
")",
"# Zero out, probabilities for which we have nothing in the target text",
"target_masks",
"=",
"(",
"target_padded",
"!=",
"self",
".",
"vocab",
".",
"tgt",
"[",
"'<pad>'",
"]",
")",
".",
"float",
"(",
")",
"# Compute log probability of generating true target words",
"target_gold_words_log_prob",
"=",
"torch",
".",
"gather",
"(",
"P",
",",
"index",
"=",
"target_padded",
"[",
"1",
":",
"]",
".",
"unsqueeze",
"(",
"-",
"1",
")",
",",
"dim",
"=",
"-",
"1",
")",
".",
"squeeze",
"(",
"-",
"1",
")",
"*",
"target_masks",
"[",
"1",
":",
"]",
"scores",
"=",
"target_gold_words_log_prob",
".",
"sum",
"(",
"dim",
"=",
"0",
")",
"return",
"scores"
] | https://github.com/xixiaoyao/CS224n-winter-together/blob/f1fbcd4db284a804cb9dfc24b65481ba66e7d32c/Assignments/assignment4/Herais/nmt_model.py#L91-L127 |
|
54xingzhe/weixin_crawler | 7e255e49717b7555424103398e16e2da40af4d38 | project/Application/report/gzh_report/__init__.py | python | GZHReportData._add_option_data | (self) | :return:将公众号表格中需要的数据逐个添加到self.option_data | :return:将公众号表格中需要的数据逐个添加到self.option_data | [
":",
"return",
":",
"将公众号表格中需要的数据逐个添加到self",
".",
"option_data"
] | def _add_option_data(self):
"""
:return:将公众号表格中需要的数据逐个添加到self.option_data
"""
# 历史所有文章阅读数据
df = self.gzh.allMainDateRead()
all_mian_date_read = draw_all_mian_date_read(df)
self.option_data['all_mian_date_read'] = all_mian_date_read
# 主副文章统计数据
df = self.gzh.allStatistic()
all_statistic = draw_all_statistic(df)
self.option_data['all_statistic'] = all_statistic
# 不同主副/小时/星期类别文章发文数量统计
dfd = self.gzh.dirPostsNumRelated()
dir_posts_num_related = draw_dir_posts_num_related(dfd)
self.option_data['dir_posts_num_related'] = dir_posts_num_related
# 阅读量分别和点赞量/上一次阅读量之间的关系
df = self.gzh.factorsAndRead()
self.option_data['read_vs_factors'] = draw_read_vs_factors(df)
# 探索最佳推文小时 推文星期 标题词数 插图数量 视频数量
self.option_data['find_best_factors'] = draw_find_best_factors(df)
# 文章列表数据
self.option_data['table'] = {}
# 表9 100001除外阅读来那个最高的10篇文章
df = self.gzh.getNumNExcept(mov='all')
self.option_data['table']['particular_most_read_10_except_100001'] = df2table(df, ['like','点赞量'])
# 表10 阅读量最低且为非0的10篇文章
df = self.gzh.getNumNExcept(com='read', exce=0, asc=1)
self.option_data['table']['particular_least_read_10_except_0'] = df2table(df, ['like','点赞量'])
# 表11 深度指数最高的10篇文章.
df = self.gzh.getNumN(com='deep_index')
self.option_data['table']['particular_most_deep_10'] = df2table(df, ['deep_index','深度指数'])
# 表12 深度指数最低的10篇文章
df = self.gzh.getNumN(com='deep_index', asc=1)
self.option_data['table']['particular_least_deep_10'] = df2table(df, ['deep_index','深度指数'])
# 表13 落差指数最低的10篇文章
df = self.gzh.getNumN(com='fall_index', asc=1)
self.option_data['table']['particular_least_fall_10'] = df2table(df, ['fall_index','落差指数'])
# 表14 所有的100001文章
df = self.gzh.getNumCondition(mov='all')
self.option_data['table']['particular_all_10001'] = df2table(df, ['like','点赞量']) | [
"def",
"_add_option_data",
"(",
"self",
")",
":",
"# 历史所有文章阅读数据",
"df",
"=",
"self",
".",
"gzh",
".",
"allMainDateRead",
"(",
")",
"all_mian_date_read",
"=",
"draw_all_mian_date_read",
"(",
"df",
")",
"self",
".",
"option_data",
"[",
"'all_mian_date_read'",
"]",
"=",
"all_mian_date_read",
"# 主副文章统计数据",
"df",
"=",
"self",
".",
"gzh",
".",
"allStatistic",
"(",
")",
"all_statistic",
"=",
"draw_all_statistic",
"(",
"df",
")",
"self",
".",
"option_data",
"[",
"'all_statistic'",
"]",
"=",
"all_statistic",
"# 不同主副/小时/星期类别文章发文数量统计",
"dfd",
"=",
"self",
".",
"gzh",
".",
"dirPostsNumRelated",
"(",
")",
"dir_posts_num_related",
"=",
"draw_dir_posts_num_related",
"(",
"dfd",
")",
"self",
".",
"option_data",
"[",
"'dir_posts_num_related'",
"]",
"=",
"dir_posts_num_related",
"# 阅读量分别和点赞量/上一次阅读量之间的关系",
"df",
"=",
"self",
".",
"gzh",
".",
"factorsAndRead",
"(",
")",
"self",
".",
"option_data",
"[",
"'read_vs_factors'",
"]",
"=",
"draw_read_vs_factors",
"(",
"df",
")",
"# 探索最佳推文小时 推文星期 标题词数 插图数量 视频数量",
"self",
".",
"option_data",
"[",
"'find_best_factors'",
"]",
"=",
"draw_find_best_factors",
"(",
"df",
")",
"# 文章列表数据",
"self",
".",
"option_data",
"[",
"'table'",
"]",
"=",
"{",
"}",
"# 表9 100001除外阅读来那个最高的10篇文章",
"df",
"=",
"self",
".",
"gzh",
".",
"getNumNExcept",
"(",
"mov",
"=",
"'all'",
")",
"self",
".",
"option_data",
"[",
"'table'",
"]",
"[",
"'particular_most_read_10_except_100001'",
"]",
"=",
"df2table",
"(",
"df",
",",
"[",
"'like'",
",",
"'点赞量'])",
"",
"",
"# 表10 阅读量最低且为非0的10篇文章",
"df",
"=",
"self",
".",
"gzh",
".",
"getNumNExcept",
"(",
"com",
"=",
"'read'",
",",
"exce",
"=",
"0",
",",
"asc",
"=",
"1",
")",
"self",
".",
"option_data",
"[",
"'table'",
"]",
"[",
"'particular_least_read_10_except_0'",
"]",
"=",
"df2table",
"(",
"df",
",",
"[",
"'like'",
",",
"'点赞量'])",
"",
"",
"# 表11 深度指数最高的10篇文章.",
"df",
"=",
"self",
".",
"gzh",
".",
"getNumN",
"(",
"com",
"=",
"'deep_index'",
")",
"self",
".",
"option_data",
"[",
"'table'",
"]",
"[",
"'particular_most_deep_10'",
"]",
"=",
"df2table",
"(",
"df",
",",
"[",
"'deep_index'",
",",
"'深度指数'])",
"",
"",
"# 表12 深度指数最低的10篇文章",
"df",
"=",
"self",
".",
"gzh",
".",
"getNumN",
"(",
"com",
"=",
"'deep_index'",
",",
"asc",
"=",
"1",
")",
"self",
".",
"option_data",
"[",
"'table'",
"]",
"[",
"'particular_least_deep_10'",
"]",
"=",
"df2table",
"(",
"df",
",",
"[",
"'deep_index'",
",",
"'深度指数'])",
"",
"",
"# 表13 落差指数最低的10篇文章",
"df",
"=",
"self",
".",
"gzh",
".",
"getNumN",
"(",
"com",
"=",
"'fall_index'",
",",
"asc",
"=",
"1",
")",
"self",
".",
"option_data",
"[",
"'table'",
"]",
"[",
"'particular_least_fall_10'",
"]",
"=",
"df2table",
"(",
"df",
",",
"[",
"'fall_index'",
",",
"'落差指数'])",
"",
"",
"# 表14 所有的100001文章",
"df",
"=",
"self",
".",
"gzh",
".",
"getNumCondition",
"(",
"mov",
"=",
"'all'",
")",
"self",
".",
"option_data",
"[",
"'table'",
"]",
"[",
"'particular_all_10001'",
"]",
"=",
"df2table",
"(",
"df",
",",
"[",
"'like'",
",",
"'点赞量'])",
"",
""
] | https://github.com/54xingzhe/weixin_crawler/blob/7e255e49717b7555424103398e16e2da40af4d38/project/Application/report/gzh_report/__init__.py#L42-L86 |
||
atom-community/ide-python | c046f9c2421713b34baa22648235541c5bb284fe | dist/debugger/VendorLib/vs-py-debugger/pythonFiles/isort/natural.py | python | nsorted | (to_sort, key=None) | return sorted(to_sort, key=key_callback) | Returns a naturally sorted list | Returns a naturally sorted list | [
"Returns",
"a",
"naturally",
"sorted",
"list"
] | def nsorted(to_sort, key=None):
"""Returns a naturally sorted list"""
if key is None:
key_callback = _natural_keys
else:
def key_callback(item):
return _natural_keys(key(item))
return sorted(to_sort, key=key_callback) | [
"def",
"nsorted",
"(",
"to_sort",
",",
"key",
"=",
"None",
")",
":",
"if",
"key",
"is",
"None",
":",
"key_callback",
"=",
"_natural_keys",
"else",
":",
"def",
"key_callback",
"(",
"item",
")",
":",
"return",
"_natural_keys",
"(",
"key",
"(",
"item",
")",
")",
"return",
"sorted",
"(",
"to_sort",
",",
"key",
"=",
"key_callback",
")"
] | https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/isort/natural.py#L39-L47 |