nwo
stringlengths
5
58
sha
stringlengths
40
40
path
stringlengths
5
172
language
stringclasses
1 value
identifier
stringlengths
1
100
parameters
stringlengths
2
3.5k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
21.5k
docstring
stringlengths
2
17k
docstring_summary
stringlengths
0
6.58k
docstring_tokens
sequence
function
stringlengths
35
55.6k
function_tokens
sequence
url
stringlengths
89
269
mozilla/spidernode
aafa9e5273f954f272bb4382fc007af14674b4c2
deps/spidershim/spidermonkey/python/mozbuild/mozbuild/preprocessor.py
python
Preprocessor.handleCommandLine
(self, args, defaultToStdin = False)
Parse a commandline into this parser. Uses OptionParser internally, no args mean sys.argv[1:].
Parse a commandline into this parser. Uses OptionParser internally, no args mean sys.argv[1:].
[ "Parse", "a", "commandline", "into", "this", "parser", ".", "Uses", "OptionParser", "internally", "no", "args", "mean", "sys", ".", "argv", "[", "1", ":", "]", "." ]
def handleCommandLine(self, args, defaultToStdin = False): """ Parse a commandline into this parser. Uses OptionParser internally, no args mean sys.argv[1:]. """ def get_output_file(path): dir = os.path.dirname(path) if dir: try: os.makedirs(dir) except OSError as error: if error.errno != errno.EEXIST: raise return open(path, 'wb') p = self.getCommandLineParser() options, args = p.parse_args(args=args) out = self.out depfile = None if options.output: out = get_output_file(options.output) if defaultToStdin and len(args) == 0: args = [sys.stdin] if options.depend: raise Preprocessor.Error(self, "--depend doesn't work with stdin", None) if options.depend: if not options.output: raise Preprocessor.Error(self, "--depend doesn't work with stdout", None) try: from makeutil import Makefile except: raise Preprocessor.Error(self, "--depend requires the " "mozbuild.makeutil module", None) depfile = get_output_file(options.depend) if args: for f in args: with open(f, 'rU') as input: self.processFile(input=input, output=out) if depfile: mk = Makefile() mk.create_rule([options.output]).add_dependencies(self.includes) mk.dump(depfile) depfile.close() if options.output: out.close()
[ "def", "handleCommandLine", "(", "self", ",", "args", ",", "defaultToStdin", "=", "False", ")", ":", "def", "get_output_file", "(", "path", ")", ":", "dir", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "if", "dir", ":", "try", ":", "os", ".", "makedirs", "(", "dir", ")", "except", "OSError", "as", "error", ":", "if", "error", ".", "errno", "!=", "errno", ".", "EEXIST", ":", "raise", "return", "open", "(", "path", ",", "'wb'", ")", "p", "=", "self", ".", "getCommandLineParser", "(", ")", "options", ",", "args", "=", "p", ".", "parse_args", "(", "args", "=", "args", ")", "out", "=", "self", ".", "out", "depfile", "=", "None", "if", "options", ".", "output", ":", "out", "=", "get_output_file", "(", "options", ".", "output", ")", "if", "defaultToStdin", "and", "len", "(", "args", ")", "==", "0", ":", "args", "=", "[", "sys", ".", "stdin", "]", "if", "options", ".", "depend", ":", "raise", "Preprocessor", ".", "Error", "(", "self", ",", "\"--depend doesn't work with stdin\"", ",", "None", ")", "if", "options", ".", "depend", ":", "if", "not", "options", ".", "output", ":", "raise", "Preprocessor", ".", "Error", "(", "self", ",", "\"--depend doesn't work with stdout\"", ",", "None", ")", "try", ":", "from", "makeutil", "import", "Makefile", "except", ":", "raise", "Preprocessor", ".", "Error", "(", "self", ",", "\"--depend requires the \"", "\"mozbuild.makeutil module\"", ",", "None", ")", "depfile", "=", "get_output_file", "(", "options", ".", "depend", ")", "if", "args", ":", "for", "f", "in", "args", ":", "with", "open", "(", "f", ",", "'rU'", ")", "as", "input", ":", "self", ".", "processFile", "(", "input", "=", "input", ",", "output", "=", "out", ")", "if", "depfile", ":", "mk", "=", "Makefile", "(", ")", "mk", ".", "create_rule", "(", "[", "options", ".", "output", "]", ")", ".", "add_dependencies", "(", "self", ".", "includes", ")", "mk", ".", "dump", "(", "depfile", ")", "depfile", ".", "close", "(", ")", "if", "options", ".", "output", ":", "out", ".", "close", "(", ")" ]
https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/deps/spidershim/spidermonkey/python/mozbuild/mozbuild/preprocessor.py#L440-L489
jly8866/archer
cc41736aede07246d2ad9f26e7b0b6de537a6cc2
src/docker/pymysql/connections.py
python
Connection._force_close
(self)
Close connection without QUIT message
Close connection without QUIT message
[ "Close", "connection", "without", "QUIT", "message" ]
def _force_close(self): """Close connection without QUIT message""" if self._sock: try: self._sock.close() except: pass self._sock = None self._rfile = None
[ "def", "_force_close", "(", "self", ")", ":", "if", "self", ".", "_sock", ":", "try", ":", "self", ".", "_sock", ".", "close", "(", ")", "except", ":", "pass", "self", ".", "_sock", "=", "None", "self", ".", "_rfile", "=", "None" ]
https://github.com/jly8866/archer/blob/cc41736aede07246d2ad9f26e7b0b6de537a6cc2/src/docker/pymysql/connections.py#L744-L752
yueyongyue/saltshaker
d32cdd0aa13098bdc77bb3abd4a92c10fa517dd1
minions/views.py
python
minions_asset_info
(request)
return render(request, 'minions/minions_asset_info.html', {'asset': asset_list})
sapi = SaltAPI() up_host = sapi.runner_status('status')['up'] jid = [] disk_all = {} for hostname in up_host: info_all = sapi.remote_noarg_execution(hostname, 'grains.items') disk_use = sapi.remote_noarg_execution(hostname, 'disk.usage') for key in disk_use: disk_info = {key: int(disk_use[key]['capacity'][:-1])} disk_all.update(disk_info) disk_dic = {'disk': disk_all} info_all.update(disk_dic) disk_all = {} jid += [info_all] return render(request, 'minions/minions_asset_info.html', {'jyp': jid})
sapi = SaltAPI() up_host = sapi.runner_status('status')['up'] jid = [] disk_all = {} for hostname in up_host: info_all = sapi.remote_noarg_execution(hostname, 'grains.items') disk_use = sapi.remote_noarg_execution(hostname, 'disk.usage') for key in disk_use: disk_info = {key: int(disk_use[key]['capacity'][:-1])} disk_all.update(disk_info) disk_dic = {'disk': disk_all} info_all.update(disk_dic) disk_all = {} jid += [info_all] return render(request, 'minions/minions_asset_info.html', {'jyp': jid})
[ "sapi", "=", "SaltAPI", "()", "up_host", "=", "sapi", ".", "runner_status", "(", "status", ")", "[", "up", "]", "jid", "=", "[]", "disk_all", "=", "{}", "for", "hostname", "in", "up_host", ":", "info_all", "=", "sapi", ".", "remote_noarg_execution", "(", "hostname", "grains", ".", "items", ")", "disk_use", "=", "sapi", ".", "remote_noarg_execution", "(", "hostname", "disk", ".", "usage", ")", "for", "key", "in", "disk_use", ":", "disk_info", "=", "{", "key", ":", "int", "(", "disk_use", "[", "key", "]", "[", "capacity", "]", "[", ":", "-", "1", "]", ")", "}", "disk_all", ".", "update", "(", "disk_info", ")", "disk_dic", "=", "{", "disk", ":", "disk_all", "}", "info_all", ".", "update", "(", "disk_dic", ")", "disk_all", "=", "{}", "jid", "+", "=", "[", "info_all", "]", "return", "render", "(", "request", "minions", "/", "minions_asset_info", ".", "html", "{", "jyp", ":", "jid", "}", ")" ]
def minions_asset_info(request): ''' sapi = SaltAPI() up_host = sapi.runner_status('status')['up'] jid = [] disk_all = {} for hostname in up_host: info_all = sapi.remote_noarg_execution(hostname, 'grains.items') disk_use = sapi.remote_noarg_execution(hostname, 'disk.usage') for key in disk_use: disk_info = {key: int(disk_use[key]['capacity'][:-1])} disk_all.update(disk_info) disk_dic = {'disk': disk_all} info_all.update(disk_dic) disk_all = {} jid += [info_all] return render(request, 'minions/minions_asset_info.html', {'jyp': jid}) ''' salt_grains = Salt_grains.objects.all() asset_list = [] for asset in salt_grains: asset_dic = {asset.minion_id.decode('string-escape'): eval(asset.grains)} asset_dics = asset_dic.copy() asset_list.append(asset_dics) return render(request, 'minions/minions_asset_info.html', {'asset': asset_list})
[ "def", "minions_asset_info", "(", "request", ")", ":", "salt_grains", "=", "Salt_grains", ".", "objects", ".", "all", "(", ")", "asset_list", "=", "[", "]", "for", "asset", "in", "salt_grains", ":", "asset_dic", "=", "{", "asset", ".", "minion_id", ".", "decode", "(", "'string-escape'", ")", ":", "eval", "(", "asset", ".", "grains", ")", "}", "asset_dics", "=", "asset_dic", ".", "copy", "(", ")", "asset_list", ".", "append", "(", "asset_dics", ")", "return", "render", "(", "request", ",", "'minions/minions_asset_info.html'", ",", "{", "'asset'", ":", "asset_list", "}", ")" ]
https://github.com/yueyongyue/saltshaker/blob/d32cdd0aa13098bdc77bb3abd4a92c10fa517dd1/minions/views.py#L60-L84
salsoftware/legacy-sal
f0d5eaf4363a4f42b86db04542ff7730ed14f4bf
server/templatetags/includeifapp.py
python
do_include_ifapp
(parser, token)
return IncludeNode(path)
Loads a template and renders it with the current context if the specified application is in settings.INSTALLED_APPS. Example:: {% includeifapp app_label "foo/some_include" %}
Loads a template and renders it with the current context if the specified application is in settings.INSTALLED_APPS.
[ "Loads", "a", "template", "and", "renders", "it", "with", "the", "current", "context", "if", "the", "specified", "application", "is", "in", "settings", ".", "INSTALLED_APPS", "." ]
def do_include_ifapp(parser, token): """ Loads a template and renders it with the current context if the specified application is in settings.INSTALLED_APPS. Example:: {% includeifapp app_label "foo/some_include" %} """ bits = token.split_contents() if len(bits) != 3: raise TemplateSyntaxError, "%r tag takes two argument: the application label and the name of the template to be included" % bits[0] app_name, path = bits[1:] app_name = app_name.strip('"\'') try: models = get_app(app_name) except ImproperlyConfigured: return template.Node() if path[0] in ('"', "'") and path[-1] == path[0]: return ConstantIncludeNode(path[1:-1]) return IncludeNode(path)
[ "def", "do_include_ifapp", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "len", "(", "bits", ")", "!=", "3", ":", "raise", "TemplateSyntaxError", ",", "\"%r tag takes two argument: the application label and the name of the template to be included\"", "%", "bits", "[", "0", "]", "app_name", ",", "path", "=", "bits", "[", "1", ":", "]", "app_name", "=", "app_name", ".", "strip", "(", "'\"\\''", ")", "try", ":", "models", "=", "get_app", "(", "app_name", ")", "except", "ImproperlyConfigured", ":", "return", "template", ".", "Node", "(", ")", "if", "path", "[", "0", "]", "in", "(", "'\"'", ",", "\"'\"", ")", "and", "path", "[", "-", "1", "]", "==", "path", "[", "0", "]", ":", "return", "ConstantIncludeNode", "(", "path", "[", "1", ":", "-", "1", "]", ")", "return", "IncludeNode", "(", "path", ")" ]
https://github.com/salsoftware/legacy-sal/blob/f0d5eaf4363a4f42b86db04542ff7730ed14f4bf/server/templatetags/includeifapp.py#L8-L31
nodejs/node-chakracore
770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43
deps/chakrashim/third_party/jinja2/ext/django2jinja/django2jinja.py
python
Writer.body
(self, nodes)
Calls node() for every node in the iterable passed.
Calls node() for every node in the iterable passed.
[ "Calls", "node", "()", "for", "every", "node", "in", "the", "iterable", "passed", "." ]
def body(self, nodes): """Calls node() for every node in the iterable passed.""" for node in nodes: self.node(node)
[ "def", "body", "(", "self", ",", "nodes", ")", ":", "for", "node", "in", "nodes", ":", "self", ".", "node", "(", "node", ")" ]
https://github.com/nodejs/node-chakracore/blob/770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43/deps/chakrashim/third_party/jinja2/ext/django2jinja/django2jinja.py#L347-L350
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
src/pyasm/prod/web/asset_history_wdg.py
python
AssetHistoryWdg.get_master_checkbox
(self, select)
return master
turn them all off first, then turn on the one for the selected context
turn them all off first, then turn on the one for the selected context
[ "turn", "them", "all", "off", "first", "then", "turn", "on", "the", "one", "for", "the", "selected", "context" ]
def get_master_checkbox(self, select): '''turn them all off first, then turn on the one for the selected context''' master = SpanWdg(css='med') master.add_color("color", "color") cb_name = '%s_%s' %(self.base_search_type, self.CB_NAME) value = select.get_value() if value: master_cb = CheckboxWdg('master_control', label='Toggle all current [%s]' %value) master_cb.add_behavior({'type': 'click_up', 'propagate_evt': True, 'cbjs_action': ''' var context_sel = bvr.src_el.getParent('.spt_history_wdg').getElement('.spt_context_select') var filter = '.spt_history_' + context_sel.value; var inputs = spt.api.Utility.get_inputs(bvr.src_el.getParent('.spt_table'),'%s','.spt_history'); for (var i = 0; i < inputs.length; i++) inputs[i].checked = false; var inputs = spt.api.Utility.get_inputs(bvr.src_el.getParent('.spt_table'),'%s', filter); for (var i = 0; i < inputs.length; i++) inputs[i].checked = bvr.src_el.checked; ''' %(cb_name, cb_name)}) master.add(master_cb) return master
[ "def", "get_master_checkbox", "(", "self", ",", "select", ")", ":", "master", "=", "SpanWdg", "(", "css", "=", "'med'", ")", "master", ".", "add_color", "(", "\"color\"", ",", "\"color\"", ")", "cb_name", "=", "'%s_%s'", "%", "(", "self", ".", "base_search_type", ",", "self", ".", "CB_NAME", ")", "value", "=", "select", ".", "get_value", "(", ")", "if", "value", ":", "master_cb", "=", "CheckboxWdg", "(", "'master_control'", ",", "label", "=", "'Toggle all current [%s]'", "%", "value", ")", "master_cb", ".", "add_behavior", "(", "{", "'type'", ":", "'click_up'", ",", "'propagate_evt'", ":", "True", ",", "'cbjs_action'", ":", "'''\n var context_sel = bvr.src_el.getParent('.spt_history_wdg').getElement('.spt_context_select')\n var filter = '.spt_history_' + context_sel.value;\n var inputs = spt.api.Utility.get_inputs(bvr.src_el.getParent('.spt_table'),'%s','.spt_history');\n for (var i = 0; i < inputs.length; i++)\n inputs[i].checked = false;\n var inputs = spt.api.Utility.get_inputs(bvr.src_el.getParent('.spt_table'),'%s', filter);\n for (var i = 0; i < inputs.length; i++)\n inputs[i].checked = bvr.src_el.checked;\n '''", "%", "(", "cb_name", ",", "cb_name", ")", "}", ")", "master", ".", "add", "(", "master_cb", ")", "return", "master" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/src/pyasm/prod/web/asset_history_wdg.py#L65-L88
Sefaria/Sefaria-Project
506752f49394fadebae283d525af8276eb2e241e
sefaria/model/text.py
python
Ref.precedes
(self, other)
return False
Does this Ref completely precede ``other`` Ref? :param other: :return bool:
Does this Ref completely precede ``other`` Ref?
[ "Does", "this", "Ref", "completely", "precede", "other", "Ref?" ]
def precedes(self, other) -> bool: """ Does this Ref completely precede ``other`` Ref? :param other: :return bool: """ assert isinstance(other, Ref) if not self.index_node == other.index_node: return False my_end = self.ending_ref() other_start = other.starting_ref() smallest_section_len = min([len(my_end.sections), len(other_start.sections)]) # Bare book references never precede or follow if smallest_section_len == 0: return False # Compare all but last section for i in range(smallest_section_len - 1): if my_end.sections[i] < other_start.sections[i]: return True if my_end.sections[i] > other_start.sections[i]: return False # Compare last significant section if my_end.sections[smallest_section_len - 1] < other_start.sections[smallest_section_len - 1]: return True return False
[ "def", "precedes", "(", "self", ",", "other", ")", "->", "bool", ":", "assert", "isinstance", "(", "other", ",", "Ref", ")", "if", "not", "self", ".", "index_node", "==", "other", ".", "index_node", ":", "return", "False", "my_end", "=", "self", ".", "ending_ref", "(", ")", "other_start", "=", "other", ".", "starting_ref", "(", ")", "smallest_section_len", "=", "min", "(", "[", "len", "(", "my_end", ".", "sections", ")", ",", "len", "(", "other_start", ".", "sections", ")", "]", ")", "# Bare book references never precede or follow", "if", "smallest_section_len", "==", "0", ":", "return", "False", "# Compare all but last section", "for", "i", "in", "range", "(", "smallest_section_len", "-", "1", ")", ":", "if", "my_end", ".", "sections", "[", "i", "]", "<", "other_start", ".", "sections", "[", "i", "]", ":", "return", "True", "if", "my_end", ".", "sections", "[", "i", "]", ">", "other_start", ".", "sections", "[", "i", "]", ":", "return", "False", "# Compare last significant section", "if", "my_end", ".", "sections", "[", "smallest_section_len", "-", "1", "]", "<", "other_start", ".", "sections", "[", "smallest_section_len", "-", "1", "]", ":", "return", "True", "return", "False" ]
https://github.com/Sefaria/Sefaria-Project/blob/506752f49394fadebae283d525af8276eb2e241e/sefaria/model/text.py#L4037-L4068
nodejs/node-chakracore
770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43
tools/gyp/pylib/gyp/msvs_emulation.py
python
MsvsSettings._HasExplicitRuleForExtension
(self, spec, extension)
return False
Determine if there's an explicit rule for a particular extension.
Determine if there's an explicit rule for a particular extension.
[ "Determine", "if", "there", "s", "an", "explicit", "rule", "for", "a", "particular", "extension", "." ]
def _HasExplicitRuleForExtension(self, spec, extension): """Determine if there's an explicit rule for a particular extension.""" for rule in spec.get('rules', []): if rule['extension'] == extension: return True return False
[ "def", "_HasExplicitRuleForExtension", "(", "self", ",", "spec", ",", "extension", ")", ":", "for", "rule", "in", "spec", ".", "get", "(", "'rules'", ",", "[", "]", ")", ":", "if", "rule", "[", "'extension'", "]", "==", "extension", ":", "return", "True", "return", "False" ]
https://github.com/nodejs/node-chakracore/blob/770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43/tools/gyp/pylib/gyp/msvs_emulation.py#L829-L834
vdutor/TF-rex
69a5565f437de38cccc862734a561baf5f47b9bc
tf-rex/environment.py
python
Environment.do_action
(self, action)
return self.get_state(action)
Performs action and returns the updated status :param action: Must come from the class Action. The only allowed actions are Action.UP, Action.Down and Action.FORWARD. :return: return the image of the game after performing the action, the reward (after the action) and whether the TRex crashed or not.
Performs action and returns the updated status
[ "Performs", "action", "and", "returns", "the", "updated", "status" ]
def do_action(self, action): """ Performs action and returns the updated status :param action: Must come from the class Action. The only allowed actions are Action.UP, Action.Down and Action.FORWARD. :return: return the image of the game after performing the action, the reward (after the action) and whether the TRex crashed or not. """ if action != Action.FORWARD: # noting needs to be send when the action is going forward self.server.send_message(self.game_client, self.actions[action]); time.sleep(.05) return self.get_state(action)
[ "def", "do_action", "(", "self", ",", "action", ")", ":", "if", "action", "!=", "Action", ".", "FORWARD", ":", "# noting needs to be send when the action is going forward", "self", ".", "server", ".", "send_message", "(", "self", ".", "game_client", ",", "self", ".", "actions", "[", "action", "]", ")", "time", ".", "sleep", "(", ".05", ")", "return", "self", ".", "get_state", "(", "action", ")" ]
https://github.com/vdutor/TF-rex/blob/69a5565f437de38cccc862734a561baf5f47b9bc/tf-rex/environment.py#L74-L88
KhronosGroup/OpenCL-Docs
2f8b8140b71cfbc9698678f74fb35b6ab6d46f66
scripts/generator.py
python
OutputGenerator.genCmd
(self, cmd, cmdinfo, alias)
Generate interface for a command. - cmdinfo - CmdInfo for a command Extend to generate as desired in your derived class.
Generate interface for a command.
[ "Generate", "interface", "for", "a", "command", "." ]
def genCmd(self, cmd, cmdinfo, alias): """Generate interface for a command. - cmdinfo - CmdInfo for a command Extend to generate as desired in your derived class.""" self.validateFeature('command', cmdinfo)
[ "def", "genCmd", "(", "self", ",", "cmd", ",", "cmdinfo", ",", "alias", ")", ":", "self", ".", "validateFeature", "(", "'command'", ",", "cmdinfo", ")" ]
https://github.com/KhronosGroup/OpenCL-Docs/blob/2f8b8140b71cfbc9698678f74fb35b6ab6d46f66/scripts/generator.py#L874-L880
hyperledger/cello
000905d29e502a38d7576f20884de3c5aa371307
kconfig-lib/kconfiglib.py
python
MenuNode.orig_selects
(self)
return [(select, self._strip_dep(cond)) for select, cond in self.selects]
See the class documentation.
See the class documentation.
[ "See", "the", "class", "documentation", "." ]
def orig_selects(self): """ See the class documentation. """ return [(select, self._strip_dep(cond)) for select, cond in self.selects]
[ "def", "orig_selects", "(", "self", ")", ":", "return", "[", "(", "select", ",", "self", ".", "_strip_dep", "(", "cond", ")", ")", "for", "select", ",", "cond", "in", "self", ".", "selects", "]" ]
https://github.com/hyperledger/cello/blob/000905d29e502a38d7576f20884de3c5aa371307/kconfig-lib/kconfiglib.py#L5774-L5778
cuckoosandbox/cuckoo
50452a39ff7c3e0c4c94d114bc6317101633b958
cuckoo/processing/platform/windows.py
python
MonitorProcessLog._api_CIFrameElement_CreateElement
(self, event)
Lowercases the attribute keys.
Lowercases the attribute keys.
[ "Lowercases", "the", "attribute", "keys", "." ]
def _api_CIFrameElement_CreateElement(self, event): """Lowercases the attribute keys.""" attrs = {} for key, value in event["arguments"]["attributes"].items(): attrs[key.lower()] = value event["arguments"]["attributes"] = attrs
[ "def", "_api_CIFrameElement_CreateElement", "(", "self", ",", "event", ")", ":", "attrs", "=", "{", "}", "for", "key", ",", "value", "in", "event", "[", "\"arguments\"", "]", "[", "\"attributes\"", "]", ".", "items", "(", ")", ":", "attrs", "[", "key", ".", "lower", "(", ")", "]", "=", "value", "event", "[", "\"arguments\"", "]", "[", "\"attributes\"", "]", "=", "attrs" ]
https://github.com/cuckoosandbox/cuckoo/blob/50452a39ff7c3e0c4c94d114bc6317101633b958/cuckoo/processing/platform/windows.py#L41-L47
windmill/windmill
994bd992b17f3f2d6f6b276fe17391fea08f32c3
windmill/dep/_uuid.py
python
_ipconfig_getnode
()
Get the hardware address on Windows by running ipconfig.exe.
Get the hardware address on Windows by running ipconfig.exe.
[ "Get", "the", "hardware", "address", "on", "Windows", "by", "running", "ipconfig", ".", "exe", "." ]
def _ipconfig_getnode(): """Get the hardware address on Windows by running ipconfig.exe.""" import os, re dirs = ['', r'c:\windows\system32', r'c:\winnt\system32'] try: import ctypes buffer = ctypes.create_string_buffer(300) ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300) dirs.insert(0, buffer.value.decode('mbcs')) except: pass for dir in dirs: try: pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all') except IOError: continue for line in pipe: value = line.split(':')[-1].strip().lower() if re.match('([0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value): return int(value.replace('-', ''), 16)
[ "def", "_ipconfig_getnode", "(", ")", ":", "import", "os", ",", "re", "dirs", "=", "[", "''", ",", "r'c:\\windows\\system32'", ",", "r'c:\\winnt\\system32'", "]", "try", ":", "import", "ctypes", "buffer", "=", "ctypes", ".", "create_string_buffer", "(", "300", ")", "ctypes", ".", "windll", ".", "kernel32", ".", "GetSystemDirectoryA", "(", "buffer", ",", "300", ")", "dirs", ".", "insert", "(", "0", ",", "buffer", ".", "value", ".", "decode", "(", "'mbcs'", ")", ")", "except", ":", "pass", "for", "dir", "in", "dirs", ":", "try", ":", "pipe", "=", "os", ".", "popen", "(", "os", ".", "path", ".", "join", "(", "dir", ",", "'ipconfig'", ")", "+", "' /all'", ")", "except", "IOError", ":", "continue", "for", "line", "in", "pipe", ":", "value", "=", "line", ".", "split", "(", "':'", ")", "[", "-", "1", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "re", ".", "match", "(", "'([0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]'", ",", "value", ")", ":", "return", "int", "(", "value", ".", "replace", "(", "'-'", ",", "''", ")", ",", "16", ")" ]
https://github.com/windmill/windmill/blob/994bd992b17f3f2d6f6b276fe17391fea08f32c3/windmill/dep/_uuid.py#L288-L307
aurora-opensource/xviz
bde0931a75a12f557867fa71e0a824c3f593f4fd
python/xviz_avs/builder/primitive.py
python
XVIZPrimitiveBuilder.image
(self, data)
return self
Add image data
Add image data
[ "Add", "image", "data" ]
def image(self, data): ''' Add image data ''' if self._type: self._flush() if not isinstance(data, (bytes, np.ndarray, str)): # TODO: support PILLOW and other image types # should save raw data and preserve mimetype? self._logger.error("An image data must be a string or numpy array") self._validate_prop_set_once("_image") self._type = PRIMITIVE_TYPES.IMAGE self._image = Image(data=data) return self
[ "def", "image", "(", "self", ",", "data", ")", ":", "if", "self", ".", "_type", ":", "self", ".", "_flush", "(", ")", "if", "not", "isinstance", "(", "data", ",", "(", "bytes", ",", "np", ".", "ndarray", ",", "str", ")", ")", ":", "# TODO: support PILLOW and other image types", "# should save raw data and preserve mimetype?", "self", ".", "_logger", ".", "error", "(", "\"An image data must be a string or numpy array\"", ")", "self", ".", "_validate_prop_set_once", "(", "\"_image\"", ")", "self", ".", "_type", "=", "PRIMITIVE_TYPES", ".", "IMAGE", "self", ".", "_image", "=", "Image", "(", "data", "=", "data", ")", "return", "self" ]
https://github.com/aurora-opensource/xviz/blob/bde0931a75a12f557867fa71e0a824c3f593f4fd/python/xviz_avs/builder/primitive.py#L21-L36
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
3rd_party/python3/site-packages/cherrypy-18.1.2/cherrypy/_cpnative_server.py
python
CPHTTPServer.__init__
(self, server_adapter=cherrypy.server)
Initialize CPHTTPServer.
Initialize CPHTTPServer.
[ "Initialize", "CPHTTPServer", "." ]
def __init__(self, server_adapter=cherrypy.server): """Initialize CPHTTPServer.""" self.server_adapter = server_adapter server_name = (self.server_adapter.socket_host or self.server_adapter.socket_file or None) cheroot.server.HTTPServer.__init__( self, server_adapter.bind_addr, NativeGateway, minthreads=server_adapter.thread_pool, maxthreads=server_adapter.thread_pool_max, server_name=server_name) self.max_request_header_size = ( self.server_adapter.max_request_header_size or 0) self.max_request_body_size = ( self.server_adapter.max_request_body_size or 0) self.request_queue_size = self.server_adapter.socket_queue_size self.timeout = self.server_adapter.socket_timeout self.shutdown_timeout = self.server_adapter.shutdown_timeout self.protocol = self.server_adapter.protocol_version self.nodelay = self.server_adapter.nodelay ssl_module = self.server_adapter.ssl_module or 'pyopenssl' if self.server_adapter.ssl_context: adapter_class = cheroot.server.get_ssl_adapter_class(ssl_module) self.ssl_adapter = adapter_class( self.server_adapter.ssl_certificate, self.server_adapter.ssl_private_key, self.server_adapter.ssl_certificate_chain, self.server_adapter.ssl_ciphers) self.ssl_adapter.context = self.server_adapter.ssl_context elif self.server_adapter.ssl_certificate: adapter_class = cheroot.server.get_ssl_adapter_class(ssl_module) self.ssl_adapter = adapter_class( self.server_adapter.ssl_certificate, self.server_adapter.ssl_private_key, self.server_adapter.ssl_certificate_chain, self.server_adapter.ssl_ciphers)
[ "def", "__init__", "(", "self", ",", "server_adapter", "=", "cherrypy", ".", "server", ")", ":", "self", ".", "server_adapter", "=", "server_adapter", "server_name", "=", "(", "self", ".", "server_adapter", ".", "socket_host", "or", "self", ".", "server_adapter", ".", "socket_file", "or", "None", ")", "cheroot", ".", "server", ".", "HTTPServer", ".", "__init__", "(", "self", ",", "server_adapter", ".", "bind_addr", ",", "NativeGateway", ",", "minthreads", "=", "server_adapter", ".", "thread_pool", ",", "maxthreads", "=", "server_adapter", ".", "thread_pool_max", ",", "server_name", "=", "server_name", ")", "self", ".", "max_request_header_size", "=", "(", "self", ".", "server_adapter", ".", "max_request_header_size", "or", "0", ")", "self", ".", "max_request_body_size", "=", "(", "self", ".", "server_adapter", ".", "max_request_body_size", "or", "0", ")", "self", ".", "request_queue_size", "=", "self", ".", "server_adapter", ".", "socket_queue_size", "self", ".", "timeout", "=", "self", ".", "server_adapter", ".", "socket_timeout", "self", ".", "shutdown_timeout", "=", "self", ".", "server_adapter", ".", "shutdown_timeout", "self", ".", "protocol", "=", "self", ".", "server_adapter", ".", "protocol_version", "self", ".", "nodelay", "=", "self", ".", "server_adapter", ".", "nodelay", "ssl_module", "=", "self", ".", "server_adapter", ".", "ssl_module", "or", "'pyopenssl'", "if", "self", ".", "server_adapter", ".", "ssl_context", ":", "adapter_class", "=", "cheroot", ".", "server", ".", "get_ssl_adapter_class", "(", "ssl_module", ")", "self", ".", "ssl_adapter", "=", "adapter_class", "(", "self", ".", "server_adapter", ".", "ssl_certificate", ",", "self", ".", "server_adapter", ".", "ssl_private_key", ",", "self", ".", "server_adapter", ".", "ssl_certificate_chain", ",", "self", ".", "server_adapter", ".", "ssl_ciphers", ")", "self", ".", "ssl_adapter", ".", "context", "=", "self", ".", "server_adapter", ".", "ssl_context", "elif", "self", ".", "server_adapter", ".", "ssl_certificate", ":", "adapter_class", "=", "cheroot", ".", "server", ".", "get_ssl_adapter_class", "(", "ssl_module", ")", "self", ".", "ssl_adapter", "=", "adapter_class", "(", "self", ".", "server_adapter", ".", "ssl_certificate", ",", "self", ".", "server_adapter", ".", "ssl_private_key", ",", "self", ".", "server_adapter", ".", "ssl_certificate_chain", ",", "self", ".", "server_adapter", ".", "ssl_ciphers", ")" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/3rd_party/python3/site-packages/cherrypy-18.1.2/cherrypy/_cpnative_server.py#L129-L168
odoo/odoo
8de8c196a137f4ebbf67d7c7c83fee36f873f5c8
addons/mass_mailing/models/mailing_trace.py
python
MailingTrace.set_opened
(self, domain=None)
return traces
Reply / Open are a bit shared in various processes: reply implies open, click implies open. Let us avoid status override by skipping traces that are not already opened or replied.
Reply / Open are a bit shared in various processes: reply implies open, click implies open. Let us avoid status override by skipping traces that are not already opened or replied.
[ "Reply", "/", "Open", "are", "a", "bit", "shared", "in", "various", "processes", ":", "reply", "implies", "open", "click", "implies", "open", ".", "Let", "us", "avoid", "status", "override", "by", "skipping", "traces", "that", "are", "not", "already", "opened", "or", "replied", "." ]
def set_opened(self, domain=None): """ Reply / Open are a bit shared in various processes: reply implies open, click implies open. Let us avoid status override by skipping traces that are not already opened or replied. """ traces = self + (self.search(domain) if domain else self.env['mailing.trace']) traces.filtered(lambda t: t.trace_status not in ('open', 'reply')).write({'trace_status': 'open', 'open_datetime': fields.Datetime.now()}) return traces
[ "def", "set_opened", "(", "self", ",", "domain", "=", "None", ")", ":", "traces", "=", "self", "+", "(", "self", ".", "search", "(", "domain", ")", "if", "domain", "else", "self", ".", "env", "[", "'mailing.trace'", "]", ")", "traces", ".", "filtered", "(", "lambda", "t", ":", "t", ".", "trace_status", "not", "in", "(", "'open'", ",", "'reply'", ")", ")", ".", "write", "(", "{", "'trace_status'", ":", "'open'", ",", "'open_datetime'", ":", "fields", ".", "Datetime", ".", "now", "(", ")", "}", ")", "return", "traces" ]
https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/addons/mass_mailing/models/mailing_trace.py#L133-L139
boltsparts/BOLTS
c4ff7d8e03c55342497bd9ef7c223c8bfcb22ce5
bolttools/common.py
python
Links.__init__
(self, N=None)
Create a new Links instance. N gives an upper bound for the number of destinations for a source, None represents infinity.
Create a new Links instance. N gives an upper bound for the number of destinations for a source, None represents infinity.
[ "Create", "a", "new", "Links", "instance", ".", "N", "gives", "an", "upper", "bound", "for", "the", "number", "of", "destinations", "for", "a", "source", "None", "represents", "infinity", "." ]
def __init__(self, N=None): """ Create a new Links instance. N gives an upper bound for the number of destinations for a source, None represents infinity. """ assert(N is None or N > 0) self.N = N self.srctodsts = {} self.dsttosrc = {}
[ "def", "__init__", "(", "self", ",", "N", "=", "None", ")", ":", "assert", "(", "N", "is", "None", "or", "N", ">", "0", ")", "self", ".", "N", "=", "N", "self", ".", "srctodsts", "=", "{", "}", "self", ".", "dsttosrc", "=", "{", "}" ]
https://github.com/boltsparts/BOLTS/blob/c4ff7d8e03c55342497bd9ef7c223c8bfcb22ce5/bolttools/common.py#L165-L173
korolr/dotfiles
8e46933503ecb8d8651739ffeb1d2d4f0f5c6524
.config/sublime-text-3/Backup/20170602095117/mdpopups/st3/mdpopups/st_scheme_template.py
python
Scheme2CSS.grayscale
(self, css)
return css
Apply grayscale filter.
Apply grayscale filter.
[ "Apply", "grayscale", "filter", "." ]
def grayscale(self, css): """Apply grayscale filter.""" parts = [c.strip('; ') for c in css.split(':')] if len(parts) == 2 and parts[0] in ('background-color', 'color'): rgba = RGBA(parts[1]) rgba.grayscale() parts[1] = "%s; " % rgba.get_rgb() return '%s: %s ' % (parts[0], parts[1]) return css
[ "def", "grayscale", "(", "self", ",", "css", ")", ":", "parts", "=", "[", "c", ".", "strip", "(", "'; '", ")", "for", "c", "in", "css", ".", "split", "(", "':'", ")", "]", "if", "len", "(", "parts", ")", "==", "2", "and", "parts", "[", "0", "]", "in", "(", "'background-color'", ",", "'color'", ")", ":", "rgba", "=", "RGBA", "(", "parts", "[", "1", "]", ")", "rgba", ".", "grayscale", "(", ")", "parts", "[", "1", "]", "=", "\"%s; \"", "%", "rgba", ".", "get_rgb", "(", ")", "return", "'%s: %s '", "%", "(", "parts", "[", "0", "]", ",", "parts", "[", "1", "]", ")", "return", "css" ]
https://github.com/korolr/dotfiles/blob/8e46933503ecb8d8651739ffeb1d2d4f0f5c6524/.config/sublime-text-3/Backup/20170602095117/mdpopups/st3/mdpopups/st_scheme_template.py#L446-L455
googleglass/mirror-quickstart-python
e34077bae91657170c305702471f5c249eb1b686
lib/httplib2/__init__.py
python
safename
(filename)
return ",".join((filename, filemd5))
Return a filename suitable for the cache. Strips dangerous and common characters to create a filename we can use to store the cache in.
Return a filename suitable for the cache.
[ "Return", "a", "filename", "suitable", "for", "the", "cache", "." ]
def safename(filename): """Return a filename suitable for the cache. Strips dangerous and common characters to create a filename we can use to store the cache in. """ try: if re_url_scheme.match(filename): if isinstance(filename,str): filename = filename.decode('utf-8') filename = filename.encode('idna') else: filename = filename.encode('idna') except UnicodeError: pass if isinstance(filename,unicode): filename=filename.encode('utf-8') filemd5 = _md5(filename).hexdigest() filename = re_url_scheme.sub("", filename) filename = re_slash.sub(",", filename) # limit length of filename if len(filename)>200: filename=filename[:200] return ",".join((filename, filemd5))
[ "def", "safename", "(", "filename", ")", ":", "try", ":", "if", "re_url_scheme", ".", "match", "(", "filename", ")", ":", "if", "isinstance", "(", "filename", ",", "str", ")", ":", "filename", "=", "filename", ".", "decode", "(", "'utf-8'", ")", "filename", "=", "filename", ".", "encode", "(", "'idna'", ")", "else", ":", "filename", "=", "filename", ".", "encode", "(", "'idna'", ")", "except", "UnicodeError", ":", "pass", "if", "isinstance", "(", "filename", ",", "unicode", ")", ":", "filename", "=", "filename", ".", "encode", "(", "'utf-8'", ")", "filemd5", "=", "_md5", "(", "filename", ")", ".", "hexdigest", "(", ")", "filename", "=", "re_url_scheme", ".", "sub", "(", "\"\"", ",", "filename", ")", "filename", "=", "re_slash", ".", "sub", "(", "\",\"", ",", "filename", ")", "# limit length of filename", "if", "len", "(", "filename", ")", ">", "200", ":", "filename", "=", "filename", "[", ":", "200", "]", "return", "\",\"", ".", "join", "(", "(", "filename", ",", "filemd5", ")", ")" ]
https://github.com/googleglass/mirror-quickstart-python/blob/e34077bae91657170c305702471f5c249eb1b686/lib/httplib2/__init__.py#L235-L260
hotosm/tasking-manager
1a7b02c6ccd431029a96d709d4d786c83cb37f5e
backend/models/postgis/project.py
python
Project.delete
(self)
Deletes the current model from the DB
Deletes the current model from the DB
[ "Deletes", "the", "current", "model", "from", "the", "DB" ]
def delete(self): """ Deletes the current model from the DB """ db.session.delete(self) db.session.commit()
[ "def", "delete", "(", "self", ")", ":", "db", ".", "session", ".", "delete", "(", "self", ")", "db", ".", "session", ".", "commit", "(", ")" ]
https://github.com/hotosm/tasking-manager/blob/1a7b02c6ccd431029a96d709d4d786c83cb37f5e/backend/models/postgis/project.py#L499-L502
UWFlow/rmc
00bcc1450ffbec3a6c8d956a2a5d1bb3a04bfcb9
models/user.py
python
User.add_course
(self, course_id, term_id, program_year_id=None)
return user_course
Creates a UserCourse and adds it to the user's course_history. Idempotent. Returns the resulting UserCourse.
Creates a UserCourse and adds it to the user's course_history.
[ "Creates", "a", "UserCourse", "and", "adds", "it", "to", "the", "user", "s", "course_history", "." ]
def add_course(self, course_id, term_id, program_year_id=None): """Creates a UserCourse and adds it to the user's course_history. Idempotent. Returns the resulting UserCourse. """ user_course = _user_course.UserCourse.objects( user_id=self.id, course_id=course_id).first() if user_course is None: if _course.Course.objects.with_id(course_id) is None: # Non-existant course according to our data rmclogger.log_event( rmclogger.LOG_CATEGORY_DATA_MODEL, rmclogger.LOG_EVENT_UNKNOWN_COURSE_ID, course_id ) return None user_course = _user_course.UserCourse( user_id=self.id, course_id=course_id, term_id=term_id, program_year_id=program_year_id, ) else: # Record only the latest attempt for duplicate/failed courses if (term_id > user_course.term_id or user_course.term_id == _term.Term.SHORTLIST_TERM_ID): user_course.term_id = term_id user_course.program_year_id = program_year_id user_course.save() if user_course.id not in self.course_history: self.course_history.append(user_course.id) self.save() return user_course
[ "def", "add_course", "(", "self", ",", "course_id", ",", "term_id", ",", "program_year_id", "=", "None", ")", ":", "user_course", "=", "_user_course", ".", "UserCourse", ".", "objects", "(", "user_id", "=", "self", ".", "id", ",", "course_id", "=", "course_id", ")", ".", "first", "(", ")", "if", "user_course", "is", "None", ":", "if", "_course", ".", "Course", ".", "objects", ".", "with_id", "(", "course_id", ")", "is", "None", ":", "# Non-existant course according to our data", "rmclogger", ".", "log_event", "(", "rmclogger", ".", "LOG_CATEGORY_DATA_MODEL", ",", "rmclogger", ".", "LOG_EVENT_UNKNOWN_COURSE_ID", ",", "course_id", ")", "return", "None", "user_course", "=", "_user_course", ".", "UserCourse", "(", "user_id", "=", "self", ".", "id", ",", "course_id", "=", "course_id", ",", "term_id", "=", "term_id", ",", "program_year_id", "=", "program_year_id", ",", ")", "else", ":", "# Record only the latest attempt for duplicate/failed courses", "if", "(", "term_id", ">", "user_course", ".", "term_id", "or", "user_course", ".", "term_id", "==", "_term", ".", "Term", ".", "SHORTLIST_TERM_ID", ")", ":", "user_course", ".", "term_id", "=", "term_id", "user_course", ".", "program_year_id", "=", "program_year_id", "user_course", ".", "save", "(", ")", "if", "user_course", ".", "id", "not", "in", "self", ".", "course_history", ":", "self", ".", "course_history", ".", "append", "(", "user_course", ".", "id", ")", "self", ".", "save", "(", ")", "return", "user_course" ]
https://github.com/UWFlow/rmc/blob/00bcc1450ffbec3a6c8d956a2a5d1bb3a04bfcb9/models/user.py#L516-L555
mozilla/spidernode
aafa9e5273f954f272bb4382fc007af14674b4c2
tools/cpplint.py
python
CheckPosixThreading
(filename, clean_lines, linenum, error)
Checks for calls to thread-unsafe functions. Much code has been originally written without consideration of multi-threading. Also, engineers are relying on their old experience; they have learned posix before threading extensions were added. These tests guide the engineers to use thread-safe functions (when using posix directly). Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Checks for calls to thread-unsafe functions.
[ "Checks", "for", "calls", "to", "thread", "-", "unsafe", "functions", "." ]
def CheckPosixThreading(filename, clean_lines, linenum, error): """Checks for calls to thread-unsafe functions. Much code has been originally written without consideration of multi-threading. Also, engineers are relying on their old experience; they have learned posix before threading extensions were added. These tests guide the engineers to use thread-safe functions (when using posix directly). Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ line = clean_lines.elided[linenum] for single_thread_func, multithread_safe_func, pattern in _THREADING_LIST: # Additional pattern matching check to confirm that this is the # function we are looking for if Search(pattern, line): error(filename, linenum, 'runtime/threadsafe_fn', 2, 'Consider using ' + multithread_safe_func + '...) instead of ' + single_thread_func + '...) for improved thread safety.')
[ "def", "CheckPosixThreading", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "for", "single_thread_func", ",", "multithread_safe_func", ",", "pattern", "in", "_THREADING_LIST", ":", "# Additional pattern matching check to confirm that this is the", "# function we are looking for", "if", "Search", "(", "pattern", ",", "line", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'runtime/threadsafe_fn'", ",", "2", ",", "'Consider using '", "+", "multithread_safe_func", "+", "'...) instead of '", "+", "single_thread_func", "+", "'...) for improved thread safety.'", ")" ]
https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/tools/cpplint.py#L1979-L2002
att/rcloud
392489d6ef88ece10f5999a832fb729da01116f2
rcloud.packages/rcloud.python/inst/python/rcloud_kernel.py
python
IPKernelApp.write_connection_file
(self)
write connection info to JSON file
write connection info to JSON file
[ "write", "connection", "info", "to", "JSON", "file" ]
def write_connection_file(self): """write connection info to JSON file""" cf = self.abs_connection_file self.log.debug("Writing connection file: %s", cf) write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport, shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, iopub_port=self.iopub_port, control_port=self.control_port)
[ "def", "write_connection_file", "(", "self", ")", ":", "cf", "=", "self", ".", "abs_connection_file", "self", ".", "log", ".", "debug", "(", "\"Writing connection file: %s\"", ",", "cf", ")", "write_connection_file", "(", "cf", ",", "ip", "=", "self", ".", "ip", ",", "key", "=", "self", ".", "session", ".", "key", ",", "transport", "=", "self", ".", "transport", ",", "shell_port", "=", "self", ".", "shell_port", ",", "stdin_port", "=", "self", ".", "stdin_port", ",", "hb_port", "=", "self", ".", "hb_port", ",", "iopub_port", "=", "self", ".", "iopub_port", ",", "control_port", "=", "self", ".", "control_port", ")" ]
https://github.com/att/rcloud/blob/392489d6ef88ece10f5999a832fb729da01116f2/rcloud.packages/rcloud.python/inst/python/rcloud_kernel.py#L266-L272
ayojs/ayo
45a1c8cf6384f5bcc81d834343c3ed9d78b97df3
deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
python
NinjaWriter.GenerateDescription
(self, verb, message, fallback)
Generate and return a description of a build step. |verb| is the short summary, e.g. ACTION or RULE. |message| is a hand-written description, or None if not available. |fallback| is the gyp-level name of the step, usable as a fallback.
Generate and return a description of a build step.
[ "Generate", "and", "return", "a", "description", "of", "a", "build", "step", "." ]
def GenerateDescription(self, verb, message, fallback): """Generate and return a description of a build step. |verb| is the short summary, e.g. ACTION or RULE. |message| is a hand-written description, or None if not available. |fallback| is the gyp-level name of the step, usable as a fallback. """ if self.toolset != 'target': verb += '(%s)' % self.toolset if message: return '%s %s' % (verb, self.ExpandSpecial(message)) else: return '%s %s: %s' % (verb, self.name, fallback)
[ "def", "GenerateDescription", "(", "self", ",", "verb", ",", "message", ",", "fallback", ")", ":", "if", "self", ".", "toolset", "!=", "'target'", ":", "verb", "+=", "'(%s)'", "%", "self", ".", "toolset", "if", "message", ":", "return", "'%s %s'", "%", "(", "verb", ",", "self", ".", "ExpandSpecial", "(", "message", ")", ")", "else", ":", "return", "'%s %s: %s'", "%", "(", "verb", ",", "self", ".", "name", ",", "fallback", ")" ]
https://github.com/ayojs/ayo/blob/45a1c8cf6384f5bcc81d834343c3ed9d78b97df3/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py#L572-L584
mceSystems/node-jsc
90634f3064fab8e89a85b3942f0cc5054acc86fa
deps/v8/third_party/jinja2/visitor.py
python
NodeVisitor.generic_visit
(self, node, *args, **kwargs)
Called if no explicit visitor function exists for a node.
Called if no explicit visitor function exists for a node.
[ "Called", "if", "no", "explicit", "visitor", "function", "exists", "for", "a", "node", "." ]
def generic_visit(self, node, *args, **kwargs): """Called if no explicit visitor function exists for a node.""" for node in node.iter_child_nodes(): self.visit(node, *args, **kwargs)
[ "def", "generic_visit", "(", "self", ",", "node", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "for", "node", "in", "node", ".", "iter_child_nodes", "(", ")", ":", "self", ".", "visit", "(", "node", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/deps/v8/third_party/jinja2/visitor.py#L41-L44
replit-archive/jsrepl
36d79b6288ca5d26208e8bade2a168c6ebcb2376
extern/python/unclosured/lib/python2.7/inspect.py
python
getsourcefile
(object)
Return the filename that can be used to locate an object's source. Return None if no way can be identified to get the source.
Return the filename that can be used to locate an object's source. Return None if no way can be identified to get the source.
[ "Return", "the", "filename", "that", "can", "be", "used", "to", "locate", "an", "object", "s", "source", ".", "Return", "None", "if", "no", "way", "can", "be", "identified", "to", "get", "the", "source", "." ]
def getsourcefile(object): """Return the filename that can be used to locate an object's source. Return None if no way can be identified to get the source. """ filename = getfile(object) if string.lower(filename[-4:]) in ('.pyc', '.pyo'): filename = filename[:-4] + '.py' for suffix, mode, kind in imp.get_suffixes(): if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix: # Looks like a binary file. We want to only return a text file. return None if os.path.exists(filename): return filename # only return a non-existent filename if the module has a PEP 302 loader if hasattr(getmodule(object, filename), '__loader__'): return filename # or it is in the linecache if filename in linecache.cache: return filename
[ "def", "getsourcefile", "(", "object", ")", ":", "filename", "=", "getfile", "(", "object", ")", "if", "string", ".", "lower", "(", "filename", "[", "-", "4", ":", "]", ")", "in", "(", "'.pyc'", ",", "'.pyo'", ")", ":", "filename", "=", "filename", "[", ":", "-", "4", "]", "+", "'.py'", "for", "suffix", ",", "mode", ",", "kind", "in", "imp", ".", "get_suffixes", "(", ")", ":", "if", "'b'", "in", "mode", "and", "string", ".", "lower", "(", "filename", "[", "-", "len", "(", "suffix", ")", ":", "]", ")", "==", "suffix", ":", "# Looks like a binary file. We want to only return a text file.", "return", "None", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "return", "filename", "# only return a non-existent filename if the module has a PEP 302 loader", "if", "hasattr", "(", "getmodule", "(", "object", ",", "filename", ")", ",", "'__loader__'", ")", ":", "return", "filename", "# or it is in the linecache", "if", "filename", "in", "linecache", ".", "cache", ":", "return", "filename" ]
https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/unclosured/lib/python2.7/inspect.py#L442-L460
sbrshk/whatever
f7ba72effd6f836ca701ed889c747db804d5ea8f
node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
python
EscapeXcodeDefine
(s)
return re.sub(_xcode_define_re, r'\\\1', s)
We must escape the defines that we give to XCode so that it knows not to split on spaces and to respect backslash and quote literals. However, we must not quote the define, or Xcode will incorrectly intepret variables especially $(inherited).
We must escape the defines that we give to XCode so that it knows not to split on spaces and to respect backslash and quote literals. However, we must not quote the define, or Xcode will incorrectly intepret variables especially $(inherited).
[ "We", "must", "escape", "the", "defines", "that", "we", "give", "to", "XCode", "so", "that", "it", "knows", "not", "to", "split", "on", "spaces", "and", "to", "respect", "backslash", "and", "quote", "literals", ".", "However", "we", "must", "not", "quote", "the", "define", "or", "Xcode", "will", "incorrectly", "intepret", "variables", "especially", "$", "(", "inherited", ")", "." ]
def EscapeXcodeDefine(s): """We must escape the defines that we give to XCode so that it knows not to split on spaces and to respect backslash and quote literals. However, we must not quote the define, or Xcode will incorrectly intepret variables especially $(inherited).""" return re.sub(_xcode_define_re, r'\\\1', s)
[ "def", "EscapeXcodeDefine", "(", "s", ")", ":", "return", "re", ".", "sub", "(", "_xcode_define_re", ",", "r'\\\\\\1'", ",", "s", ")" ]
https://github.com/sbrshk/whatever/blob/f7ba72effd6f836ca701ed889c747db804d5ea8f/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py#L557-L562
googleglass/mirror-quickstart-python
e34077bae91657170c305702471f5c249eb1b686
lib/apiclient/http.py
python
BatchHttpRequest._deserialize_response
(self, payload)
return resp, content
Convert string into httplib2 response and content. Args: payload: string, headers and body as a string. Returns: A pair (resp, content), such as would be returned from httplib2.request.
Convert string into httplib2 response and content.
[ "Convert", "string", "into", "httplib2", "response", "and", "content", "." ]
def _deserialize_response(self, payload): """Convert string into httplib2 response and content. Args: payload: string, headers and body as a string. Returns: A pair (resp, content), such as would be returned from httplib2.request. """ # Strip off the status line status_line, payload = payload.split('\n', 1) protocol, status, reason = status_line.split(' ', 2) # Parse the rest of the response parser = FeedParser() parser.feed(payload) msg = parser.close() msg['status'] = status # Create httplib2.Response from the parsed headers. resp = httplib2.Response(msg) resp.reason = reason resp.version = int(protocol.split('/', 1)[1].replace('.', '')) content = payload.split('\r\n\r\n', 1)[1] return resp, content
[ "def", "_deserialize_response", "(", "self", ",", "payload", ")", ":", "# Strip off the status line", "status_line", ",", "payload", "=", "payload", ".", "split", "(", "'\\n'", ",", "1", ")", "protocol", ",", "status", ",", "reason", "=", "status_line", ".", "split", "(", "' '", ",", "2", ")", "# Parse the rest of the response", "parser", "=", "FeedParser", "(", ")", "parser", ".", "feed", "(", "payload", ")", "msg", "=", "parser", ".", "close", "(", ")", "msg", "[", "'status'", "]", "=", "status", "# Create httplib2.Response from the parsed headers.", "resp", "=", "httplib2", ".", "Response", "(", "msg", ")", "resp", ".", "reason", "=", "reason", "resp", ".", "version", "=", "int", "(", "protocol", ".", "split", "(", "'/'", ",", "1", ")", "[", "1", "]", ".", "replace", "(", "'.'", ",", "''", ")", ")", "content", "=", "payload", ".", "split", "(", "'\\r\\n\\r\\n'", ",", "1", ")", "[", "1", "]", "return", "resp", ",", "content" ]
https://github.com/googleglass/mirror-quickstart-python/blob/e34077bae91657170c305702471f5c249eb1b686/lib/apiclient/http.py#L1050-L1076
kresusapp/kresus
7b528816a633f7966af55e2b6f9febff4c1ff9b7
scripts/banks_list_generator.py
python
format_kresus
(backend, module, is_deprecated=False, module_loader=None)
return kresus_module
Export the bank module to kresus format name : module.description uuid: module.name backend: backend customFields: [ name: type: ]
Export the bank module to kresus format name : module.description uuid: module.name backend: backend customFields: [ name: type: ]
[ "Export", "the", "bank", "module", "to", "kresus", "format", "name", ":", "module", ".", "description", "uuid", ":", "module", ".", "name", "backend", ":", "backend", "customFields", ":", "[", "name", ":", "type", ":", "]" ]
def format_kresus(backend, module, is_deprecated=False, module_loader=None): ''' Export the bank module to kresus format name : module.description uuid: module.name backend: backend customFields: [ name: type: ] ''' kresus_module = { 'name': module.description, 'uuid': module.name, 'backend': backend, 'deprecated': is_deprecated } # If the module is deprecated, just dump it. if is_deprecated: return kresus_module fields = [] config = module.config.items() for key, value in config: # Kresus does not expect login and password to be part of the custom fields, it is then not necessary to add them to the file. if key in ('login', 'username', 'password'): continue # We don't want transient items (mainly used for 2FA). if isinstance(value, ValueTransient): continue optional = not value.required and key not in ['website', 'auth_type'] if optional and key in IGNORE_FIELDS_LIST: print_error('Skipping optional key "%s" for module "%s".' % (key, module.name)) continue field = { 'name': key } if optional: field['optional'] = True if value.choices: field['type'] = 'select' if value.default: field['default'] = value.default choices = [] try: for k, label in iteritems(value.choices): if module.name != 'banquepopulaire' or\ k not in BANQUE_POPULAIRE_DEPRECATED_WEBSITES: choices.append(dict(label=label, value=k)) except AttributeError: # Handle the case where the choices would not be a dict, but a list. for k in value.choices: if module.name != 'banquepopulaire' or\ k not in BANQUE_POPULAIRE_DEPRECATED_WEBSITES: choices.append(dict(label=k, value=k)) choices.sort(key=lambda choice: choice["value"]) field['values'] = choices else: if value.masked: field['type'] = 'password' else: field['type'] = 'text' if key in NEEDS_PLACEHOLDER: field['placeholderKey'] = 'client.settings.%s_placeholder' % key fields.append(field) if fields: fields.sort(key=lambda field: field["name"]) kresus_module['customFields'] = fields return kresus_module
[ "def", "format_kresus", "(", "backend", ",", "module", ",", "is_deprecated", "=", "False", ",", "module_loader", "=", "None", ")", ":", "kresus_module", "=", "{", "'name'", ":", "module", ".", "description", ",", "'uuid'", ":", "module", ".", "name", ",", "'backend'", ":", "backend", ",", "'deprecated'", ":", "is_deprecated", "}", "# If the module is deprecated, just dump it.", "if", "is_deprecated", ":", "return", "kresus_module", "fields", "=", "[", "]", "config", "=", "module", ".", "config", ".", "items", "(", ")", "for", "key", ",", "value", "in", "config", ":", "# Kresus does not expect login and password to be part of the custom fields, it is then not necessary to add them to the file.", "if", "key", "in", "(", "'login'", ",", "'username'", ",", "'password'", ")", ":", "continue", "# We don't want transient items (mainly used for 2FA).", "if", "isinstance", "(", "value", ",", "ValueTransient", ")", ":", "continue", "optional", "=", "not", "value", ".", "required", "and", "key", "not", "in", "[", "'website'", ",", "'auth_type'", "]", "if", "optional", "and", "key", "in", "IGNORE_FIELDS_LIST", ":", "print_error", "(", "'Skipping optional key \"%s\" for module \"%s\".'", "%", "(", "key", ",", "module", ".", "name", ")", ")", "continue", "field", "=", "{", "'name'", ":", "key", "}", "if", "optional", ":", "field", "[", "'optional'", "]", "=", "True", "if", "value", ".", "choices", ":", "field", "[", "'type'", "]", "=", "'select'", "if", "value", ".", "default", ":", "field", "[", "'default'", "]", "=", "value", ".", "default", "choices", "=", "[", "]", "try", ":", "for", "k", ",", "label", "in", "iteritems", "(", "value", ".", "choices", ")", ":", "if", "module", ".", "name", "!=", "'banquepopulaire'", "or", "k", "not", "in", "BANQUE_POPULAIRE_DEPRECATED_WEBSITES", ":", "choices", ".", "append", "(", "dict", "(", "label", "=", "label", ",", "value", "=", "k", ")", ")", "except", "AttributeError", ":", "# Handle the case where the choices would not be a dict, but a list.", "for", "k", "in", "value", ".", "choices", ":", "if", "module", ".", "name", "!=", "'banquepopulaire'", "or", "k", "not", "in", "BANQUE_POPULAIRE_DEPRECATED_WEBSITES", ":", "choices", ".", "append", "(", "dict", "(", "label", "=", "k", ",", "value", "=", "k", ")", ")", "choices", ".", "sort", "(", "key", "=", "lambda", "choice", ":", "choice", "[", "\"value\"", "]", ")", "field", "[", "'values'", "]", "=", "choices", "else", ":", "if", "value", ".", "masked", ":", "field", "[", "'type'", "]", "=", "'password'", "else", ":", "field", "[", "'type'", "]", "=", "'text'", "if", "key", "in", "NEEDS_PLACEHOLDER", ":", "field", "[", "'placeholderKey'", "]", "=", "'client.settings.%s_placeholder'", "%", "key", "fields", ".", "append", "(", "field", ")", "if", "fields", ":", "fields", ".", "sort", "(", "key", "=", "lambda", "field", ":", "field", "[", "\"name\"", "]", ")", "kresus_module", "[", "'customFields'", "]", "=", "fields", "return", "kresus_module" ]
https://github.com/kresusapp/kresus/blob/7b528816a633f7966af55e2b6f9febff4c1ff9b7/scripts/banks_list_generator.py#L89-L172
mceSystems/node-jsc
90634f3064fab8e89a85b3942f0cc5054acc86fa
deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
python
_SuffixName
(name, suffix)
return '#'.join(parts)
Add a suffix to the end of a target. Arguments: name: name of the target (foo#target) suffix: the suffix to be added Returns: Target name with suffix added (foo_suffix#target)
Add a suffix to the end of a target.
[ "Add", "a", "suffix", "to", "the", "end", "of", "a", "target", "." ]
def _SuffixName(name, suffix): """Add a suffix to the end of a target. Arguments: name: name of the target (foo#target) suffix: the suffix to be added Returns: Target name with suffix added (foo_suffix#target) """ parts = name.rsplit('#', 1) parts[0] = '%s_%s' % (parts[0], suffix) return '#'.join(parts)
[ "def", "_SuffixName", "(", "name", ",", "suffix", ")", ":", "parts", "=", "name", ".", "rsplit", "(", "'#'", ",", "1", ")", "parts", "[", "0", "]", "=", "'%s_%s'", "%", "(", "parts", "[", "0", "]", ",", "suffix", ")", "return", "'#'", ".", "join", "(", "parts", ")" ]
https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py#L47-L58
Nexedi/erp5
44df1959c0e21576cf5e9803d602d95efb4b695b
product/ERP5Type/Tool/TypesTool.py
python
TypesTool.getPortalTypeClass
(self, context, temp=False)
Infer a portal type class from the context. Context can be a portal type string, or an object, or a class. This is the proper API to retrieve a portal type class, and no one should hack anything anywhere else.
Infer a portal type class from the context. Context can be a portal type string, or an object, or a class.
[ "Infer", "a", "portal", "type", "class", "from", "the", "context", ".", "Context", "can", "be", "a", "portal", "type", "string", "or", "an", "object", "or", "a", "class", "." ]
def getPortalTypeClass(self, context, temp=False): """ Infer a portal type class from the context. Context can be a portal type string, or an object, or a class. This is the proper API to retrieve a portal type class, and no one should hack anything anywhere else. """ portal_type = None if isinstance(context, type): if context.__module__ in ('erp5.portal_type', 'erp5.temp_portal_type'): portal_type = context.__name__ else: portal_type = getattr(context, 'portal_type', None) elif isinstance(context, str): portal_type = context else: portal_type = getattr(context, 'portal_type', None) if portal_type is not None: import erp5 if temp: module = erp5.temp_portal_type else: module = erp5.portal_type return getattr(module, portal_type, None)
[ "def", "getPortalTypeClass", "(", "self", ",", "context", ",", "temp", "=", "False", ")", ":", "portal_type", "=", "None", "if", "isinstance", "(", "context", ",", "type", ")", ":", "if", "context", ".", "__module__", "in", "(", "'erp5.portal_type'", ",", "'erp5.temp_portal_type'", ")", ":", "portal_type", "=", "context", ".", "__name__", "else", ":", "portal_type", "=", "getattr", "(", "context", ",", "'portal_type'", ",", "None", ")", "elif", "isinstance", "(", "context", ",", "str", ")", ":", "portal_type", "=", "context", "else", ":", "portal_type", "=", "getattr", "(", "context", ",", "'portal_type'", ",", "None", ")", "if", "portal_type", "is", "not", "None", ":", "import", "erp5", "if", "temp", ":", "module", "=", "erp5", ".", "temp_portal_type", "else", ":", "module", "=", "erp5", ".", "portal_type", "return", "getattr", "(", "module", ",", "portal_type", ",", "None", ")" ]
https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5Type/Tool/TypesTool.py#L229-L254
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
src/tactic/ui/tools/repo_browser_wdg.py
python
RepoBrowserCbk.move_parent
(self, parent, relative_dir, snapshot=None)
Moves an asset and all related snapshots
Moves an asset and all related snapshots
[ "Moves", "an", "asset", "and", "all", "related", "snapshots" ]
def move_parent(self, parent, relative_dir, snapshot=None): '''Moves an asset and all related snapshots''' base_dir = Environment.get_asset_dir() search = Search("sthpw/snapshot") search.add_parent_filter(parent) if snapshot: context = snapshot.get_value("context") search.add_filter("context", context) search.add_order_by("version") snapshots = search.get_sobjects() # For each snapshot, update all files relative_dir. # Also record versionless to update latest after for-loop all_files = [] # find highest version highest_snapshot = {} highest_version = {} for snapshot in snapshots: # Record highest context = snapshot.get("context") version = snapshot.get_value("version") if version == -1: file_types = snapshot.get_all_file_types() for file_type in file_types: snapshot.remove_file(file_type) continue if version > highest_version.get(context): highest_version[context] = version highest_snapshot[context] = snapshot # move all of the files from this snapshot files = snapshot.get_all_file_objects() for file in files: # Get the old relative_dir file_relative_dir = file.get_value("relative_dir") file_name = file.get_value("file_name") # Build the paths and check if new path already exists old_path = "%s/%s/%s" % (base_dir, file_relative_dir, file_name) if not os.path.exists(old_path): print("WARNING: [%s] not found." % old_path) new_path = "%s/%s/%s" % (base_dir, relative_dir, file_name) if os.path.exists(new_path): raise Exception("[%s] already exists in [%s]." % (file_name, relative_dir)) file.set_value("relative_dir", relative_dir) file.commit() FileUndo.move(old_path, new_path) all_files.extend(files) # Some assumed behavior for this mode: # 1) all snapshots in this context exist in the same folder # and should remain so # 2) all sobjects have a column called {relative_dir} # This should all fail cleanly if these assumptions are not the # case unless the sobject has a column called "relative_dir" # used for some other purpose if parent.column_exists("relative_dir"): parent.set_value("relative_dir", relative_dir) parent.commit() # Update the versionless snapshot for snapshot in highest_snapshot.values(): snapshot.update_versionless("latest")
[ "def", "move_parent", "(", "self", ",", "parent", ",", "relative_dir", ",", "snapshot", "=", "None", ")", ":", "base_dir", "=", "Environment", ".", "get_asset_dir", "(", ")", "search", "=", "Search", "(", "\"sthpw/snapshot\"", ")", "search", ".", "add_parent_filter", "(", "parent", ")", "if", "snapshot", ":", "context", "=", "snapshot", ".", "get_value", "(", "\"context\"", ")", "search", ".", "add_filter", "(", "\"context\"", ",", "context", ")", "search", ".", "add_order_by", "(", "\"version\"", ")", "snapshots", "=", "search", ".", "get_sobjects", "(", ")", "# For each snapshot, update all files relative_dir.", "# Also record versionless to update latest after for-loop", "all_files", "=", "[", "]", "# find highest version", "highest_snapshot", "=", "{", "}", "highest_version", "=", "{", "}", "for", "snapshot", "in", "snapshots", ":", "# Record highest ", "context", "=", "snapshot", ".", "get", "(", "\"context\"", ")", "version", "=", "snapshot", ".", "get_value", "(", "\"version\"", ")", "if", "version", "==", "-", "1", ":", "file_types", "=", "snapshot", ".", "get_all_file_types", "(", ")", "for", "file_type", "in", "file_types", ":", "snapshot", ".", "remove_file", "(", "file_type", ")", "continue", "if", "version", ">", "highest_version", ".", "get", "(", "context", ")", ":", "highest_version", "[", "context", "]", "=", "version", "highest_snapshot", "[", "context", "]", "=", "snapshot", "# move all of the files from this snapshot", "files", "=", "snapshot", ".", "get_all_file_objects", "(", ")", "for", "file", "in", "files", ":", "# Get the old relative_dir", "file_relative_dir", "=", "file", ".", "get_value", "(", "\"relative_dir\"", ")", "file_name", "=", "file", ".", "get_value", "(", "\"file_name\"", ")", "# Build the paths and check if new path already exists", "old_path", "=", "\"%s/%s/%s\"", "%", "(", "base_dir", ",", "file_relative_dir", ",", "file_name", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "old_path", ")", ":", "print", "(", "\"WARNING: [%s] not found.\"", "%", "old_path", ")", "new_path", "=", "\"%s/%s/%s\"", "%", "(", "base_dir", ",", "relative_dir", ",", "file_name", ")", "if", "os", ".", "path", ".", "exists", "(", "new_path", ")", ":", "raise", "Exception", "(", "\"[%s] already exists in [%s].\"", "%", "(", "file_name", ",", "relative_dir", ")", ")", "file", ".", "set_value", "(", "\"relative_dir\"", ",", "relative_dir", ")", "file", ".", "commit", "(", ")", "FileUndo", ".", "move", "(", "old_path", ",", "new_path", ")", "all_files", ".", "extend", "(", "files", ")", "# Some assumed behavior for this mode:", "# 1) all snapshots in this context exist in the same folder", "# and should remain so", "# 2) all sobjects have a column called {relative_dir}", "# This should all fail cleanly if these assumptions are not the", "# case unless the sobject has a column called \"relative_dir\"", "# used for some other purpose", "if", "parent", ".", "column_exists", "(", "\"relative_dir\"", ")", ":", "parent", ".", "set_value", "(", "\"relative_dir\"", ",", "relative_dir", ")", "parent", ".", "commit", "(", ")", "# Update the versionless snapshot", "for", "snapshot", "in", "highest_snapshot", ".", "values", "(", ")", ":", "snapshot", ".", "update_versionless", "(", "\"latest\"", ")" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/src/tactic/ui/tools/repo_browser_wdg.py#L3235-L3307
clemesha/hotdot
83f4bfa7d80489d48b297fdda031a8733fcfc46e
djangoweb/registration/forms.py
python
RegistrationFormUniqueEmail.clean_email
(self)
return self.cleaned_data['email']
Validate that the supplied email address is unique for the site.
Validate that the supplied email address is unique for the site.
[ "Validate", "that", "the", "supplied", "email", "address", "is", "unique", "for", "the", "site", "." ]
def clean_email(self): """ Validate that the supplied email address is unique for the site. """ if User.objects.filter(email__iexact=self.cleaned_data['email']): raise forms.ValidationError(_(u'This email address is already in use. Please supply a different email address.')) return self.cleaned_data['email']
[ "def", "clean_email", "(", "self", ")", ":", "if", "User", ".", "objects", ".", "filter", "(", "email__iexact", "=", "self", ".", "cleaned_data", "[", "'email'", "]", ")", ":", "raise", "forms", ".", "ValidationError", "(", "_", "(", "u'This email address is already in use. Please supply a different email address.'", ")", ")", "return", "self", ".", "cleaned_data", "[", "'email'", "]" ]
https://github.com/clemesha/hotdot/blob/83f4bfa7d80489d48b297fdda031a8733fcfc46e/djangoweb/registration/forms.py#L116-L124
jam-py/jam-py
0821492cdff8665928e0f093a4435aa64285a45c
jam/third_party/sqlalchemy/engine/row.py
python
Row.keys
(self)
return [k for k in self._parent.keys if k is not None]
Return the list of keys as strings represented by this :class:`.Row`. This method is analogous to the Python dictionary ``.keys()`` method, except that it returns a list, not an iterator. .. seealso:: :attr:`.Row._fields` :attr:`.Row._mapping`
Return the list of keys as strings represented by this :class:`.Row`.
[ "Return", "the", "list", "of", "keys", "as", "strings", "represented", "by", "this", ":", "class", ":", ".", "Row", "." ]
def keys(self): """Return the list of keys as strings represented by this :class:`.Row`. This method is analogous to the Python dictionary ``.keys()`` method, except that it returns a list, not an iterator. .. seealso:: :attr:`.Row._fields` :attr:`.Row._mapping` """ return [k for k in self._parent.keys if k is not None]
[ "def", "keys", "(", "self", ")", ":", "return", "[", "k", "for", "k", "in", "self", ".", "_parent", ".", "keys", "if", "k", "is", "not", "None", "]" ]
https://github.com/jam-py/jam-py/blob/0821492cdff8665928e0f093a4435aa64285a45c/jam/third_party/sqlalchemy/engine/row.py#L231-L245
silklabs/silk
08c273949086350aeddd8e23e92f0f79243f446f
node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
python
MacTool.ExecPackageFramework
(self, framework, version)
Takes a path to Something.framework and the Current version of that and sets up all the symlinks.
Takes a path to Something.framework and the Current version of that and sets up all the symlinks.
[ "Takes", "a", "path", "to", "Something", ".", "framework", "and", "the", "Current", "version", "of", "that", "and", "sets", "up", "all", "the", "symlinks", "." ]
def ExecPackageFramework(self, framework, version): """Takes a path to Something.framework and the Current version of that and sets up all the symlinks.""" # Find the name of the binary based on the part before the ".framework". binary = os.path.basename(framework).split('.')[0] CURRENT = 'Current' RESOURCES = 'Resources' VERSIONS = 'Versions' if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): # Binary-less frameworks don't seem to contain symlinks (see e.g. # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). return # Move into the framework directory to set the symlinks correctly. pwd = os.getcwd() os.chdir(framework) # Set up the Current version. self._Relink(version, os.path.join(VERSIONS, CURRENT)) # Set up the root symlinks. self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) # Back to where we were before! os.chdir(pwd)
[ "def", "ExecPackageFramework", "(", "self", ",", "framework", ",", "version", ")", ":", "# Find the name of the binary based on the part before the \".framework\".", "binary", "=", "os", ".", "path", ".", "basename", "(", "framework", ")", ".", "split", "(", "'.'", ")", "[", "0", "]", "CURRENT", "=", "'Current'", "RESOURCES", "=", "'Resources'", "VERSIONS", "=", "'Versions'", "if", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "framework", ",", "VERSIONS", ",", "version", ",", "binary", ")", ")", ":", "# Binary-less frameworks don't seem to contain symlinks (see e.g.", "# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).", "return", "# Move into the framework directory to set the symlinks correctly.", "pwd", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "framework", ")", "# Set up the Current version.", "self", ".", "_Relink", "(", "version", ",", "os", ".", "path", ".", "join", "(", "VERSIONS", ",", "CURRENT", ")", ")", "# Set up the root symlinks.", "self", ".", "_Relink", "(", "os", ".", "path", ".", "join", "(", "VERSIONS", ",", "CURRENT", ",", "binary", ")", ",", "binary", ")", "self", ".", "_Relink", "(", "os", ".", "path", ".", "join", "(", "VERSIONS", ",", "CURRENT", ",", "RESOURCES", ")", ",", "RESOURCES", ")", "# Back to where we were before!", "os", ".", "chdir", "(", "pwd", ")" ]
https://github.com/silklabs/silk/blob/08c273949086350aeddd8e23e92f0f79243f446f/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py#L256-L283
Opentrons/opentrons
466e0567065d8773a81c25cd1b5c7998e00adf2c
api/src/opentrons/hardware_control/api.py
python
API._critical_point_for
( self, mount: top_types.Mount, cp_override: CriticalPoint = None )
Return the current critical point of the specified mount. The mount's critical point is the position of the mount itself, if no pipette is attached, or the pipette's critical point (which depends on tip status). If `cp_override` is specified, and that critical point actually exists, it will be used instead. Invalid `cp_override`s are ignored.
Return the current critical point of the specified mount.
[ "Return", "the", "current", "critical", "point", "of", "the", "specified", "mount", "." ]
def _critical_point_for( self, mount: top_types.Mount, cp_override: CriticalPoint = None ) -> top_types.Point: """Return the current critical point of the specified mount. The mount's critical point is the position of the mount itself, if no pipette is attached, or the pipette's critical point (which depends on tip status). If `cp_override` is specified, and that critical point actually exists, it will be used instead. Invalid `cp_override`s are ignored. """ pip = self._attached_instruments[mount] if pip is not None and cp_override != CriticalPoint.MOUNT: return pip.critical_point(cp_override) else: # This offset is required because the motor driver coordinate system is # configured such that the end of a p300 single gen1's tip is 0. return top_types.Point(0, 0, 30)
[ "def", "_critical_point_for", "(", "self", ",", "mount", ":", "top_types", ".", "Mount", ",", "cp_override", ":", "CriticalPoint", "=", "None", ")", "->", "top_types", ".", "Point", ":", "pip", "=", "self", ".", "_attached_instruments", "[", "mount", "]", "if", "pip", "is", "not", "None", "and", "cp_override", "!=", "CriticalPoint", ".", "MOUNT", ":", "return", "pip", ".", "critical_point", "(", "cp_override", ")", "else", ":", "# This offset is required because the motor driver coordinate system is", "# configured such that the end of a p300 single gen1's tip is 0.", "return", "top_types", ".", "Point", "(", "0", ",", "0", ",", "30", ")" ]
https://github.com/Opentrons/opentrons/blob/466e0567065d8773a81c25cd1b5c7998e00adf2c/api/src/opentrons/hardware_control/api.py#L1333-L1351
nodejs/node-convergence-archive
e11fe0c2777561827cdb7207d46b0917ef3c42a7
deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
python
WinTool.ExecRecursiveMirror
(self, source, dest)
Emulation of rm -rf out && cp -af in out.
Emulation of rm -rf out && cp -af in out.
[ "Emulation", "of", "rm", "-", "rf", "out", "&&", "cp", "-", "af", "in", "out", "." ]
def ExecRecursiveMirror(self, source, dest): """Emulation of rm -rf out && cp -af in out.""" if os.path.exists(dest): if os.path.isdir(dest): shutil.rmtree(dest) else: os.unlink(dest) if os.path.isdir(source): shutil.copytree(source, dest) else: shutil.copy2(source, dest)
[ "def", "ExecRecursiveMirror", "(", "self", ",", "source", ",", "dest", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "dest", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "dest", ")", ":", "shutil", ".", "rmtree", "(", "dest", ")", "else", ":", "os", ".", "unlink", "(", "dest", ")", "if", "os", ".", "path", ".", "isdir", "(", "source", ")", ":", "shutil", ".", "copytree", "(", "source", ",", "dest", ")", "else", ":", "shutil", ".", "copy2", "(", "source", ",", "dest", ")" ]
https://github.com/nodejs/node-convergence-archive/blob/e11fe0c2777561827cdb7207d46b0917ef3c42a7/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py#L87-L97
booktype/Booktype
a1fe45e873ca4381b0eee8ea1b2ee3489b8fbbc2
lib/booktype/convert/epub/writerplugins/base_writerplugin.py
python
WriterPlugin._fix_text
(self, root)
Find text in the chapter's root which is not covered with tags and cover it with "p" :param root: lxml node tree with the chapter content
Find text in the chapter's root which is not covered with tags and cover it with "p" :param root: lxml node tree with the chapter content
[ "Find", "text", "in", "the", "chapter", "s", "root", "which", "is", "not", "covered", "with", "tags", "and", "cover", "it", "with", "p", ":", "param", "root", ":", "lxml", "node", "tree", "with", "the", "chapter", "content" ]
def _fix_text(self, root): """ Find text in the chapter's root which is not covered with tags and cover it with "p" :param root: lxml node tree with the chapter content """ for element in root.xpath('//body')[0].getchildren(): if element.tail: if len(element.tail.strip()): p = etree.Element("p") p.text = element.tail element.tail = None element.addnext(p) else: element.tail = None
[ "def", "_fix_text", "(", "self", ",", "root", ")", ":", "for", "element", "in", "root", ".", "xpath", "(", "'//body'", ")", "[", "0", "]", ".", "getchildren", "(", ")", ":", "if", "element", ".", "tail", ":", "if", "len", "(", "element", ".", "tail", ".", "strip", "(", ")", ")", ":", "p", "=", "etree", ".", "Element", "(", "\"p\"", ")", "p", ".", "text", "=", "element", ".", "tail", "element", ".", "tail", "=", "None", "element", ".", "addnext", "(", "p", ")", "else", ":", "element", ".", "tail", "=", "None" ]
https://github.com/booktype/Booktype/blob/a1fe45e873ca4381b0eee8ea1b2ee3489b8fbbc2/lib/booktype/convert/epub/writerplugins/base_writerplugin.py#L63-L76
mozilla/django-browserid
c9583c096a12d638f857032af84aec3bea83e415
django_browserid/views.py
python
_get_next
(request)
Get the next parameter from the request's POST arguments and validate it. :returns: The next parameter or None if it was not found or invalid.
Get the next parameter from the request's POST arguments and validate it.
[ "Get", "the", "next", "parameter", "from", "the", "request", "s", "POST", "arguments", "and", "validate", "it", "." ]
def _get_next(request): """ Get the next parameter from the request's POST arguments and validate it. :returns: The next parameter or None if it was not found or invalid. """ next = request.POST.get('next') if is_safe_url(next, host=request.get_host()): return next else: return None
[ "def", "_get_next", "(", "request", ")", ":", "next", "=", "request", ".", "POST", ".", "get", "(", "'next'", ")", "if", "is_safe_url", "(", "next", ",", "host", "=", "request", ".", "get_host", "(", ")", ")", ":", "return", "next", "else", ":", "return", "None" ]
https://github.com/mozilla/django-browserid/blob/c9583c096a12d638f857032af84aec3bea83e415/django_browserid/views.py#L30-L42
replit-archive/jsrepl
36d79b6288ca5d26208e8bade2a168c6ebcb2376
extern/python/closured/lib/python2.7/_pyio.py
python
BufferedIOBase.read1
(self, n=None)
Read up to n bytes with at most one read() system call.
Read up to n bytes with at most one read() system call.
[ "Read", "up", "to", "n", "bytes", "with", "at", "most", "one", "read", "()", "system", "call", "." ]
def read1(self, n=None): """Read up to n bytes with at most one read() system call.""" self._unsupported("read1")
[ "def", "read1", "(", "self", ",", "n", "=", "None", ")", ":", "self", ".", "_unsupported", "(", "\"read1\"", ")" ]
https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/_pyio.py#L626-L628
Opentrons/opentrons
466e0567065d8773a81c25cd1b5c7998e00adf2c
api/docs/v1/api_cache/robot.py
python
Robot.pause
(self, msg=None)
Pauses execution of the protocol. Use :meth:`resume` to resume
Pauses execution of the protocol. Use :meth:`resume` to resume
[ "Pauses", "execution", "of", "the", "protocol", ".", "Use", ":", "meth", ":", "resume", "to", "resume" ]
def pause(self, msg=None): """ Pauses execution of the protocol. Use :meth:`resume` to resume """ pass
[ "def", "pause", "(", "self", ",", "msg", "=", "None", ")", ":", "pass" ]
https://github.com/Opentrons/opentrons/blob/466e0567065d8773a81c25cd1b5c7998e00adf2c/api/docs/v1/api_cache/robot.py#L195-L199
odoo/odoo
8de8c196a137f4ebbf67d7c7c83fee36f873f5c8
addons/base_vat/models/res_partner.py
python
ResPartner.check_vat_ch
(self, vat)
return False
Check Switzerland VAT number.
Check Switzerland VAT number.
[ "Check", "Switzerland", "VAT", "number", "." ]
def check_vat_ch(self, vat): ''' Check Switzerland VAT number. ''' # A new VAT number format in Switzerland has been introduced between 2011 and 2013 # https://www.estv.admin.ch/estv/fr/home/mehrwertsteuer/fachinformationen/steuerpflicht/unternehmens-identifikationsnummer--uid-.html # The old format "TVA 123456" is not valid since 2014 # Accepted format are: (spaces are ignored) # CHE#########MWST # CHE#########TVA # CHE#########IVA # CHE-###.###.### MWST # CHE-###.###.### TVA # CHE-###.###.### IVA # # /!\ The english abbreviation VAT is not valid /!\ match = self.__check_vat_ch_re.match(vat) if match: # For new TVA numbers, the last digit is a MOD11 checksum digit build with weighting pattern: 5,4,3,2,7,6,5,4 num = [s for s in match.group(1) if s.isdigit()] # get the digits only factor = (5, 4, 3, 2, 7, 6, 5, 4) csum = sum([int(num[i]) * factor[i] for i in range(8)]) check = (11 - (csum % 11)) % 11 return check == int(num[8]) return False
[ "def", "check_vat_ch", "(", "self", ",", "vat", ")", ":", "# A new VAT number format in Switzerland has been introduced between 2011 and 2013", "# https://www.estv.admin.ch/estv/fr/home/mehrwertsteuer/fachinformationen/steuerpflicht/unternehmens-identifikationsnummer--uid-.html", "# The old format \"TVA 123456\" is not valid since 2014", "# Accepted format are: (spaces are ignored)", "# CHE#########MWST", "# CHE#########TVA", "# CHE#########IVA", "# CHE-###.###.### MWST", "# CHE-###.###.### TVA", "# CHE-###.###.### IVA", "#", "# /!\\ The english abbreviation VAT is not valid /!\\", "match", "=", "self", ".", "__check_vat_ch_re", ".", "match", "(", "vat", ")", "if", "match", ":", "# For new TVA numbers, the last digit is a MOD11 checksum digit build with weighting pattern: 5,4,3,2,7,6,5,4", "num", "=", "[", "s", "for", "s", "in", "match", ".", "group", "(", "1", ")", "if", "s", ".", "isdigit", "(", ")", "]", "# get the digits only", "factor", "=", "(", "5", ",", "4", ",", "3", ",", "2", ",", "7", ",", "6", ",", "5", ",", "4", ")", "csum", "=", "sum", "(", "[", "int", "(", "num", "[", "i", "]", ")", "*", "factor", "[", "i", "]", "for", "i", "in", "range", "(", "8", ")", "]", ")", "check", "=", "(", "11", "-", "(", "csum", "%", "11", ")", ")", "%", "11", "return", "check", "==", "int", "(", "num", "[", "8", "]", ")", "return", "False" ]
https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/addons/base_vat/models/res_partner.py#L243-L269
p2k/pygowave-legacy
60329cff7d7c626004b9b9280f0058d3903fcef1
pygowave_client/views.py
python
view_combined
(request)
return response
Return a concatenation of all pygowave_client scripts.
Return a concatenation of all pygowave_client scripts.
[ "Return", "a", "concatenation", "of", "all", "pygowave_client", "scripts", "." ]
def view_combined(request): """ Return a concatenation of all pygowave_client scripts. """ # Compile all changed = False for package, modules in STATIC_LOAD_ORDER: for module in modules: namespace = "pygowave.%s" % (package) outfile, result, mtime = compile_and_cache(package, module, namespace) if result == "error": return HttpResponse(mtime, mimetype="text/javascript") elif result == "changed": changed = True outfile = CACHE_FOLDER + "pygowave_client_combined.js" # Combine if changed or not os.path.exists(outfile): cf = open(outfile, 'w') first = True for package, modules in STATIC_LOAD_ORDER: for module in modules: modulecachefile = CACHE_FOLDER + package + os.path.sep + module + ".js" mcf = open(modulecachefile, 'r') infoline = "/* --- pygowave.%s.%s --- */\n\n" % (package, module) if first: first = False # Leave license information; stop after it line = mcf.readline() while (line.startswith("/*") and not line.startswith("/**")) or line.startswith(" *") or line == "\n": cf.write(line) line = mcf.readline() cf.write(infoline) cf.write(line) else: # Strip license information line = mcf.readline() while (line.startswith("/*") and not line.startswith("/**")) or line.startswith(" *") or line == "\n": line = mcf.readline() cf.write("\n" + infoline) cf.write(line) cf.write(mcf.read()) mcf.close() cf.close() # Handle If-Modified-Since mtime = datetime.utcfromtimestamp(os.path.getmtime(outfile)) if request.META.has_key("HTTP_IF_MODIFIED_SINCE"): try: mstime = datetime.strptime(request.META["HTTP_IF_MODIFIED_SINCE"], RFC_1123_DATETIME) if mtime <= mstime: return HttpResponseNotModified() except ValueError: pass response = HttpResponse(mimetype="text/javascript") response["Last-Modified"] = mtime.strftime(RFC_1123_DATETIME) # Support for gzip if "gzip" in request.META["HTTP_ACCEPT_ENCODING"].split(","): if not os.path.exists(outfile + ".gz") or os.path.getmtime(outfile) > os.path.getmtime(outfile + ".gz"): gzip.open(outfile + ".gz", 'wb').write(open(outfile, 'r').read()) outfile = outfile + ".gz" response["Content-Encoding"] = "gzip" response.write(open(outfile, "r").read()) return response
[ "def", "view_combined", "(", "request", ")", ":", "# Compile all", "changed", "=", "False", "for", "package", ",", "modules", "in", "STATIC_LOAD_ORDER", ":", "for", "module", "in", "modules", ":", "namespace", "=", "\"pygowave.%s\"", "%", "(", "package", ")", "outfile", ",", "result", ",", "mtime", "=", "compile_and_cache", "(", "package", ",", "module", ",", "namespace", ")", "if", "result", "==", "\"error\"", ":", "return", "HttpResponse", "(", "mtime", ",", "mimetype", "=", "\"text/javascript\"", ")", "elif", "result", "==", "\"changed\"", ":", "changed", "=", "True", "outfile", "=", "CACHE_FOLDER", "+", "\"pygowave_client_combined.js\"", "# Combine", "if", "changed", "or", "not", "os", ".", "path", ".", "exists", "(", "outfile", ")", ":", "cf", "=", "open", "(", "outfile", ",", "'w'", ")", "first", "=", "True", "for", "package", ",", "modules", "in", "STATIC_LOAD_ORDER", ":", "for", "module", "in", "modules", ":", "modulecachefile", "=", "CACHE_FOLDER", "+", "package", "+", "os", ".", "path", ".", "sep", "+", "module", "+", "\".js\"", "mcf", "=", "open", "(", "modulecachefile", ",", "'r'", ")", "infoline", "=", "\"/* --- pygowave.%s.%s --- */\\n\\n\"", "%", "(", "package", ",", "module", ")", "if", "first", ":", "first", "=", "False", "# Leave license information; stop after it", "line", "=", "mcf", ".", "readline", "(", ")", "while", "(", "line", ".", "startswith", "(", "\"/*\"", ")", "and", "not", "line", ".", "startswith", "(", "\"/**\"", ")", ")", "or", "line", ".", "startswith", "(", "\" *\"", ")", "or", "line", "==", "\"\\n\"", ":", "cf", ".", "write", "(", "line", ")", "line", "=", "mcf", ".", "readline", "(", ")", "cf", ".", "write", "(", "infoline", ")", "cf", ".", "write", "(", "line", ")", "else", ":", "# Strip license information", "line", "=", "mcf", ".", "readline", "(", ")", "while", "(", "line", ".", "startswith", "(", "\"/*\"", ")", "and", "not", "line", ".", "startswith", "(", "\"/**\"", ")", ")", "or", "line", ".", "startswith", "(", "\" *\"", ")", "or", "line", "==", "\"\\n\"", ":", "line", "=", "mcf", ".", "readline", "(", ")", "cf", ".", "write", "(", "\"\\n\"", "+", "infoline", ")", "cf", ".", "write", "(", "line", ")", "cf", ".", "write", "(", "mcf", ".", "read", "(", ")", ")", "mcf", ".", "close", "(", ")", "cf", ".", "close", "(", ")", "# Handle If-Modified-Since", "mtime", "=", "datetime", ".", "utcfromtimestamp", "(", "os", ".", "path", ".", "getmtime", "(", "outfile", ")", ")", "if", "request", ".", "META", ".", "has_key", "(", "\"HTTP_IF_MODIFIED_SINCE\"", ")", ":", "try", ":", "mstime", "=", "datetime", ".", "strptime", "(", "request", ".", "META", "[", "\"HTTP_IF_MODIFIED_SINCE\"", "]", ",", "RFC_1123_DATETIME", ")", "if", "mtime", "<=", "mstime", ":", "return", "HttpResponseNotModified", "(", ")", "except", "ValueError", ":", "pass", "response", "=", "HttpResponse", "(", "mimetype", "=", "\"text/javascript\"", ")", "response", "[", "\"Last-Modified\"", "]", "=", "mtime", ".", "strftime", "(", "RFC_1123_DATETIME", ")", "# Support for gzip", "if", "\"gzip\"", "in", "request", ".", "META", "[", "\"HTTP_ACCEPT_ENCODING\"", "]", ".", "split", "(", "\",\"", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "outfile", "+", "\".gz\"", ")", "or", "os", ".", "path", ".", "getmtime", "(", "outfile", ")", ">", "os", ".", "path", ".", "getmtime", "(", "outfile", "+", "\".gz\"", ")", ":", "gzip", ".", "open", "(", "outfile", "+", "\".gz\"", ",", "'wb'", ")", ".", "write", "(", "open", "(", "outfile", ",", "'r'", ")", ".", "read", "(", ")", ")", "outfile", "=", "outfile", "+", "\".gz\"", "response", "[", "\"Content-Encoding\"", "]", "=", "\"gzip\"", "response", ".", "write", "(", "open", "(", "outfile", ",", "\"r\"", ")", ".", "read", "(", ")", ")", "return", "response" ]
https://github.com/p2k/pygowave-legacy/blob/60329cff7d7c626004b9b9280f0058d3903fcef1/pygowave_client/views.py#L110-L182
Jumpscale/go-raml
f151e1e143c47282b294fe70c5e56f113988ed10
docs/tutorial/python/sanic/types/client_support.py
python
UUIDHandler.flatten
(cls, obj)
return str(obj)
flatten
flatten
[ "flatten" ]
def flatten(cls, obj): """flatten""" return str(obj)
[ "def", "flatten", "(", "cls", ",", "obj", ")", ":", "return", "str", "(", "obj", ")" ]
https://github.com/Jumpscale/go-raml/blob/f151e1e143c47282b294fe70c5e56f113988ed10/docs/tutorial/python/sanic/types/client_support.py#L199-L201
idank/showthedocs
b432aba098662f9924e853e3a97373f29e508cb5
showdocs/repos/manager.py
python
RepositoryManager.generate
(self)
generates all repos. Each repository's output files end up under self.outdir/repo.name. Overwrites existing files.
generates all repos. Each repository's output files end up under self.outdir/repo.name. Overwrites existing files.
[ "generates", "all", "repos", ".", "Each", "repository", "s", "output", "files", "end", "up", "under", "self", ".", "outdir", "/", "repo", ".", "name", ".", "Overwrites", "existing", "files", "." ]
def generate(self): '''generates all repos. Each repository's output files end up under self.outdir/repo.name. Overwrites existing files.''' for repocls in self.reposcls: repostagingdir = os.path.join(self.stagingdir, repocls.name) if not os.path.exists(repostagingdir): os.mkdir(repostagingdir) repo = repocls(repostagingdir) repo.build() files = list(repo.files()) if not len(files): raise errors.RepoBuildError('repo %r build returned no files' % repocls.name) repo.filter() repo.clean() repooutdir = os.path.join(self.outdir, repocls.name) if not os.path.exists(repooutdir): os.mkdir(repooutdir) for f in files: relpath = os.path.relpath(f, repo.stagingdir) outpath = repo.outputpath(relpath) destpath = os.path.join(self.outdir, repo.name, outpath) logger.info('copying staging file %r -> %r', f, destpath) dirname = os.path.dirname(destpath) if not os.path.exists(dirname): os.makedirs(dirname) shutil.copyfile(f, destpath)
[ "def", "generate", "(", "self", ")", ":", "for", "repocls", "in", "self", ".", "reposcls", ":", "repostagingdir", "=", "os", ".", "path", ".", "join", "(", "self", ".", "stagingdir", ",", "repocls", ".", "name", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "repostagingdir", ")", ":", "os", ".", "mkdir", "(", "repostagingdir", ")", "repo", "=", "repocls", "(", "repostagingdir", ")", "repo", ".", "build", "(", ")", "files", "=", "list", "(", "repo", ".", "files", "(", ")", ")", "if", "not", "len", "(", "files", ")", ":", "raise", "errors", ".", "RepoBuildError", "(", "'repo %r build returned no files'", "%", "repocls", ".", "name", ")", "repo", ".", "filter", "(", ")", "repo", ".", "clean", "(", ")", "repooutdir", "=", "os", ".", "path", ".", "join", "(", "self", ".", "outdir", ",", "repocls", ".", "name", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "repooutdir", ")", ":", "os", ".", "mkdir", "(", "repooutdir", ")", "for", "f", "in", "files", ":", "relpath", "=", "os", ".", "path", ".", "relpath", "(", "f", ",", "repo", ".", "stagingdir", ")", "outpath", "=", "repo", ".", "outputpath", "(", "relpath", ")", "destpath", "=", "os", ".", "path", ".", "join", "(", "self", ".", "outdir", ",", "repo", ".", "name", ",", "outpath", ")", "logger", ".", "info", "(", "'copying staging file %r -> %r'", ",", "f", ",", "destpath", ")", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "destpath", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dirname", ")", ":", "os", ".", "makedirs", "(", "dirname", ")", "shutil", ".", "copyfile", "(", "f", ",", "destpath", ")" ]
https://github.com/idank/showthedocs/blob/b432aba098662f9924e853e3a97373f29e508cb5/showdocs/repos/manager.py#L27-L57
XRPLF/xrpl-dev-portal
4a75fe3579fa7938acf4fc3fe48fc01c8eb5a38c
content/_code-samples/tx-serialization/serialize.py
python
field_sort_key
(field_name)
return (DEFINITIONS["TYPES"][field_type_name], DEFINITIONS["FIELDS"][field_name]["nth"])
Return a tuple sort key for a given field name
Return a tuple sort key for a given field name
[ "Return", "a", "tuple", "sort", "key", "for", "a", "given", "field", "name" ]
def field_sort_key(field_name): """Return a tuple sort key for a given field name""" field_type_name = DEFINITIONS["FIELDS"][field_name]["type"] return (DEFINITIONS["TYPES"][field_type_name], DEFINITIONS["FIELDS"][field_name]["nth"])
[ "def", "field_sort_key", "(", "field_name", ")", ":", "field_type_name", "=", "DEFINITIONS", "[", "\"FIELDS\"", "]", "[", "field_name", "]", "[", "\"type\"", "]", "return", "(", "DEFINITIONS", "[", "\"TYPES\"", "]", "[", "field_type_name", "]", ",", "DEFINITIONS", "[", "\"FIELDS\"", "]", "[", "field_name", "]", "[", "\"nth\"", "]", ")" ]
https://github.com/XRPLF/xrpl-dev-portal/blob/4a75fe3579fa7938acf4fc3fe48fc01c8eb5a38c/content/_code-samples/tx-serialization/serialize.py#L46-L49
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
src/pyasm/prod/service/api_xmlrpc.py
python
ApiXMLRPC.group_checkin
(self, ticket, search_key, context, file_path, file_range_val, snapshot_type='sequence', description="", file_type='main', metadata={}, mode=None, is_revision=False, info={}, version=None, process=None)
return result
checkin a range of files @params ticket - authentication ticket search_key - a unique identifier key representing an sobject context - the context of the checkin file_path - expression for file range: ./blah.####.jpg file_range - string describing range of frames in the from '1-5/1' snapshot_type - the snapshot type of the checkin description - a descriptive comment that pertains to this checkin file_type - the type of file this is checked in as. Default = 'main' metadata - a dictionary of values that will be stored as metadata on the snapshot mode - determines whether the files passed in should be preallocate, copied, move or uploaded. By default, this is a manual process (for backwards compatibility) metadata - add metadata to snapshot is_revision - flag to set this as a revision instead of a version info - dictionary for ApiClientCmd version - explicitly set a version process - explicitly set a process @return snapshot dictionary
checkin a range of files
[ "checkin", "a", "range", "of", "files" ]
def group_checkin(self, ticket, search_key, context, file_path, file_range_val, snapshot_type='sequence', description="", file_type='main', metadata={}, mode=None, is_revision=False, info={}, version=None, process=None): '''checkin a range of files @params ticket - authentication ticket search_key - a unique identifier key representing an sobject context - the context of the checkin file_path - expression for file range: ./blah.####.jpg file_range - string describing range of frames in the from '1-5/1' snapshot_type - the snapshot type of the checkin description - a descriptive comment that pertains to this checkin file_type - the type of file this is checked in as. Default = 'main' metadata - a dictionary of values that will be stored as metadata on the snapshot mode - determines whether the files passed in should be preallocate, copied, move or uploaded. By default, this is a manual process (for backwards compatibility) metadata - add metadata to snapshot is_revision - flag to set this as a revision instead of a version info - dictionary for ApiClientCmd version - explicitly set a version process - explicitly set a process @return snapshot dictionary ''' if not file_range_val: # try to extract from paths file_path, file_range_val = FileGroup.extract_template_and_range(file_path) file_range = FileRange.get(file_range_val) sobject = SearchKey.get_by_search_key(search_key) if not sobject: raise ApiException("SObject for [%s] does not exist" % search_key) # get the handoff dir and remap web = WebContainer.get_web() if mode == 'inplace': base_dir = os.path.dirname(file_path) elif mode in ['upload','uploaded']: base_dir = web.get_upload_dir() else: base_dir = web.get_server_handoff_dir() if not isinstance(file_path, list): file_path = file_path.replace("\\", "/") filename = os.path.basename(file_path) file_paths = ["%s/%s" % (base_dir, filename)] else: file_paths = [x.replace("\\", "/") for x in file_path] # if file type is not specified, use file_type if not file_type: file_type = "main" file_types = [] for file_path in file_paths: file_types.append(file_type) if not description: if file_range: description = 'File Group Publish [%s] files' %file_range.get_num_frames() else: description = 'File Group Publish [%s] files' % len(file_paths) checkin = FileGroupCheckin(sobject, file_paths, file_types,\ file_range, context=context, snapshot_type=snapshot_type, \ description=description, is_revision=is_revision, mode=mode, \ version=version, process=process) checkin.execute() # cleanup expanded_paths = checkin.get_expanded_paths() for path in expanded_paths: if os.path.exists(path): os.unlink(path) # get values from snapshot snapshot = checkin.get_snapshot() # add metadata to the snapshot if metadata: snapshot.add_metadata(metadata) snapshot.commit() self.set_sobject(sobject) info['snapshot'] = snapshot self.update_info(info) search = Search("sthpw/snapshot") columns = search.get_columns() result = {} for column in columns: value = str(snapshot.get_value(column)) result[column] = value result['__search_key__'] = SearchKey.build_by_sobject(snapshot) return result
[ "def", "group_checkin", "(", "self", ",", "ticket", ",", "search_key", ",", "context", ",", "file_path", ",", "file_range_val", ",", "snapshot_type", "=", "'sequence'", ",", "description", "=", "\"\"", ",", "file_type", "=", "'main'", ",", "metadata", "=", "{", "}", ",", "mode", "=", "None", ",", "is_revision", "=", "False", ",", "info", "=", "{", "}", ",", "version", "=", "None", ",", "process", "=", "None", ")", ":", "if", "not", "file_range_val", ":", "# try to extract from paths", "file_path", ",", "file_range_val", "=", "FileGroup", ".", "extract_template_and_range", "(", "file_path", ")", "file_range", "=", "FileRange", ".", "get", "(", "file_range_val", ")", "sobject", "=", "SearchKey", ".", "get_by_search_key", "(", "search_key", ")", "if", "not", "sobject", ":", "raise", "ApiException", "(", "\"SObject for [%s] does not exist\"", "%", "search_key", ")", "# get the handoff dir and remap", "web", "=", "WebContainer", ".", "get_web", "(", ")", "if", "mode", "==", "'inplace'", ":", "base_dir", "=", "os", ".", "path", ".", "dirname", "(", "file_path", ")", "elif", "mode", "in", "[", "'upload'", ",", "'uploaded'", "]", ":", "base_dir", "=", "web", ".", "get_upload_dir", "(", ")", "else", ":", "base_dir", "=", "web", ".", "get_server_handoff_dir", "(", ")", "if", "not", "isinstance", "(", "file_path", ",", "list", ")", ":", "file_path", "=", "file_path", ".", "replace", "(", "\"\\\\\"", ",", "\"/\"", ")", "filename", "=", "os", ".", "path", ".", "basename", "(", "file_path", ")", "file_paths", "=", "[", "\"%s/%s\"", "%", "(", "base_dir", ",", "filename", ")", "]", "else", ":", "file_paths", "=", "[", "x", ".", "replace", "(", "\"\\\\\"", ",", "\"/\"", ")", "for", "x", "in", "file_path", "]", "# if file type is not specified, use file_type", "if", "not", "file_type", ":", "file_type", "=", "\"main\"", "file_types", "=", "[", "]", "for", "file_path", "in", "file_paths", ":", "file_types", ".", "append", "(", "file_type", ")", "if", "not", "description", ":", "if", "file_range", ":", "description", "=", "'File Group Publish [%s] files'", "%", "file_range", ".", "get_num_frames", "(", ")", "else", ":", "description", "=", "'File Group Publish [%s] files'", "%", "len", "(", "file_paths", ")", "checkin", "=", "FileGroupCheckin", "(", "sobject", ",", "file_paths", ",", "file_types", ",", "file_range", ",", "context", "=", "context", ",", "snapshot_type", "=", "snapshot_type", ",", "description", "=", "description", ",", "is_revision", "=", "is_revision", ",", "mode", "=", "mode", ",", "version", "=", "version", ",", "process", "=", "process", ")", "checkin", ".", "execute", "(", ")", "# cleanup", "expanded_paths", "=", "checkin", ".", "get_expanded_paths", "(", ")", "for", "path", "in", "expanded_paths", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "os", ".", "unlink", "(", "path", ")", "# get values from snapshot", "snapshot", "=", "checkin", ".", "get_snapshot", "(", ")", "# add metadata to the snapshot", "if", "metadata", ":", "snapshot", ".", "add_metadata", "(", "metadata", ")", "snapshot", ".", "commit", "(", ")", "self", ".", "set_sobject", "(", "sobject", ")", "info", "[", "'snapshot'", "]", "=", "snapshot", "self", ".", "update_info", "(", "info", ")", "search", "=", "Search", "(", "\"sthpw/snapshot\"", ")", "columns", "=", "search", ".", "get_columns", "(", ")", "result", "=", "{", "}", "for", "column", "in", "columns", ":", "value", "=", "str", "(", "snapshot", ".", "get_value", "(", "column", ")", ")", "result", "[", "column", "]", "=", "value", "result", "[", "'__search_key__'", "]", "=", "SearchKey", ".", "build_by_sobject", "(", "snapshot", ")", "return", "result" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/src/pyasm/prod/service/api_xmlrpc.py#L4034-L4145
stdlib-js/stdlib
e3c14dd9a7985ed1cd1cc80e83b6659aeabeb7df
lib/node_modules/@stdlib/math/base/special/erfc/benchmark/python/scipy/benchmark.py
python
print_version
()
Print the TAP version.
Print the TAP version.
[ "Print", "the", "TAP", "version", "." ]
def print_version(): """Print the TAP version.""" print("TAP version 13")
[ "def", "print_version", "(", ")", ":", "print", "(", "\"TAP version 13\"", ")" ]
https://github.com/stdlib-js/stdlib/blob/e3c14dd9a7985ed1cd1cc80e83b6659aeabeb7df/lib/node_modules/@stdlib/math/base/special/erfc/benchmark/python/scipy/benchmark.py#L29-L31
Opentrons/opentrons
466e0567065d8773a81c25cd1b5c7998e00adf2c
api/src/opentrons/hardware_control/emulation/connection_handler.py
python
ConnectionHandler.__call__
( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter )
New connection callback.
New connection callback.
[ "New", "connection", "callback", "." ]
async def __call__( self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter ) -> None: """New connection callback.""" emulator_name = self._emulator.__class__.__name__ logger.debug("%s Connected.", emulator_name) while True: line = await reader.readuntil(self._emulator.get_terminator()) logger.debug("%s Received: %s", emulator_name, line) try: response = self._emulator.handle(line.decode().strip()) if response: response = f"{response}\r\n" logger.debug("%s Sending: %s", emulator_name, response) writer.write(response.encode()) except Exception as e: logger.exception("%s exception", emulator_name) writer.write(f"Error: {str(e)}\r\n".encode()) writer.write(self._emulator.get_ack()) await writer.drain()
[ "async", "def", "__call__", "(", "self", ",", "reader", ":", "asyncio", ".", "StreamReader", ",", "writer", ":", "asyncio", ".", "StreamWriter", ")", "->", "None", ":", "emulator_name", "=", "self", ".", "_emulator", ".", "__class__", ".", "__name__", "logger", ".", "debug", "(", "\"%s Connected.\"", ",", "emulator_name", ")", "while", "True", ":", "line", "=", "await", "reader", ".", "readuntil", "(", "self", ".", "_emulator", ".", "get_terminator", "(", ")", ")", "logger", ".", "debug", "(", "\"%s Received: %s\"", ",", "emulator_name", ",", "line", ")", "try", ":", "response", "=", "self", ".", "_emulator", ".", "handle", "(", "line", ".", "decode", "(", ")", ".", "strip", "(", ")", ")", "if", "response", ":", "response", "=", "f\"{response}\\r\\n\"", "logger", ".", "debug", "(", "\"%s Sending: %s\"", ",", "emulator_name", ",", "response", ")", "writer", ".", "write", "(", "response", ".", "encode", "(", ")", ")", "except", "Exception", "as", "e", ":", "logger", ".", "exception", "(", "\"%s exception\"", ",", "emulator_name", ")", "writer", ".", "write", "(", "f\"Error: {str(e)}\\r\\n\"", ".", "encode", "(", ")", ")", "writer", ".", "write", "(", "self", ".", "_emulator", ".", "get_ack", "(", ")", ")", "await", "writer", ".", "drain", "(", ")" ]
https://github.com/Opentrons/opentrons/blob/466e0567065d8773a81c25cd1b5c7998e00adf2c/api/src/opentrons/hardware_control/emulation/connection_handler.py#L18-L38
mozilla/spidernode
aafa9e5273f954f272bb4382fc007af14674b4c2
deps/spidershim/spidermonkey/python/mozbuild/mozbuild/shellutil.py
python
_ClineSplitter._next
(self)
Finalize current argument, effectively adding it to the list.
Finalize current argument, effectively adding it to the list.
[ "Finalize", "current", "argument", "effectively", "adding", "it", "to", "the", "list", "." ]
def _next(self): ''' Finalize current argument, effectively adding it to the list. ''' if self.arg is None: return self.result.append(self.arg) self.arg = None
[ "def", "_next", "(", "self", ")", ":", "if", "self", ".", "arg", "is", "None", ":", "return", "self", ".", "result", ".", "append", "(", "self", ".", "arg", ")", "self", ".", "arg", "=", "None" ]
https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/deps/spidershim/spidermonkey/python/mozbuild/mozbuild/shellutil.py#L71-L78
Nexedi/erp5
44df1959c0e21576cf5e9803d602d95efb4b695b
product/CMFActivity/ActivityTool.py
python
ActivityTool.getActivityBuffer
(self, create_if_not_found=True)
Get activtity buffer for this thread for this activity tool. If no activity buffer is found at lowest level and create_if_not_found is True, create one. Intermediate level is unconditionaly created if non existant because chances are it will be used in the instance life.
Get activtity buffer for this thread for this activity tool. If no activity buffer is found at lowest level and create_if_not_found is True, create one. Intermediate level is unconditionaly created if non existant because chances are it will be used in the instance life.
[ "Get", "activtity", "buffer", "for", "this", "thread", "for", "this", "activity", "tool", ".", "If", "no", "activity", "buffer", "is", "found", "at", "lowest", "level", "and", "create_if_not_found", "is", "True", "create", "one", ".", "Intermediate", "level", "is", "unconditionaly", "created", "if", "non", "existant", "because", "chances", "are", "it", "will", "be", "used", "in", "the", "instance", "life", "." ]
def getActivityBuffer(self, create_if_not_found=True): """ Get activtity buffer for this thread for this activity tool. If no activity buffer is found at lowest level and create_if_not_found is True, create one. Intermediate level is unconditionaly created if non existant because chances are it will be used in the instance life. """ # XXX: using a volatile attribute to cache getPhysicalPath result. # This cache may need invalidation if all the following is # simultaneously true: # - ActivityTool instances can be moved in object tree # - moved instance is used to get access to its activity buffer # - another instance is put in the place of the original, and used to # access its activity buffer # ...which seems currently unlikely, and as such is left out. try: my_instance_key = self._v_physical_path except AttributeError: # Safeguard: make sure we are wrapped in acquisition context before # using our path as an activity tool instance-wide identifier. assert getattr(self, 'aq_self', None) is not None self._v_physical_path = my_instance_key = self.getPhysicalPath() thread_activity_buffer = global_activity_buffer[my_instance_key] my_thread_key = get_ident() try: return thread_activity_buffer[my_thread_key] except KeyError: if create_if_not_found: buffer = ActivityBuffer() else: buffer = None thread_activity_buffer[my_thread_key] = buffer return buffer
[ "def", "getActivityBuffer", "(", "self", ",", "create_if_not_found", "=", "True", ")", ":", "# XXX: using a volatile attribute to cache getPhysicalPath result.", "# This cache may need invalidation if all the following is", "# simultaneously true:", "# - ActivityTool instances can be moved in object tree", "# - moved instance is used to get access to its activity buffer", "# - another instance is put in the place of the original, and used to", "# access its activity buffer", "# ...which seems currently unlikely, and as such is left out.", "try", ":", "my_instance_key", "=", "self", ".", "_v_physical_path", "except", "AttributeError", ":", "# Safeguard: make sure we are wrapped in acquisition context before", "# using our path as an activity tool instance-wide identifier.", "assert", "getattr", "(", "self", ",", "'aq_self'", ",", "None", ")", "is", "not", "None", "self", ".", "_v_physical_path", "=", "my_instance_key", "=", "self", ".", "getPhysicalPath", "(", ")", "thread_activity_buffer", "=", "global_activity_buffer", "[", "my_instance_key", "]", "my_thread_key", "=", "get_ident", "(", ")", "try", ":", "return", "thread_activity_buffer", "[", "my_thread_key", "]", "except", "KeyError", ":", "if", "create_if_not_found", ":", "buffer", "=", "ActivityBuffer", "(", ")", "else", ":", "buffer", "=", "None", "thread_activity_buffer", "[", "my_thread_key", "]", "=", "buffer", "return", "buffer" ]
https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/CMFActivity/ActivityTool.py#L1396-L1429
Nexedi/erp5
44df1959c0e21576cf5e9803d602d95efb4b695b
product/ZSQLCatalog/SQLCatalog.py
python
Catalog.manage_catalogFoundItems
(self, REQUEST, RESPONSE, URL2, URL1, obj_metatypes=None, obj_ids=None, obj_searchterm=None, obj_expr=None, obj_mtime=None, obj_mspec=None, obj_roles=None, obj_permission=None)
Find object according to search criteria and Catalog them
Find object according to search criteria and Catalog them
[ "Find", "object", "according", "to", "search", "criteria", "and", "Catalog", "them" ]
def manage_catalogFoundItems(self, REQUEST, RESPONSE, URL2, URL1, obj_metatypes=None, obj_ids=None, obj_searchterm=None, obj_expr=None, obj_mtime=None, obj_mspec=None, obj_roles=None, obj_permission=None): """ Find object according to search criteria and Catalog them """ elapse = time.time() c_elapse = time.clock() words = 0 obj = REQUEST.PARENTS[1] path = string.join(obj.getPhysicalPath(), '/') results = self.aq_parent.ZopeFindAndApply(obj, obj_metatypes=obj_metatypes, obj_ids=obj_ids, obj_searchterm=obj_searchterm, obj_expr=obj_expr, obj_mtime=obj_mtime, obj_mspec=obj_mspec, obj_permission=obj_permission, obj_roles=obj_roles, search_sub=1, REQUEST=REQUEST, apply_func=self.aq_parent.catalog_object, apply_path=path, sql_catalog_id=self.id) elapse = time.time() - elapse c_elapse = time.clock() - c_elapse RESPONSE.redirect(URL1 + '/manage_catalogView?manage_tabs_message=' + urllib.quote('Catalog Updated<br>Total time: %s<br>Total CPU time: %s' % (`elapse`, `c_elapse`)))
[ "def", "manage_catalogFoundItems", "(", "self", ",", "REQUEST", ",", "RESPONSE", ",", "URL2", ",", "URL1", ",", "obj_metatypes", "=", "None", ",", "obj_ids", "=", "None", ",", "obj_searchterm", "=", "None", ",", "obj_expr", "=", "None", ",", "obj_mtime", "=", "None", ",", "obj_mspec", "=", "None", ",", "obj_roles", "=", "None", ",", "obj_permission", "=", "None", ")", ":", "elapse", "=", "time", ".", "time", "(", ")", "c_elapse", "=", "time", ".", "clock", "(", ")", "words", "=", "0", "obj", "=", "REQUEST", ".", "PARENTS", "[", "1", "]", "path", "=", "string", ".", "join", "(", "obj", ".", "getPhysicalPath", "(", ")", ",", "'/'", ")", "results", "=", "self", ".", "aq_parent", ".", "ZopeFindAndApply", "(", "obj", ",", "obj_metatypes", "=", "obj_metatypes", ",", "obj_ids", "=", "obj_ids", ",", "obj_searchterm", "=", "obj_searchterm", ",", "obj_expr", "=", "obj_expr", ",", "obj_mtime", "=", "obj_mtime", ",", "obj_mspec", "=", "obj_mspec", ",", "obj_permission", "=", "obj_permission", ",", "obj_roles", "=", "obj_roles", ",", "search_sub", "=", "1", ",", "REQUEST", "=", "REQUEST", ",", "apply_func", "=", "self", ".", "aq_parent", ".", "catalog_object", ",", "apply_path", "=", "path", ",", "sql_catalog_id", "=", "self", ".", "id", ")", "elapse", "=", "time", ".", "time", "(", ")", "-", "elapse", "c_elapse", "=", "time", ".", "clock", "(", ")", "-", "c_elapse", "RESPONSE", ".", "redirect", "(", "URL1", "+", "'/manage_catalogView?manage_tabs_message='", "+", "urllib", ".", "quote", "(", "'Catalog Updated<br>Total time: %s<br>Total CPU time: %s'", "%", "(", "`elapse`", ",", "`c_elapse`", ")", ")", ")" ]
https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ZSQLCatalog/SQLCatalog.py#L1201-L1235
acmbvp/Hacktoberfest
79b2785fc597e214f30eae965e25bf239288a92d
Languages/Python/Multithreading.py
python
find_pi
(n)
return pi
Function to estimate the value of Pi
Function to estimate the value of Pi
[ "Function", "to", "estimate", "the", "value", "of", "Pi" ]
def find_pi(n): """ Function to estimate the value of Pi """ inside=0 for i in range(0,n): x=random() y=random() if (x*x+y*y)**(0.5)<=1: # if i falls inside the circle inside+=1 pi=4*inside/n return pi
[ "def", "find_pi", "(", "n", ")", ":", "inside", "=", "0", "for", "i", "in", "range", "(", "0", ",", "n", ")", ":", "x", "=", "random", "(", ")", "y", "=", "random", "(", ")", "if", "(", "x", "*", "x", "+", "y", "*", "y", ")", "**", "(", "0.5", ")", "<=", "1", ":", "# if i falls inside the circle", "inside", "+=", "1", "pi", "=", "4", "*", "inside", "/", "n", "return", "pi" ]
https://github.com/acmbvp/Hacktoberfest/blob/79b2785fc597e214f30eae965e25bf239288a92d/Languages/Python/Multithreading.py#L5-L18
COVID-19-electronic-health-system/Corona-tracker
80cab55a7a03c43fa4a3412daa6e4ff1f0a45d8c
src/python/pull_gsheets_translations_mvp.py
python
convert_to_camelCase
(value)
Converts a string to camelCase and removes punctuation. Arguments: value {[string]} -- [string from the 'value' column in language_df] Returns: [string] -- [returns a string with no punctuation, in camelCase format.]
Converts a string to camelCase and removes punctuation.
[ "Converts", "a", "string", "to", "camelCase", "and", "removes", "punctuation", "." ]
def convert_to_camelCase(value): """ Converts a string to camelCase and removes punctuation. Arguments: value {[string]} -- [string from the 'value' column in language_df] Returns: [string] -- [returns a string with no punctuation, in camelCase format.] """ # if value is a string with more than one word if len(value.split()) > 1: #converts value to camelCase camelCase = value.split()[0].lower() + " ".join(value.split()[1:]).title().replace(" ","") #removes punctuation return camelCase.translate(str.maketrans('', '', punctuation)) else: return value.lower().translate(str.maketrans('', '', punctuation))
[ "def", "convert_to_camelCase", "(", "value", ")", ":", "# if value is a string with more than one word", "if", "len", "(", "value", ".", "split", "(", ")", ")", ">", "1", ":", "#converts value to camelCase", "camelCase", "=", "value", ".", "split", "(", ")", "[", "0", "]", ".", "lower", "(", ")", "+", "\" \"", ".", "join", "(", "value", ".", "split", "(", ")", "[", "1", ":", "]", ")", ".", "title", "(", ")", ".", "replace", "(", "\" \"", ",", "\"\"", ")", "#removes punctuation", "return", "camelCase", ".", "translate", "(", "str", ".", "maketrans", "(", "''", ",", "''", ",", "punctuation", ")", ")", "else", ":", "return", "value", ".", "lower", "(", ")", ".", "translate", "(", "str", ".", "maketrans", "(", "''", ",", "''", ",", "punctuation", ")", ")" ]
https://github.com/COVID-19-electronic-health-system/Corona-tracker/blob/80cab55a7a03c43fa4a3412daa6e4ff1f0a45d8c/src/python/pull_gsheets_translations_mvp.py#L64-L80
ayojs/ayo
45a1c8cf6384f5bcc81d834343c3ed9d78b97df3
tools/cpplint.py
python
_IncludeState.IsInAlphabeticalOrder
(self, clean_lines, linenum, header_path)
return True
Check if a header is in alphabetical order with the previous header. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. header_path: Canonicalized header to be checked. Returns: Returns true if the header is in alphabetical order.
Check if a header is in alphabetical order with the previous header.
[ "Check", "if", "a", "header", "is", "in", "alphabetical", "order", "with", "the", "previous", "header", "." ]
def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path): """Check if a header is in alphabetical order with the previous header. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. header_path: Canonicalized header to be checked. Returns: Returns true if the header is in alphabetical order. """ # If previous section is different from current section, _last_header will # be reset to empty string, so it's always less than current header. # # If previous line was a blank line, assume that the headers are # intentionally sorted the way they are. if (self._last_header > header_path and Match(r'^\s*#\s*include\b', clean_lines.elided[linenum - 1])): return False return True
[ "def", "IsInAlphabeticalOrder", "(", "self", ",", "clean_lines", ",", "linenum", ",", "header_path", ")", ":", "# If previous section is different from current section, _last_header will", "# be reset to empty string, so it's always less than current header.", "#", "# If previous line was a blank line, assume that the headers are", "# intentionally sorted the way they are.", "if", "(", "self", ".", "_last_header", ">", "header_path", "and", "Match", "(", "r'^\\s*#\\s*include\\b'", ",", "clean_lines", ".", "elided", "[", "linenum", "-", "1", "]", ")", ")", ":", "return", "False", "return", "True" ]
https://github.com/ayojs/ayo/blob/45a1c8cf6384f5bcc81d834343c3ed9d78b97df3/tools/cpplint.py#L756-L775
voilet/cmdb
b4c8452ac341dc4b9848e2eefa50f1c34ff688e2
monitor/views.py
python
HttpMonitorDel
(request, uuid)
return HttpResponse(json.dumps({"retCode": 200, "retMsg": "ok"}, ensure_ascii=False, indent=4))
http监控列表
http监控列表
[ "http监控列表" ]
def HttpMonitorDel(request, uuid): """ http监控列表 """ data = MonitorHttp.objects.get(pk=uuid).delete() return HttpResponse(json.dumps({"retCode": 200, "retMsg": "ok"}, ensure_ascii=False, indent=4))
[ "def", "HttpMonitorDel", "(", "request", ",", "uuid", ")", ":", "data", "=", "MonitorHttp", ".", "objects", ".", "get", "(", "pk", "=", "uuid", ")", ".", "delete", "(", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "{", "\"retCode\"", ":", "200", ",", "\"retMsg\"", ":", "\"ok\"", "}", ",", "ensure_ascii", "=", "False", ",", "indent", "=", "4", ")", ")" ]
https://github.com/voilet/cmdb/blob/b4c8452ac341dc4b9848e2eefa50f1c34ff688e2/monitor/views.py#L66-L69
korolr/dotfiles
8e46933503ecb8d8651739ffeb1d2d4f0f5c6524
.config/sublime-text-3/Packages/python-markdown/st3/markdown/extensions/fenced_code.py
python
FencedCodeExtension.extendMarkdown
(self, md, md_globals)
Add FencedBlockPreprocessor to the Markdown instance.
Add FencedBlockPreprocessor to the Markdown instance.
[ "Add", "FencedBlockPreprocessor", "to", "the", "Markdown", "instance", "." ]
def extendMarkdown(self, md, md_globals): """ Add FencedBlockPreprocessor to the Markdown instance. """ md.registerExtension(self) md.preprocessors.add('fenced_code_block', FencedBlockPreprocessor(md), ">normalize_whitespace")
[ "def", "extendMarkdown", "(", "self", ",", "md", ",", "md_globals", ")", ":", "md", ".", "registerExtension", "(", "self", ")", "md", ".", "preprocessors", ".", "add", "(", "'fenced_code_block'", ",", "FencedBlockPreprocessor", "(", "md", ")", ",", "\">normalize_whitespace\"", ")" ]
https://github.com/korolr/dotfiles/blob/8e46933503ecb8d8651739ffeb1d2d4f0f5c6524/.config/sublime-text-3/Packages/python-markdown/st3/markdown/extensions/fenced_code.py#L28-L34
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
3rd_party/python3/site-packages/cherrypy-18.1.2/cherrypy/lib/static.py
python
staticdir
(section, dir, root='', match='', content_types=None, index='', debug=False)
return handled
Serve a static resource from the given (root +) dir. match If given, request.path_info will be searched for the given regular expression before attempting to serve static content. content_types If given, it should be a Python dictionary of {file-extension: content-type} pairs, where 'file-extension' is a string (e.g. "gif") and 'content-type' is the value to write out in the Content-Type response header (e.g. "image/gif"). index If provided, it should be the (relative) name of a file to serve for directory requests. For example, if the dir argument is '/home/me', the Request-URI is 'myapp', and the index arg is 'index.html', the file '/home/me/myapp/index.html' will be sought.
Serve a static resource from the given (root +) dir.
[ "Serve", "a", "static", "resource", "from", "the", "given", "(", "root", "+", ")", "dir", "." ]
def staticdir(section, dir, root='', match='', content_types=None, index='', debug=False): """Serve a static resource from the given (root +) dir. match If given, request.path_info will be searched for the given regular expression before attempting to serve static content. content_types If given, it should be a Python dictionary of {file-extension: content-type} pairs, where 'file-extension' is a string (e.g. "gif") and 'content-type' is the value to write out in the Content-Type response header (e.g. "image/gif"). index If provided, it should be the (relative) name of a file to serve for directory requests. For example, if the dir argument is '/home/me', the Request-URI is 'myapp', and the index arg is 'index.html', the file '/home/me/myapp/index.html' will be sought. """ request = cherrypy.serving.request if request.method not in ('GET', 'HEAD'): if debug: cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICDIR') return False if match and not re.search(match, request.path_info): if debug: cherrypy.log('request.path_info %r does not match pattern %r' % (request.path_info, match), 'TOOLS.STATICDIR') return False # Allow the use of '~' to refer to a user's home directory. dir = os.path.expanduser(dir) # If dir is relative, make absolute using "root". if not os.path.isabs(dir): if not root: msg = 'Static dir requires an absolute dir (or root).' if debug: cherrypy.log(msg, 'TOOLS.STATICDIR') raise ValueError(msg) dir = os.path.join(root, dir) # Determine where we are in the object tree relative to 'section' # (where the static tool was defined). if section == 'global': section = '/' section = section.rstrip(r'\/') branch = request.path_info[len(section) + 1:] branch = urllib.parse.unquote(branch.lstrip(r'\/')) # Requesting a file in sub-dir of the staticdir results # in mixing of delimiter styles, e.g. C:\static\js/script.js. # Windows accepts this form except not when the path is # supplied in extended-path notation, e.g. \\?\C:\static\js/script.js. # http://bit.ly/1vdioCX if platform.system() == 'Windows': branch = branch.replace('/', '\\') # If branch is "", filename will end in a slash filename = os.path.join(dir, branch) if debug: cherrypy.log('Checking file %r to fulfill %r' % (filename, request.path_info), 'TOOLS.STATICDIR') # There's a chance that the branch pulled from the URL might # have ".." or similar uplevel attacks in it. Check that the final # filename is a child of dir. if not os.path.normpath(filename).startswith(os.path.normpath(dir)): raise cherrypy.HTTPError(403) # Forbidden handled = _attempt(filename, content_types) if not handled: # Check for an index file if a folder was requested. if index: handled = _attempt(os.path.join(filename, index), content_types) if handled: request.is_index = filename[-1] in (r'\/') return handled
[ "def", "staticdir", "(", "section", ",", "dir", ",", "root", "=", "''", ",", "match", "=", "''", ",", "content_types", "=", "None", ",", "index", "=", "''", ",", "debug", "=", "False", ")", ":", "request", "=", "cherrypy", ".", "serving", ".", "request", "if", "request", ".", "method", "not", "in", "(", "'GET'", ",", "'HEAD'", ")", ":", "if", "debug", ":", "cherrypy", ".", "log", "(", "'request.method not GET or HEAD'", ",", "'TOOLS.STATICDIR'", ")", "return", "False", "if", "match", "and", "not", "re", ".", "search", "(", "match", ",", "request", ".", "path_info", ")", ":", "if", "debug", ":", "cherrypy", ".", "log", "(", "'request.path_info %r does not match pattern %r'", "%", "(", "request", ".", "path_info", ",", "match", ")", ",", "'TOOLS.STATICDIR'", ")", "return", "False", "# Allow the use of '~' to refer to a user's home directory.", "dir", "=", "os", ".", "path", ".", "expanduser", "(", "dir", ")", "# If dir is relative, make absolute using \"root\".", "if", "not", "os", ".", "path", ".", "isabs", "(", "dir", ")", ":", "if", "not", "root", ":", "msg", "=", "'Static dir requires an absolute dir (or root).'", "if", "debug", ":", "cherrypy", ".", "log", "(", "msg", ",", "'TOOLS.STATICDIR'", ")", "raise", "ValueError", "(", "msg", ")", "dir", "=", "os", ".", "path", ".", "join", "(", "root", ",", "dir", ")", "# Determine where we are in the object tree relative to 'section'", "# (where the static tool was defined).", "if", "section", "==", "'global'", ":", "section", "=", "'/'", "section", "=", "section", ".", "rstrip", "(", "r'\\/'", ")", "branch", "=", "request", ".", "path_info", "[", "len", "(", "section", ")", "+", "1", ":", "]", "branch", "=", "urllib", ".", "parse", ".", "unquote", "(", "branch", ".", "lstrip", "(", "r'\\/'", ")", ")", "# Requesting a file in sub-dir of the staticdir results", "# in mixing of delimiter styles, e.g. C:\\static\\js/script.js.", "# Windows accepts this form except not when the path is", "# supplied in extended-path notation, e.g. \\\\?\\C:\\static\\js/script.js.", "# http://bit.ly/1vdioCX", "if", "platform", ".", "system", "(", ")", "==", "'Windows'", ":", "branch", "=", "branch", ".", "replace", "(", "'/'", ",", "'\\\\'", ")", "# If branch is \"\", filename will end in a slash", "filename", "=", "os", ".", "path", ".", "join", "(", "dir", ",", "branch", ")", "if", "debug", ":", "cherrypy", ".", "log", "(", "'Checking file %r to fulfill %r'", "%", "(", "filename", ",", "request", ".", "path_info", ")", ",", "'TOOLS.STATICDIR'", ")", "# There's a chance that the branch pulled from the URL might", "# have \"..\" or similar uplevel attacks in it. Check that the final", "# filename is a child of dir.", "if", "not", "os", ".", "path", ".", "normpath", "(", "filename", ")", ".", "startswith", "(", "os", ".", "path", ".", "normpath", "(", "dir", ")", ")", ":", "raise", "cherrypy", ".", "HTTPError", "(", "403", ")", "# Forbidden", "handled", "=", "_attempt", "(", "filename", ",", "content_types", ")", "if", "not", "handled", ":", "# Check for an index file if a folder was requested.", "if", "index", ":", "handled", "=", "_attempt", "(", "os", ".", "path", ".", "join", "(", "filename", ",", "index", ")", ",", "content_types", ")", "if", "handled", ":", "request", ".", "is_index", "=", "filename", "[", "-", "1", "]", "in", "(", "r'\\/'", ")", "return", "handled" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/3rd_party/python3/site-packages/cherrypy-18.1.2/cherrypy/lib/static.py#L271-L350
mceSystems/node-jsc
90634f3064fab8e89a85b3942f0cc5054acc86fa
tools/gyp/pylib/gyp/MSVSProject.py
python
Writer.WriteIfChanged
(self)
Writes the project file.
Writes the project file.
[ "Writes", "the", "project", "file", "." ]
def WriteIfChanged(self): """Writes the project file.""" # First create XML content definition content = [ 'VisualStudioProject', {'ProjectType': 'Visual C++', 'Version': self.version.ProjectVersion(), 'Name': self.name, 'ProjectGUID': self.guid, 'RootNamespace': self.name, 'Keyword': 'Win32Proj' }, self.platform_section, self.tool_files_section, self.configurations_section, ['References'], # empty section self.files_section, ['Globals'] # empty section ] easy_xml.WriteXmlIfChanged(content, self.project_path, encoding="Windows-1252")
[ "def", "WriteIfChanged", "(", "self", ")", ":", "# First create XML content definition", "content", "=", "[", "'VisualStudioProject'", ",", "{", "'ProjectType'", ":", "'Visual C++'", ",", "'Version'", ":", "self", ".", "version", ".", "ProjectVersion", "(", ")", ",", "'Name'", ":", "self", ".", "name", ",", "'ProjectGUID'", ":", "self", ".", "guid", ",", "'RootNamespace'", ":", "self", ".", "name", ",", "'Keyword'", ":", "'Win32Proj'", "}", ",", "self", ".", "platform_section", ",", "self", ".", "tool_files_section", ",", "self", ".", "configurations_section", ",", "[", "'References'", "]", ",", "# empty section", "self", ".", "files_section", ",", "[", "'Globals'", "]", "# empty section", "]", "easy_xml", ".", "WriteXmlIfChanged", "(", "content", ",", "self", ".", "project_path", ",", "encoding", "=", "\"Windows-1252\"", ")" ]
https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/tools/gyp/pylib/gyp/MSVSProject.py#L188-L208
PostHog/posthog
6071cfd053b2b51c75182709e7bb237eb6b7f866
posthog/email.py
python
inline_css
(value: str)
return lxml.html.tostring(tree, doctype="<!DOCTYPE html>").decode("utf-8")
Returns an HTML document with inline CSS. Forked from getsentry/sentry
Returns an HTML document with inline CSS. Forked from getsentry/sentry
[ "Returns", "an", "HTML", "document", "with", "inline", "CSS", ".", "Forked", "from", "getsentry", "/", "sentry" ]
def inline_css(value: str) -> str: """ Returns an HTML document with inline CSS. Forked from getsentry/sentry """ tree = lxml.html.document_fromstring(value) toronado.inline(tree) # CSS media query support is inconsistent when the DOCTYPE declaration is # missing, so we force it to HTML5 here. return lxml.html.tostring(tree, doctype="<!DOCTYPE html>").decode("utf-8")
[ "def", "inline_css", "(", "value", ":", "str", ")", "->", "str", ":", "tree", "=", "lxml", ".", "html", ".", "document_fromstring", "(", "value", ")", "toronado", ".", "inline", "(", "tree", ")", "# CSS media query support is inconsistent when the DOCTYPE declaration is", "# missing, so we force it to HTML5 here.", "return", "lxml", ".", "html", ".", "tostring", "(", "tree", ",", "doctype", "=", "\"<!DOCTYPE html>\"", ")", ".", "decode", "(", "\"utf-8\"", ")" ]
https://github.com/PostHog/posthog/blob/6071cfd053b2b51c75182709e7bb237eb6b7f866/posthog/email.py#L17-L26
wotermelon/toJump
3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f
lib/mac/systrace/catapult/devil/devil/android/sdk/fastboot.py
python
Fastboot.Flash
(self, partition, image, timeout=None, retries=None)
Flash partition with img. Args: partition: Partition to be flashed. image: location of image to flash with.
Flash partition with img.
[ "Flash", "partition", "with", "img", "." ]
def Flash(self, partition, image, timeout=None, retries=None): """Flash partition with img. Args: partition: Partition to be flashed. image: location of image to flash with. """ self._RunFastbootCommand(['flash', partition, image])
[ "def", "Flash", "(", "self", ",", "partition", ",", "image", ",", "timeout", "=", "None", ",", "retries", "=", "None", ")", ":", "self", ".", "_RunFastbootCommand", "(", "[", "'flash'", ",", "partition", ",", "image", "]", ")" ]
https://github.com/wotermelon/toJump/blob/3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f/lib/mac/systrace/catapult/devil/devil/android/sdk/fastboot.py#L65-L72
jupyterlab/jupyterlab
29266626af0702ff093806df7d3a7348014d0450
jupyterlab/commands.py
python
_AppHandler._check_common_extension
(self, extension, info, check_installed_only)
return True
Check if a common (non-core) extension is enabled or disabled
Check if a common (non-core) extension is enabled or disabled
[ "Check", "if", "a", "common", "(", "non", "-", "core", ")", "extension", "is", "enabled", "or", "disabled" ]
def _check_common_extension(self, extension, info, check_installed_only): """Check if a common (non-core) extension is enabled or disabled """ if extension not in info['extensions']: self.logger.info('%s:%s' % (extension, RED_X)) return False errors = self._get_extension_compat()[extension] if errors: self.logger.info('%s:%s (compatibility errors)' % (extension, RED_X)) return False if check_installed_only: self.logger.info('%s: %s' % (extension, GREEN_OK)) return True if _is_disabled(extension, info['disabled']): self.logger.info('%s: %s' % (extension, RED_DISABLED)) return False self.logger.info('%s:%s' % (extension, GREEN_ENABLED)) return True
[ "def", "_check_common_extension", "(", "self", ",", "extension", ",", "info", ",", "check_installed_only", ")", ":", "if", "extension", "not", "in", "info", "[", "'extensions'", "]", ":", "self", ".", "logger", ".", "info", "(", "'%s:%s'", "%", "(", "extension", ",", "RED_X", ")", ")", "return", "False", "errors", "=", "self", ".", "_get_extension_compat", "(", ")", "[", "extension", "]", "if", "errors", ":", "self", ".", "logger", ".", "info", "(", "'%s:%s (compatibility errors)'", "%", "(", "extension", ",", "RED_X", ")", ")", "return", "False", "if", "check_installed_only", ":", "self", ".", "logger", ".", "info", "(", "'%s: %s'", "%", "(", "extension", ",", "GREEN_OK", ")", ")", "return", "True", "if", "_is_disabled", "(", "extension", ",", "info", "[", "'disabled'", "]", ")", ":", "self", ".", "logger", ".", "info", "(", "'%s: %s'", "%", "(", "extension", ",", "RED_DISABLED", ")", ")", "return", "False", "self", ".", "logger", ".", "info", "(", "'%s:%s'", "%", "(", "extension", ",", "GREEN_ENABLED", ")", ")", "return", "True" ]
https://github.com/jupyterlab/jupyterlab/blob/29266626af0702ff093806df7d3a7348014d0450/jupyterlab/commands.py#L1085-L1107
almonk/Bind
03e9e98fb8b30a58cb4fc2829f06289fa9958897
public/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
python
MakefileWriter.WriteList
(self, value_list, variable=None, prefix='', quoter=QuoteIfNecessary)
Write a variable definition that is a list of values. E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out foo = blaha blahb but in a pretty-printed style.
Write a variable definition that is a list of values.
[ "Write", "a", "variable", "definition", "that", "is", "a", "list", "of", "values", "." ]
def WriteList(self, value_list, variable=None, prefix='', quoter=QuoteIfNecessary): """Write a variable definition that is a list of values. E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out foo = blaha blahb but in a pretty-printed style. """ values = '' if value_list: value_list = [quoter(prefix + l) for l in value_list] values = ' \\\n\t' + ' \\\n\t'.join(value_list) self.fp.write('%s :=%s\n\n' % (variable, values))
[ "def", "WriteList", "(", "self", ",", "value_list", ",", "variable", "=", "None", ",", "prefix", "=", "''", ",", "quoter", "=", "QuoteIfNecessary", ")", ":", "values", "=", "''", "if", "value_list", ":", "value_list", "=", "[", "quoter", "(", "prefix", "+", "l", ")", "for", "l", "in", "value_list", "]", "values", "=", "' \\\\\\n\\t'", "+", "' \\\\\\n\\t'", ".", "join", "(", "value_list", ")", "self", ".", "fp", ".", "write", "(", "'%s :=%s\\n\\n'", "%", "(", "variable", ",", "values", ")", ")" ]
https://github.com/almonk/Bind/blob/03e9e98fb8b30a58cb4fc2829f06289fa9958897/public/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py#L1630-L1642
almonk/Bind
03e9e98fb8b30a58cb4fc2829f06289fa9958897
public/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
python
_GetXcodeEnv
(xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None)
return additional_settings
Return the environment variables that Xcode would set. See http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 for a full list. Args: xcode_settings: An XcodeSettings object. If this is None, this function returns an empty dict. built_products_dir: Absolute path to the built products dir. srcroot: Absolute path to the source root. configuration: The build configuration name. additional_settings: An optional dict with more values to add to the result.
Return the environment variables that Xcode would set. See http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 for a full list.
[ "Return", "the", "environment", "variables", "that", "Xcode", "would", "set", ".", "See", "http", ":", "//", "developer", ".", "apple", ".", "com", "/", "library", "/", "mac", "/", "#documentation", "/", "DeveloperTools", "/", "Reference", "/", "XcodeBuildSettingRef", "/", "1", "-", "Build_Setting_Reference", "/", "build_setting_ref", ".", "html#", "//", "apple_ref", "/", "doc", "/", "uid", "/", "TP40003931", "-", "CH3", "-", "SW153", "for", "a", "full", "list", "." ]
def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None): """Return the environment variables that Xcode would set. See http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 for a full list. Args: xcode_settings: An XcodeSettings object. If this is None, this function returns an empty dict. built_products_dir: Absolute path to the built products dir. srcroot: Absolute path to the source root. configuration: The build configuration name. additional_settings: An optional dict with more values to add to the result. """ if not xcode_settings: return {} # This function is considered a friend of XcodeSettings, so let it reach into # its implementation details. spec = xcode_settings.spec # These are filled in on a as-needed basis. env = { 'BUILT_PRODUCTS_DIR' : built_products_dir, 'CONFIGURATION' : configuration, 'PRODUCT_NAME' : xcode_settings.GetProductName(), # See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME 'SRCROOT' : srcroot, 'SOURCE_ROOT': '${SRCROOT}', # This is not true for static libraries, but currently the env is only # written for bundles: 'TARGET_BUILD_DIR' : built_products_dir, 'TEMP_DIR' : '${TMPDIR}', } if xcode_settings.GetPerConfigSetting('SDKROOT', configuration): env['SDKROOT'] = xcode_settings._SdkPath(configuration) else: env['SDKROOT'] = '' if spec['type'] in ( 'executable', 'static_library', 'shared_library', 'loadable_module'): env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName() env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath() env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName() mach_o_type = xcode_settings.GetMachOType() if mach_o_type: env['MACH_O_TYPE'] = mach_o_type env['PRODUCT_TYPE'] = xcode_settings.GetProductType() if xcode_settings._IsBundle(): env['CONTENTS_FOLDER_PATH'] = \ xcode_settings.GetBundleContentsFolderPath() env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \ xcode_settings.GetBundleResourceFolder() env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath() env['WRAPPER_NAME'] = xcode_settings.GetWrapperName() install_name = xcode_settings.GetInstallName() if install_name: env['LD_DYLIB_INSTALL_NAME'] = install_name install_name_base = xcode_settings.GetInstallNameBase() if install_name_base: env['DYLIB_INSTALL_NAME_BASE'] = install_name_base if not additional_settings: additional_settings = {} else: # Flatten lists to strings. for k in additional_settings: if not isinstance(additional_settings[k], str): additional_settings[k] = ' '.join(additional_settings[k]) additional_settings.update(env) for k in additional_settings: additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k]) return additional_settings
[ "def", "_GetXcodeEnv", "(", "xcode_settings", ",", "built_products_dir", ",", "srcroot", ",", "configuration", ",", "additional_settings", "=", "None", ")", ":", "if", "not", "xcode_settings", ":", "return", "{", "}", "# This function is considered a friend of XcodeSettings, so let it reach into", "# its implementation details.", "spec", "=", "xcode_settings", ".", "spec", "# These are filled in on a as-needed basis.", "env", "=", "{", "'BUILT_PRODUCTS_DIR'", ":", "built_products_dir", ",", "'CONFIGURATION'", ":", "configuration", ",", "'PRODUCT_NAME'", ":", "xcode_settings", ".", "GetProductName", "(", ")", ",", "# See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\\ Product\\ Types.xcspec for FULL_PRODUCT_NAME", "'SRCROOT'", ":", "srcroot", ",", "'SOURCE_ROOT'", ":", "'${SRCROOT}'", ",", "# This is not true for static libraries, but currently the env is only", "# written for bundles:", "'TARGET_BUILD_DIR'", ":", "built_products_dir", ",", "'TEMP_DIR'", ":", "'${TMPDIR}'", ",", "}", "if", "xcode_settings", ".", "GetPerConfigSetting", "(", "'SDKROOT'", ",", "configuration", ")", ":", "env", "[", "'SDKROOT'", "]", "=", "xcode_settings", ".", "_SdkPath", "(", "configuration", ")", "else", ":", "env", "[", "'SDKROOT'", "]", "=", "''", "if", "spec", "[", "'type'", "]", "in", "(", "'executable'", ",", "'static_library'", ",", "'shared_library'", ",", "'loadable_module'", ")", ":", "env", "[", "'EXECUTABLE_NAME'", "]", "=", "xcode_settings", ".", "GetExecutableName", "(", ")", "env", "[", "'EXECUTABLE_PATH'", "]", "=", "xcode_settings", ".", "GetExecutablePath", "(", ")", "env", "[", "'FULL_PRODUCT_NAME'", "]", "=", "xcode_settings", ".", "GetFullProductName", "(", ")", "mach_o_type", "=", "xcode_settings", ".", "GetMachOType", "(", ")", "if", "mach_o_type", ":", "env", "[", "'MACH_O_TYPE'", "]", "=", "mach_o_type", "env", "[", "'PRODUCT_TYPE'", "]", "=", "xcode_settings", ".", "GetProductType", "(", ")", "if", "xcode_settings", ".", "_IsBundle", "(", ")", ":", "env", "[", "'CONTENTS_FOLDER_PATH'", "]", "=", "xcode_settings", ".", "GetBundleContentsFolderPath", "(", ")", "env", "[", "'UNLOCALIZED_RESOURCES_FOLDER_PATH'", "]", "=", "xcode_settings", ".", "GetBundleResourceFolder", "(", ")", "env", "[", "'INFOPLIST_PATH'", "]", "=", "xcode_settings", ".", "GetBundlePlistPath", "(", ")", "env", "[", "'WRAPPER_NAME'", "]", "=", "xcode_settings", ".", "GetWrapperName", "(", ")", "install_name", "=", "xcode_settings", ".", "GetInstallName", "(", ")", "if", "install_name", ":", "env", "[", "'LD_DYLIB_INSTALL_NAME'", "]", "=", "install_name", "install_name_base", "=", "xcode_settings", ".", "GetInstallNameBase", "(", ")", "if", "install_name_base", ":", "env", "[", "'DYLIB_INSTALL_NAME_BASE'", "]", "=", "install_name_base", "if", "not", "additional_settings", ":", "additional_settings", "=", "{", "}", "else", ":", "# Flatten lists to strings.", "for", "k", "in", "additional_settings", ":", "if", "not", "isinstance", "(", "additional_settings", "[", "k", "]", ",", "str", ")", ":", "additional_settings", "[", "k", "]", "=", "' '", ".", "join", "(", "additional_settings", "[", "k", "]", ")", "additional_settings", ".", "update", "(", "env", ")", "for", "k", "in", "additional_settings", ":", "additional_settings", "[", "k", "]", "=", "_NormalizeEnvVarReferences", "(", "additional_settings", "[", "k", "]", ")", "return", "additional_settings" ]
https://github.com/almonk/Bind/blob/03e9e98fb8b30a58cb4fc2829f06289fa9958897/public/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L1251-L1326
jxcore/jxcore
b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410
deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
python
XcodeSettings.GetCflagsC
(self, configname)
return cflags_c
Returns flags that need to be added to .c, and .m compilations.
Returns flags that need to be added to .c, and .m compilations.
[ "Returns", "flags", "that", "need", "to", "be", "added", "to", ".", "c", "and", ".", "m", "compilations", "." ]
def GetCflagsC(self, configname): """Returns flags that need to be added to .c, and .m compilations.""" self.configname = configname cflags_c = [] if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi': cflags_c.append('-ansi') else: self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s') cflags_c += self._Settings().get('OTHER_CFLAGS', []) self.configname = None return cflags_c
[ "def", "GetCflagsC", "(", "self", ",", "configname", ")", ":", "self", ".", "configname", "=", "configname", "cflags_c", "=", "[", "]", "if", "self", ".", "_Settings", "(", ")", ".", "get", "(", "'GCC_C_LANGUAGE_STANDARD'", ",", "''", ")", "==", "'ansi'", ":", "cflags_c", ".", "append", "(", "'-ansi'", ")", "else", ":", "self", ".", "_Appendf", "(", "cflags_c", ",", "'GCC_C_LANGUAGE_STANDARD'", ",", "'-std=%s'", ")", "cflags_c", "+=", "self", ".", "_Settings", "(", ")", ".", "get", "(", "'OTHER_CFLAGS'", ",", "[", "]", ")", "self", ".", "configname", "=", "None", "return", "cflags_c" ]
https://github.com/jxcore/jxcore/blob/b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L576-L586
replit-archive/jsrepl
36d79b6288ca5d26208e8bade2a168c6ebcb2376
extern/python/unclosured/lib/python2.7/lib2to3/pygram.py
python
Symbols.__init__
(self, grammar)
Initializer. Creates an attribute for each grammar symbol (nonterminal), whose value is the symbol's type (an int >= 256).
Initializer.
[ "Initializer", "." ]
def __init__(self, grammar): """Initializer. Creates an attribute for each grammar symbol (nonterminal), whose value is the symbol's type (an int >= 256). """ for name, symbol in grammar.symbol2number.iteritems(): setattr(self, name, symbol)
[ "def", "__init__", "(", "self", ",", "grammar", ")", ":", "for", "name", ",", "symbol", "in", "grammar", ".", "symbol2number", ".", "iteritems", "(", ")", ":", "setattr", "(", "self", ",", "name", ",", "symbol", ")" ]
https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/unclosured/lib/python2.7/lib2to3/pygram.py#L22-L29
nodejs/node-convergence-archive
e11fe0c2777561827cdb7207d46b0917ef3c42a7
tools/gyp/pylib/gyp/generator/analyzer.py
python
_NamesNotIn
(names, mapping)
return [name for name in names if name not in mapping]
Returns a list of the values in |names| that are not in |mapping|.
Returns a list of the values in |names| that are not in |mapping|.
[ "Returns", "a", "list", "of", "the", "values", "in", "|names|", "that", "are", "not", "in", "|mapping|", "." ]
def _NamesNotIn(names, mapping): """Returns a list of the values in |names| that are not in |mapping|.""" return [name for name in names if name not in mapping]
[ "def", "_NamesNotIn", "(", "names", ",", "mapping", ")", ":", "return", "[", "name", "for", "name", "in", "names", "if", "name", "not", "in", "mapping", "]" ]
https://github.com/nodejs/node-convergence-archive/blob/e11fe0c2777561827cdb7207d46b0917ef3c42a7/tools/gyp/pylib/gyp/generator/analyzer.py#L482-L484
replit-archive/jsrepl
36d79b6288ca5d26208e8bade2a168c6ebcb2376
extern/python/reloop-closured/lib/python2.7/atexit.py
python
register
(func, *targs, **kargs)
return func
register a function to be executed upon normal program termination func - function to be called at exit targs - optional arguments to pass to func kargs - optional keyword arguments to pass to func func is returned to facilitate usage as a decorator.
register a function to be executed upon normal program termination
[ "register", "a", "function", "to", "be", "executed", "upon", "normal", "program", "termination" ]
def register(func, *targs, **kargs): """register a function to be executed upon normal program termination func - function to be called at exit targs - optional arguments to pass to func kargs - optional keyword arguments to pass to func func is returned to facilitate usage as a decorator. """ _exithandlers.append((func, targs, kargs)) return func
[ "def", "register", "(", "func", ",", "*", "targs", ",", "*", "*", "kargs", ")", ":", "_exithandlers", ".", "append", "(", "(", "func", ",", "targs", ",", "kargs", ")", ")", "return", "func" ]
https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/reloop-closured/lib/python2.7/atexit.py#L37-L47
wotermelon/toJump
3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f
lib/win/systrace/catapult/devil/devil/utils/parallelizer.py
python
Parallelizer.__getattr__
(self, name)
return r
Emulate getting the |name| attribute of |self|. Args: name: The name of the attribute to retrieve. Returns: A Parallelizer emulating the |name| attribute of |self|.
Emulate getting the |name| attribute of |self|.
[ "Emulate", "getting", "the", "|name|", "attribute", "of", "|self|", "." ]
def __getattr__(self, name): """Emulate getting the |name| attribute of |self|. Args: name: The name of the attribute to retrieve. Returns: A Parallelizer emulating the |name| attribute of |self|. """ self.pGet(None) r = type(self)(self._orig_objs) r._objs = [getattr(o, name) for o in self._objs] return r
[ "def", "__getattr__", "(", "self", ",", "name", ")", ":", "self", ".", "pGet", "(", "None", ")", "r", "=", "type", "(", "self", ")", "(", "self", ".", "_orig_objs", ")", "r", ".", "_objs", "=", "[", "getattr", "(", "o", ",", "name", ")", "for", "o", "in", "self", ".", "_objs", "]", "return", "r" ]
https://github.com/wotermelon/toJump/blob/3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f/lib/win/systrace/catapult/devil/devil/utils/parallelizer.py#L71-L83
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
src/client/tactic_client_lib/application/common/application_old.py
python
Application.import_static
(self, buffer, node_name)
import unkeyed values
import unkeyed values
[ "import", "unkeyed", "values" ]
def import_static(self, buffer, node_name): '''import unkeyed values''' pass
[ "def", "import_static", "(", "self", ",", "buffer", ",", "node_name", ")", ":", "pass" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/src/client/tactic_client_lib/application/common/application_old.py#L128-L130
odoo/odoo
8de8c196a137f4ebbf67d7c7c83fee36f873f5c8
addons/microsoft_calendar/models/microsoft_sync.py
python
MicrosoftSync._microsoft_values
(self, fields_to_sync)
Implements this method to return a dict with values formatted according to the Microsoft Calendar API :return: dict of Microsoft formatted values
Implements this method to return a dict with values formatted according to the Microsoft Calendar API :return: dict of Microsoft formatted values
[ "Implements", "this", "method", "to", "return", "a", "dict", "with", "values", "formatted", "according", "to", "the", "Microsoft", "Calendar", "API", ":", "return", ":", "dict", "of", "Microsoft", "formatted", "values" ]
def _microsoft_values(self, fields_to_sync): """Implements this method to return a dict with values formatted according to the Microsoft Calendar API :return: dict of Microsoft formatted values """ raise NotImplementedError()
[ "def", "_microsoft_values", "(", "self", ",", "fields_to_sync", ")", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/odoo/odoo/blob/8de8c196a137f4ebbf67d7c7c83fee36f873f5c8/addons/microsoft_calendar/models/microsoft_sync.py#L362-L367
crits/crits
6b357daa5c3060cf622d3a3b0c7b41a9ca69c049
crits/core/handlers.py
python
do_add_preferred_actions
(obj_type, obj_id, username)
return {'success': True, 'object': actions}
Add all preferred actions to an object. :param obj_type: The type of object to update. :type obj_type: str :param obj_id: The ObjectId of the object to update. :type obj_id: str :param username: The user adding the preferred actions. :type username: str :returns: dict with keys: "success" (boolean), "message" (str) if failed, "object" (list of dicts) if successful.
Add all preferred actions to an object.
[ "Add", "all", "preferred", "actions", "to", "an", "object", "." ]
def do_add_preferred_actions(obj_type, obj_id, username): """ Add all preferred actions to an object. :param obj_type: The type of object to update. :type obj_type: str :param obj_id: The ObjectId of the object to update. :type obj_id: str :param username: The user adding the preferred actions. :type username: str :returns: dict with keys: "success" (boolean), "message" (str) if failed, "object" (list of dicts) if successful. """ klass = class_from_type(obj_type) if not klass: return {'success': False, 'message': 'Invalid type'} preferred_actions = Action.objects(preferred__object_type=obj_type, active='on') if not preferred_actions: return {'success': False, 'message': 'No preferred actions'} sources = user_sources(username) obj = klass.objects(id=obj_id, source__name__in=sources).first() if not obj: return {'success': False, 'message': 'Could not find object'} actions = [] # Get preferred actions and add them. for a in preferred_actions: for p in a.preferred: if (p.object_type == obj_type and obj.__getattribute__(p.object_field) == p.object_value): now = datetime.datetime.now() action = {'action_type': a.name, 'active': 'on', 'analyst': username, 'begin_date': now, 'end_date': None, 'performed_date': now, 'reason': 'Preferred action toggle', 'date': now} obj.add_action(action['action_type'], action['active'], action['analyst'], action['begin_date'], action['end_date'], action['performed_date'], action['reason'], action['date']) actions.append(action) if len(actions) < 1: return {'success': False, 'message': 'No preferred actions'} # Change status to In Progress if it is currently 'New' if obj.status == 'New': obj.set_status('In Progress') try: obj.save(username=username) except ValidationError, e: return {'success': False, 'message': e} return {'success': True, 'object': actions}
[ "def", "do_add_preferred_actions", "(", "obj_type", ",", "obj_id", ",", "username", ")", ":", "klass", "=", "class_from_type", "(", "obj_type", ")", "if", "not", "klass", ":", "return", "{", "'success'", ":", "False", ",", "'message'", ":", "'Invalid type'", "}", "preferred_actions", "=", "Action", ".", "objects", "(", "preferred__object_type", "=", "obj_type", ",", "active", "=", "'on'", ")", "if", "not", "preferred_actions", ":", "return", "{", "'success'", ":", "False", ",", "'message'", ":", "'No preferred actions'", "}", "sources", "=", "user_sources", "(", "username", ")", "obj", "=", "klass", ".", "objects", "(", "id", "=", "obj_id", ",", "source__name__in", "=", "sources", ")", ".", "first", "(", ")", "if", "not", "obj", ":", "return", "{", "'success'", ":", "False", ",", "'message'", ":", "'Could not find object'", "}", "actions", "=", "[", "]", "# Get preferred actions and add them.", "for", "a", "in", "preferred_actions", ":", "for", "p", "in", "a", ".", "preferred", ":", "if", "(", "p", ".", "object_type", "==", "obj_type", "and", "obj", ".", "__getattribute__", "(", "p", ".", "object_field", ")", "==", "p", ".", "object_value", ")", ":", "now", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "action", "=", "{", "'action_type'", ":", "a", ".", "name", ",", "'active'", ":", "'on'", ",", "'analyst'", ":", "username", ",", "'begin_date'", ":", "now", ",", "'end_date'", ":", "None", ",", "'performed_date'", ":", "now", ",", "'reason'", ":", "'Preferred action toggle'", ",", "'date'", ":", "now", "}", "obj", ".", "add_action", "(", "action", "[", "'action_type'", "]", ",", "action", "[", "'active'", "]", ",", "action", "[", "'analyst'", "]", ",", "action", "[", "'begin_date'", "]", ",", "action", "[", "'end_date'", "]", ",", "action", "[", "'performed_date'", "]", ",", "action", "[", "'reason'", "]", ",", "action", "[", "'date'", "]", ")", "actions", ".", "append", "(", "action", ")", "if", "len", "(", "actions", ")", "<", "1", ":", "return", "{", "'success'", ":", "False", ",", "'message'", ":", "'No preferred actions'", "}", "# Change status to In Progress if it is currently 'New'", "if", "obj", ".", "status", "==", "'New'", ":", "obj", ".", "set_status", "(", "'In Progress'", ")", "try", ":", "obj", ".", "save", "(", "username", "=", "username", ")", "except", "ValidationError", ",", "e", ":", "return", "{", "'success'", ":", "False", ",", "'message'", ":", "e", "}", "return", "{", "'success'", ":", "True", ",", "'object'", ":", "actions", "}" ]
https://github.com/crits/crits/blob/6b357daa5c3060cf622d3a3b0c7b41a9ca69c049/crits/core/handlers.py#L1474-L1542
facebookarchive/nuclide
2a2a0a642d136768b7d2a6d35a652dc5fb77d70a
modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/third_party/pep8/autopep8.py
python
FixPEP8.fix_e303
(self, result)
return modified_lines
Remove extra blank lines.
Remove extra blank lines.
[ "Remove", "extra", "blank", "lines", "." ]
def fix_e303(self, result): """Remove extra blank lines.""" delete_linenum = int(result['info'].split('(')[1].split(')')[0]) - 2 delete_linenum = max(1, delete_linenum) # We need to count because pycodestyle reports an offset line number if # there are comments. cnt = 0 line = result['line'] - 2 modified_lines = [] while cnt < delete_linenum and line >= 0: if not self.source[line].strip(): self.source[line] = '' modified_lines.append(1 + line) # Line indexed at 1 cnt += 1 line -= 1 return modified_lines
[ "def", "fix_e303", "(", "self", ",", "result", ")", ":", "delete_linenum", "=", "int", "(", "result", "[", "'info'", "]", ".", "split", "(", "'('", ")", "[", "1", "]", ".", "split", "(", "')'", ")", "[", "0", "]", ")", "-", "2", "delete_linenum", "=", "max", "(", "1", ",", "delete_linenum", ")", "# We need to count because pycodestyle reports an offset line number if", "# there are comments.", "cnt", "=", "0", "line", "=", "result", "[", "'line'", "]", "-", "2", "modified_lines", "=", "[", "]", "while", "cnt", "<", "delete_linenum", "and", "line", ">=", "0", ":", "if", "not", "self", ".", "source", "[", "line", "]", ".", "strip", "(", ")", ":", "self", ".", "source", "[", "line", "]", "=", "''", "modified_lines", ".", "append", "(", "1", "+", "line", ")", "# Line indexed at 1", "cnt", "+=", "1", "line", "-=", "1", "return", "modified_lines" ]
https://github.com/facebookarchive/nuclide/blob/2a2a0a642d136768b7d2a6d35a652dc5fb77d70a/modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/third_party/pep8/autopep8.py#L760-L777
weitechen/anafora
ce2ca2c20b3f432280eb5041a089f7713ec82537
src/anafora/projectSetting.py
python
Schema.__init__
(self, name, modes = {})
@type name: str @type modes: dict
[]
def __init__(self, name, modes = {}): """ @type name: str @type modes: dict """ self.name = name self.modes = modes
[ "def", "__init__", "(", "self", ",", "name", ",", "modes", "=", "{", "}", ")", ":", "self", ".", "name", "=", "name", "self", ".", "modes", "=", "modes" ]
https://github.com/weitechen/anafora/blob/ce2ca2c20b3f432280eb5041a089f7713ec82537/src/anafora/projectSetting.py#L149-L155
wotermelon/toJump
3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f
lib/win/systrace/catapult/devil/devil/android/device_utils.py
python
DeviceUtils.screen_density
(self)
return DPI_TO_DENSITY.get(self.pixel_density, 'tvdpi')
Returns the screen density of the device.
Returns the screen density of the device.
[ "Returns", "the", "screen", "density", "of", "the", "device", "." ]
def screen_density(self): """Returns the screen density of the device.""" DPI_TO_DENSITY = { 120: 'ldpi', 160: 'mdpi', 240: 'hdpi', 320: 'xhdpi', 480: 'xxhdpi', 640: 'xxxhdpi', } return DPI_TO_DENSITY.get(self.pixel_density, 'tvdpi')
[ "def", "screen_density", "(", "self", ")", ":", "DPI_TO_DENSITY", "=", "{", "120", ":", "'ldpi'", ",", "160", ":", "'mdpi'", ",", "240", ":", "'hdpi'", ",", "320", ":", "'xhdpi'", ",", "480", ":", "'xxhdpi'", ",", "640", ":", "'xxxhdpi'", ",", "}", "return", "DPI_TO_DENSITY", ".", "get", "(", "self", ".", "pixel_density", ",", "'tvdpi'", ")" ]
https://github.com/wotermelon/toJump/blob/3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f/lib/win/systrace/catapult/devil/devil/android/device_utils.py#L1945-L1955
pinterest/pinball
c54a206cf6e3dbadb056c189f741d75828c02f98
pinball_ext/executor/cluster_executor.py
python
ClusterExecutor.does_table_exist
(self, table, database='default')
return True
Return True if the table exists in the database, else False.
Return True if the table exists in the database, else False.
[ "Return", "True", "if", "the", "table", "exists", "in", "the", "database", "else", "False", "." ]
def does_table_exist(self, table, database='default'): """Return True if the table exists in the database, else False.""" rows, stderr, job_ids = self.run_hive_query( "USE %s; DESCRIBE EXTENDED %s;" % (database, table), upload_archive=False) for row in rows[0]: if row.startswith('Table %s does not exist' % table): return False return True
[ "def", "does_table_exist", "(", "self", ",", "table", ",", "database", "=", "'default'", ")", ":", "rows", ",", "stderr", ",", "job_ids", "=", "self", ".", "run_hive_query", "(", "\"USE %s; DESCRIBE EXTENDED %s;\"", "%", "(", "database", ",", "table", ")", ",", "upload_archive", "=", "False", ")", "for", "row", "in", "rows", "[", "0", "]", ":", "if", "row", ".", "startswith", "(", "'Table %s does not exist'", "%", "table", ")", ":", "return", "False", "return", "True" ]
https://github.com/pinterest/pinball/blob/c54a206cf6e3dbadb056c189f741d75828c02f98/pinball_ext/executor/cluster_executor.py#L343-L351
korolr/dotfiles
8e46933503ecb8d8651739ffeb1d2d4f0f5c6524
.config/sublime-text-3/Packages/python-jinja2/all/jinja2/compiler.py
python
find_undeclared
(nodes, names)
return visitor.undeclared
Check if the names passed are accessed undeclared. The return value is a set of all the undeclared names from the sequence of names found.
Check if the names passed are accessed undeclared. The return value is a set of all the undeclared names from the sequence of names found.
[ "Check", "if", "the", "names", "passed", "are", "accessed", "undeclared", ".", "The", "return", "value", "is", "a", "set", "of", "all", "the", "undeclared", "names", "from", "the", "sequence", "of", "names", "found", "." ]
def find_undeclared(nodes, names): """Check if the names passed are accessed undeclared. The return value is a set of all the undeclared names from the sequence of names found. """ visitor = UndeclaredNameVisitor(names) try: for node in nodes: visitor.visit(node) except VisitorExit: pass return visitor.undeclared
[ "def", "find_undeclared", "(", "nodes", ",", "names", ")", ":", "visitor", "=", "UndeclaredNameVisitor", "(", "names", ")", "try", ":", "for", "node", "in", "nodes", ":", "visitor", ".", "visit", "(", "node", ")", "except", "VisitorExit", ":", "pass", "return", "visitor", ".", "undeclared" ]
https://github.com/korolr/dotfiles/blob/8e46933503ecb8d8651739ffeb1d2d4f0f5c6524/.config/sublime-text-3/Packages/python-jinja2/all/jinja2/compiler.py#L89-L99
mozilla/chromeless
4e6c980479b0f91b76830dc7b58ff6ae9a0b3978
impl/markdown/__init__.py
python
Markdown.registerExtension
(self, extension)
This gets called by the extension
This gets called by the extension
[ "This", "gets", "called", "by", "the", "extension" ]
def registerExtension(self, extension): """ This gets called by the extension """ self.registeredExtensions.append(extension)
[ "def", "registerExtension", "(", "self", ",", "extension", ")", ":", "self", ".", "registeredExtensions", ".", "append", "(", "extension", ")" ]
https://github.com/mozilla/chromeless/blob/4e6c980479b0f91b76830dc7b58ff6ae9a0b3978/impl/markdown/__init__.py#L337-L339
atom-community/ide-python
c046f9c2421713b34baa22648235541c5bb284fe
dist/debugger/VendorLib/vs-py-debugger/pythonFiles/parso/pgen2/pgen.py
python
generate_grammar
(bnf_text, token_namespace)
return p.make_grammar()
``bnf_text`` is a grammar in extended BNF (using * for repetition, + for at-least-once repetition, [] for optional parts, | for alternatives and () for grouping). It's not EBNF according to ISO/IEC 14977. It's a dialect Python uses in its own parser.
``bnf_text`` is a grammar in extended BNF (using * for repetition, + for at-least-once repetition, [] for optional parts, | for alternatives and () for grouping).
[ "bnf_text", "is", "a", "grammar", "in", "extended", "BNF", "(", "using", "*", "for", "repetition", "+", "for", "at", "-", "least", "-", "once", "repetition", "[]", "for", "optional", "parts", "|", "for", "alternatives", "and", "()", "for", "grouping", ")", "." ]
def generate_grammar(bnf_text, token_namespace): """ ``bnf_text`` is a grammar in extended BNF (using * for repetition, + for at-least-once repetition, [] for optional parts, | for alternatives and () for grouping). It's not EBNF according to ISO/IEC 14977. It's a dialect Python uses in its own parser. """ p = ParserGenerator(bnf_text, token_namespace) return p.make_grammar()
[ "def", "generate_grammar", "(", "bnf_text", ",", "token_namespace", ")", ":", "p", "=", "ParserGenerator", "(", "bnf_text", ",", "token_namespace", ")", "return", "p", ".", "make_grammar", "(", ")" ]
https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/parso/pgen2/pgen.py#L390-L400
wsvincent/djangoforprofessionals
347e307035abae1ad33bf511fba17553e88f07b7
ch3-books/manage.py
python
main
()
Run administrative tasks.
Run administrative tasks.
[ "Run", "administrative", "tasks", "." ]
def main(): """Run administrative tasks.""" os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv)
[ "def", "main", "(", ")", ":", "os", ".", "environ", ".", "setdefault", "(", "'DJANGO_SETTINGS_MODULE'", ",", "'config.settings'", ")", "try", ":", "from", "django", ".", "core", ".", "management", "import", "execute_from_command_line", "except", "ImportError", "as", "exc", ":", "raise", "ImportError", "(", "\"Couldn't import Django. Are you sure it's installed and \"", "\"available on your PYTHONPATH environment variable? Did you \"", "\"forget to activate a virtual environment?\"", ")", "from", "exc", "execute_from_command_line", "(", "sys", ".", "argv", ")" ]
https://github.com/wsvincent/djangoforprofessionals/blob/347e307035abae1ad33bf511fba17553e88f07b7/ch3-books/manage.py#L7-L18
Kitware/tangelo
470034ee9b3d7a01becc1ce5fddc7adc1d5263ef
tangelo/tangelo/__init__.py
python
types
(**typefuncs)
return wrap
Decorate a function that takes strings to one that takes typed values. The decorator's arguments are functions to perform type conversion. The positional and keyword arguments will be mapped to the positional and keyword arguments of the decoratored function. This allows web-based service functions, which by design always are passed string arguments, to be declared as functions taking typed arguments instead, eliminating the overhead of having to perform type conversions manually. If type conversion fails for any argument, the wrapped function will return a dict describing the exception that was raised.
Decorate a function that takes strings to one that takes typed values.
[ "Decorate", "a", "function", "that", "takes", "strings", "to", "one", "that", "takes", "typed", "values", "." ]
def types(**typefuncs): """ Decorate a function that takes strings to one that takes typed values. The decorator's arguments are functions to perform type conversion. The positional and keyword arguments will be mapped to the positional and keyword arguments of the decoratored function. This allows web-based service functions, which by design always are passed string arguments, to be declared as functions taking typed arguments instead, eliminating the overhead of having to perform type conversions manually. If type conversion fails for any argument, the wrapped function will return a dict describing the exception that was raised. """ def wrap(f): @functools.wraps(f) def typed_func(*pargs, **kwargs): # Analyze the incoming arguments so we know how to apply the # type-conversion functions in `typefuncs`. argspec = inspect.getargspec(f) # The `args` property contains the list of named arguments passed to # f. Construct a dict mapping from these names to the values that # were passed. # # It is possible that `args` contains names that are not represented # in `pargs`, if some of the arguments are passed as keyword # arguments. In this case, the relative shortness of `pargs` will # cause the call to zip() to truncate the `args` list, and the # keyword-style passed arguments will simply be present in `kwargs`. pargs_dict = {name: value for (name, value) in zip(argspec.args, pargs)} # Begin converting arguments according to the functions given in # `typefuncs`. If a given name does not appear in `typefuncs`, # simply leave it unchanged. If a name appears in `typefuncs` that # does not appear in the argument list, this is considered an error. try: for name, func in typefuncs.iteritems(): if name in pargs_dict: pargs_dict[name] = func(pargs_dict[name]) elif name in kwargs: kwargs[name] = func(kwargs[name]) else: http_status(400, "Unknown Argument Name") content_type("application/json") return {"error": "'%s' was registered for type conversion but did not appear in the arguments list" % (name)} except ValueError as e: http_status(400, "Input Value Conversion Failed") content_type("application/json") return {"error": str(e)} # Unroll `pargs` into a list of arguments that are in the correct # order. pargs = [] for name in argspec.args: try: pargs.append(pargs_dict[name]) except KeyError: break # Call the wrapped function using the converted arguments. return f(*pargs, **kwargs) typed_func.typefuncs = typefuncs return typed_func return wrap
[ "def", "types", "(", "*", "*", "typefuncs", ")", ":", "def", "wrap", "(", "f", ")", ":", "@", "functools", ".", "wraps", "(", "f", ")", "def", "typed_func", "(", "*", "pargs", ",", "*", "*", "kwargs", ")", ":", "# Analyze the incoming arguments so we know how to apply the", "# type-conversion functions in `typefuncs`.", "argspec", "=", "inspect", ".", "getargspec", "(", "f", ")", "# The `args` property contains the list of named arguments passed to", "# f. Construct a dict mapping from these names to the values that", "# were passed.", "#", "# It is possible that `args` contains names that are not represented", "# in `pargs`, if some of the arguments are passed as keyword", "# arguments. In this case, the relative shortness of `pargs` will", "# cause the call to zip() to truncate the `args` list, and the", "# keyword-style passed arguments will simply be present in `kwargs`.", "pargs_dict", "=", "{", "name", ":", "value", "for", "(", "name", ",", "value", ")", "in", "zip", "(", "argspec", ".", "args", ",", "pargs", ")", "}", "# Begin converting arguments according to the functions given in", "# `typefuncs`. If a given name does not appear in `typefuncs`,", "# simply leave it unchanged. If a name appears in `typefuncs` that", "# does not appear in the argument list, this is considered an error.", "try", ":", "for", "name", ",", "func", "in", "typefuncs", ".", "iteritems", "(", ")", ":", "if", "name", "in", "pargs_dict", ":", "pargs_dict", "[", "name", "]", "=", "func", "(", "pargs_dict", "[", "name", "]", ")", "elif", "name", "in", "kwargs", ":", "kwargs", "[", "name", "]", "=", "func", "(", "kwargs", "[", "name", "]", ")", "else", ":", "http_status", "(", "400", ",", "\"Unknown Argument Name\"", ")", "content_type", "(", "\"application/json\"", ")", "return", "{", "\"error\"", ":", "\"'%s' was registered for type conversion but did not appear in the arguments list\"", "%", "(", "name", ")", "}", "except", "ValueError", "as", "e", ":", "http_status", "(", "400", ",", "\"Input Value Conversion Failed\"", ")", "content_type", "(", "\"application/json\"", ")", "return", "{", "\"error\"", ":", "str", "(", "e", ")", "}", "# Unroll `pargs` into a list of arguments that are in the correct", "# order.", "pargs", "=", "[", "]", "for", "name", "in", "argspec", ".", "args", ":", "try", ":", "pargs", ".", "append", "(", "pargs_dict", "[", "name", "]", ")", "except", "KeyError", ":", "break", "# Call the wrapped function using the converted arguments.", "return", "f", "(", "*", "pargs", ",", "*", "*", "kwargs", ")", "typed_func", ".", "typefuncs", "=", "typefuncs", "return", "typed_func", "return", "wrap" ]
https://github.com/Kitware/tangelo/blob/470034ee9b3d7a01becc1ce5fddc7adc1d5263ef/tangelo/tangelo/__init__.py#L221-L286
xtk/X
04c1aa856664a8517d23aefd94c470d47130aead
lib/selenium/selenium/webdriver/remote/webdriver.py
python
WebDriver.execute
(self, driver_command, params=None)
return {'success': 0, 'value': None, 'sessionId': self.session_id}
Sends a command to be executed by a command.CommandExecutor. :Args: - driver_command: The name of the command to execute as a string. - params: A dictionary of named parameters to send with the command. :Returns: The command's JSON response loaded into a dictionary object.
Sends a command to be executed by a command.CommandExecutor.
[ "Sends", "a", "command", "to", "be", "executed", "by", "a", "command", ".", "CommandExecutor", "." ]
def execute(self, driver_command, params=None): """ Sends a command to be executed by a command.CommandExecutor. :Args: - driver_command: The name of the command to execute as a string. - params: A dictionary of named parameters to send with the command. :Returns: The command's JSON response loaded into a dictionary object. """ if not params: params = {'sessionId': self.session_id} elif 'sessionId' not in params: params['sessionId'] = self.session_id params = self._wrap_value(params) response = self.command_executor.execute(driver_command, params) if response: self.error_handler.check_response(response) response['value'] = self._unwrap_value( response.get('value', None)) return response # If the server doesn't send a response, assume the command was # a success return {'success': 0, 'value': None, 'sessionId': self.session_id}
[ "def", "execute", "(", "self", ",", "driver_command", ",", "params", "=", "None", ")", ":", "if", "not", "params", ":", "params", "=", "{", "'sessionId'", ":", "self", ".", "session_id", "}", "elif", "'sessionId'", "not", "in", "params", ":", "params", "[", "'sessionId'", "]", "=", "self", ".", "session_id", "params", "=", "self", ".", "_wrap_value", "(", "params", ")", "response", "=", "self", ".", "command_executor", ".", "execute", "(", "driver_command", ",", "params", ")", "if", "response", ":", "self", ".", "error_handler", ".", "check_response", "(", "response", ")", "response", "[", "'value'", "]", "=", "self", ".", "_unwrap_value", "(", "response", ".", "get", "(", "'value'", ",", "None", ")", ")", "return", "response", "# If the server doesn't send a response, assume the command was", "# a success", "return", "{", "'success'", ":", "0", ",", "'value'", ":", "None", ",", "'sessionId'", ":", "self", ".", "session_id", "}" ]
https://github.com/xtk/X/blob/04c1aa856664a8517d23aefd94c470d47130aead/lib/selenium/selenium/webdriver/remote/webdriver.py#L141-L166
facebookarchive/nuclide
2a2a0a642d136768b7d2a6d35a652dc5fb77d70a
modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/PythonTools/visualstudio_py_repl.py
python
ReplBackend._cmd_inpl
(self)
handles the input command which returns a string of input
handles the input command which returns a string of input
[ "handles", "the", "input", "command", "which", "returns", "a", "string", "of", "input" ]
def _cmd_inpl(self): """handles the input command which returns a string of input""" self.input_string = read_string(self.conn) self.input_event.release()
[ "def", "_cmd_inpl", "(", "self", ")", ":", "self", ".", "input_string", "=", "read_string", "(", "self", ".", "conn", ")", "self", ".", "input_event", ".", "release", "(", ")" ]
https://github.com/facebookarchive/nuclide/blob/2a2a0a642d136768b7d2a6d35a652dc5fb77d70a/modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/PythonTools/visualstudio_py_repl.py#L335-L338
zhouxinkai/awesome-python3-webapp
3535cb65e7b4acaca31a4c32a8ca706d6d6253f3
www/markdown2.py
python
Markdown.convert
(self, text)
return rv
Convert the given text.
Convert the given text.
[ "Convert", "the", "given", "text", "." ]
def convert(self, text): """Convert the given text.""" # Main function. The order in which other subs are called here is # essential. Link and image substitutions need to happen before # _EscapeSpecialChars(), so that any *'s or _'s in the <a> # and <img> tags get encoded. # Clear the global hashes. If we don't clear these, you get conflicts # from other articles when generating a page which contains more than # one article (e.g. an index page that shows the N most recent # articles): self.reset() if not isinstance(text, unicode): #TODO: perhaps shouldn't presume UTF-8 for string input? text = unicode(text, 'utf-8') if self.use_file_vars: # Look for emacs-style file variable hints. emacs_vars = self._get_emacs_vars(text) if "markdown-extras" in emacs_vars: splitter = re.compile("[ ,]+") for e in splitter.split(emacs_vars["markdown-extras"]): if '=' in e: ename, earg = e.split('=', 1) try: earg = int(earg) except ValueError: pass else: ename, earg = e, None self.extras[ename] = earg # Standardize line endings: text = re.sub("\r\n|\r", "\n", text) # Make sure $text ends with a couple of newlines: text += "\n\n" # Convert all tabs to spaces. text = self._detab(text) # Strip any lines consisting only of spaces and tabs. # This makes subsequent regexen easier to write, because we can # match consecutive blank lines with /\n+/ instead of something # contorted like /[ \t]*\n+/ . text = self._ws_only_line_re.sub("", text) # strip metadata from head and extract if "metadata" in self.extras: text = self._extract_metadata(text) text = self.preprocess(text) if "fenced-code-blocks" in self.extras and not self.safe_mode: text = self._do_fenced_code_blocks(text) if self.safe_mode: text = self._hash_html_spans(text) # Turn block-level HTML blocks into hash entries text = self._hash_html_blocks(text, raw=True) if "fenced-code-blocks" in self.extras and self.safe_mode: text = self._do_fenced_code_blocks(text) # Strip link definitions, store in hashes. if "footnotes" in self.extras: # Must do footnotes first because an unlucky footnote defn # looks like a link defn: # [^4]: this "looks like a link defn" text = self._strip_footnote_definitions(text) text = self._strip_link_definitions(text) text = self._run_block_gamut(text) if "footnotes" in self.extras: text = self._add_footnotes(text) text = self.postprocess(text) text = self._unescape_special_chars(text) if self.safe_mode: text = self._unhash_html_spans(text) if "nofollow" in self.extras: text = self._a_nofollow.sub(r'<\1 rel="nofollow"\2', text) text += "\n" rv = UnicodeWithAttrs(text) if "toc" in self.extras: rv._toc = self._toc if "metadata" in self.extras: rv.metadata = self.metadata return rv
[ "def", "convert", "(", "self", ",", "text", ")", ":", "# Main function. The order in which other subs are called here is", "# essential. Link and image substitutions need to happen before", "# _EscapeSpecialChars(), so that any *'s or _'s in the <a>", "# and <img> tags get encoded.", "# Clear the global hashes. If we don't clear these, you get conflicts", "# from other articles when generating a page which contains more than", "# one article (e.g. an index page that shows the N most recent", "# articles):", "self", ".", "reset", "(", ")", "if", "not", "isinstance", "(", "text", ",", "unicode", ")", ":", "#TODO: perhaps shouldn't presume UTF-8 for string input?", "text", "=", "unicode", "(", "text", ",", "'utf-8'", ")", "if", "self", ".", "use_file_vars", ":", "# Look for emacs-style file variable hints.", "emacs_vars", "=", "self", ".", "_get_emacs_vars", "(", "text", ")", "if", "\"markdown-extras\"", "in", "emacs_vars", ":", "splitter", "=", "re", ".", "compile", "(", "\"[ ,]+\"", ")", "for", "e", "in", "splitter", ".", "split", "(", "emacs_vars", "[", "\"markdown-extras\"", "]", ")", ":", "if", "'='", "in", "e", ":", "ename", ",", "earg", "=", "e", ".", "split", "(", "'='", ",", "1", ")", "try", ":", "earg", "=", "int", "(", "earg", ")", "except", "ValueError", ":", "pass", "else", ":", "ename", ",", "earg", "=", "e", ",", "None", "self", ".", "extras", "[", "ename", "]", "=", "earg", "# Standardize line endings:", "text", "=", "re", ".", "sub", "(", "\"\\r\\n|\\r\"", ",", "\"\\n\"", ",", "text", ")", "# Make sure $text ends with a couple of newlines:", "text", "+=", "\"\\n\\n\"", "# Convert all tabs to spaces.", "text", "=", "self", ".", "_detab", "(", "text", ")", "# Strip any lines consisting only of spaces and tabs.", "# This makes subsequent regexen easier to write, because we can", "# match consecutive blank lines with /\\n+/ instead of something", "# contorted like /[ \\t]*\\n+/ .", "text", "=", "self", ".", "_ws_only_line_re", ".", "sub", "(", "\"\"", ",", "text", ")", "# strip metadata from head and extract", "if", "\"metadata\"", "in", "self", ".", "extras", ":", "text", "=", "self", ".", "_extract_metadata", "(", "text", ")", "text", "=", "self", ".", "preprocess", "(", "text", ")", "if", "\"fenced-code-blocks\"", "in", "self", ".", "extras", "and", "not", "self", ".", "safe_mode", ":", "text", "=", "self", ".", "_do_fenced_code_blocks", "(", "text", ")", "if", "self", ".", "safe_mode", ":", "text", "=", "self", ".", "_hash_html_spans", "(", "text", ")", "# Turn block-level HTML blocks into hash entries", "text", "=", "self", ".", "_hash_html_blocks", "(", "text", ",", "raw", "=", "True", ")", "if", "\"fenced-code-blocks\"", "in", "self", ".", "extras", "and", "self", ".", "safe_mode", ":", "text", "=", "self", ".", "_do_fenced_code_blocks", "(", "text", ")", "# Strip link definitions, store in hashes.", "if", "\"footnotes\"", "in", "self", ".", "extras", ":", "# Must do footnotes first because an unlucky footnote defn", "# looks like a link defn:", "# [^4]: this \"looks like a link defn\"", "text", "=", "self", ".", "_strip_footnote_definitions", "(", "text", ")", "text", "=", "self", ".", "_strip_link_definitions", "(", "text", ")", "text", "=", "self", ".", "_run_block_gamut", "(", "text", ")", "if", "\"footnotes\"", "in", "self", ".", "extras", ":", "text", "=", "self", ".", "_add_footnotes", "(", "text", ")", "text", "=", "self", ".", "postprocess", "(", "text", ")", "text", "=", "self", ".", "_unescape_special_chars", "(", "text", ")", "if", "self", ".", "safe_mode", ":", "text", "=", "self", ".", "_unhash_html_spans", "(", "text", ")", "if", "\"nofollow\"", "in", "self", ".", "extras", ":", "text", "=", "self", ".", "_a_nofollow", ".", "sub", "(", "r'<\\1 rel=\"nofollow\"\\2'", ",", "text", ")", "text", "+=", "\"\\n\"", "rv", "=", "UnicodeWithAttrs", "(", "text", ")", "if", "\"toc\"", "in", "self", ".", "extras", ":", "rv", ".", "_toc", "=", "self", ".", "_toc", "if", "\"metadata\"", "in", "self", ".", "extras", ":", "rv", ".", "metadata", "=", "self", ".", "metadata", "return", "rv" ]
https://github.com/zhouxinkai/awesome-python3-webapp/blob/3535cb65e7b4acaca31a4c32a8ca706d6d6253f3/www/markdown2.py#L260-L356
redapple0204/my-boring-python
1ab378e9d4f39ad920ff542ef3b2db68f0575a98
pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/tarfile.py
python
copyfileobj
(src, dst, length=None)
return
Copy length bytes from fileobj src to fileobj dst. If length is None, copy the entire content.
Copy length bytes from fileobj src to fileobj dst. If length is None, copy the entire content.
[ "Copy", "length", "bytes", "from", "fileobj", "src", "to", "fileobj", "dst", ".", "If", "length", "is", "None", "copy", "the", "entire", "content", "." ]
def copyfileobj(src, dst, length=None): """Copy length bytes from fileobj src to fileobj dst. If length is None, copy the entire content. """ if length == 0: return if length is None: while True: buf = src.read(16*1024) if not buf: break dst.write(buf) return BUFSIZE = 16 * 1024 blocks, remainder = divmod(length, BUFSIZE) for b in range(blocks): buf = src.read(BUFSIZE) if len(buf) < BUFSIZE: raise IOError("end of file reached") dst.write(buf) if remainder != 0: buf = src.read(remainder) if len(buf) < remainder: raise IOError("end of file reached") dst.write(buf) return
[ "def", "copyfileobj", "(", "src", ",", "dst", ",", "length", "=", "None", ")", ":", "if", "length", "==", "0", ":", "return", "if", "length", "is", "None", ":", "while", "True", ":", "buf", "=", "src", ".", "read", "(", "16", "*", "1024", ")", "if", "not", "buf", ":", "break", "dst", ".", "write", "(", "buf", ")", "return", "BUFSIZE", "=", "16", "*", "1024", "blocks", ",", "remainder", "=", "divmod", "(", "length", ",", "BUFSIZE", ")", "for", "b", "in", "range", "(", "blocks", ")", ":", "buf", "=", "src", ".", "read", "(", "BUFSIZE", ")", "if", "len", "(", "buf", ")", "<", "BUFSIZE", ":", "raise", "IOError", "(", "\"end of file reached\"", ")", "dst", ".", "write", "(", "buf", ")", "if", "remainder", "!=", "0", ":", "buf", "=", "src", ".", "read", "(", "remainder", ")", "if", "len", "(", "buf", ")", "<", "remainder", ":", "raise", "IOError", "(", "\"end of file reached\"", ")", "dst", ".", "write", "(", "buf", ")", "return" ]
https://github.com/redapple0204/my-boring-python/blob/1ab378e9d4f39ad920ff542ef3b2db68f0575a98/pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/distlib/_backport/tarfile.py#L256-L283
nodejs/node
ac3c33c1646bf46104c15ae035982c06364da9b8
deps/v8/tools/run_perf.py
python
TraceConfig.ConsumeOutput
(self, output, result_tracker)
return result
Extracts trace results from the output. Args: output: Output object from the test run. result_tracker: Result tracker to be updated. Returns: The raw extracted result value or None if an error occurred.
Extracts trace results from the output.
[ "Extracts", "trace", "results", "from", "the", "output", "." ]
def ConsumeOutput(self, output, result_tracker): """Extracts trace results from the output. Args: output: Output object from the test run. result_tracker: Result tracker to be updated. Returns: The raw extracted result value or None if an error occurred. """ result = None stddev = None try: result = float( re.search(self.results_regexp, output.stdout, re.M).group(1)) except ValueError: result_tracker.AddError( 'Regexp "%s" returned a non-numeric for test %s.' % (self.results_regexp, self.name)) except: result_tracker.AddError( 'Regexp "%s" did not match for test %s.' % (self.results_regexp, self.name)) try: if self.stddev_regexp: if result_tracker.TraceHasStdDev(self): result_tracker.AddError( 'Test %s should only run once since a stddev is provided by the ' 'test.' % self.name) stddev = re.search(self.stddev_regexp, output.stdout, re.M).group(1) except: result_tracker.AddError( 'Regexp "%s" did not match for test %s.' % (self.stddev_regexp, self.name)) if result: result_tracker.AddTraceResult(self, result, stddev) return result
[ "def", "ConsumeOutput", "(", "self", ",", "output", ",", "result_tracker", ")", ":", "result", "=", "None", "stddev", "=", "None", "try", ":", "result", "=", "float", "(", "re", ".", "search", "(", "self", ".", "results_regexp", ",", "output", ".", "stdout", ",", "re", ".", "M", ")", ".", "group", "(", "1", ")", ")", "except", "ValueError", ":", "result_tracker", ".", "AddError", "(", "'Regexp \"%s\" returned a non-numeric for test %s.'", "%", "(", "self", ".", "results_regexp", ",", "self", ".", "name", ")", ")", "except", ":", "result_tracker", ".", "AddError", "(", "'Regexp \"%s\" did not match for test %s.'", "%", "(", "self", ".", "results_regexp", ",", "self", ".", "name", ")", ")", "try", ":", "if", "self", ".", "stddev_regexp", ":", "if", "result_tracker", ".", "TraceHasStdDev", "(", "self", ")", ":", "result_tracker", ".", "AddError", "(", "'Test %s should only run once since a stddev is provided by the '", "'test.'", "%", "self", ".", "name", ")", "stddev", "=", "re", ".", "search", "(", "self", ".", "stddev_regexp", ",", "output", ".", "stdout", ",", "re", ".", "M", ")", ".", "group", "(", "1", ")", "except", ":", "result_tracker", ".", "AddError", "(", "'Regexp \"%s\" did not match for test %s.'", "%", "(", "self", ".", "stddev_regexp", ",", "self", ".", "name", ")", ")", "if", "result", ":", "result_tracker", ".", "AddTraceResult", "(", "self", ",", "result", ",", "stddev", ")", "return", "result" ]
https://github.com/nodejs/node/blob/ac3c33c1646bf46104c15ae035982c06364da9b8/deps/v8/tools/run_perf.py#L402-L441
demi6od/ChromeFuzzer
4eaf1084d5f8fd20897706edf8b67bfbbd6380fc
PocSmplify/psutil-master/psutil/__init__.py
python
Process.num_threads
(self)
return self._proc.num_threads()
Return the number of threads used by this process.
Return the number of threads used by this process.
[ "Return", "the", "number", "of", "threads", "used", "by", "this", "process", "." ]
def num_threads(self): """Return the number of threads used by this process.""" return self._proc.num_threads()
[ "def", "num_threads", "(", "self", ")", ":", "return", "self", ".", "_proc", ".", "num_threads", "(", ")" ]
https://github.com/demi6od/ChromeFuzzer/blob/4eaf1084d5f8fd20897706edf8b67bfbbd6380fc/PocSmplify/psutil-master/psutil/__init__.py#L697-L699
erlerobot/robot_blockly
c2b334502a7dea035a6aadf5ad65ab0e733f7f03
frontend/closure-library/closure/bin/calcdeps.py
python
GetInputsFromOptions
(options)
return FilterByExcludes(options, inputs)
Generates the inputs from flag options. Args: options: The flags to calcdeps. Returns: A list of inputs (strings).
Generates the inputs from flag options.
[ "Generates", "the", "inputs", "from", "flag", "options", "." ]
def GetInputsFromOptions(options): """Generates the inputs from flag options. Args: options: The flags to calcdeps. Returns: A list of inputs (strings). """ inputs = options.inputs if not inputs: # Parse stdin logging.info('No inputs specified. Reading from stdin...') inputs = filter(None, [line.strip('\n') for line in sys.stdin.readlines()]) logging.info('Scanning files...') inputs = ExpandDirectories(inputs) return FilterByExcludes(options, inputs)
[ "def", "GetInputsFromOptions", "(", "options", ")", ":", "inputs", "=", "options", ".", "inputs", "if", "not", "inputs", ":", "# Parse stdin", "logging", ".", "info", "(", "'No inputs specified. Reading from stdin...'", ")", "inputs", "=", "filter", "(", "None", ",", "[", "line", ".", "strip", "(", "'\\n'", ")", "for", "line", "in", "sys", ".", "stdin", ".", "readlines", "(", ")", "]", ")", "logging", ".", "info", "(", "'Scanning files...'", ")", "inputs", "=", "ExpandDirectories", "(", "inputs", ")", "return", "FilterByExcludes", "(", "options", ",", "inputs", ")" ]
https://github.com/erlerobot/robot_blockly/blob/c2b334502a7dea035a6aadf5ad65ab0e733f7f03/frontend/closure-library/closure/bin/calcdeps.py#L432-L448
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
3rd_party/python2/site-packages/cheroot/server.py
python
ChunkedRFile.read
(self, size=None)
Read a chunk from rfile buffer and return it. Args: size (int): amount of data to read Returns: bytes: Chunk from rfile, limited by size if specified.
Read a chunk from rfile buffer and return it.
[ "Read", "a", "chunk", "from", "rfile", "buffer", "and", "return", "it", "." ]
def read(self, size=None): """Read a chunk from rfile buffer and return it. Args: size (int): amount of data to read Returns: bytes: Chunk from rfile, limited by size if specified. """ data = EMPTY if size == 0: return data while True: if size and len(data) >= size: return data if not self.buffer: self._fetch() if not self.buffer: # EOF return data if size: remaining = size - len(data) data += self.buffer[:remaining] self.buffer = self.buffer[remaining:] else: data += self.buffer self.buffer = EMPTY
[ "def", "read", "(", "self", ",", "size", "=", "None", ")", ":", "data", "=", "EMPTY", "if", "size", "==", "0", ":", "return", "data", "while", "True", ":", "if", "size", "and", "len", "(", "data", ")", ">=", "size", ":", "return", "data", "if", "not", "self", ".", "buffer", ":", "self", ".", "_fetch", "(", ")", "if", "not", "self", ".", "buffer", ":", "# EOF", "return", "data", "if", "size", ":", "remaining", "=", "size", "-", "len", "(", "data", ")", "data", "+=", "self", ".", "buffer", "[", ":", "remaining", "]", "self", ".", "buffer", "=", "self", ".", "buffer", "[", "remaining", ":", "]", "else", ":", "data", "+=", "self", ".", "buffer", "self", ".", "buffer", "=", "EMPTY" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/3rd_party/python2/site-packages/cheroot/server.py#L436-L466
nodejs/node-convergence-archive
e11fe0c2777561827cdb7207d46b0917ef3c42a7
tools/gyp/pylib/gyp/msvs_emulation.py
python
_GenericRetrieve
(root, default, path)
return _GenericRetrieve(root.get(path[0]), default, path[1:])
Given a list of dictionary keys |path| and a tree of dicts |root|, find value at path, or return |default| if any of the path doesn't exist.
Given a list of dictionary keys |path| and a tree of dicts |root|, find value at path, or return |default| if any of the path doesn't exist.
[ "Given", "a", "list", "of", "dictionary", "keys", "|path|", "and", "a", "tree", "of", "dicts", "|root|", "find", "value", "at", "path", "or", "return", "|default|", "if", "any", "of", "the", "path", "doesn", "t", "exist", "." ]
def _GenericRetrieve(root, default, path): """Given a list of dictionary keys |path| and a tree of dicts |root|, find value at path, or return |default| if any of the path doesn't exist.""" if not root: return default if not path: return root return _GenericRetrieve(root.get(path[0]), default, path[1:])
[ "def", "_GenericRetrieve", "(", "root", ",", "default", ",", "path", ")", ":", "if", "not", "root", ":", "return", "default", "if", "not", "path", ":", "return", "root", "return", "_GenericRetrieve", "(", "root", ".", "get", "(", "path", "[", "0", "]", ")", ",", "default", ",", "path", "[", "1", ":", "]", ")" ]
https://github.com/nodejs/node-convergence-archive/blob/e11fe0c2777561827cdb7207d46b0917ef3c42a7/tools/gyp/pylib/gyp/msvs_emulation.py#L69-L76
lambdamusic/Ontospy
534e408372edd392590e12839c32a403430aac23
ontospy/extras/shell_lib.py
python
Shell.complete_serialize
(self, text, line, begidx, endidx)
return completions
completion for serialize command
completion for serialize command
[ "completion", "for", "serialize", "command" ]
def complete_serialize(self, text, line, begidx, endidx): """completion for serialize command""" opts = self.SERIALIZE_OPTS if not text: completions = opts else: completions = [f for f in opts if f.startswith(text) ] return completions
[ "def", "complete_serialize", "(", "self", ",", "text", ",", "line", ",", "begidx", ",", "endidx", ")", ":", "opts", "=", "self", ".", "SERIALIZE_OPTS", "if", "not", "text", ":", "completions", "=", "opts", "else", ":", "completions", "=", "[", "f", "for", "f", "in", "opts", "if", "f", ".", "startswith", "(", "text", ")", "]", "return", "completions" ]
https://github.com/lambdamusic/Ontospy/blob/534e408372edd392590e12839c32a403430aac23/ontospy/extras/shell_lib.py#L1276-L1288
django-cms/djangocms-text-ckeditor
5d672e317f619f43b958cde81be8b1cf34b3c8e0
djangocms_text_ckeditor/models.py
python
AbstractText.post_copy
(self, old_instance, ziplist)
Fix references to plugins
Fix references to plugins
[ "Fix", "references", "to", "plugins" ]
def post_copy(self, old_instance, ziplist): """ Fix references to plugins """ replace_ids = {} for new, old in ziplist: replace_ids[old.pk] = new.pk old_text = old_instance.get_plugin_instance()[0].body self.body = replace_plugin_tags(old_text, replace_ids) self.save()
[ "def", "post_copy", "(", "self", ",", "old_instance", ",", "ziplist", ")", ":", "replace_ids", "=", "{", "}", "for", "new", ",", "old", "in", "ziplist", ":", "replace_ids", "[", "old", ".", "pk", "]", "=", "new", ".", "pk", "old_text", "=", "old_instance", ".", "get_plugin_instance", "(", ")", "[", "0", "]", ".", "body", "self", ".", "body", "=", "replace_plugin_tags", "(", "old_text", ",", "replace_ids", ")", "self", ".", "save", "(", ")" ]
https://github.com/django-cms/djangocms-text-ckeditor/blob/5d672e317f619f43b958cde81be8b1cf34b3c8e0/djangocms_text_ckeditor/models.py#L110-L120
mdipierro/web2py-appliances
f97658293d51519e5f06e1ed503ee85f8154fcf3
FacebookConnectExample/modules/plugin_fbconnect/facebook.py
python
require_add
(request, settings, next=None)
return fb
Used for redirecting the user to the Facebook add-app page. Usage: require_add(request,settings)
Used for redirecting the user to the Facebook add-app page.
[ "Used", "for", "redirecting", "the", "user", "to", "the", "Facebook", "add", "-", "app", "page", "." ]
def require_add(request, settings, next=None): """ Used for redirecting the user to the Facebook add-app page. Usage: require_add(request,settings) """ if not request.facebook: add_instance(request,settings) fb = request.facebook if not fb.check_session(request): redirect(fb.get_login_url(next=next)) if not fb.added: redirect(fb.get_add_url(next=next)) return fb
[ "def", "require_add", "(", "request", ",", "settings", ",", "next", "=", "None", ")", ":", "if", "not", "request", ".", "facebook", ":", "add_instance", "(", "request", ",", "settings", ")", "fb", "=", "request", ".", "facebook", "if", "not", "fb", ".", "check_session", "(", "request", ")", ":", "redirect", "(", "fb", ".", "get_login_url", "(", "next", "=", "next", ")", ")", "if", "not", "fb", ".", "added", ":", "redirect", "(", "fb", ".", "get_add_url", "(", "next", "=", "next", ")", ")", "return", "fb" ]
https://github.com/mdipierro/web2py-appliances/blob/f97658293d51519e5f06e1ed503ee85f8154fcf3/FacebookConnectExample/modules/plugin_fbconnect/facebook.py#L983-L997
aaPanel/aaPanel
d2a66661dbd66948cce5a074214257550aec91ee
class/php_execute_deny.py
python
PhpExecuteDeny.del_php_deny
(self,args)
return public.returnMsg(True,'Delete Successfully')
# 添加某个网站禁止运行PHP author: zhwen<zhw@bt.cn> :param args: website 网站名 str :param args: deny_name 规则名称 str :return:
# 添加某个网站禁止运行PHP author: zhwen<zhw
[ "#", "添加某个网站禁止运行PHP", "author", ":", "zhwen<zhw" ]
def del_php_deny(self,args): ''' # 添加某个网站禁止运行PHP author: zhwen<zhw@bt.cn> :param args: website 网站名 str :param args: deny_name 规则名称 str :return: ''' self._init_conf(args.website) deny_name = args.deny_name self._set_nginx_php_deny(deny_name) self._set_apache_php_deny(deny_name) self._set_ols_php_deny(deny_name) public.serviceReload() return public.returnMsg(True,'Delete Successfully')
[ "def", "del_php_deny", "(", "self", ",", "args", ")", ":", "self", ".", "_init_conf", "(", "args", ".", "website", ")", "deny_name", "=", "args", ".", "deny_name", "self", ".", "_set_nginx_php_deny", "(", "deny_name", ")", "self", ".", "_set_apache_php_deny", "(", "deny_name", ")", "self", ".", "_set_ols_php_deny", "(", "deny_name", ")", "public", ".", "serviceReload", "(", ")", "return", "public", ".", "returnMsg", "(", "True", ",", "'Delete Successfully'", ")" ]
https://github.com/aaPanel/aaPanel/blob/d2a66661dbd66948cce5a074214257550aec91ee/class/php_execute_deny.py#L183-L197
webrtc/apprtc
db975e22ea07a0c11a4179d4beb2feb31cf344f4
src/third_party/apiclient/http.py
python
MediaDownloadProgress.__init__
(self, resumable_progress, total_size)
Constructor. Args: resumable_progress: int, bytes received so far. total_size: int, total bytes in complete download.
Constructor.
[ "Constructor", "." ]
def __init__(self, resumable_progress, total_size): """Constructor. Args: resumable_progress: int, bytes received so far. total_size: int, total bytes in complete download. """ self.resumable_progress = resumable_progress self.total_size = total_size
[ "def", "__init__", "(", "self", ",", "resumable_progress", ",", "total_size", ")", ":", "self", ".", "resumable_progress", "=", "resumable_progress", "self", ".", "total_size", "=", "total_size" ]
https://github.com/webrtc/apprtc/blob/db975e22ea07a0c11a4179d4beb2feb31cf344f4/src/third_party/apiclient/http.py#L90-L98
jupyterlab/jupyterlab
29266626af0702ff093806df7d3a7348014d0450
jupyterlab/commands.py
python
_AppHandler._get_local_extensions
(self)
return self._get_local_data('local_extensions')
Get the locally installed extensions.
Get the locally installed extensions.
[ "Get", "the", "locally", "installed", "extensions", "." ]
def _get_local_extensions(self): """Get the locally installed extensions. """ return self._get_local_data('local_extensions')
[ "def", "_get_local_extensions", "(", "self", ")", ":", "return", "self", ".", "_get_local_data", "(", "'local_extensions'", ")" ]
https://github.com/jupyterlab/jupyterlab/blob/29266626af0702ff093806df7d3a7348014d0450/jupyterlab/commands.py#L1502-L1505
ayojs/ayo
45a1c8cf6384f5bcc81d834343c3ed9d78b97df3
deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
python
OpenOutput
(path, mode='w')
return open(path, mode)
Open |path| for writing, creating directories if necessary.
Open |path| for writing, creating directories if necessary.
[ "Open", "|path|", "for", "writing", "creating", "directories", "if", "necessary", "." ]
def OpenOutput(path, mode='w'): """Open |path| for writing, creating directories if necessary.""" gyp.common.EnsureDirExists(path) return open(path, mode)
[ "def", "OpenOutput", "(", "path", ",", "mode", "=", "'w'", ")", ":", "gyp", ".", "common", ".", "EnsureDirExists", "(", "path", ")", "return", "open", "(", "path", ",", "mode", ")" ]
https://github.com/ayojs/ayo/blob/45a1c8cf6384f5bcc81d834343c3ed9d78b97df3/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py#L1673-L1676
redapple0204/my-boring-python
1ab378e9d4f39ad920ff542ef3b2db68f0575a98
pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/pkg_resources/__init__.py
python
_always_object
(classes)
return classes
Ensure object appears in the mro even for old-style classes.
Ensure object appears in the mro even for old-style classes.
[ "Ensure", "object", "appears", "in", "the", "mro", "even", "for", "old", "-", "style", "classes", "." ]
def _always_object(classes): """ Ensure object appears in the mro even for old-style classes. """ if object not in classes: return classes + (object,) return classes
[ "def", "_always_object", "(", "classes", ")", ":", "if", "object", "not", "in", "classes", ":", "return", "classes", "+", "(", "object", ",", ")", "return", "classes" ]
https://github.com/redapple0204/my-boring-python/blob/1ab378e9d4f39ad920ff542ef3b2db68f0575a98/pythonenv3.8/lib/python3.8/site-packages/pip/_vendor/pkg_resources/__init__.py#L3142-L3149
defunctzombie/libuv.js
04a76a470dfdcad14ea8f19b6f215f205a9214f8
tools/gyp/pylib/gyp/generator/make.py
python
MakefileWriter.WriteSubMake
(self, output_filename, makefile_path, targets, build_dir)
Write a "sub-project" Makefile. This is a small, wrapper Makefile that calls the top-level Makefile to build the targets from a single gyp file (i.e. a sub-project). Arguments: output_filename: sub-project Makefile name to write makefile_path: path to the top-level Makefile targets: list of "all" targets for this sub-project build_dir: build output directory, relative to the sub-project
Write a "sub-project" Makefile.
[ "Write", "a", "sub", "-", "project", "Makefile", "." ]
def WriteSubMake(self, output_filename, makefile_path, targets, build_dir): """Write a "sub-project" Makefile. This is a small, wrapper Makefile that calls the top-level Makefile to build the targets from a single gyp file (i.e. a sub-project). Arguments: output_filename: sub-project Makefile name to write makefile_path: path to the top-level Makefile targets: list of "all" targets for this sub-project build_dir: build output directory, relative to the sub-project """ ensure_directory_exists(output_filename) self.fp = open(output_filename, 'w') self.fp.write(header) # For consistency with other builders, put sub-project build output in the # sub-project dir (see test/subdirectory/gyptest-subdir-all.py). self.WriteLn('export builddir_name ?= %s' % os.path.join(os.path.dirname(output_filename), build_dir)) self.WriteLn('.PHONY: all') self.WriteLn('all:') if makefile_path: makefile_path = ' -C ' + makefile_path self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets))) self.fp.close()
[ "def", "WriteSubMake", "(", "self", ",", "output_filename", ",", "makefile_path", ",", "targets", ",", "build_dir", ")", ":", "ensure_directory_exists", "(", "output_filename", ")", "self", ".", "fp", "=", "open", "(", "output_filename", ",", "'w'", ")", "self", ".", "fp", ".", "write", "(", "header", ")", "# For consistency with other builders, put sub-project build output in the", "# sub-project dir (see test/subdirectory/gyptest-subdir-all.py).", "self", ".", "WriteLn", "(", "'export builddir_name ?= %s'", "%", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "output_filename", ")", ",", "build_dir", ")", ")", "self", ".", "WriteLn", "(", "'.PHONY: all'", ")", "self", ".", "WriteLn", "(", "'all:'", ")", "if", "makefile_path", ":", "makefile_path", "=", "' -C '", "+", "makefile_path", "self", ".", "WriteLn", "(", "'\\t$(MAKE)%s %s'", "%", "(", "makefile_path", ",", "' '", ".", "join", "(", "targets", ")", ")", ")", "self", ".", "fp", ".", "close", "(", ")" ]
https://github.com/defunctzombie/libuv.js/blob/04a76a470dfdcad14ea8f19b6f215f205a9214f8/tools/gyp/pylib/gyp/generator/make.py#L798-L822
googleglass/mirror-quickstart-python
e34077bae91657170c305702471f5c249eb1b686
lib/gflags.py
python
GetHelpWidth
()
Returns: an integer, the width of help lines that is used in TextWrap.
Returns: an integer, the width of help lines that is used in TextWrap.
[ "Returns", ":", "an", "integer", "the", "width", "of", "help", "lines", "that", "is", "used", "in", "TextWrap", "." ]
def GetHelpWidth(): """Returns: an integer, the width of help lines that is used in TextWrap.""" if (not sys.stdout.isatty()) or (termios is None) or (fcntl is None): return _help_width try: data = fcntl.ioctl(sys.stdout, termios.TIOCGWINSZ, '1234') columns = struct.unpack('hh', data)[1] # Emacs mode returns 0. # Here we assume that any value below 40 is unreasonable if columns >= 40: return columns # Returning an int as default is fine, int(int) just return the int. return int(os.getenv('COLUMNS', _help_width)) except (TypeError, IOError, struct.error): return _help_width
[ "def", "GetHelpWidth", "(", ")", ":", "if", "(", "not", "sys", ".", "stdout", ".", "isatty", "(", ")", ")", "or", "(", "termios", "is", "None", ")", "or", "(", "fcntl", "is", "None", ")", ":", "return", "_help_width", "try", ":", "data", "=", "fcntl", ".", "ioctl", "(", "sys", ".", "stdout", ",", "termios", ".", "TIOCGWINSZ", ",", "'1234'", ")", "columns", "=", "struct", ".", "unpack", "(", "'hh'", ",", "data", ")", "[", "1", "]", "# Emacs mode returns 0.", "# Here we assume that any value below 40 is unreasonable", "if", "columns", ">=", "40", ":", "return", "columns", "# Returning an int as default is fine, int(int) just return the int.", "return", "int", "(", "os", ".", "getenv", "(", "'COLUMNS'", ",", "_help_width", ")", ")", "except", "(", "TypeError", ",", "IOError", ",", "struct", ".", "error", ")", ":", "return", "_help_width" ]
https://github.com/googleglass/mirror-quickstart-python/blob/e34077bae91657170c305702471f5c249eb1b686/lib/gflags.py#L532-L547