nwo
stringlengths
5
58
sha
stringlengths
40
40
path
stringlengths
5
172
language
stringclasses
1 value
identifier
stringlengths
1
100
parameters
stringlengths
2
3.5k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
21.5k
docstring
stringlengths
2
17k
docstring_summary
stringlengths
0
6.58k
docstring_tokens
sequence
function
stringlengths
35
55.6k
function_tokens
sequence
url
stringlengths
89
269
nodejs/node-chakracore
770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43
tools/gyp/pylib/gyp/input.py
python
DependencyGraphNode.DependenciesForLinkSettings
(self, targets)
return self._LinkDependenciesInternal(targets, include_shared_libraries)
Returns a list of dependency targets whose link_settings should be merged into this target.
Returns a list of dependency targets whose link_settings should be merged into this target.
[ "Returns", "a", "list", "of", "dependency", "targets", "whose", "link_settings", "should", "be", "merged", "into", "this", "target", "." ]
def DependenciesForLinkSettings(self, targets): """ Returns a list of dependency targets whose link_settings should be merged into this target. """ # TODO(sbaig) Currently, chrome depends on the bug that shared libraries' # link_settings are propagated. So for now, we will allow it, unless the # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to # False. Once chrome is fixed, we can remove this flag. include_shared_libraries = \ targets[self.ref].get('allow_sharedlib_linksettings_propagation', True) return self._LinkDependenciesInternal(targets, include_shared_libraries)
[ "def", "DependenciesForLinkSettings", "(", "self", ",", "targets", ")", ":", "# TODO(sbaig) Currently, chrome depends on the bug that shared libraries'", "# link_settings are propagated. So for now, we will allow it, unless the", "# 'allow_sharedlib_linksettings_propagation' flag is explicitly set to", "# False. Once chrome is fixed, we can remove this flag.", "include_shared_libraries", "=", "targets", "[", "self", ".", "ref", "]", ".", "get", "(", "'allow_sharedlib_linksettings_propagation'", ",", "True", ")", "return", "self", ".", "_LinkDependenciesInternal", "(", "targets", ",", "include_shared_libraries", ")" ]
https://github.com/nodejs/node-chakracore/blob/770c8dcd1bc3e0fce2d4497b4eec3fe49d829d43/tools/gyp/pylib/gyp/input.py#L1772-L1784
nodejs/node-convergence-archive
e11fe0c2777561827cdb7207d46b0917ef3c42a7
deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
python
_RuleInputsAndOutputs
(rule, trigger_file)
return (inputs, outputs)
Find the inputs and outputs generated by a rule. Arguments: rule: the rule in question. trigger_file: the main trigger for this rule. Returns: The pair of (inputs, outputs) involved in this rule.
Find the inputs and outputs generated by a rule.
[ "Find", "the", "inputs", "and", "outputs", "generated", "by", "a", "rule", "." ]
def _RuleInputsAndOutputs(rule, trigger_file): """Find the inputs and outputs generated by a rule. Arguments: rule: the rule in question. trigger_file: the main trigger for this rule. Returns: The pair of (inputs, outputs) involved in this rule. """ raw_inputs = _FixPaths(rule.get('inputs', [])) raw_outputs = _FixPaths(rule.get('outputs', [])) inputs = OrderedSet() outputs = OrderedSet() inputs.add(trigger_file) for i in raw_inputs: inputs.add(_RuleExpandPath(i, trigger_file)) for o in raw_outputs: outputs.add(_RuleExpandPath(o, trigger_file)) return (inputs, outputs)
[ "def", "_RuleInputsAndOutputs", "(", "rule", ",", "trigger_file", ")", ":", "raw_inputs", "=", "_FixPaths", "(", "rule", ".", "get", "(", "'inputs'", ",", "[", "]", ")", ")", "raw_outputs", "=", "_FixPaths", "(", "rule", ".", "get", "(", "'outputs'", ",", "[", "]", ")", ")", "inputs", "=", "OrderedSet", "(", ")", "outputs", "=", "OrderedSet", "(", ")", "inputs", ".", "add", "(", "trigger_file", ")", "for", "i", "in", "raw_inputs", ":", "inputs", ".", "add", "(", "_RuleExpandPath", "(", "i", ",", "trigger_file", ")", ")", "for", "o", "in", "raw_outputs", ":", "outputs", ".", "add", "(", "_RuleExpandPath", "(", "o", ",", "trigger_file", ")", ")", "return", "(", "inputs", ",", "outputs", ")" ]
https://github.com/nodejs/node-convergence-archive/blob/e11fe0c2777561827cdb7207d46b0917ef3c42a7/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py#L533-L551
catmaid/CATMAID
9f3312f2eacfc6fab48e4c6f1bd24672cc9c9ecf
django/applications/catmaid/control/skeleton.py
python
skeletons_by_node_labels
(request:HttpRequest, project_id=None)
return JsonResponse(cursor.fetchall(), safe=False)
Return relationship between label IDs and skeleton IDs --- parameters: - name: label_ids[] description: IDs of the labels to find skeletons associated with required: true type: array items: type: integer paramType: form - name: label_names[] description: Alternative to `label_ids` to pass in a list label names. required: true type: array items: type: string paramType: form type: - type: array items: type: integer description: array of [label_id, [skel_id1, skel_id2, skel_id3, ...]] tuples required: true
Return relationship between label IDs and skeleton IDs --- parameters: - name: label_ids[] description: IDs of the labels to find skeletons associated with required: true type: array items: type: integer paramType: form - name: label_names[] description: Alternative to `label_ids` to pass in a list label names. required: true type: array items: type: string paramType: form type: - type: array items: type: integer description: array of [label_id, [skel_id1, skel_id2, skel_id3, ...]] tuples required: true
[ "Return", "relationship", "between", "label", "IDs", "and", "skeleton", "IDs", "---", "parameters", ":", "-", "name", ":", "label_ids", "[]", "description", ":", "IDs", "of", "the", "labels", "to", "find", "skeletons", "associated", "with", "required", ":", "true", "type", ":", "array", "items", ":", "type", ":", "integer", "paramType", ":", "form", "-", "name", ":", "label_names", "[]", "description", ":", "Alternative", "to", "label_ids", "to", "pass", "in", "a", "list", "label", "names", ".", "required", ":", "true", "type", ":", "array", "items", ":", "type", ":", "string", "paramType", ":", "form", "type", ":", "-", "type", ":", "array", "items", ":", "type", ":", "integer", "description", ":", "array", "of", "[", "label_id", "[", "skel_id1", "skel_id2", "skel_id3", "...", "]]", "tuples", "required", ":", "true" ]
def skeletons_by_node_labels(request:HttpRequest, project_id=None) -> JsonResponse: """Return relationship between label IDs and skeleton IDs --- parameters: - name: label_ids[] description: IDs of the labels to find skeletons associated with required: true type: array items: type: integer paramType: form - name: label_names[] description: Alternative to `label_ids` to pass in a list label names. required: true type: array items: type: string paramType: form type: - type: array items: type: integer description: array of [label_id, [skel_id1, skel_id2, skel_id3, ...]] tuples required: true """ label_ids = get_request_list(request.POST, 'label_ids', default=[], map_fn=int) label_names = get_request_list(request.POST, 'label_names', default=[]) if not label_ids and not label_names: return JsonResponse([], safe=False) label_class = Class.objects.get(project=project_id, class_name='label') labeled_as_relation = Relation.objects.get(project=project_id, relation_name='labeled_as') if label_names: extra_label_ids = ClassInstance.objects.filter(project_id=project_id, class_column=label_class, name__in=label_names).values_list('id', flat=True) label_ids.extend(extra_label_ids) cursor = connection.cursor() cursor.execute(""" SELECT ci.id, array_agg(DISTINCT t.skeleton_id) FROM treenode t JOIN treenode_class_instance tci ON t.id = tci.treenode_id JOIN class_instance ci ON tci.class_instance_id = ci.id JOIN UNNEST(%(label_ids)s::bigint[]) label(id) ON label.id = ci.id WHERE ci.project_id = %(project_id)s AND tci.relation_id = %(labeled_as)s GROUP BY ci.id; """, { 'label_ids': label_ids, 'project_id': int(project_id), 'labeled_as': labeled_as_relation.id }) return JsonResponse(cursor.fetchall(), safe=False)
[ "def", "skeletons_by_node_labels", "(", "request", ":", "HttpRequest", ",", "project_id", "=", "None", ")", "->", "JsonResponse", ":", "label_ids", "=", "get_request_list", "(", "request", ".", "POST", ",", "'label_ids'", ",", "default", "=", "[", "]", ",", "map_fn", "=", "int", ")", "label_names", "=", "get_request_list", "(", "request", ".", "POST", ",", "'label_names'", ",", "default", "=", "[", "]", ")", "if", "not", "label_ids", "and", "not", "label_names", ":", "return", "JsonResponse", "(", "[", "]", ",", "safe", "=", "False", ")", "label_class", "=", "Class", ".", "objects", ".", "get", "(", "project", "=", "project_id", ",", "class_name", "=", "'label'", ")", "labeled_as_relation", "=", "Relation", ".", "objects", ".", "get", "(", "project", "=", "project_id", ",", "relation_name", "=", "'labeled_as'", ")", "if", "label_names", ":", "extra_label_ids", "=", "ClassInstance", ".", "objects", ".", "filter", "(", "project_id", "=", "project_id", ",", "class_column", "=", "label_class", ",", "name__in", "=", "label_names", ")", ".", "values_list", "(", "'id'", ",", "flat", "=", "True", ")", "label_ids", ".", "extend", "(", "extra_label_ids", ")", "cursor", "=", "connection", ".", "cursor", "(", ")", "cursor", ".", "execute", "(", "\"\"\"\n SELECT ci.id, array_agg(DISTINCT t.skeleton_id)\n FROM treenode t\n JOIN treenode_class_instance tci\n ON t.id = tci.treenode_id\n JOIN class_instance ci\n ON tci.class_instance_id = ci.id\n JOIN UNNEST(%(label_ids)s::bigint[]) label(id)\n ON label.id = ci.id\n WHERE ci.project_id = %(project_id)s\n AND tci.relation_id = %(labeled_as)s\n GROUP BY ci.id;\n \"\"\"", ",", "{", "'label_ids'", ":", "label_ids", ",", "'project_id'", ":", "int", "(", "project_id", ")", ",", "'labeled_as'", ":", "labeled_as_relation", ".", "id", "}", ")", "return", "JsonResponse", "(", "cursor", ".", "fetchall", "(", ")", ",", "safe", "=", "False", ")" ]
https://github.com/catmaid/CATMAID/blob/9f3312f2eacfc6fab48e4c6f1bd24672cc9c9ecf/django/applications/catmaid/control/skeleton.py#L3992-L4050
hotosm/osm-tasking-manager2
281eb61a98ca6a70e2baa5231cab88abf7e434e2
osmtm/views/project.py
python
passes_project_id_challenge
(challenge_id, project_id)
return True
Checks if challenge id is the same as project id. Returns True if yes, False if not
Checks if challenge id is the same as project id. Returns True if yes, False if not
[ "Checks", "if", "challenge", "id", "is", "the", "same", "as", "project", "id", ".", "Returns", "True", "if", "yes", "False", "if", "not" ]
def passes_project_id_challenge(challenge_id, project_id): """ Checks if challenge id is the same as project id. Returns True if yes, False if not """ if not challenge_id: return False try: challenge_id_int = int(challenge_id) project_id_int = int(project_id) except Exception: return False if not challenge_id_int == project_id_int: return False return True
[ "def", "passes_project_id_challenge", "(", "challenge_id", ",", "project_id", ")", ":", "if", "not", "challenge_id", ":", "return", "False", "try", ":", "challenge_id_int", "=", "int", "(", "challenge_id", ")", "project_id_int", "=", "int", "(", "project_id", ")", "except", "Exception", ":", "return", "False", "if", "not", "challenge_id_int", "==", "project_id_int", ":", "return", "False", "return", "True" ]
https://github.com/hotosm/osm-tasking-manager2/blob/281eb61a98ca6a70e2baa5231cab88abf7e434e2/osmtm/views/project.py#L667-L681
NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application
b9b1f9e9aa84e379c573063fc5622ef50d38898d
tracker/code/mctrack/mctracker.py
python
MulticamTracker.xfer_attr_from_1vehicle
(self, act_record, json_list)
Transfer vehicle attributes such as license plate, make, color from one record (act_record) to all the records in the list (json_list) Arguments: act_record {[dict]} -- Record from which the attribute need to be transfered json_list {[list]} -- List of records to which the records needs to be transferred to
Transfer vehicle attributes such as license plate, make, color from one record (act_record) to all the records in the list (json_list)
[ "Transfer", "vehicle", "attributes", "such", "as", "license", "plate", "make", "color", "from", "one", "record", "(", "act_record", ")", "to", "all", "the", "records", "in", "the", "list", "(", "json_list", ")" ]
def xfer_attr_from_1vehicle(self, act_record, json_list): """ Transfer vehicle attributes such as license plate, make, color from one record (act_record) to all the records in the list (json_list) Arguments: act_record {[dict]} -- Record from which the attribute need to be transfered json_list {[list]} -- List of records to which the records needs to be transferred to """ for rec in json_list: if rec != act_record: rec['object']['vehicle'] = act_record['object']['vehicle'].copy() rec['object']['id'] = act_record['object']['id']
[ "def", "xfer_attr_from_1vehicle", "(", "self", ",", "act_record", ",", "json_list", ")", ":", "for", "rec", "in", "json_list", ":", "if", "rec", "!=", "act_record", ":", "rec", "[", "'object'", "]", "[", "'vehicle'", "]", "=", "act_record", "[", "'object'", "]", "[", "'vehicle'", "]", ".", "copy", "(", ")", "rec", "[", "'object'", "]", "[", "'id'", "]", "=", "act_record", "[", "'object'", "]", "[", "'id'", "]" ]
https://github.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/blob/b9b1f9e9aa84e379c573063fc5622ef50d38898d/tracker/code/mctrack/mctracker.py#L1102-L1118
philogb/jit
9966fbb6843538e6019fa8b233068ba0e7d283a8
webpy/web/db.py
python
DB.insert
(self, tablename, seqname=None, _test=False, **values)
return out
Inserts `values` into `tablename`. Returns current sequence ID. Set `seqname` to the ID if it's not the default, or to `False` if there isn't one. >>> db = DB(None, {}) >>> q = db.insert('foo', name='bob', age=2, created=SQLLiteral('NOW()'), _test=True) >>> q <sql: "INSERT INTO foo (age, name, created) VALUES (2, 'bob', NOW())"> >>> q.query() 'INSERT INTO foo (age, name, created) VALUES (%s, %s, NOW())' >>> q.values() [2, 'bob']
Inserts `values` into `tablename`. Returns current sequence ID. Set `seqname` to the ID if it's not the default, or to `False` if there isn't one. >>> db = DB(None, {}) >>> q = db.insert('foo', name='bob', age=2, created=SQLLiteral('NOW()'), _test=True) >>> q <sql: "INSERT INTO foo (age, name, created) VALUES (2, 'bob', NOW())"> >>> q.query() 'INSERT INTO foo (age, name, created) VALUES (%s, %s, NOW())' >>> q.values() [2, 'bob']
[ "Inserts", "values", "into", "tablename", ".", "Returns", "current", "sequence", "ID", ".", "Set", "seqname", "to", "the", "ID", "if", "it", "s", "not", "the", "default", "or", "to", "False", "if", "there", "isn", "t", "one", ".", ">>>", "db", "=", "DB", "(", "None", "{}", ")", ">>>", "q", "=", "db", ".", "insert", "(", "foo", "name", "=", "bob", "age", "=", "2", "created", "=", "SQLLiteral", "(", "NOW", "()", ")", "_test", "=", "True", ")", ">>>", "q", "<sql", ":", "INSERT", "INTO", "foo", "(", "age", "name", "created", ")", "VALUES", "(", "2", "bob", "NOW", "()", ")", ">", ">>>", "q", ".", "query", "()", "INSERT", "INTO", "foo", "(", "age", "name", "created", ")", "VALUES", "(", "%s", "%s", "NOW", "()", ")", ">>>", "q", ".", "values", "()", "[", "2", "bob", "]" ]
def insert(self, tablename, seqname=None, _test=False, **values): """ Inserts `values` into `tablename`. Returns current sequence ID. Set `seqname` to the ID if it's not the default, or to `False` if there isn't one. >>> db = DB(None, {}) >>> q = db.insert('foo', name='bob', age=2, created=SQLLiteral('NOW()'), _test=True) >>> q <sql: "INSERT INTO foo (age, name, created) VALUES (2, 'bob', NOW())"> >>> q.query() 'INSERT INTO foo (age, name, created) VALUES (%s, %s, NOW())' >>> q.values() [2, 'bob'] """ def q(x): return "(" + x + ")" if values: _keys = SQLQuery.join(values.keys(), ', ') _values = SQLQuery.join([sqlparam(v) for v in values.values()], ', ') sql_query = "INSERT INTO %s " % tablename + q(_keys) + ' VALUES ' + q(_values) else: sql_query = SQLQuery("INSERT INTO %s DEFAULT VALUES" % tablename) if _test: return sql_query db_cursor = self._db_cursor() if seqname is not False: sql_query = self._process_insert_query(sql_query, tablename, seqname) if isinstance(sql_query, tuple): # for some databases, a separate query has to be made to find # the id of the inserted row. q1, q2 = sql_query self._db_execute(db_cursor, q1) self._db_execute(db_cursor, q2) else: self._db_execute(db_cursor, sql_query) try: out = db_cursor.fetchone()[0] except Exception: out = None if not self.ctx.transactions: self.ctx.commit() return out
[ "def", "insert", "(", "self", ",", "tablename", ",", "seqname", "=", "None", ",", "_test", "=", "False", ",", "*", "*", "values", ")", ":", "def", "q", "(", "x", ")", ":", "return", "\"(\"", "+", "x", "+", "\")\"", "if", "values", ":", "_keys", "=", "SQLQuery", ".", "join", "(", "values", ".", "keys", "(", ")", ",", "', '", ")", "_values", "=", "SQLQuery", ".", "join", "(", "[", "sqlparam", "(", "v", ")", "for", "v", "in", "values", ".", "values", "(", ")", "]", ",", "', '", ")", "sql_query", "=", "\"INSERT INTO %s \"", "%", "tablename", "+", "q", "(", "_keys", ")", "+", "' VALUES '", "+", "q", "(", "_values", ")", "else", ":", "sql_query", "=", "SQLQuery", "(", "\"INSERT INTO %s DEFAULT VALUES\"", "%", "tablename", ")", "if", "_test", ":", "return", "sql_query", "db_cursor", "=", "self", ".", "_db_cursor", "(", ")", "if", "seqname", "is", "not", "False", ":", "sql_query", "=", "self", ".", "_process_insert_query", "(", "sql_query", ",", "tablename", ",", "seqname", ")", "if", "isinstance", "(", "sql_query", ",", "tuple", ")", ":", "# for some databases, a separate query has to be made to find ", "# the id of the inserted row.", "q1", ",", "q2", "=", "sql_query", "self", ".", "_db_execute", "(", "db_cursor", ",", "q1", ")", "self", ".", "_db_execute", "(", "db_cursor", ",", "q2", ")", "else", ":", "self", ".", "_db_execute", "(", "db_cursor", ",", "sql_query", ")", "try", ":", "out", "=", "db_cursor", ".", "fetchone", "(", ")", "[", "0", "]", "except", "Exception", ":", "out", "=", "None", "if", "not", "self", ".", "ctx", ".", "transactions", ":", "self", ".", "ctx", ".", "commit", "(", ")", "return", "out" ]
https://github.com/philogb/jit/blob/9966fbb6843538e6019fa8b233068ba0e7d283a8/webpy/web/db.py#L667-L713
stdlib-js/stdlib
e3c14dd9a7985ed1cd1cc80e83b6659aeabeb7df
lib/node_modules/@stdlib/stats/base/dists/lognormal/ctor/benchmark/python/benchmark.scipy.py
python
print_version
()
Print the TAP version.
Print the TAP version.
[ "Print", "the", "TAP", "version", "." ]
def print_version(): """Print the TAP version.""" print("TAP version 13")
[ "def", "print_version", "(", ")", ":", "print", "(", "\"TAP version 13\"", ")" ]
https://github.com/stdlib-js/stdlib/blob/e3c14dd9a7985ed1cd1cc80e83b6659aeabeb7df/lib/node_modules/@stdlib/stats/base/dists/lognormal/ctor/benchmark/python/benchmark.scipy.py#L28-L30
replit-archive/jsrepl
36d79b6288ca5d26208e8bade2a168c6ebcb2376
extern/python/closured/lib/python2.7/compiler/pyassem.py
python
PyFlowGraph.getCode
(self)
return self.newCodeObject()
Get a Python code object
Get a Python code object
[ "Get", "a", "Python", "code", "object" ]
def getCode(self): """Get a Python code object""" assert self.stage == RAW self.computeStackDepth() self.flattenGraph() assert self.stage == FLAT self.convertArgs() assert self.stage == CONV self.makeByteCode() assert self.stage == DONE return self.newCodeObject()
[ "def", "getCode", "(", "self", ")", ":", "assert", "self", ".", "stage", "==", "RAW", "self", ".", "computeStackDepth", "(", ")", "self", ".", "flattenGraph", "(", ")", "assert", "self", ".", "stage", "==", "FLAT", "self", ".", "convertArgs", "(", ")", "assert", "self", ".", "stage", "==", "CONV", "self", ".", "makeByteCode", "(", ")", "assert", "self", ".", "stage", "==", "DONE", "return", "self", ".", "newCodeObject", "(", ")" ]
https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/compiler/pyassem.py#L305-L315
pim-book/programmers-introduction-to-mathematics
5181aa8485c7acddbf52da60dc72eaa61a2f4b4a
hyperbolic_tessellation/geometry.py
python
VerticalLine.reflect
(self, point)
return Point(2 * self.point.x - point.x, point.y)
Reflect a point across this line.
Reflect a point across this line.
[ "Reflect", "a", "point", "across", "this", "line", "." ]
def reflect(self, point): """Reflect a point across this line.""" return Point(2 * self.point.x - point.x, point.y)
[ "def", "reflect", "(", "self", ",", "point", ")", ":", "return", "Point", "(", "2", "*", "self", ".", "point", ".", "x", "-", "point", ".", "x", ",", "point", ".", "y", ")" ]
https://github.com/pim-book/programmers-introduction-to-mathematics/blob/5181aa8485c7acddbf52da60dc72eaa61a2f4b4a/hyperbolic_tessellation/geometry.py#L149-L151
ibuler/jumpserver
0aa43c7cabc012cf02f39826fdce80f4b7b7654b
jasset/views.py
python
idc_edit
(request)
IDC edit view
IDC edit view
[ "IDC", "edit", "view" ]
def idc_edit(request): """ IDC edit view """ header_title, path1, path2 = u'编辑IDC', u'资产管理', u'编辑IDC' idc_id = request.GET.get('id', '') idc = get_object(IDC, id=idc_id) if request.method == 'POST': idc_form = IdcForm(request.POST, instance=idc) if idc_form.is_valid(): idc_form.save() return HttpResponseRedirect(reverse('idc_list')) else: idc_form = IdcForm(instance=idc) return my_render('jasset/idc_edit.html', locals(), request)
[ "def", "idc_edit", "(", "request", ")", ":", "header_title", ",", "path1", ",", "path2", "=", "u'编辑IDC', u'", "资", "管理', u'编辑IDC'", "", "", "idc_id", "=", "request", ".", "GET", ".", "get", "(", "'id'", ",", "''", ")", "idc", "=", "get_object", "(", "IDC", ",", "id", "=", "idc_id", ")", "if", "request", ".", "method", "==", "'POST'", ":", "idc_form", "=", "IdcForm", "(", "request", ".", "POST", ",", "instance", "=", "idc", ")", "if", "idc_form", ".", "is_valid", "(", ")", ":", "idc_form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'idc_list'", ")", ")", "else", ":", "idc_form", "=", "IdcForm", "(", "instance", "=", "idc", ")", "return", "my_render", "(", "'jasset/idc_edit.html'", ",", "locals", "(", ")", ",", "request", ")" ]
https://github.com/ibuler/jumpserver/blob/0aa43c7cabc012cf02f39826fdce80f4b7b7654b/jasset/views.py#L527-L541
anpylar/anpylar
ed80696b25a42ee0e7b845042bbb108326abae69
anpylar/promise.py
python
Promise.resolve
(value)
return Promise(lambda resolve, reject: resolve(value))
This creates a Promise which is immediately resolved with ``value``
This creates a Promise which is immediately resolved with ``value``
[ "This", "creates", "a", "Promise", "which", "is", "immediately", "resolved", "with", "value" ]
def resolve(value): '''This creates a Promise which is immediately resolved with ``value``''' return Promise(lambda resolve, reject: resolve(value))
[ "def", "resolve", "(", "value", ")", ":", "return", "Promise", "(", "lambda", "resolve", ",", "reject", ":", "resolve", "(", "value", ")", ")" ]
https://github.com/anpylar/anpylar/blob/ed80696b25a42ee0e7b845042bbb108326abae69/anpylar/promise.py#L66-L69
OpenDSA/OpenDSA
5f29b76a98e347fb60711415b85147732451d3dd
khan-exercises/build/lint_i18n_strings.py
python
_clean_data_if
(match)
return 'data-if=%s%s%s' % (quote, condition, quote)
Clean up entities in data-if attributes. This is done purely to aid in readability. In an attribute it's possible to have < > and & exist un-escaped so we covert them to be as such. Helps to make the contents easier to understand. lxml will do the encoding automatically so we actually revert that using this method.
Clean up entities in data-if attributes.
[ "Clean", "up", "entities", "in", "data", "-", "if", "attributes", "." ]
def _clean_data_if(match): """Clean up entities in data-if attributes. This is done purely to aid in readability. In an attribute it's possible to have < > and & exist un-escaped so we covert them to be as such. Helps to make the contents easier to understand. lxml will do the encoding automatically so we actually revert that using this method. """ quote = match.group(1) condition = match.group(2) # Make sure any common entities are cleaned up, to help # with readability. for entity, replace in _CLEAN_ENTITIES.iteritems(): condition = condition.replace(entity, replace) return 'data-if=%s%s%s' % (quote, condition, quote)
[ "def", "_clean_data_if", "(", "match", ")", ":", "quote", "=", "match", ".", "group", "(", "1", ")", "condition", "=", "match", ".", "group", "(", "2", ")", "# Make sure any common entities are cleaned up, to help", "# with readability.", "for", "entity", ",", "replace", "in", "_CLEAN_ENTITIES", ".", "iteritems", "(", ")", ":", "condition", "=", "condition", ".", "replace", "(", "entity", ",", "replace", ")", "return", "'data-if=%s%s%s'", "%", "(", "quote", ",", "condition", ",", "quote", ")" ]
https://github.com/OpenDSA/OpenDSA/blob/5f29b76a98e347fb60711415b85147732451d3dd/khan-exercises/build/lint_i18n_strings.py#L1834-L1852
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
src/client/tactic_client_lib/tactic_server_stub.py
python
TacticServerStub.generate_ticket
(self)
return self.server.generate_ticket(self.ticket)
API Function: generate_ticket() Ask the server to generate a ticket explicity used for your own commands @return: string - representing the transaction ticket
API Function: generate_ticket() Ask the server to generate a ticket explicity used for your own commands
[ "API", "Function", ":", "generate_ticket", "()", "Ask", "the", "server", "to", "generate", "a", "ticket", "explicity", "used", "for", "your", "own", "commands" ]
def generate_ticket(self): '''API Function: generate_ticket() Ask the server to generate a ticket explicity used for your own commands @return: string - representing the transaction ticket ''' return self.server.generate_ticket(self.ticket)
[ "def", "generate_ticket", "(", "self", ")", ":", "return", "self", ".", "server", ".", "generate_ticket", "(", "self", ".", "ticket", ")" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/src/client/tactic_client_lib/tactic_server_stub.py#L891-L898
KhronosGroup/Vulkan-Docs
ee155139142a2a71b56238419bf0a6859f7b0a93
scripts/conventions.py
python
ConventionsBase.type_always_valid
(self, typename)
return typename in TYPES_KNOWN_ALWAYS_VALID
Return True if the given type name is always valid (never requires validation). This is for things like integers. Defaults to a reasonable implementation. May override.
Return True if the given type name is always valid (never requires validation).
[ "Return", "True", "if", "the", "given", "type", "name", "is", "always", "valid", "(", "never", "requires", "validation", ")", "." ]
def type_always_valid(self, typename): """Return True if the given type name is always valid (never requires validation). This is for things like integers. Defaults to a reasonable implementation. May override.""" return typename in TYPES_KNOWN_ALWAYS_VALID
[ "def", "type_always_valid", "(", "self", ",", "typename", ")", ":", "return", "typename", "in", "TYPES_KNOWN_ALWAYS_VALID" ]
https://github.com/KhronosGroup/Vulkan-Docs/blob/ee155139142a2a71b56238419bf0a6859f7b0a93/scripts/conventions.py#L296-L304
retspen/webvirtmgr
86bb20f2eb5dedf03ee6aa942cabcb39fbc74821
webvirtmgr/utils/secret_key.py
python
generate_or_read_from_file
(key_file='.secret_key', key_length=64)
Multiprocess-safe secret key file generator. Useful to replace the default (and thus unsafe) SECRET_KEY in settings.py upon first start. Save to use, i.e. when multiple Python interpreters serve the dashboard Django application (e.g. in a mod_wsgi + daemonized environment). Also checks if file permissions are set correctly and throws an exception if not.
Multiprocess-safe secret key file generator.
[ "Multiprocess", "-", "safe", "secret", "key", "file", "generator", "." ]
def generate_or_read_from_file(key_file='.secret_key', key_length=64): """Multiprocess-safe secret key file generator. Useful to replace the default (and thus unsafe) SECRET_KEY in settings.py upon first start. Save to use, i.e. when multiple Python interpreters serve the dashboard Django application (e.g. in a mod_wsgi + daemonized environment). Also checks if file permissions are set correctly and throws an exception if not. """ lock = lockfile.FileLock(key_file) # with lock: if not lock.is_locked(): if not os.path.exists(key_file): key = generate_key(key_length) old_umask = os.umask(0o177) # Use '0600' file permissions with open(key_file, 'w') as f: f.write(key) os.umask(old_umask) else: if oct(os.stat(key_file).st_mode & 0o777) != '0600': raise FilePermissionError("Insecure key file permissions!") with open(key_file, 'r') as f: key = f.readline() return key
[ "def", "generate_or_read_from_file", "(", "key_file", "=", "'.secret_key'", ",", "key_length", "=", "64", ")", ":", "lock", "=", "lockfile", ".", "FileLock", "(", "key_file", ")", "# with lock:", "if", "not", "lock", ".", "is_locked", "(", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "key_file", ")", ":", "key", "=", "generate_key", "(", "key_length", ")", "old_umask", "=", "os", ".", "umask", "(", "0o177", ")", "# Use '0600' file permissions", "with", "open", "(", "key_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "key", ")", "os", ".", "umask", "(", "old_umask", ")", "else", ":", "if", "oct", "(", "os", ".", "stat", "(", "key_file", ")", ".", "st_mode", "&", "0o777", ")", "!=", "'0600'", ":", "raise", "FilePermissionError", "(", "\"Insecure key file permissions!\"", ")", "with", "open", "(", "key_file", ",", "'r'", ")", "as", "f", ":", "key", "=", "f", ".", "readline", "(", ")", "return", "key" ]
https://github.com/retspen/webvirtmgr/blob/86bb20f2eb5dedf03ee6aa942cabcb39fbc74821/webvirtmgr/utils/secret_key.py#L41-L64
replit-archive/jsrepl
36d79b6288ca5d26208e8bade2a168c6ebcb2376
extern/python/closured/lib/python2.7/lib2to3/pytree.py
python
WildcardPattern.optimize
(self)
return self
Optimize certain stacked wildcard patterns.
Optimize certain stacked wildcard patterns.
[ "Optimize", "certain", "stacked", "wildcard", "patterns", "." ]
def optimize(self): """Optimize certain stacked wildcard patterns.""" subpattern = None if (self.content is not None and len(self.content) == 1 and len(self.content[0]) == 1): subpattern = self.content[0][0] if self.min == 1 and self.max == 1: if self.content is None: return NodePattern(name=self.name) if subpattern is not None and self.name == subpattern.name: return subpattern.optimize() if (self.min <= 1 and isinstance(subpattern, WildcardPattern) and subpattern.min <= 1 and self.name == subpattern.name): return WildcardPattern(subpattern.content, self.min*subpattern.min, self.max*subpattern.max, subpattern.name) return self
[ "def", "optimize", "(", "self", ")", ":", "subpattern", "=", "None", "if", "(", "self", ".", "content", "is", "not", "None", "and", "len", "(", "self", ".", "content", ")", "==", "1", "and", "len", "(", "self", ".", "content", "[", "0", "]", ")", "==", "1", ")", ":", "subpattern", "=", "self", ".", "content", "[", "0", "]", "[", "0", "]", "if", "self", ".", "min", "==", "1", "and", "self", ".", "max", "==", "1", ":", "if", "self", ".", "content", "is", "None", ":", "return", "NodePattern", "(", "name", "=", "self", ".", "name", ")", "if", "subpattern", "is", "not", "None", "and", "self", ".", "name", "==", "subpattern", ".", "name", ":", "return", "subpattern", ".", "optimize", "(", ")", "if", "(", "self", ".", "min", "<=", "1", "and", "isinstance", "(", "subpattern", ",", "WildcardPattern", ")", "and", "subpattern", ".", "min", "<=", "1", "and", "self", ".", "name", "==", "subpattern", ".", "name", ")", ":", "return", "WildcardPattern", "(", "subpattern", ".", "content", ",", "self", ".", "min", "*", "subpattern", ".", "min", ",", "self", ".", "max", "*", "subpattern", ".", "max", ",", "subpattern", ".", "name", ")", "return", "self" ]
https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/lib2to3/pytree.py#L688-L705
jadijadi/bestoon
46ac14d46daa47f5b9d077881751775ce267e6f0
web/views.py
python
edit_income
(request)
return JsonResponse({ 'status': 'ok', }, encoder=JSONEncoder)
edit an income
edit an income
[ "edit", "an", "income" ]
def edit_income(request): """ edit an income """ this_text = request.POST['text'] if 'text' in request.POST else "" this_amount = request.POST['amount'] if 'amount' in request.POST else "0" this_pk = request.POST['id'] if 'id' in request.POST else "0" this_token = request.POST['token'] if 'token' in request.POST else "" this_user = get_object_or_404(User, token__token=this_token) this_income = get_object_or_404(Income, pk=this_pk, user=this_user) this_income.text = this_text this_income.amount = this_amount this_income.save() return JsonResponse({ 'status': 'ok', }, encoder=JSONEncoder)
[ "def", "edit_income", "(", "request", ")", ":", "this_text", "=", "request", ".", "POST", "[", "'text'", "]", "if", "'text'", "in", "request", ".", "POST", "else", "\"\"", "this_amount", "=", "request", ".", "POST", "[", "'amount'", "]", "if", "'amount'", "in", "request", ".", "POST", "else", "\"0\"", "this_pk", "=", "request", ".", "POST", "[", "'id'", "]", "if", "'id'", "in", "request", ".", "POST", "else", "\"0\"", "this_token", "=", "request", ".", "POST", "[", "'token'", "]", "if", "'token'", "in", "request", ".", "POST", "else", "\"\"", "this_user", "=", "get_object_or_404", "(", "User", ",", "token__token", "=", "this_token", ")", "this_income", "=", "get_object_or_404", "(", "Income", ",", "pk", "=", "this_pk", ",", "user", "=", "this_user", ")", "this_income", ".", "text", "=", "this_text", "this_income", ".", "amount", "=", "this_amount", "this_income", ".", "save", "(", ")", "return", "JsonResponse", "(", "{", "'status'", ":", "'ok'", ",", "}", ",", "encoder", "=", "JSONEncoder", ")" ]
https://github.com/jadijadi/bestoon/blob/46ac14d46daa47f5b9d077881751775ce267e6f0/web/views.py#L234-L249
mceSystems/node-jsc
90634f3064fab8e89a85b3942f0cc5054acc86fa
tools/gyp/pylib/gyp/__init__.py
python
NameValueListToDict
(name_value_list)
return result
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary of the pairs. If a string is simply NAME, then the value in the dictionary is set to True. If VALUE can be converted to an integer, it is.
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary of the pairs. If a string is simply NAME, then the value in the dictionary is set to True. If VALUE can be converted to an integer, it is.
[ "Takes", "an", "array", "of", "strings", "of", "the", "form", "NAME", "=", "VALUE", "and", "creates", "a", "dictionary", "of", "the", "pairs", ".", "If", "a", "string", "is", "simply", "NAME", "then", "the", "value", "in", "the", "dictionary", "is", "set", "to", "True", ".", "If", "VALUE", "can", "be", "converted", "to", "an", "integer", "it", "is", "." ]
def NameValueListToDict(name_value_list): """ Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary of the pairs. If a string is simply NAME, then the value in the dictionary is set to True. If VALUE can be converted to an integer, it is. """ result = { } for item in name_value_list: tokens = item.split('=', 1) if len(tokens) == 2: # If we can make it an int, use that, otherwise, use the string. try: token_value = int(tokens[1]) except ValueError: token_value = tokens[1] # Set the variable to the supplied value. result[tokens[0]] = token_value else: # No value supplied, treat it as a boolean and set it. result[tokens[0]] = True return result
[ "def", "NameValueListToDict", "(", "name_value_list", ")", ":", "result", "=", "{", "}", "for", "item", "in", "name_value_list", ":", "tokens", "=", "item", ".", "split", "(", "'='", ",", "1", ")", "if", "len", "(", "tokens", ")", "==", "2", ":", "# If we can make it an int, use that, otherwise, use the string.", "try", ":", "token_value", "=", "int", "(", "tokens", "[", "1", "]", ")", "except", "ValueError", ":", "token_value", "=", "tokens", "[", "1", "]", "# Set the variable to the supplied value.", "result", "[", "tokens", "[", "0", "]", "]", "=", "token_value", "else", ":", "# No value supplied, treat it as a boolean and set it.", "result", "[", "tokens", "[", "0", "]", "]", "=", "True", "return", "result" ]
https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/tools/gyp/pylib/gyp/__init__.py#L133-L153
Nexedi/erp5
44df1959c0e21576cf5e9803d602d95efb4b695b
bt5/erp5_web_service/ModuleComponentTemplateItem/portal_components/module.erp5.SFTPConnection.py
python
SFTPConnection.renameFile
(self, old_path, new_path)
Rename a file
Rename a file
[ "Rename", "a", "file" ]
def renameFile(self, old_path, new_path): """Rename a file""" try: self.conn.rename(old_path, new_path) except error, msg: raise SFTPError('%s while trying to rename "%s" to "%s" on %s.' % \ (str(msg), old_path, new_path, self.url))
[ "def", "renameFile", "(", "self", ",", "old_path", ",", "new_path", ")", ":", "try", ":", "self", ".", "conn", ".", "rename", "(", "old_path", ",", "new_path", ")", "except", "error", ",", "msg", ":", "raise", "SFTPError", "(", "'%s while trying to rename \"%s\" to \"%s\" on %s.'", "%", "(", "str", "(", "msg", ")", ",", "old_path", ",", "new_path", ",", "self", ".", "url", ")", ")" ]
https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/bt5/erp5_web_service/ModuleComponentTemplateItem/portal_components/module.erp5.SFTPConnection.py#L166-L172
RASSec/A_Scan_Framework
4a46cf14b8c717dc0196071bbfd27e2d9c85bb17
pocscan/plugins/tangscan/tangscan/thirdparty/colorama/ansitowin32.py
python
AnsiToWin32.write_and_convert
(self, text)
Write the given text to our wrapped stream, stripping any ANSI sequences from the text, and optionally converting them into win32 calls.
Write the given text to our wrapped stream, stripping any ANSI sequences from the text, and optionally converting them into win32 calls.
[ "Write", "the", "given", "text", "to", "our", "wrapped", "stream", "stripping", "any", "ANSI", "sequences", "from", "the", "text", "and", "optionally", "converting", "them", "into", "win32", "calls", "." ]
def write_and_convert(self, text): ''' Write the given text to our wrapped stream, stripping any ANSI sequences from the text, and optionally converting them into win32 calls. ''' cursor = 0 for match in self.ANSI_RE.finditer(text): start, end = match.span() self.write_plain_text(text, cursor, start) self.convert_ansi(*match.groups()) cursor = end self.write_plain_text(text, cursor, len(text))
[ "def", "write_and_convert", "(", "self", ",", "text", ")", ":", "cursor", "=", "0", "for", "match", "in", "self", ".", "ANSI_RE", ".", "finditer", "(", "text", ")", ":", "start", ",", "end", "=", "match", ".", "span", "(", ")", "self", ".", "write_plain_text", "(", "text", ",", "cursor", ",", "start", ")", "self", ".", "convert_ansi", "(", "*", "match", ".", "groups", "(", ")", ")", "cursor", "=", "end", "self", ".", "write_plain_text", "(", "text", ",", "cursor", ",", "len", "(", "text", ")", ")" ]
https://github.com/RASSec/A_Scan_Framework/blob/4a46cf14b8c717dc0196071bbfd27e2d9c85bb17/pocscan/plugins/tangscan/tangscan/thirdparty/colorama/ansitowin32.py#L132-L144
mozilla/ichnaea
63a2bf1ba057c1b90931f6bf0f88c570c21aaf27
ichnaea/util.py
python
utcnow
()
return datetime.utcnow().replace(microsecond=0, tzinfo=UTC)
Return the current time in UTC with a UTC timezone set.
Return the current time in UTC with a UTC timezone set.
[ "Return", "the", "current", "time", "in", "UTC", "with", "a", "UTC", "timezone", "set", "." ]
def utcnow(): """Return the current time in UTC with a UTC timezone set.""" return datetime.utcnow().replace(microsecond=0, tzinfo=UTC)
[ "def", "utcnow", "(", ")", ":", "return", "datetime", ".", "utcnow", "(", ")", ".", "replace", "(", "microsecond", "=", "0", ",", "tzinfo", "=", "UTC", ")" ]
https://github.com/mozilla/ichnaea/blob/63a2bf1ba057c1b90931f6bf0f88c570c21aaf27/ichnaea/util.py#L62-L64
xl7dev/BurpSuite
d1d4bd4981a87f2f4c0c9744ad7c476336c813da
Extender/faraday/plugins/repo/nmap/plugin.py
python
Host.get_ipv4_address
(self)
return ip_address if ip_address else 'unknown'
Expects to find '<address addr="127.0.0.1" addrtype="ipv4"/>' in the node @return ip_address or 'unknown'
Expects to find '<address addr="127.0.0.1" addrtype="ipv4"/>' in the node
[ "Expects", "to", "find", "<address", "addr", "=", "127", ".", "0", ".", "0", ".", "1", "addrtype", "=", "ipv4", "/", ">", "in", "the", "node" ]
def get_ipv4_address(self): """ Expects to find '<address addr="127.0.0.1" addrtype="ipv4"/>' in the node @return ip_address or 'unknown' """ ip_address = self.get_attrib_from_subnode("address[@addrtype='ipv4']",'addr') return ip_address if ip_address else 'unknown'
[ "def", "get_ipv4_address", "(", "self", ")", ":", "ip_address", "=", "self", ".", "get_attrib_from_subnode", "(", "\"address[@addrtype='ipv4']\"", ",", "'addr'", ")", "return", "ip_address", "if", "ip_address", "else", "'unknown'" ]
https://github.com/xl7dev/BurpSuite/blob/d1d4bd4981a87f2f4c0c9744ad7c476336c813da/Extender/faraday/plugins/repo/nmap/plugin.py#L177-L184
scottgchin/delta5_race_timer
fe4e0f46307b05c9384f3124189edc402d812ade
src/delta5server/server.py
python
emit_pilot_data
()
Emits pilot data.
Emits pilot data.
[ "Emits", "pilot", "data", "." ]
def emit_pilot_data(): '''Emits pilot data.''' SOCKET_IO.emit('pilot_data', { 'callsign': [pilot.callsign for pilot in Pilot.query.all()], 'name': [pilot.name for pilot in Pilot.query.all()] })
[ "def", "emit_pilot_data", "(", ")", ":", "SOCKET_IO", ".", "emit", "(", "'pilot_data'", ",", "{", "'callsign'", ":", "[", "pilot", ".", "callsign", "for", "pilot", "in", "Pilot", ".", "query", ".", "all", "(", ")", "]", ",", "'name'", ":", "[", "pilot", ".", "name", "for", "pilot", "in", "Pilot", ".", "query", ".", "all", "(", ")", "]", "}", ")" ]
https://github.com/scottgchin/delta5_race_timer/blob/fe4e0f46307b05c9384f3124189edc402d812ade/src/delta5server/server.py#L866-L871
replit-archive/jsrepl
36d79b6288ca5d26208e8bade2a168c6ebcb2376
extern/python/closured/lib/python2.7/fractions.py
python
Fraction.__mod__
(a, b)
return a - b * div
a % b
a % b
[ "a", "%", "b" ]
def __mod__(a, b): """a % b""" div = a // b return a - b * div
[ "def", "__mod__", "(", "a", ",", "b", ")", ":", "div", "=", "a", "//", "b", "return", "a", "-", "b", "*", "div" ]
https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/fractions.py#L441-L444
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
3rd_party/python3/site-packages/cherrypy-18.1.2/cherrypy/lib/sessions.py
python
Session.values
(self)
return self._data.values()
D.values() -> list of D's values.
D.values() -> list of D's values.
[ "D", ".", "values", "()", "-", ">", "list", "of", "D", "s", "values", "." ]
def values(self): """D.values() -> list of D's values.""" if not self.loaded: self.load() return self._data.values()
[ "def", "values", "(", "self", ")", ":", "if", "not", "self", ".", "loaded", ":", "self", ".", "load", "(", ")", "return", "self", ".", "_data", ".", "values", "(", ")" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/3rd_party/python3/site-packages/cherrypy-18.1.2/cherrypy/lib/sessions.py#L390-L394
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
3rd_party/python2/site-packages/cherrypy/_cpcompat.py
python
base64_decode
(n, encoding='ISO-8859-1')
return tonative(decoded, encoding)
Return the native string base64-decoded (as a native string).
Return the native string base64-decoded (as a native string).
[ "Return", "the", "native", "string", "base64", "-", "decoded", "(", "as", "a", "native", "string", ")", "." ]
def base64_decode(n, encoding='ISO-8859-1'): """Return the native string base64-decoded (as a native string).""" decoded = base64.decodestring(n.encode('ascii')) return tonative(decoded, encoding)
[ "def", "base64_decode", "(", "n", ",", "encoding", "=", "'ISO-8859-1'", ")", ":", "decoded", "=", "base64", ".", "decodestring", "(", "n", ".", "encode", "(", "'ascii'", ")", ")", "return", "tonative", "(", "decoded", ",", "encoding", ")" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/3rd_party/python2/site-packages/cherrypy/_cpcompat.py#L91-L94
almonk/Bind
03e9e98fb8b30a58cb4fc2829f06289fa9958897
public/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
python
BuildTargetsDict
(data)
return targets
Builds a dict mapping fully-qualified target names to their target dicts. |data| is a dict mapping loaded build files by pathname relative to the current directory. Values in |data| are build file contents. For each |data| value with a "targets" key, the value of the "targets" key is taken as a list containing target dicts. Each target's fully-qualified name is constructed from the pathname of the build file (|data| key) and its "target_name" property. These fully-qualified names are used as the keys in the returned dict. These keys provide access to the target dicts, the dicts in the "targets" lists.
Builds a dict mapping fully-qualified target names to their target dicts.
[ "Builds", "a", "dict", "mapping", "fully", "-", "qualified", "target", "names", "to", "their", "target", "dicts", "." ]
def BuildTargetsDict(data): """Builds a dict mapping fully-qualified target names to their target dicts. |data| is a dict mapping loaded build files by pathname relative to the current directory. Values in |data| are build file contents. For each |data| value with a "targets" key, the value of the "targets" key is taken as a list containing target dicts. Each target's fully-qualified name is constructed from the pathname of the build file (|data| key) and its "target_name" property. These fully-qualified names are used as the keys in the returned dict. These keys provide access to the target dicts, the dicts in the "targets" lists. """ targets = {} for build_file in data['target_build_files']: for target in data[build_file].get('targets', []): target_name = gyp.common.QualifiedTarget(build_file, target['target_name'], target['toolset']) if target_name in targets: raise GypError('Duplicate target definitions for ' + target_name) targets[target_name] = target return targets
[ "def", "BuildTargetsDict", "(", "data", ")", ":", "targets", "=", "{", "}", "for", "build_file", "in", "data", "[", "'target_build_files'", "]", ":", "for", "target", "in", "data", "[", "build_file", "]", ".", "get", "(", "'targets'", ",", "[", "]", ")", ":", "target_name", "=", "gyp", ".", "common", ".", "QualifiedTarget", "(", "build_file", ",", "target", "[", "'target_name'", "]", ",", "target", "[", "'toolset'", "]", ")", "if", "target_name", "in", "targets", ":", "raise", "GypError", "(", "'Duplicate target definitions for '", "+", "target_name", ")", "targets", "[", "target_name", "]", "=", "target", "return", "targets" ]
https://github.com/almonk/Bind/blob/03e9e98fb8b30a58cb4fc2829f06289fa9958897/public/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py#L1277-L1300
atom-community/ide-python
c046f9c2421713b34baa22648235541c5bb284fe
dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/thread.py
python
Thread.get_stack_trace
(self, depth = 16)
return trace
Tries to get a stack trace for the current function. Only works for functions with standard prologue and epilogue. @type depth: int @param depth: Maximum depth of stack trace. @rtype: tuple of tuple( int, int, str ) @return: Stack trace of the thread as a tuple of ( return address, frame pointer address, module filename ). @raise WindowsError: Raises an exception on error.
Tries to get a stack trace for the current function. Only works for functions with standard prologue and epilogue.
[ "Tries", "to", "get", "a", "stack", "trace", "for", "the", "current", "function", ".", "Only", "works", "for", "functions", "with", "standard", "prologue", "and", "epilogue", "." ]
def get_stack_trace(self, depth = 16): """ Tries to get a stack trace for the current function. Only works for functions with standard prologue and epilogue. @type depth: int @param depth: Maximum depth of stack trace. @rtype: tuple of tuple( int, int, str ) @return: Stack trace of the thread as a tuple of ( return address, frame pointer address, module filename ). @raise WindowsError: Raises an exception on error. """ try: trace = self.__get_stack_trace(depth, False) except Exception: import traceback traceback.print_exc() trace = () if not trace: trace = self.__get_stack_trace_manually(depth, False) return trace
[ "def", "get_stack_trace", "(", "self", ",", "depth", "=", "16", ")", ":", "try", ":", "trace", "=", "self", ".", "__get_stack_trace", "(", "depth", ",", "False", ")", "except", "Exception", ":", "import", "traceback", "traceback", ".", "print_exc", "(", ")", "trace", "=", "(", ")", "if", "not", "trace", ":", "trace", "=", "self", ".", "__get_stack_trace_manually", "(", "depth", ",", "False", ")", "return", "trace" ]
https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/thread.py#L1234-L1256
wotermelon/toJump
3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f
lib/mac/systrace/catapult/devil/devil/android/sdk/shared_prefs.py
python
SharedPrefs.__len__
(self)
return len(self.xml)
Get the number of preferences in this collection.
Get the number of preferences in this collection.
[ "Get", "the", "number", "of", "preferences", "in", "this", "collection", "." ]
def __len__(self): """Get the number of preferences in this collection.""" return len(self.xml)
[ "def", "__len__", "(", "self", ")", ":", "return", "len", "(", "self", ".", "xml", ")" ]
https://github.com/wotermelon/toJump/blob/3dcec5cb5d91387d415b805d015ab8d2e6ffcf5f/lib/mac/systrace/catapult/devil/devil/android/sdk/shared_prefs.py#L291-L293
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
src/pyasm/widget/web_wdg.py
python
DeleteLinkWdg.init
(self)
from pyasm.web import AjaxCmd ajax = AjaxCmd("delete_%s" % self.search_id ) ajax.register_cmd("pyasm.command.DeleteCmd") ajax.set_option("search_type", self.search_type) ajax.set_option("search_id", self.search_id) div = ajax.generate_div() div.add_style("display", "inline") self.add(div)
from pyasm.web import AjaxCmd ajax = AjaxCmd("delete_%s" % self.search_id ) ajax.register_cmd("pyasm.command.DeleteCmd") ajax.set_option("search_type", self.search_type) ajax.set_option("search_id", self.search_id) div = ajax.generate_div() div.add_style("display", "inline") self.add(div)
[ "from", "pyasm", ".", "web", "import", "AjaxCmd", "ajax", "=", "AjaxCmd", "(", "delete_%s", "%", "self", ".", "search_id", ")", "ajax", ".", "register_cmd", "(", "pyasm", ".", "command", ".", "DeleteCmd", ")", "ajax", ".", "set_option", "(", "search_type", "self", ".", "search_type", ")", "ajax", ".", "set_option", "(", "search_id", "self", ".", "search_id", ")", "div", "=", "ajax", ".", "generate_div", "()", "div", ".", "add_style", "(", "display", "inline", ")", "self", ".", "add", "(", "div", ")" ]
def init(self): button = IconButtonWdg("Delete", IconWdg.DELETE) """ from pyasm.web import AjaxCmd ajax = AjaxCmd("delete_%s" % self.search_id ) ajax.register_cmd("pyasm.command.DeleteCmd") ajax.set_option("search_type", self.search_type) ajax.set_option("search_id", self.search_id) div = ajax.generate_div() div.add_style("display", "inline") self.add(div) """ display_name = self.sobject.get_name() js_action = "TacticServerCmd.execute_cmd('pyasm.command.DeleteCmd', \ '', {'search_type': '%s', 'search_id': '%s'}, {});" %(self.search_type, self.search_id) # build the search key search_key = "%s|%s" % (self.search_type, self.search_id) button.add_behavior({'type': 'click_up', 'cbjs_action': " if (delete_sobject('%s','%s')==true){ %s}" \ %(search_key, display_name, js_action)}) self.add(button)
[ "def", "init", "(", "self", ")", ":", "button", "=", "IconButtonWdg", "(", "\"Delete\"", ",", "IconWdg", ".", "DELETE", ")", "display_name", "=", "self", ".", "sobject", ".", "get_name", "(", ")", "js_action", "=", "\"TacticServerCmd.execute_cmd('pyasm.command.DeleteCmd', \\\n '', {'search_type': '%s', 'search_id': '%s'}, {});\"", "%", "(", "self", ".", "search_type", ",", "self", ".", "search_id", ")", "# build the search key", "search_key", "=", "\"%s|%s\"", "%", "(", "self", ".", "search_type", ",", "self", ".", "search_id", ")", "button", ".", "add_behavior", "(", "{", "'type'", ":", "'click_up'", ",", "'cbjs_action'", ":", "\" if (delete_sobject('%s','%s')==true){ %s}\"", "%", "(", "search_key", ",", "display_name", ",", "js_action", ")", "}", ")", "self", ".", "add", "(", "button", ")" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/src/pyasm/widget/web_wdg.py#L613-L637
mozilla-b2g/gaia
975a35c0f5010df341e96d6c5ec60217f5347412
distros/spark/apps/customizer/components/js-beautify/python/jsbeautifier/unpackers/myobfuscate.py
python
unpack
(source)
return CAVEAT + polished
Unpacks js code packed with MyObfuscate.com
Unpacks js code packed with MyObfuscate.com
[ "Unpacks", "js", "code", "packed", "with", "MyObfuscate", ".", "com" ]
def unpack(source): """Unpacks js code packed with MyObfuscate.com""" if not detect(source): return source payload = unquote(_filter(source)) match = re.search(r"^var _escape\='<script>(.*)<\/script>'", payload, re.DOTALL) polished = match.group(1) if match else source return CAVEAT + polished
[ "def", "unpack", "(", "source", ")", ":", "if", "not", "detect", "(", "source", ")", ":", "return", "source", "payload", "=", "unquote", "(", "_filter", "(", "source", ")", ")", "match", "=", "re", ".", "search", "(", "r\"^var _escape\\='<script>(.*)<\\/script>'\"", ",", "payload", ",", "re", ".", "DOTALL", ")", "polished", "=", "match", ".", "group", "(", "1", ")", "if", "match", "else", "source", "return", "CAVEAT", "+", "polished" ]
https://github.com/mozilla-b2g/gaia/blob/975a35c0f5010df341e96d6c5ec60217f5347412/distros/spark/apps/customizer/components/js-beautify/python/jsbeautifier/unpackers/myobfuscate.py#L66-L74
Sefaria/Sefaria-Project
506752f49394fadebae283d525af8276eb2e241e
reader/templatetags/sefaria_tags.py
python
version_link
(v)
return mark_safe(link)
Return an <a> tag linking to the first available text of a particular version.
Return an <a> tag linking to the first available text of a particular version.
[ "Return", "an", "<a", ">", "tag", "linking", "to", "the", "first", "available", "text", "of", "a", "particular", "version", "." ]
def version_link(v): """ Return an <a> tag linking to the first available text of a particular version. """ try: section_ref = v.first_section_ref() or v.get_index().nodes.first_leaf().first_section_ref() except IndexError: try: section_ref = v.get_index().nodes.first_leaf().first_section_ref() except: # Better if we knew how this may fail... return mark_safe('<a href="/{}.1/{}/{}">{}</a>'.format(v.title, v.language, urllib.parse.quote(v.versionTitle.replace(" ", "_").encode("utf-8")), v.versionTitle)) link = '<a href="/{}/{}/{}">{}</a>'.format(section_ref.url(), v.language, urllib.parse.quote(v.versionTitle.replace(" ", "_").encode("utf-8")), v.versionTitle) return mark_safe(link)
[ "def", "version_link", "(", "v", ")", ":", "try", ":", "section_ref", "=", "v", ".", "first_section_ref", "(", ")", "or", "v", ".", "get_index", "(", ")", ".", "nodes", ".", "first_leaf", "(", ")", ".", "first_section_ref", "(", ")", "except", "IndexError", ":", "try", ":", "section_ref", "=", "v", ".", "get_index", "(", ")", ".", "nodes", ".", "first_leaf", "(", ")", ".", "first_section_ref", "(", ")", "except", ":", "# Better if we knew how this may fail...", "return", "mark_safe", "(", "'<a href=\"/{}.1/{}/{}\">{}</a>'", ".", "format", "(", "v", ".", "title", ",", "v", ".", "language", ",", "urllib", ".", "parse", ".", "quote", "(", "v", ".", "versionTitle", ".", "replace", "(", "\" \"", ",", "\"_\"", ")", ".", "encode", "(", "\"utf-8\"", ")", ")", ",", "v", ".", "versionTitle", ")", ")", "link", "=", "'<a href=\"/{}/{}/{}\">{}</a>'", ".", "format", "(", "section_ref", ".", "url", "(", ")", ",", "v", ".", "language", ",", "urllib", ".", "parse", ".", "quote", "(", "v", ".", "versionTitle", ".", "replace", "(", "\" \"", ",", "\"_\"", ")", ".", "encode", "(", "\"utf-8\"", ")", ")", ",", "v", ".", "versionTitle", ")", "return", "mark_safe", "(", "link", ")" ]
https://github.com/Sefaria/Sefaria-Project/blob/506752f49394fadebae283d525af8276eb2e241e/reader/templatetags/sefaria_tags.py#L119-L132
doitintl/zorya
afc70f10125c81af44d0ca35df958f4bf2a6ad63
gcp/gae.py
python
Gae.stop_version
(self, service_id, version_id)
return ( self.app.apps().services().versions().patch(servicesId=service_id, appsId=self.project, versionsId=version_id, updateMask='servingStatus', body={"servingStatus": "STOPPED"}).execute() )
Stop an instance. Args: service_id: The App Engine service id version_id: The App Engine version id Returns:
Stop an instance. Args: service_id: The App Engine service id version_id: The App Engine version id
[ "Stop", "an", "instance", ".", "Args", ":", "service_id", ":", "The", "App", "Engine", "service", "id", "version_id", ":", "The", "App", "Engine", "version", "id" ]
def stop_version(self, service_id, version_id): """ Stop an instance. Args: service_id: The App Engine service id version_id: The App Engine version id Returns: """ # TODO add requestId return ( self.app.apps().services().versions().patch(servicesId=service_id, appsId=self.project, versionsId=version_id, updateMask='servingStatus', body={"servingStatus": "STOPPED"}).execute() )
[ "def", "stop_version", "(", "self", ",", "service_id", ",", "version_id", ")", ":", "# TODO add requestId", "return", "(", "self", ".", "app", ".", "apps", "(", ")", ".", "services", "(", ")", ".", "versions", "(", ")", ".", "patch", "(", "servicesId", "=", "service_id", ",", "appsId", "=", "self", ".", "project", ",", "versionsId", "=", "version_id", ",", "updateMask", "=", "'servingStatus'", ",", "body", "=", "{", "\"servingStatus\"", ":", "\"STOPPED\"", "}", ")", ".", "execute", "(", ")", ")" ]
https://github.com/doitintl/zorya/blob/afc70f10125c81af44d0ca35df958f4bf2a6ad63/gcp/gae.py#L58-L73
jxcore/jxcore
b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410
tools/gyp/pylib/gyp/generator/android.py
python
AndroidMkWriter.WriteSources
(self, spec, configs, extra_sources)
Write Makefile code for any 'sources' from the gyp input. These are source files necessary to build the current target. We need to handle shared_intermediate directory source files as a special case by copying them to the intermediate directory and treating them as a genereated sources. Otherwise the Android build rules won't pick them up. Args: spec, configs: input from gyp. extra_sources: Sources generated from Actions or Rules.
Write Makefile code for any 'sources' from the gyp input. These are source files necessary to build the current target. We need to handle shared_intermediate directory source files as a special case by copying them to the intermediate directory and treating them as a genereated sources. Otherwise the Android build rules won't pick them up.
[ "Write", "Makefile", "code", "for", "any", "sources", "from", "the", "gyp", "input", ".", "These", "are", "source", "files", "necessary", "to", "build", "the", "current", "target", ".", "We", "need", "to", "handle", "shared_intermediate", "directory", "source", "files", "as", "a", "special", "case", "by", "copying", "them", "to", "the", "intermediate", "directory", "and", "treating", "them", "as", "a", "genereated", "sources", ".", "Otherwise", "the", "Android", "build", "rules", "won", "t", "pick", "them", "up", "." ]
def WriteSources(self, spec, configs, extra_sources): """Write Makefile code for any 'sources' from the gyp input. These are source files necessary to build the current target. We need to handle shared_intermediate directory source files as a special case by copying them to the intermediate directory and treating them as a genereated sources. Otherwise the Android build rules won't pick them up. Args: spec, configs: input from gyp. extra_sources: Sources generated from Actions or Rules. """ sources = filter(make.Compilable, spec.get('sources', [])) generated_not_sources = [x for x in extra_sources if not make.Compilable(x)] extra_sources = filter(make.Compilable, extra_sources) # Determine and output the C++ extension used by these sources. # We simply find the first C++ file and use that extension. all_sources = sources + extra_sources local_cpp_extension = '.cpp' for source in all_sources: (root, ext) = os.path.splitext(source) if IsCPPExtension(ext): local_cpp_extension = ext break if local_cpp_extension != '.cpp': self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension) # We need to move any non-generated sources that are coming from the # shared intermediate directory out of LOCAL_SRC_FILES and put them # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files # that don't match our local_cpp_extension, since Android will only # generate Makefile rules for a single LOCAL_CPP_EXTENSION. local_files = [] for source in sources: (root, ext) = os.path.splitext(source) if '$(gyp_shared_intermediate_dir)' in source: extra_sources.append(source) elif '$(gyp_intermediate_dir)' in source: extra_sources.append(source) elif IsCPPExtension(ext) and ext != local_cpp_extension: extra_sources.append(source) else: local_files.append(os.path.normpath(os.path.join(self.path, source))) # For any generated source, if it is coming from the shared intermediate # directory then we add a Make rule to copy them to the local intermediate # directory first. This is because the Android LOCAL_GENERATED_SOURCES # must be in the local module intermediate directory for the compile rules # to work properly. If the file has the wrong C++ extension, then we add # a rule to copy that to intermediates and use the new version. final_generated_sources = [] # If a source file gets copied, we still need to add the orginal source # directory as header search path, for GCC searches headers in the # directory that contains the source file by default. origin_src_dirs = [] for source in extra_sources: local_file = source if not '$(gyp_intermediate_dir)/' in local_file: basename = os.path.basename(local_file) local_file = '$(gyp_intermediate_dir)/' + basename (root, ext) = os.path.splitext(local_file) if IsCPPExtension(ext) and ext != local_cpp_extension: local_file = root + local_cpp_extension if local_file != source: self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source))) self.WriteLn('\tmkdir -p $(@D); cp $< $@') origin_src_dirs.append(os.path.dirname(source)) final_generated_sources.append(local_file) # We add back in all of the non-compilable stuff to make sure that the # make rules have dependencies on them. final_generated_sources.extend(generated_not_sources) self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES') origin_src_dirs = gyp.common.uniquer(origin_src_dirs) origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs)) self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS') self.WriteList(local_files, 'LOCAL_SRC_FILES') # Write out the flags used to compile the source; this must be done last # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path. self.WriteSourceFlags(spec, configs)
[ "def", "WriteSources", "(", "self", ",", "spec", ",", "configs", ",", "extra_sources", ")", ":", "sources", "=", "filter", "(", "make", ".", "Compilable", ",", "spec", ".", "get", "(", "'sources'", ",", "[", "]", ")", ")", "generated_not_sources", "=", "[", "x", "for", "x", "in", "extra_sources", "if", "not", "make", ".", "Compilable", "(", "x", ")", "]", "extra_sources", "=", "filter", "(", "make", ".", "Compilable", ",", "extra_sources", ")", "# Determine and output the C++ extension used by these sources.", "# We simply find the first C++ file and use that extension.", "all_sources", "=", "sources", "+", "extra_sources", "local_cpp_extension", "=", "'.cpp'", "for", "source", "in", "all_sources", ":", "(", "root", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "source", ")", "if", "IsCPPExtension", "(", "ext", ")", ":", "local_cpp_extension", "=", "ext", "break", "if", "local_cpp_extension", "!=", "'.cpp'", ":", "self", ".", "WriteLn", "(", "'LOCAL_CPP_EXTENSION := %s'", "%", "local_cpp_extension", ")", "# We need to move any non-generated sources that are coming from the", "# shared intermediate directory out of LOCAL_SRC_FILES and put them", "# into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files", "# that don't match our local_cpp_extension, since Android will only", "# generate Makefile rules for a single LOCAL_CPP_EXTENSION.", "local_files", "=", "[", "]", "for", "source", "in", "sources", ":", "(", "root", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "source", ")", "if", "'$(gyp_shared_intermediate_dir)'", "in", "source", ":", "extra_sources", ".", "append", "(", "source", ")", "elif", "'$(gyp_intermediate_dir)'", "in", "source", ":", "extra_sources", ".", "append", "(", "source", ")", "elif", "IsCPPExtension", "(", "ext", ")", "and", "ext", "!=", "local_cpp_extension", ":", "extra_sources", ".", "append", "(", "source", ")", "else", ":", "local_files", ".", "append", "(", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "self", ".", "path", ",", "source", ")", ")", ")", "# For any generated source, if it is coming from the shared intermediate", "# directory then we add a Make rule to copy them to the local intermediate", "# directory first. This is because the Android LOCAL_GENERATED_SOURCES", "# must be in the local module intermediate directory for the compile rules", "# to work properly. If the file has the wrong C++ extension, then we add", "# a rule to copy that to intermediates and use the new version.", "final_generated_sources", "=", "[", "]", "# If a source file gets copied, we still need to add the orginal source", "# directory as header search path, for GCC searches headers in the", "# directory that contains the source file by default.", "origin_src_dirs", "=", "[", "]", "for", "source", "in", "extra_sources", ":", "local_file", "=", "source", "if", "not", "'$(gyp_intermediate_dir)/'", "in", "local_file", ":", "basename", "=", "os", ".", "path", ".", "basename", "(", "local_file", ")", "local_file", "=", "'$(gyp_intermediate_dir)/'", "+", "basename", "(", "root", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "local_file", ")", "if", "IsCPPExtension", "(", "ext", ")", "and", "ext", "!=", "local_cpp_extension", ":", "local_file", "=", "root", "+", "local_cpp_extension", "if", "local_file", "!=", "source", ":", "self", ".", "WriteLn", "(", "'%s: %s'", "%", "(", "local_file", ",", "self", ".", "LocalPathify", "(", "source", ")", ")", ")", "self", ".", "WriteLn", "(", "'\\tmkdir -p $(@D); cp $< $@'", ")", "origin_src_dirs", ".", "append", "(", "os", ".", "path", ".", "dirname", "(", "source", ")", ")", "final_generated_sources", ".", "append", "(", "local_file", ")", "# We add back in all of the non-compilable stuff to make sure that the", "# make rules have dependencies on them.", "final_generated_sources", ".", "extend", "(", "generated_not_sources", ")", "self", ".", "WriteList", "(", "final_generated_sources", ",", "'LOCAL_GENERATED_SOURCES'", ")", "origin_src_dirs", "=", "gyp", ".", "common", ".", "uniquer", "(", "origin_src_dirs", ")", "origin_src_dirs", "=", "map", "(", "Sourceify", ",", "map", "(", "self", ".", "LocalPathify", ",", "origin_src_dirs", ")", ")", "self", ".", "WriteList", "(", "origin_src_dirs", ",", "'GYP_COPIED_SOURCE_ORIGIN_DIRS'", ")", "self", ".", "WriteList", "(", "local_files", ",", "'LOCAL_SRC_FILES'", ")", "# Write out the flags used to compile the source; this must be done last", "# so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.", "self", ".", "WriteSourceFlags", "(", "spec", ",", "configs", ")" ]
https://github.com/jxcore/jxcore/blob/b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410/tools/gyp/pylib/gyp/generator/android.py#L506-L589
Nexedi/erp5
44df1959c0e21576cf5e9803d602d95efb4b695b
product/ERP5/bootstrap/erp5_core/ToolComponentTemplateItem/portal_components/tool.erp5.SessionTool.py
python
DistributedSession._updateStorage
(self)
Update backend storage.
Update backend storage.
[ "Update", "backend", "storage", "." ]
def _updateStorage(self): """ Update backend storage. """ assert self.session_id storage_plugin.set(self.session_id, \ SESSION_SCOPE, \ value = self, \ cache_duration = getattr(self, 'session_duration', DEFAULT_SESSION_DURATION))
[ "def", "_updateStorage", "(", "self", ")", ":", "assert", "self", ".", "session_id", "storage_plugin", ".", "set", "(", "self", ".", "session_id", ",", "SESSION_SCOPE", ",", "value", "=", "self", ",", "cache_duration", "=", "getattr", "(", "self", ",", "'session_duration'", ",", "DEFAULT_SESSION_DURATION", ")", ")" ]
https://github.com/Nexedi/erp5/blob/44df1959c0e21576cf5e9803d602d95efb4b695b/product/ERP5/bootstrap/erp5_core/ToolComponentTemplateItem/portal_components/tool.erp5.SessionTool.py#L147-L153
nodejs/http2
734ad72e3939e62bcff0f686b8ec426b8aaa22e3
deps/v8/third_party/jinja2/utils.py
python
contextfunction
(f)
return f
This decorator can be used to mark a function or method context callable. A context callable is passed the active :class:`Context` as first argument when called from the template. This is useful if a function wants to get access to the context or functions provided on the context object. For example a function that returns a sorted list of template variables the current template exports could look like this:: @contextfunction def get_exported_names(context): return sorted(context.exported_vars)
This decorator can be used to mark a function or method context callable. A context callable is passed the active :class:`Context` as first argument when called from the template. This is useful if a function wants to get access to the context or functions provided on the context object. For example a function that returns a sorted list of template variables the current template exports could look like this::
[ "This", "decorator", "can", "be", "used", "to", "mark", "a", "function", "or", "method", "context", "callable", ".", "A", "context", "callable", "is", "passed", "the", "active", ":", "class", ":", "Context", "as", "first", "argument", "when", "called", "from", "the", "template", ".", "This", "is", "useful", "if", "a", "function", "wants", "to", "get", "access", "to", "the", "context", "or", "functions", "provided", "on", "the", "context", "object", ".", "For", "example", "a", "function", "that", "returns", "a", "sorted", "list", "of", "template", "variables", "the", "current", "template", "exports", "could", "look", "like", "this", "::" ]
def contextfunction(f): """This decorator can be used to mark a function or method context callable. A context callable is passed the active :class:`Context` as first argument when called from the template. This is useful if a function wants to get access to the context or functions provided on the context object. For example a function that returns a sorted list of template variables the current template exports could look like this:: @contextfunction def get_exported_names(context): return sorted(context.exported_vars) """ f.contextfunction = True return f
[ "def", "contextfunction", "(", "f", ")", ":", "f", ".", "contextfunction", "=", "True", "return", "f" ]
https://github.com/nodejs/http2/blob/734ad72e3939e62bcff0f686b8ec426b8aaa22e3/deps/v8/third_party/jinja2/utils.py#L41-L54
Southpaw-TACTIC/TACTIC
ba9b87aef0ee3b3ea51446f25b285ebbca06f62c
3rd_party/python3/site-packages/dateutil/rrule.py
python
rruleset.rrule
(self, rrule)
Include the given :py:class:`rrule` instance in the recurrence set generation.
Include the given :py:class:`rrule` instance in the recurrence set generation.
[ "Include", "the", "given", ":", "py", ":", "class", ":", "rrule", "instance", "in", "the", "recurrence", "set", "generation", "." ]
def rrule(self, rrule): """ Include the given :py:class:`rrule` instance in the recurrence set generation. """ self._rrule.append(rrule)
[ "def", "rrule", "(", "self", ",", "rrule", ")", ":", "self", ".", "_rrule", ".", "append", "(", "rrule", ")" ]
https://github.com/Southpaw-TACTIC/TACTIC/blob/ba9b87aef0ee3b3ea51446f25b285ebbca06f62c/3rd_party/python3/site-packages/dateutil/rrule.py#L1356-L1359
mceSystems/node-jsc
90634f3064fab8e89a85b3942f0cc5054acc86fa
tools/gyp/pylib/gyp/msvs_emulation.py
python
MsvsSettings.AdjustMidlIncludeDirs
(self, midl_include_dirs, config)
return [self.ConvertVSMacros(p, config=config) for p in includes]
Updates midl_include_dirs to expand VS specific paths, and adds the system include dirs used for platform SDK and similar.
Updates midl_include_dirs to expand VS specific paths, and adds the system include dirs used for platform SDK and similar.
[ "Updates", "midl_include_dirs", "to", "expand", "VS", "specific", "paths", "and", "adds", "the", "system", "include", "dirs", "used", "for", "platform", "SDK", "and", "similar", "." ]
def AdjustMidlIncludeDirs(self, midl_include_dirs, config): """Updates midl_include_dirs to expand VS specific paths, and adds the system include dirs used for platform SDK and similar.""" config = self._TargetConfig(config) includes = midl_include_dirs + self.msvs_system_include_dirs[config] includes.extend(self._Setting( ('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[])) return [self.ConvertVSMacros(p, config=config) for p in includes]
[ "def", "AdjustMidlIncludeDirs", "(", "self", ",", "midl_include_dirs", ",", "config", ")", ":", "config", "=", "self", ".", "_TargetConfig", "(", "config", ")", "includes", "=", "midl_include_dirs", "+", "self", ".", "msvs_system_include_dirs", "[", "config", "]", "includes", ".", "extend", "(", "self", ".", "_Setting", "(", "(", "'VCMIDLTool'", ",", "'AdditionalIncludeDirectories'", ")", ",", "config", ",", "default", "=", "[", "]", ")", ")", "return", "[", "self", ".", "ConvertVSMacros", "(", "p", ",", "config", "=", "config", ")", "for", "p", "in", "includes", "]" ]
https://github.com/mceSystems/node-jsc/blob/90634f3064fab8e89a85b3942f0cc5054acc86fa/tools/gyp/pylib/gyp/msvs_emulation.py#L349-L356
nodejs/http2
734ad72e3939e62bcff0f686b8ec426b8aaa22e3
tools/gyp/pylib/gyp/common.py
python
WriteOnDiff
(filename)
return Writer()
Write to a file only if the new contents differ. Arguments: filename: name of the file to potentially write to. Returns: A file like object which will write to temporary file and only overwrite the target if it differs (on close).
Write to a file only if the new contents differ.
[ "Write", "to", "a", "file", "only", "if", "the", "new", "contents", "differ", "." ]
def WriteOnDiff(filename): """Write to a file only if the new contents differ. Arguments: filename: name of the file to potentially write to. Returns: A file like object which will write to temporary file and only overwrite the target if it differs (on close). """ class Writer(object): """Wrapper around file which only covers the target if it differs.""" def __init__(self): # Pick temporary file. tmp_fd, self.tmp_path = tempfile.mkstemp( suffix='.tmp', prefix=os.path.split(filename)[1] + '.gyp.', dir=os.path.split(filename)[0]) try: self.tmp_file = os.fdopen(tmp_fd, 'wb') except Exception: # Don't leave turds behind. os.unlink(self.tmp_path) raise def __getattr__(self, attrname): # Delegate everything else to self.tmp_file return getattr(self.tmp_file, attrname) def close(self): try: # Close tmp file. self.tmp_file.close() # Determine if different. same = False try: same = filecmp.cmp(self.tmp_path, filename, False) except OSError, e: if e.errno != errno.ENOENT: raise if same: # The new file is identical to the old one, just get rid of the new # one. os.unlink(self.tmp_path) else: # The new file is different from the old one, or there is no old one. # Rename the new file to the permanent name. # # tempfile.mkstemp uses an overly restrictive mode, resulting in a # file that can only be read by the owner, regardless of the umask. # There's no reason to not respect the umask here, which means that # an extra hoop is required to fetch it and reset the new file's mode. # # No way to get the umask without setting a new one? Set a safe one # and then set it back to the old value. umask = os.umask(077) os.umask(umask) os.chmod(self.tmp_path, 0666 & ~umask) if sys.platform == 'win32' and os.path.exists(filename): # NOTE: on windows (but not cygwin) rename will not replace an # existing file, so it must be preceded with a remove. Sadly there # is no way to make the switch atomic. os.remove(filename) os.rename(self.tmp_path, filename) except Exception: # Don't leave turds behind. os.unlink(self.tmp_path) raise return Writer()
[ "def", "WriteOnDiff", "(", "filename", ")", ":", "class", "Writer", "(", "object", ")", ":", "\"\"\"Wrapper around file which only covers the target if it differs.\"\"\"", "def", "__init__", "(", "self", ")", ":", "# Pick temporary file.", "tmp_fd", ",", "self", ".", "tmp_path", "=", "tempfile", ".", "mkstemp", "(", "suffix", "=", "'.tmp'", ",", "prefix", "=", "os", ".", "path", ".", "split", "(", "filename", ")", "[", "1", "]", "+", "'.gyp.'", ",", "dir", "=", "os", ".", "path", ".", "split", "(", "filename", ")", "[", "0", "]", ")", "try", ":", "self", ".", "tmp_file", "=", "os", ".", "fdopen", "(", "tmp_fd", ",", "'wb'", ")", "except", "Exception", ":", "# Don't leave turds behind.", "os", ".", "unlink", "(", "self", ".", "tmp_path", ")", "raise", "def", "__getattr__", "(", "self", ",", "attrname", ")", ":", "# Delegate everything else to self.tmp_file", "return", "getattr", "(", "self", ".", "tmp_file", ",", "attrname", ")", "def", "close", "(", "self", ")", ":", "try", ":", "# Close tmp file.", "self", ".", "tmp_file", ".", "close", "(", ")", "# Determine if different.", "same", "=", "False", "try", ":", "same", "=", "filecmp", ".", "cmp", "(", "self", ".", "tmp_path", ",", "filename", ",", "False", ")", "except", "OSError", ",", "e", ":", "if", "e", ".", "errno", "!=", "errno", ".", "ENOENT", ":", "raise", "if", "same", ":", "# The new file is identical to the old one, just get rid of the new", "# one.", "os", ".", "unlink", "(", "self", ".", "tmp_path", ")", "else", ":", "# The new file is different from the old one, or there is no old one.", "# Rename the new file to the permanent name.", "#", "# tempfile.mkstemp uses an overly restrictive mode, resulting in a", "# file that can only be read by the owner, regardless of the umask.", "# There's no reason to not respect the umask here, which means that", "# an extra hoop is required to fetch it and reset the new file's mode.", "#", "# No way to get the umask without setting a new one? Set a safe one", "# and then set it back to the old value.", "umask", "=", "os", ".", "umask", "(", "077", ")", "os", ".", "umask", "(", "umask", ")", "os", ".", "chmod", "(", "self", ".", "tmp_path", ",", "0666", "&", "~", "umask", ")", "if", "sys", ".", "platform", "==", "'win32'", "and", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "# NOTE: on windows (but not cygwin) rename will not replace an", "# existing file, so it must be preceded with a remove. Sadly there", "# is no way to make the switch atomic.", "os", ".", "remove", "(", "filename", ")", "os", ".", "rename", "(", "self", ".", "tmp_path", ",", "filename", ")", "except", "Exception", ":", "# Don't leave turds behind.", "os", ".", "unlink", "(", "self", ".", "tmp_path", ")", "raise", "return", "Writer", "(", ")" ]
https://github.com/nodejs/http2/blob/734ad72e3939e62bcff0f686b8ec426b8aaa22e3/tools/gyp/pylib/gyp/common.py#L329-L399
nodejs/node
ac3c33c1646bf46104c15ae035982c06364da9b8
tools/gyp/pylib/gyp/xcodeproj_file.py
python
XCBuildPhase._AddBuildFileToDicts
(self, pbxbuildfile, path=None)
Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. If path is specified, then it is the path that is being added to the phase, and pbxbuildfile must contain either a PBXFileReference directly referencing that path, or it must contain a PBXVariantGroup that itself contains a PBXFileReference referencing the path. If path is not specified, either the PBXFileReference's path or the paths of all children of the PBXVariantGroup are taken as being added to the phase. If the path is already present in the phase, raises an exception. If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile are already present in the phase, referenced by a different PBXBuildFile object, raises an exception. This does not raise an exception when a PBXFileReference or PBXVariantGroup reappear and are referenced by the same PBXBuildFile that has already introduced them, because in the case of PBXVariantGroup objects, they may correspond to multiple paths that are not all added simultaneously. When this situation occurs, the path needs to be added to _files_by_path, but nothing needs to change in _files_by_xcfilelikeelement, and the caller should have avoided adding the PBXBuildFile if it is already present in the list of children.
Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
[ "Maintains", "the", "_files_by_path", "and", "_files_by_xcfilelikeelement", "dicts", "." ]
def _AddBuildFileToDicts(self, pbxbuildfile, path=None): """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. If path is specified, then it is the path that is being added to the phase, and pbxbuildfile must contain either a PBXFileReference directly referencing that path, or it must contain a PBXVariantGroup that itself contains a PBXFileReference referencing the path. If path is not specified, either the PBXFileReference's path or the paths of all children of the PBXVariantGroup are taken as being added to the phase. If the path is already present in the phase, raises an exception. If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile are already present in the phase, referenced by a different PBXBuildFile object, raises an exception. This does not raise an exception when a PBXFileReference or PBXVariantGroup reappear and are referenced by the same PBXBuildFile that has already introduced them, because in the case of PBXVariantGroup objects, they may correspond to multiple paths that are not all added simultaneously. When this situation occurs, the path needs to be added to _files_by_path, but nothing needs to change in _files_by_xcfilelikeelement, and the caller should have avoided adding the PBXBuildFile if it is already present in the list of children. """ xcfilelikeelement = pbxbuildfile._properties["fileRef"] paths = [] if path is not None: # It's best when the caller provides the path. if isinstance(xcfilelikeelement, PBXVariantGroup): paths.append(path) else: # If the caller didn't provide a path, there can be either multiple # paths (PBXVariantGroup) or one. if isinstance(xcfilelikeelement, PBXVariantGroup): for variant in xcfilelikeelement._properties["children"]: paths.append(variant.FullPath()) else: paths.append(xcfilelikeelement.FullPath()) # Add the paths first, because if something's going to raise, the # messages provided by _AddPathToDict are more useful owing to its # having access to a real pathname and not just an object's Name(). for a_path in paths: self._AddPathToDict(pbxbuildfile, a_path) # If another PBXBuildFile references this XCFileLikeElement, there's a # problem. if ( xcfilelikeelement in self._files_by_xcfilelikeelement and self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile ): raise ValueError( "Found multiple build files for " + xcfilelikeelement.Name() ) self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
[ "def", "_AddBuildFileToDicts", "(", "self", ",", "pbxbuildfile", ",", "path", "=", "None", ")", ":", "xcfilelikeelement", "=", "pbxbuildfile", ".", "_properties", "[", "\"fileRef\"", "]", "paths", "=", "[", "]", "if", "path", "is", "not", "None", ":", "# It's best when the caller provides the path.", "if", "isinstance", "(", "xcfilelikeelement", ",", "PBXVariantGroup", ")", ":", "paths", ".", "append", "(", "path", ")", "else", ":", "# If the caller didn't provide a path, there can be either multiple", "# paths (PBXVariantGroup) or one.", "if", "isinstance", "(", "xcfilelikeelement", ",", "PBXVariantGroup", ")", ":", "for", "variant", "in", "xcfilelikeelement", ".", "_properties", "[", "\"children\"", "]", ":", "paths", ".", "append", "(", "variant", ".", "FullPath", "(", ")", ")", "else", ":", "paths", ".", "append", "(", "xcfilelikeelement", ".", "FullPath", "(", ")", ")", "# Add the paths first, because if something's going to raise, the", "# messages provided by _AddPathToDict are more useful owing to its", "# having access to a real pathname and not just an object's Name().", "for", "a_path", "in", "paths", ":", "self", ".", "_AddPathToDict", "(", "pbxbuildfile", ",", "a_path", ")", "# If another PBXBuildFile references this XCFileLikeElement, there's a", "# problem.", "if", "(", "xcfilelikeelement", "in", "self", ".", "_files_by_xcfilelikeelement", "and", "self", ".", "_files_by_xcfilelikeelement", "[", "xcfilelikeelement", "]", "!=", "pbxbuildfile", ")", ":", "raise", "ValueError", "(", "\"Found multiple build files for \"", "+", "xcfilelikeelement", ".", "Name", "(", ")", ")", "self", ".", "_files_by_xcfilelikeelement", "[", "xcfilelikeelement", "]", "=", "pbxbuildfile" ]
https://github.com/nodejs/node/blob/ac3c33c1646bf46104c15ae035982c06364da9b8/tools/gyp/pylib/gyp/xcodeproj_file.py#L1894-L1951
laoqiu/pypress-tornado
f06c5eab6b6adb2580df025f91c66460e1bb3b2e
pypress/helpers.py
python
endtags
(html)
return html
close all open html tags at the end of the string
close all open html tags at the end of the string
[ "close", "all", "open", "html", "tags", "at", "the", "end", "of", "the", "string" ]
def endtags(html): """ close all open html tags at the end of the string """ NON_CLOSING_TAGS = ['AREA', 'BASE', 'BASEFONT', 'BR', 'COL', 'FRAME', 'HR', 'IMG', 'INPUT', 'ISINDEX', 'LINK', 'META', 'PARAM'] opened_tags = re.findall(r"<([a-z]+)[^<>]*>",html) closed_tags = re.findall(r"</([a-z]+)>",html) opened_tags = [i.lower() for i in opened_tags if i.upper() not in NON_CLOSING_TAGS] closed_tags = [i.lower() for i in closed_tags] len_opened = len(opened_tags) if len_opened==len(closed_tags): return html opened_tags.reverse() for tag in opened_tags: if tag in closed_tags: closed_tags.remove(tag) else: html += "</%s>" % tag return html
[ "def", "endtags", "(", "html", ")", ":", "NON_CLOSING_TAGS", "=", "[", "'AREA'", ",", "'BASE'", ",", "'BASEFONT'", ",", "'BR'", ",", "'COL'", ",", "'FRAME'", ",", "'HR'", ",", "'IMG'", ",", "'INPUT'", ",", "'ISINDEX'", ",", "'LINK'", ",", "'META'", ",", "'PARAM'", "]", "opened_tags", "=", "re", ".", "findall", "(", "r\"<([a-z]+)[^<>]*>\"", ",", "html", ")", "closed_tags", "=", "re", ".", "findall", "(", "r\"</([a-z]+)>\"", ",", "html", ")", "opened_tags", "=", "[", "i", ".", "lower", "(", ")", "for", "i", "in", "opened_tags", "if", "i", ".", "upper", "(", ")", "not", "in", "NON_CLOSING_TAGS", "]", "closed_tags", "=", "[", "i", ".", "lower", "(", ")", "for", "i", "in", "closed_tags", "]", "len_opened", "=", "len", "(", "opened_tags", ")", "if", "len_opened", "==", "len", "(", "closed_tags", ")", ":", "return", "html", "opened_tags", ".", "reverse", "(", ")", "for", "tag", "in", "opened_tags", ":", "if", "tag", "in", "closed_tags", ":", "closed_tags", ".", "remove", "(", "tag", ")", "else", ":", "html", "+=", "\"</%s>\"", "%", "tag", "return", "html" ]
https://github.com/laoqiu/pypress-tornado/blob/f06c5eab6b6adb2580df025f91c66460e1bb3b2e/pypress/helpers.py#L148-L173
algorithmiaio/sample-apps
a5c90698c09c61febcd03d922d47dc437a1f4cdc
Python/tweet-profanity-demo/profanity_analysis.py
python
process_text
()
return stripped_text
Remove emoticons, numbers etc. and returns list of cleaned tweets.
Remove emoticons, numbers etc. and returns list of cleaned tweets.
[ "Remove", "emoticons", "numbers", "etc", ".", "and", "returns", "list", "of", "cleaned", "tweets", "." ]
def process_text(): """Remove emoticons, numbers etc. and returns list of cleaned tweets.""" stripped_text = [ re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)|^rt|http.+?" + sys.argv[1].lower(), '', tweets.lower()).strip() for tweets in read_data() ] return stripped_text
[ "def", "process_text", "(", ")", ":", "stripped_text", "=", "[", "re", ".", "sub", "(", "\"(@[A-Za-z0-9]+)|([^0-9A-Za-z \\t])|(\\w+:\\/\\/\\S+)|^rt|http.+?\"", "+", "sys", ".", "argv", "[", "1", "]", ".", "lower", "(", ")", ",", "''", ",", "tweets", ".", "lower", "(", ")", ")", ".", "strip", "(", ")", "for", "tweets", "in", "read_data", "(", ")", "]", "return", "stripped_text" ]
https://github.com/algorithmiaio/sample-apps/blob/a5c90698c09c61febcd03d922d47dc437a1f4cdc/Python/tweet-profanity-demo/profanity_analysis.py#L38-L45
nodejs/quic
5baab3f3a05548d3b51bea98868412b08766e34d
deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
python
XcodeArchsVariableMapping
(archs, archs_including_64_bit=None)
return mapping
Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable, and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT).
Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable, and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT).
[ "Constructs", "a", "dictionary", "with", "expansion", "for", "$", "(", "ARCHS_STANDARD", ")", "variable", "and", "optionally", "for", "$", "(", "ARCHS_STANDARD_INCLUDING_64_BIT", ")", "." ]
def XcodeArchsVariableMapping(archs, archs_including_64_bit=None): """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable, and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT).""" mapping = {'$(ARCHS_STANDARD)': archs} if archs_including_64_bit: mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit return mapping
[ "def", "XcodeArchsVariableMapping", "(", "archs", ",", "archs_including_64_bit", "=", "None", ")", ":", "mapping", "=", "{", "'$(ARCHS_STANDARD)'", ":", "archs", "}", "if", "archs_including_64_bit", ":", "mapping", "[", "'$(ARCHS_STANDARD_INCLUDING_64_BIT)'", "]", "=", "archs_including_64_bit", "return", "mapping" ]
https://github.com/nodejs/quic/blob/5baab3f3a05548d3b51bea98868412b08766e34d/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py#L35-L41
facebookarchive/nuclide
2a2a0a642d136768b7d2a6d35a652dc5fb77d70a
modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py
python
Breakpoint.get_span
(self)
return ( address, address + size )
@rtype: tuple( int, int ) @return: Starting and ending address of the memory range covered by the breakpoint.
[]
def get_span(self): """ @rtype: tuple( int, int ) @return: Starting and ending address of the memory range covered by the breakpoint. """ address = self.get_address() size = self.get_size() return ( address, address + size )
[ "def", "get_span", "(", "self", ")", ":", "address", "=", "self", ".", "get_address", "(", ")", "size", "=", "self", ".", "get_size", "(", ")", "return", "(", "address", ",", "address", "+", "size", ")" ]
https://github.com/facebookarchive/nuclide/blob/2a2a0a642d136768b7d2a6d35a652dc5fb77d70a/modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py#L265-L274
machinalis/telegraphy
eb6d4a054e0b3a5ea3b5864e9fcf13917bedee96
telegraphy/contrib/django_telegraphy/events.py
python
get_gateway_proxy
()
return xmlrpclib.Server(gateway_proxy_url, allow_none=True)
Get an object that allows to interact with the current gateway.
Get an object that allows to interact with the current gateway.
[ "Get", "an", "object", "that", "allows", "to", "interact", "with", "the", "current", "gateway", "." ]
def get_gateway_proxy(): """ Get an object that allows to interact with the current gateway. """ # TODO: eventually, we need a GatewayProxy class to abstract this interface. gateway_proxy_url = settings.TELEGRAPHY_RPC_PARAMS['url'] return xmlrpclib.Server(gateway_proxy_url, allow_none=True)
[ "def", "get_gateway_proxy", "(", ")", ":", "# TODO: eventually, we need a GatewayProxy class to abstract this interface.", "gateway_proxy_url", "=", "settings", ".", "TELEGRAPHY_RPC_PARAMS", "[", "'url'", "]", "return", "xmlrpclib", ".", "Server", "(", "gateway_proxy_url", ",", "allow_none", "=", "True", ")" ]
https://github.com/machinalis/telegraphy/blob/eb6d4a054e0b3a5ea3b5864e9fcf13917bedee96/telegraphy/contrib/django_telegraphy/events.py#L16-L23
nodejs/quic
5baab3f3a05548d3b51bea98868412b08766e34d
tools/gyp/pylib/gyp/mac_tool.py
python
MacTool._CommandifyName
(self, name_string)
return name_string.title().replace('-', '')
Transforms a tool name like copy-info-plist to CopyInfoPlist
Transforms a tool name like copy-info-plist to CopyInfoPlist
[ "Transforms", "a", "tool", "name", "like", "copy", "-", "info", "-", "plist", "to", "CopyInfoPlist" ]
def _CommandifyName(self, name_string): """Transforms a tool name like copy-info-plist to CopyInfoPlist""" return name_string.title().replace('-', '')
[ "def", "_CommandifyName", "(", "self", ",", "name_string", ")", ":", "return", "name_string", ".", "title", "(", ")", ".", "replace", "(", "'-'", ",", "''", ")" ]
https://github.com/nodejs/quic/blob/5baab3f3a05548d3b51bea98868412b08766e34d/tools/gyp/pylib/gyp/mac_tool.py#L49-L51
IonicChina/ioniclub
208d5298939672ef44076bb8a7e8e6df5278e286
node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/msvs_emulation.py
python
MsvsSettings._GetLdManifestFlags
(self, config, name, gyp_to_build_path, allow_isolation, build_dir)
return flags, output_name, manifest_files
Returns a 3-tuple: - the set of flags that need to be added to the link to generate a default manifest - the intermediate manifest that the linker will generate that should be used to assert it doesn't add anything to the merged one. - the list of all the manifest files to be merged by the manifest tool and included into the link.
Returns a 3-tuple: - the set of flags that need to be added to the link to generate a default manifest - the intermediate manifest that the linker will generate that should be used to assert it doesn't add anything to the merged one. - the list of all the manifest files to be merged by the manifest tool and included into the link.
[ "Returns", "a", "3", "-", "tuple", ":", "-", "the", "set", "of", "flags", "that", "need", "to", "be", "added", "to", "the", "link", "to", "generate", "a", "default", "manifest", "-", "the", "intermediate", "manifest", "that", "the", "linker", "will", "generate", "that", "should", "be", "used", "to", "assert", "it", "doesn", "t", "add", "anything", "to", "the", "merged", "one", ".", "-", "the", "list", "of", "all", "the", "manifest", "files", "to", "be", "merged", "by", "the", "manifest", "tool", "and", "included", "into", "the", "link", "." ]
def _GetLdManifestFlags(self, config, name, gyp_to_build_path, allow_isolation, build_dir): """Returns a 3-tuple: - the set of flags that need to be added to the link to generate a default manifest - the intermediate manifest that the linker will generate that should be used to assert it doesn't add anything to the merged one. - the list of all the manifest files to be merged by the manifest tool and included into the link.""" generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'), config, default='true') if generate_manifest != 'true': # This means not only that the linker should not generate the intermediate # manifest but also that the manifest tool should do nothing even when # additional manifests are specified. return ['/MANIFEST:NO'], [], [] output_name = name + '.intermediate.manifest' flags = [ '/MANIFEST', '/ManifestFile:' + output_name, ] # Instead of using the MANIFESTUAC flags, we generate a .manifest to # include into the list of manifests. This allows us to avoid the need to # do two passes during linking. The /MANIFEST flag and /ManifestFile are # still used, and the intermediate manifest is used to assert that the # final manifest we get from merging all the additional manifest files # (plus the one we generate here) isn't modified by merging the # intermediate into it. # Always NO, because we generate a manifest file that has what we want. flags.append('/MANIFESTUAC:NO') config = self._TargetConfig(config) enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config, default='true') manifest_files = [] generated_manifest_outer = \ "<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \ "<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \ "</assembly>" if enable_uac == 'true': execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'), config, default='0') execution_level_map = { '0': 'asInvoker', '1': 'highestAvailable', '2': 'requireAdministrator' } ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config, default='false') inner = ''' <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> <security> <requestedPrivileges> <requestedExecutionLevel level='%s' uiAccess='%s' /> </requestedPrivileges> </security> </trustInfo>''' % (execution_level_map[execution_level], ui_access) else: inner = '' generated_manifest_contents = generated_manifest_outer % inner generated_name = name + '.generated.manifest' # Need to join with the build_dir here as we're writing it during # generation time, but we return the un-joined version because the build # will occur in that directory. We only write the file if the contents # have changed so that simply regenerating the project files doesn't # cause a relink. build_dir_generated_name = os.path.join(build_dir, generated_name) gyp.common.EnsureDirExists(build_dir_generated_name) f = gyp.common.WriteOnDiff(build_dir_generated_name) f.write(generated_manifest_contents) f.close() manifest_files = [generated_name] if allow_isolation: flags.append('/ALLOWISOLATION') manifest_files += self._GetAdditionalManifestFiles(config, gyp_to_build_path) return flags, output_name, manifest_files
[ "def", "_GetLdManifestFlags", "(", "self", ",", "config", ",", "name", ",", "gyp_to_build_path", ",", "allow_isolation", ",", "build_dir", ")", ":", "generate_manifest", "=", "self", ".", "_Setting", "(", "(", "'VCLinkerTool'", ",", "'GenerateManifest'", ")", ",", "config", ",", "default", "=", "'true'", ")", "if", "generate_manifest", "!=", "'true'", ":", "# This means not only that the linker should not generate the intermediate", "# manifest but also that the manifest tool should do nothing even when", "# additional manifests are specified.", "return", "[", "'/MANIFEST:NO'", "]", ",", "[", "]", ",", "[", "]", "output_name", "=", "name", "+", "'.intermediate.manifest'", "flags", "=", "[", "'/MANIFEST'", ",", "'/ManifestFile:'", "+", "output_name", ",", "]", "# Instead of using the MANIFESTUAC flags, we generate a .manifest to", "# include into the list of manifests. This allows us to avoid the need to", "# do two passes during linking. The /MANIFEST flag and /ManifestFile are", "# still used, and the intermediate manifest is used to assert that the", "# final manifest we get from merging all the additional manifest files", "# (plus the one we generate here) isn't modified by merging the", "# intermediate into it.", "# Always NO, because we generate a manifest file that has what we want.", "flags", ".", "append", "(", "'/MANIFESTUAC:NO'", ")", "config", "=", "self", ".", "_TargetConfig", "(", "config", ")", "enable_uac", "=", "self", ".", "_Setting", "(", "(", "'VCLinkerTool'", ",", "'EnableUAC'", ")", ",", "config", ",", "default", "=", "'true'", ")", "manifest_files", "=", "[", "]", "generated_manifest_outer", "=", "\"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>\"", "\"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s\"", "\"</assembly>\"", "if", "enable_uac", "==", "'true'", ":", "execution_level", "=", "self", ".", "_Setting", "(", "(", "'VCLinkerTool'", ",", "'UACExecutionLevel'", ")", ",", "config", ",", "default", "=", "'0'", ")", "execution_level_map", "=", "{", "'0'", ":", "'asInvoker'", ",", "'1'", ":", "'highestAvailable'", ",", "'2'", ":", "'requireAdministrator'", "}", "ui_access", "=", "self", ".", "_Setting", "(", "(", "'VCLinkerTool'", ",", "'UACUIAccess'", ")", ",", "config", ",", "default", "=", "'false'", ")", "inner", "=", "'''\n<trustInfo xmlns=\"urn:schemas-microsoft-com:asm.v3\">\n <security>\n <requestedPrivileges>\n <requestedExecutionLevel level='%s' uiAccess='%s' />\n </requestedPrivileges>\n </security>\n</trustInfo>'''", "%", "(", "execution_level_map", "[", "execution_level", "]", ",", "ui_access", ")", "else", ":", "inner", "=", "''", "generated_manifest_contents", "=", "generated_manifest_outer", "%", "inner", "generated_name", "=", "name", "+", "'.generated.manifest'", "# Need to join with the build_dir here as we're writing it during", "# generation time, but we return the un-joined version because the build", "# will occur in that directory. We only write the file if the contents", "# have changed so that simply regenerating the project files doesn't", "# cause a relink.", "build_dir_generated_name", "=", "os", ".", "path", ".", "join", "(", "build_dir", ",", "generated_name", ")", "gyp", ".", "common", ".", "EnsureDirExists", "(", "build_dir_generated_name", ")", "f", "=", "gyp", ".", "common", ".", "WriteOnDiff", "(", "build_dir_generated_name", ")", "f", ".", "write", "(", "generated_manifest_contents", ")", "f", ".", "close", "(", ")", "manifest_files", "=", "[", "generated_name", "]", "if", "allow_isolation", ":", "flags", ".", "append", "(", "'/ALLOWISOLATION'", ")", "manifest_files", "+=", "self", ".", "_GetAdditionalManifestFiles", "(", "config", ",", "gyp_to_build_path", ")", "return", "flags", ",", "output_name", ",", "manifest_files" ]
https://github.com/IonicChina/ioniclub/blob/208d5298939672ef44076bb8a7e8e6df5278e286/node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/msvs_emulation.py#L546-L631
facebookarchive/nuclide
2a2a0a642d136768b7d2a6d35a652dc5fb77d70a
modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_version.py
python
git_pieces_from_vcs
(tag_prefix, root, verbose, run_command=run_command)
return pieces
Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree.
Get version from 'git describe' in the root of the source tree.
[ "Get", "version", "from", "git", "describe", "in", "the", "root", "of", "the", "source", "tree", "." ]
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces
[ "def", "git_pieces_from_vcs", "(", "tag_prefix", ",", "root", ",", "verbose", ",", "run_command", "=", "run_command", ")", ":", "GITS", "=", "[", "\"git\"", "]", "if", "sys", ".", "platform", "==", "\"win32\"", ":", "GITS", "=", "[", "\"git.cmd\"", ",", "\"git.exe\"", "]", "out", ",", "rc", "=", "run_command", "(", "GITS", ",", "[", "\"rev-parse\"", ",", "\"--git-dir\"", "]", ",", "cwd", "=", "root", ",", "hide_stderr", "=", "True", ")", "if", "rc", "!=", "0", ":", "if", "verbose", ":", "print", "(", "\"Directory %s not under git control\"", "%", "root", ")", "raise", "NotThisMethod", "(", "\"'git rev-parse --git-dir' returned error\"", ")", "# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]", "# if there isn't one, this yields HEX[-dirty] (no NUM)", "describe_out", ",", "rc", "=", "run_command", "(", "GITS", ",", "[", "\"describe\"", ",", "\"--tags\"", ",", "\"--dirty\"", ",", "\"--always\"", ",", "\"--long\"", ",", "\"--match\"", ",", "\"%s*\"", "%", "tag_prefix", "]", ",", "cwd", "=", "root", ")", "# --long was added in git-1.5.5", "if", "describe_out", "is", "None", ":", "raise", "NotThisMethod", "(", "\"'git describe' failed\"", ")", "describe_out", "=", "describe_out", ".", "strip", "(", ")", "full_out", ",", "rc", "=", "run_command", "(", "GITS", ",", "[", "\"rev-parse\"", ",", "\"HEAD\"", "]", ",", "cwd", "=", "root", ")", "if", "full_out", "is", "None", ":", "raise", "NotThisMethod", "(", "\"'git rev-parse' failed\"", ")", "full_out", "=", "full_out", ".", "strip", "(", ")", "pieces", "=", "{", "}", "pieces", "[", "\"long\"", "]", "=", "full_out", "pieces", "[", "\"short\"", "]", "=", "full_out", "[", ":", "7", "]", "# maybe improved later", "pieces", "[", "\"error\"", "]", "=", "None", "# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]", "# TAG might have hyphens.", "git_describe", "=", "describe_out", "# look for -dirty suffix", "dirty", "=", "git_describe", ".", "endswith", "(", "\"-dirty\"", ")", "pieces", "[", "\"dirty\"", "]", "=", "dirty", "if", "dirty", ":", "git_describe", "=", "git_describe", "[", ":", "git_describe", ".", "rindex", "(", "\"-dirty\"", ")", "]", "# now we have TAG-NUM-gHEX or HEX", "if", "\"-\"", "in", "git_describe", ":", "# TAG-NUM-gHEX", "mo", "=", "re", ".", "search", "(", "r'^(.+)-(\\d+)-g([0-9a-f]+)$'", ",", "git_describe", ")", "if", "not", "mo", ":", "# unparseable. Maybe git-describe is misbehaving?", "pieces", "[", "\"error\"", "]", "=", "(", "\"unable to parse git-describe output: '%s'\"", "%", "describe_out", ")", "return", "pieces", "# tag", "full_tag", "=", "mo", ".", "group", "(", "1", ")", "if", "not", "full_tag", ".", "startswith", "(", "tag_prefix", ")", ":", "if", "verbose", ":", "fmt", "=", "\"tag '%s' doesn't start with prefix '%s'\"", "print", "(", "fmt", "%", "(", "full_tag", ",", "tag_prefix", ")", ")", "pieces", "[", "\"error\"", "]", "=", "(", "\"tag '%s' doesn't start with prefix '%s'\"", "%", "(", "full_tag", ",", "tag_prefix", ")", ")", "return", "pieces", "pieces", "[", "\"closest-tag\"", "]", "=", "full_tag", "[", "len", "(", "tag_prefix", ")", ":", "]", "# distance: number of commits since tag", "pieces", "[", "\"distance\"", "]", "=", "int", "(", "mo", ".", "group", "(", "2", ")", ")", "# commit: short hex revision ID", "pieces", "[", "\"short\"", "]", "=", "mo", ".", "group", "(", "3", ")", "else", ":", "# HEX: no tags", "pieces", "[", "\"closest-tag\"", "]", "=", "None", "count_out", ",", "rc", "=", "run_command", "(", "GITS", ",", "[", "\"rev-list\"", ",", "\"HEAD\"", ",", "\"--count\"", "]", ",", "cwd", "=", "root", ")", "pieces", "[", "\"distance\"", "]", "=", "int", "(", "count_out", ")", "# total number of commits", "# commit date: see ISO-8601 comment in git_versions_from_keywords()", "date", "=", "run_command", "(", "GITS", ",", "[", "\"show\"", ",", "\"-s\"", ",", "\"--format=%ci\"", ",", "\"HEAD\"", "]", ",", "cwd", "=", "root", ")", "[", "0", "]", ".", "strip", "(", ")", "pieces", "[", "\"date\"", "]", "=", "date", ".", "strip", "(", ")", ".", "replace", "(", "\" \"", ",", "\"T\"", ",", "1", ")", ".", "replace", "(", "\" \"", ",", "\"\"", ",", "1", ")", "return", "pieces" ]
https://github.com/facebookarchive/nuclide/blob/2a2a0a642d136768b7d2a6d35a652dc5fb77d70a/modules/atom-ide-debugger-python/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_version.py#L219-L307
nprapps/dailygraphics
c83ca22322d8bc069389daed6f38d5f238a14d9b
fabfile/__init__.py
python
production
()
Run as though on production.
Run as though on production.
[ "Run", "as", "though", "on", "production", "." ]
def production(): """ Run as though on production. """ env.settings = 'production' app_config.configure_targets(env.settings)
[ "def", "production", "(", ")", ":", "env", ".", "settings", "=", "'production'", "app_config", ".", "configure_targets", "(", "env", ".", "settings", ")" ]
https://github.com/nprapps/dailygraphics/blob/c83ca22322d8bc069389daed6f38d5f238a14d9b/fabfile/__init__.py#L38-L43
mozilla/spidernode
aafa9e5273f954f272bb4382fc007af14674b4c2
deps/v8/third_party/jinja2/environment.py
python
Environment.select_template
(self, names, parent=None, globals=None)
Works like :meth:`get_template` but tries a number of templates before it fails. If it cannot find any of the templates, it will raise a :exc:`TemplatesNotFound` exception. .. versionadded:: 2.3 .. versionchanged:: 2.4 If `names` contains a :class:`Template` object it is returned from the function unchanged.
Works like :meth:`get_template` but tries a number of templates before it fails. If it cannot find any of the templates, it will raise a :exc:`TemplatesNotFound` exception.
[ "Works", "like", ":", "meth", ":", "get_template", "but", "tries", "a", "number", "of", "templates", "before", "it", "fails", ".", "If", "it", "cannot", "find", "any", "of", "the", "templates", "it", "will", "raise", "a", ":", "exc", ":", "TemplatesNotFound", "exception", "." ]
def select_template(self, names, parent=None, globals=None): """Works like :meth:`get_template` but tries a number of templates before it fails. If it cannot find any of the templates, it will raise a :exc:`TemplatesNotFound` exception. .. versionadded:: 2.3 .. versionchanged:: 2.4 If `names` contains a :class:`Template` object it is returned from the function unchanged. """ if not names: raise TemplatesNotFound(message=u'Tried to select from an empty list ' u'of templates.') globals = self.make_globals(globals) for name in names: if isinstance(name, Template): return name if parent is not None: name = self.join_path(name, parent) try: return self._load_template(name, globals) except TemplateNotFound: pass raise TemplatesNotFound(names)
[ "def", "select_template", "(", "self", ",", "names", ",", "parent", "=", "None", ",", "globals", "=", "None", ")", ":", "if", "not", "names", ":", "raise", "TemplatesNotFound", "(", "message", "=", "u'Tried to select from an empty list '", "u'of templates.'", ")", "globals", "=", "self", ".", "make_globals", "(", "globals", ")", "for", "name", "in", "names", ":", "if", "isinstance", "(", "name", ",", "Template", ")", ":", "return", "name", "if", "parent", "is", "not", "None", ":", "name", "=", "self", ".", "join_path", "(", "name", ",", "parent", ")", "try", ":", "return", "self", ".", "_load_template", "(", "name", ",", "globals", ")", "except", "TemplateNotFound", ":", "pass", "raise", "TemplatesNotFound", "(", "names", ")" ]
https://github.com/mozilla/spidernode/blob/aafa9e5273f954f272bb4382fc007af14674b4c2/deps/v8/third_party/jinja2/environment.py#L815-L839
atom-community/ide-python
c046f9c2421713b34baa22648235541c5bb284fe
dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/_pydevd_bundle/pydevd_extension_api.py
python
_AbstractResolver.get_dictionary
(self, var)
@param var: this is the variable that should have its children gotten. @return: a dictionary where each pair key, value should be shown to the user as children items in the variables view for the given var.
@param var: this is the variable that should have its children gotten.
[ "@param", "var", ":", "this", "is", "the", "variable", "that", "should", "have", "its", "children", "gotten", "." ]
def get_dictionary(self, var): """ @param var: this is the variable that should have its children gotten. @return: a dictionary where each pair key, value should be shown to the user as children items in the variables view for the given var. """ raise NotImplementedError
[ "def", "get_dictionary", "(", "self", ",", "var", ")", ":", "raise", "NotImplementedError" ]
https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/_pydevd_bundle/pydevd_extension_api.py#L40-L47
edf-hpc/slurm-web
6a163117cfad882ddb7f5680335907a09c2c638d
src/slurmweb/restapi/slurmrestapi.py
python
filter_entities
(entity, entitiesList)
return entitiesList
Return the list entities filtered if privateData is on for the entities.
Return the list entities filtered if privateData is on for the entities.
[ "Return", "the", "list", "entities", "filtered", "if", "privateData", "is", "on", "for", "the", "entities", "." ]
def filter_entities(entity, entitiesList): """ Return the list entities filtered if privateData is on for the entities. """ onlyUsersEntities = False if auth_enabled: # If auth_enabled is true, getting the current user becomes # possible because authentification_verify decorator has been # already checked. currentUser = get_current_user() onlyUsersEntities = check_private_data_for_entity( currentUser, entity) if onlyUsersEntities: # if private data is applied and the entities' owner is different from # current user, then the entities will not be added to the list to # show if auth disabled, onlyUsersEntities becomes always False and all # the entities are added to the list to show return dict((k, v) for k, v in entitiesList.items() if ((entity == 'reservations' and currentUser.login in v['users']) or (entity == 'jobs' and currentUser.login == v['login']))) return entitiesList
[ "def", "filter_entities", "(", "entity", ",", "entitiesList", ")", ":", "onlyUsersEntities", "=", "False", "if", "auth_enabled", ":", "# If auth_enabled is true, getting the current user becomes", "# possible because authentification_verify decorator has been", "# already checked.", "currentUser", "=", "get_current_user", "(", ")", "onlyUsersEntities", "=", "check_private_data_for_entity", "(", "currentUser", ",", "entity", ")", "if", "onlyUsersEntities", ":", "# if private data is applied and the entities' owner is different from", "# current user, then the entities will not be added to the list to", "# show if auth disabled, onlyUsersEntities becomes always False and all", "# the entities are added to the list to show", "return", "dict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "entitiesList", ".", "items", "(", ")", "if", "(", "(", "entity", "==", "'reservations'", "and", "currentUser", ".", "login", "in", "v", "[", "'users'", "]", ")", "or", "(", "entity", "==", "'jobs'", "and", "currentUser", ".", "login", "==", "v", "[", "'login'", "]", ")", ")", ")", "return", "entitiesList" ]
https://github.com/edf-hpc/slurm-web/blob/6a163117cfad882ddb7f5680335907a09c2c638d/src/slurmweb/restapi/slurmrestapi.py#L534-L558
jxcore/jxcore
b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410
deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
python
NormjoinPathForceCMakeSource
(base_path, rel_path)
return os.path.join('${CMAKE_SOURCE_DIR}', os.path.normpath(os.path.join(base_path, rel_path)))
Resolves rel_path against base_path and returns the result. If rel_path is an absolute path it is returned unchanged. Otherwise it is resolved against base_path and normalized. If the result is a relative path, it is forced to be relative to the CMakeLists.txt.
Resolves rel_path against base_path and returns the result.
[ "Resolves", "rel_path", "against", "base_path", "and", "returns", "the", "result", "." ]
def NormjoinPathForceCMakeSource(base_path, rel_path): """Resolves rel_path against base_path and returns the result. If rel_path is an absolute path it is returned unchanged. Otherwise it is resolved against base_path and normalized. If the result is a relative path, it is forced to be relative to the CMakeLists.txt. """ if os.path.isabs(rel_path): return rel_path if any([rel_path.startswith(var) for var in FULL_PATH_VARS]): return rel_path # TODO: do we need to check base_path for absolute variables as well? return os.path.join('${CMAKE_SOURCE_DIR}', os.path.normpath(os.path.join(base_path, rel_path)))
[ "def", "NormjoinPathForceCMakeSource", "(", "base_path", ",", "rel_path", ")", ":", "if", "os", ".", "path", ".", "isabs", "(", "rel_path", ")", ":", "return", "rel_path", "if", "any", "(", "[", "rel_path", ".", "startswith", "(", "var", ")", "for", "var", "in", "FULL_PATH_VARS", "]", ")", ":", "return", "rel_path", "# TODO: do we need to check base_path for absolute variables as well?", "return", "os", ".", "path", ".", "join", "(", "'${CMAKE_SOURCE_DIR}'", ",", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "base_path", ",", "rel_path", ")", ")", ")" ]
https://github.com/jxcore/jxcore/blob/b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py#L93-L107
atom-community/ide-python
c046f9c2421713b34baa22648235541c5bb284fe
dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/process.py
python
Process.read_pointer
(self, lpBaseAddress)
return self.__read_c_type(lpBaseAddress, '@P', ctypes.c_void_p)
Reads a pointer value from the memory of the process. @see: L{peek_pointer} @type lpBaseAddress: int @param lpBaseAddress: Memory address to begin reading. @rtype: int @return: Pointer value read from the process memory. @raise WindowsError: On error an exception is raised.
Reads a pointer value from the memory of the process.
[ "Reads", "a", "pointer", "value", "from", "the", "memory", "of", "the", "process", "." ]
def read_pointer(self, lpBaseAddress): """ Reads a pointer value from the memory of the process. @see: L{peek_pointer} @type lpBaseAddress: int @param lpBaseAddress: Memory address to begin reading. @rtype: int @return: Pointer value read from the process memory. @raise WindowsError: On error an exception is raised. """ return self.__read_c_type(lpBaseAddress, '@P', ctypes.c_void_p)
[ "def", "read_pointer", "(", "self", ",", "lpBaseAddress", ")", ":", "return", "self", ".", "__read_c_type", "(", "lpBaseAddress", ",", "'@P'", ",", "ctypes", ".", "c_void_p", ")" ]
https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/dist/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/process.py#L1795-L1809
OpenDroneMap/WebODM
d7d2cfc4fd2222100130b731b24041e0b5ed6b3a
app/api/tiler.py
python
Export.post
(self, request, pk=None, project_pk=None, asset_type=None)
Export assets (orthophoto, DEMs, etc.) after applying scaling formulas, shading, reprojections
Export assets (orthophoto, DEMs, etc.) after applying scaling formulas, shading, reprojections
[ "Export", "assets", "(", "orthophoto", "DEMs", "etc", ".", ")", "after", "applying", "scaling", "formulas", "shading", "reprojections" ]
def post(self, request, pk=None, project_pk=None, asset_type=None): """ Export assets (orthophoto, DEMs, etc.) after applying scaling formulas, shading, reprojections """ task = self.get_and_check_task(request, pk) formula = request.data.get('formula') bands = request.data.get('bands') rescale = request.data.get('rescale') export_format = request.data.get('format', 'laz' if asset_type == 'georeferenced_model' else 'gtiff') epsg = request.data.get('epsg') color_map = request.data.get('color_map') hillshade = request.data.get('hillshade') if formula == '': formula = None if bands == '': bands = None if rescale == '': rescale = None if epsg == '': epsg = None if color_map == '': color_map = None if hillshade == '': hillshade = None expr = None if asset_type in ['orthophoto', 'dsm', 'dtm'] and not export_format in ['gtiff', 'gtiff-rgb', 'jpg', 'png', 'kmz']: raise exceptions.ValidationError(_("Unsupported format: %(value)s") % {'value': export_format}) if asset_type == 'georeferenced_model' and not export_format in ['laz', 'las', 'ply', 'csv']: raise exceptions.ValidationError(_("Unsupported format: %(value)s") % {'value': export_format}) # Default color map, hillshade if asset_type in ['dsm', 'dtm'] and export_format != 'gtiff': if color_map is None: color_map = 'viridis' if hillshade is None: hillshade = 6 if color_map is not None: try: colormap.get(color_map) except InvalidColorMapName: raise exceptions.ValidationError(_("Not a valid color_map value")) if epsg is not None: try: epsg = int(epsg) except ValueError: raise exceptions.ValidationError(_("Invalid EPSG code: %(value)s") % {'value': epsg}) if (formula and not bands) or (not formula and bands): raise exceptions.ValidationError(_("Both formula and bands parameters are required")) if formula and bands: try: expr, _discard_ = lookup_formula(formula, bands) except ValueError as e: raise exceptions.ValidationError(str(e)) if export_format in ['gtiff-rgb', 'jpg', 'png']: if formula is not None and rescale is None: rescale = "-1,1" if export_format == 'gtiff': rescale = None if rescale is not None: rescale = rescale.replace("%2C", ",") try: rescale = list(map(float, rescale.split(","))) except ValueError: raise exceptions.ValidationError(_("Invalid rescale value: %(value)s") % {'value': rescale}) if hillshade is not None: try: hillshade = float(hillshade) if hillshade < 0: raise Exception("Hillshade must be > 0") except: raise exceptions.ValidationError(_("Invalid hillshade value: %(value)s") % {'value': hillshade}) if asset_type == 'georeferenced_model': url = get_pointcloud_path(task) else: url = get_raster_path(task, asset_type) if not os.path.isfile(url): raise exceptions.NotFound() if epsg is not None and task.epsg is None: raise exceptions.ValidationError(_("Cannot use epsg on non-georeferenced dataset")) # Strip unsafe chars, append suffix extension = extension_for_export_format(export_format) filename = "{}{}.{}".format( get_asset_download_filename(task, asset_type), "-{}".format(formula) if expr is not None else "", extension ) if asset_type in ['orthophoto', 'dsm', 'dtm']: # Shortcut the process if no processing is required if export_format == 'gtiff' and (epsg == task.epsg or epsg is None) and expr is None: return Response({'url': '/api/projects/{}/tasks/{}/download/{}.tif'.format(task.project.id, task.id, asset_type), 'filename': filename}) else: celery_task_id = export_raster.delay(url, epsg=epsg, expression=expr, format=export_format, rescale=rescale, color_map=color_map, hillshade=hillshade, asset_type=asset_type, name=task.name).task_id return Response({'celery_task_id': celery_task_id, 'filename': filename}) elif asset_type == 'georeferenced_model': # Shortcut the process if no processing is required if export_format == 'laz' and (epsg == task.epsg or epsg is None): return Response({'url': '/api/projects/{}/tasks/{}/download/{}.laz'.format(task.project.id, task.id, asset_type), 'filename': filename}) else: celery_task_id = export_pointcloud.delay(url, epsg=epsg, format=export_format).task_id return Response({'celery_task_id': celery_task_id, 'filename': filename})
[ "def", "post", "(", "self", ",", "request", ",", "pk", "=", "None", ",", "project_pk", "=", "None", ",", "asset_type", "=", "None", ")", ":", "task", "=", "self", ".", "get_and_check_task", "(", "request", ",", "pk", ")", "formula", "=", "request", ".", "data", ".", "get", "(", "'formula'", ")", "bands", "=", "request", ".", "data", ".", "get", "(", "'bands'", ")", "rescale", "=", "request", ".", "data", ".", "get", "(", "'rescale'", ")", "export_format", "=", "request", ".", "data", ".", "get", "(", "'format'", ",", "'laz'", "if", "asset_type", "==", "'georeferenced_model'", "else", "'gtiff'", ")", "epsg", "=", "request", ".", "data", ".", "get", "(", "'epsg'", ")", "color_map", "=", "request", ".", "data", ".", "get", "(", "'color_map'", ")", "hillshade", "=", "request", ".", "data", ".", "get", "(", "'hillshade'", ")", "if", "formula", "==", "''", ":", "formula", "=", "None", "if", "bands", "==", "''", ":", "bands", "=", "None", "if", "rescale", "==", "''", ":", "rescale", "=", "None", "if", "epsg", "==", "''", ":", "epsg", "=", "None", "if", "color_map", "==", "''", ":", "color_map", "=", "None", "if", "hillshade", "==", "''", ":", "hillshade", "=", "None", "expr", "=", "None", "if", "asset_type", "in", "[", "'orthophoto'", ",", "'dsm'", ",", "'dtm'", "]", "and", "not", "export_format", "in", "[", "'gtiff'", ",", "'gtiff-rgb'", ",", "'jpg'", ",", "'png'", ",", "'kmz'", "]", ":", "raise", "exceptions", ".", "ValidationError", "(", "_", "(", "\"Unsupported format: %(value)s\"", ")", "%", "{", "'value'", ":", "export_format", "}", ")", "if", "asset_type", "==", "'georeferenced_model'", "and", "not", "export_format", "in", "[", "'laz'", ",", "'las'", ",", "'ply'", ",", "'csv'", "]", ":", "raise", "exceptions", ".", "ValidationError", "(", "_", "(", "\"Unsupported format: %(value)s\"", ")", "%", "{", "'value'", ":", "export_format", "}", ")", "# Default color map, hillshade", "if", "asset_type", "in", "[", "'dsm'", ",", "'dtm'", "]", "and", "export_format", "!=", "'gtiff'", ":", "if", "color_map", "is", "None", ":", "color_map", "=", "'viridis'", "if", "hillshade", "is", "None", ":", "hillshade", "=", "6", "if", "color_map", "is", "not", "None", ":", "try", ":", "colormap", ".", "get", "(", "color_map", ")", "except", "InvalidColorMapName", ":", "raise", "exceptions", ".", "ValidationError", "(", "_", "(", "\"Not a valid color_map value\"", ")", ")", "if", "epsg", "is", "not", "None", ":", "try", ":", "epsg", "=", "int", "(", "epsg", ")", "except", "ValueError", ":", "raise", "exceptions", ".", "ValidationError", "(", "_", "(", "\"Invalid EPSG code: %(value)s\"", ")", "%", "{", "'value'", ":", "epsg", "}", ")", "if", "(", "formula", "and", "not", "bands", ")", "or", "(", "not", "formula", "and", "bands", ")", ":", "raise", "exceptions", ".", "ValidationError", "(", "_", "(", "\"Both formula and bands parameters are required\"", ")", ")", "if", "formula", "and", "bands", ":", "try", ":", "expr", ",", "_discard_", "=", "lookup_formula", "(", "formula", ",", "bands", ")", "except", "ValueError", "as", "e", ":", "raise", "exceptions", ".", "ValidationError", "(", "str", "(", "e", ")", ")", "if", "export_format", "in", "[", "'gtiff-rgb'", ",", "'jpg'", ",", "'png'", "]", ":", "if", "formula", "is", "not", "None", "and", "rescale", "is", "None", ":", "rescale", "=", "\"-1,1\"", "if", "export_format", "==", "'gtiff'", ":", "rescale", "=", "None", "if", "rescale", "is", "not", "None", ":", "rescale", "=", "rescale", ".", "replace", "(", "\"%2C\"", ",", "\",\"", ")", "try", ":", "rescale", "=", "list", "(", "map", "(", "float", ",", "rescale", ".", "split", "(", "\",\"", ")", ")", ")", "except", "ValueError", ":", "raise", "exceptions", ".", "ValidationError", "(", "_", "(", "\"Invalid rescale value: %(value)s\"", ")", "%", "{", "'value'", ":", "rescale", "}", ")", "if", "hillshade", "is", "not", "None", ":", "try", ":", "hillshade", "=", "float", "(", "hillshade", ")", "if", "hillshade", "<", "0", ":", "raise", "Exception", "(", "\"Hillshade must be > 0\"", ")", "except", ":", "raise", "exceptions", ".", "ValidationError", "(", "_", "(", "\"Invalid hillshade value: %(value)s\"", ")", "%", "{", "'value'", ":", "hillshade", "}", ")", "if", "asset_type", "==", "'georeferenced_model'", ":", "url", "=", "get_pointcloud_path", "(", "task", ")", "else", ":", "url", "=", "get_raster_path", "(", "task", ",", "asset_type", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "url", ")", ":", "raise", "exceptions", ".", "NotFound", "(", ")", "if", "epsg", "is", "not", "None", "and", "task", ".", "epsg", "is", "None", ":", "raise", "exceptions", ".", "ValidationError", "(", "_", "(", "\"Cannot use epsg on non-georeferenced dataset\"", ")", ")", "# Strip unsafe chars, append suffix", "extension", "=", "extension_for_export_format", "(", "export_format", ")", "filename", "=", "\"{}{}.{}\"", ".", "format", "(", "get_asset_download_filename", "(", "task", ",", "asset_type", ")", ",", "\"-{}\"", ".", "format", "(", "formula", ")", "if", "expr", "is", "not", "None", "else", "\"\"", ",", "extension", ")", "if", "asset_type", "in", "[", "'orthophoto'", ",", "'dsm'", ",", "'dtm'", "]", ":", "# Shortcut the process if no processing is required", "if", "export_format", "==", "'gtiff'", "and", "(", "epsg", "==", "task", ".", "epsg", "or", "epsg", "is", "None", ")", "and", "expr", "is", "None", ":", "return", "Response", "(", "{", "'url'", ":", "'/api/projects/{}/tasks/{}/download/{}.tif'", ".", "format", "(", "task", ".", "project", ".", "id", ",", "task", ".", "id", ",", "asset_type", ")", ",", "'filename'", ":", "filename", "}", ")", "else", ":", "celery_task_id", "=", "export_raster", ".", "delay", "(", "url", ",", "epsg", "=", "epsg", ",", "expression", "=", "expr", ",", "format", "=", "export_format", ",", "rescale", "=", "rescale", ",", "color_map", "=", "color_map", ",", "hillshade", "=", "hillshade", ",", "asset_type", "=", "asset_type", ",", "name", "=", "task", ".", "name", ")", ".", "task_id", "return", "Response", "(", "{", "'celery_task_id'", ":", "celery_task_id", ",", "'filename'", ":", "filename", "}", ")", "elif", "asset_type", "==", "'georeferenced_model'", ":", "# Shortcut the process if no processing is required", "if", "export_format", "==", "'laz'", "and", "(", "epsg", "==", "task", ".", "epsg", "or", "epsg", "is", "None", ")", ":", "return", "Response", "(", "{", "'url'", ":", "'/api/projects/{}/tasks/{}/download/{}.laz'", ".", "format", "(", "task", ".", "project", ".", "id", ",", "task", ".", "id", ",", "asset_type", ")", ",", "'filename'", ":", "filename", "}", ")", "else", ":", "celery_task_id", "=", "export_pointcloud", ".", "delay", "(", "url", ",", "epsg", "=", "epsg", ",", "format", "=", "export_format", ")", ".", "task_id", "return", "Response", "(", "{", "'celery_task_id'", ":", "celery_task_id", ",", "'filename'", ":", "filename", "}", ")" ]
https://github.com/OpenDroneMap/WebODM/blob/d7d2cfc4fd2222100130b731b24041e0b5ed6b3a/app/api/tiler.py#L473-L592
hotosm/tasking-manager
1a7b02c6ccd431029a96d709d4d786c83cb37f5e
backend/services/grid/split_service.py
python
SplitService.split_task
(split_task_dto: SplitTaskDTO)
return task_dtos
Replaces a task square with 4 smaller tasks at the next OSM tile grid zoom level Validates that task is: - locked for mapping by current user :param split_task_dto: :return: new tasks in a DTO
Replaces a task square with 4 smaller tasks at the next OSM tile grid zoom level Validates that task is: - locked for mapping by current user :param split_task_dto: :return: new tasks in a DTO
[ "Replaces", "a", "task", "square", "with", "4", "smaller", "tasks", "at", "the", "next", "OSM", "tile", "grid", "zoom", "level", "Validates", "that", "task", "is", ":", "-", "locked", "for", "mapping", "by", "current", "user", ":", "param", "split_task_dto", ":", ":", "return", ":", "new", "tasks", "in", "a", "DTO" ]
def split_task(split_task_dto: SplitTaskDTO) -> TaskDTOs: """ Replaces a task square with 4 smaller tasks at the next OSM tile grid zoom level Validates that task is: - locked for mapping by current user :param split_task_dto: :return: new tasks in a DTO """ # get the task to be split original_task = Task.get(split_task_dto.task_id, split_task_dto.project_id) if original_task is None: raise NotFound() original_geometry = shape.to_shape(original_task.geometry) # Fetch the task geometry in meters original_task_area_m = db.engine.execute( ST_Area(ST_GeogFromWKB(original_task.geometry)) ).scalar() if ( original_task.zoom and original_task.zoom >= 18 ) or original_task_area_m < 25000: raise SplitServiceError("Task is too small to be split") # check its locked for mapping by the current user if TaskStatus(original_task.task_status) != TaskStatus.LOCKED_FOR_MAPPING: raise SplitServiceError("Status must be LOCKED_FOR_MAPPING to split") if original_task.locked_by != split_task_dto.user_id: raise SplitServiceError("Attempting to split a task owned by another user") # create new geometries from the task geometry try: new_tasks_geojson = SplitService._create_split_tasks( original_task.x, original_task.y, original_task.zoom, original_task ) except Exception as e: raise SplitServiceError(f"Error splitting task{str(e)}") # create new tasks from the new geojson i = Task.get_max_task_id_for_project(split_task_dto.project_id) new_tasks = [] new_tasks_dto = [] for new_task_geojson in new_tasks_geojson: # Sanity check: ensure the new task geometry intersects the original task geometry new_geometry = shapely_shape(new_task_geojson.geometry) if not new_geometry.intersects(original_geometry): raise InvalidGeoJson("New split task does not intersect original task") # insert new tasks into database i = i + 1 new_task = Task.from_geojson_feature(i, new_task_geojson) new_task.project_id = split_task_dto.project_id new_task.task_status = TaskStatus.READY.value new_task.create() new_task.task_history.extend(original_task.copy_task_history()) if new_task.task_history: new_task.clear_task_lock() # since we just copied the lock new_task.set_task_history( TaskAction.STATE_CHANGE, split_task_dto.user_id, None, TaskStatus.SPLIT ) new_task.set_task_history( TaskAction.STATE_CHANGE, split_task_dto.user_id, None, TaskStatus.READY ) new_task.task_status = TaskStatus.READY.value new_tasks.append(new_task) new_task.update() new_tasks_dto.append( new_task.as_dto_with_instructions(split_task_dto.preferred_locale) ) # delete original task from the database try: original_task.delete() except Exception: db.session.rollback() # Ensure the new tasks are cleaned up for new_task in new_tasks: new_task.delete() db.session.commit() raise # update project task counts project = Project.query.get(split_task_dto.project_id) project.total_tasks = project.tasks.count() # update bad imagery because we may have split a bad imagery tile project.tasks_bad_imagery = project.tasks.filter( Task.task_status == TaskStatus.BADIMAGERY.value ).count() project.save() # return the new tasks in a DTO task_dtos = TaskDTOs() task_dtos.tasks = new_tasks_dto return task_dtos
[ "def", "split_task", "(", "split_task_dto", ":", "SplitTaskDTO", ")", "->", "TaskDTOs", ":", "# get the task to be split", "original_task", "=", "Task", ".", "get", "(", "split_task_dto", ".", "task_id", ",", "split_task_dto", ".", "project_id", ")", "if", "original_task", "is", "None", ":", "raise", "NotFound", "(", ")", "original_geometry", "=", "shape", ".", "to_shape", "(", "original_task", ".", "geometry", ")", "# Fetch the task geometry in meters", "original_task_area_m", "=", "db", ".", "engine", ".", "execute", "(", "ST_Area", "(", "ST_GeogFromWKB", "(", "original_task", ".", "geometry", ")", ")", ")", ".", "scalar", "(", ")", "if", "(", "original_task", ".", "zoom", "and", "original_task", ".", "zoom", ">=", "18", ")", "or", "original_task_area_m", "<", "25000", ":", "raise", "SplitServiceError", "(", "\"Task is too small to be split\"", ")", "# check its locked for mapping by the current user", "if", "TaskStatus", "(", "original_task", ".", "task_status", ")", "!=", "TaskStatus", ".", "LOCKED_FOR_MAPPING", ":", "raise", "SplitServiceError", "(", "\"Status must be LOCKED_FOR_MAPPING to split\"", ")", "if", "original_task", ".", "locked_by", "!=", "split_task_dto", ".", "user_id", ":", "raise", "SplitServiceError", "(", "\"Attempting to split a task owned by another user\"", ")", "# create new geometries from the task geometry", "try", ":", "new_tasks_geojson", "=", "SplitService", ".", "_create_split_tasks", "(", "original_task", ".", "x", ",", "original_task", ".", "y", ",", "original_task", ".", "zoom", ",", "original_task", ")", "except", "Exception", "as", "e", ":", "raise", "SplitServiceError", "(", "f\"Error splitting task{str(e)}\"", ")", "# create new tasks from the new geojson", "i", "=", "Task", ".", "get_max_task_id_for_project", "(", "split_task_dto", ".", "project_id", ")", "new_tasks", "=", "[", "]", "new_tasks_dto", "=", "[", "]", "for", "new_task_geojson", "in", "new_tasks_geojson", ":", "# Sanity check: ensure the new task geometry intersects the original task geometry", "new_geometry", "=", "shapely_shape", "(", "new_task_geojson", ".", "geometry", ")", "if", "not", "new_geometry", ".", "intersects", "(", "original_geometry", ")", ":", "raise", "InvalidGeoJson", "(", "\"New split task does not intersect original task\"", ")", "# insert new tasks into database", "i", "=", "i", "+", "1", "new_task", "=", "Task", ".", "from_geojson_feature", "(", "i", ",", "new_task_geojson", ")", "new_task", ".", "project_id", "=", "split_task_dto", ".", "project_id", "new_task", ".", "task_status", "=", "TaskStatus", ".", "READY", ".", "value", "new_task", ".", "create", "(", ")", "new_task", ".", "task_history", ".", "extend", "(", "original_task", ".", "copy_task_history", "(", ")", ")", "if", "new_task", ".", "task_history", ":", "new_task", ".", "clear_task_lock", "(", ")", "# since we just copied the lock", "new_task", ".", "set_task_history", "(", "TaskAction", ".", "STATE_CHANGE", ",", "split_task_dto", ".", "user_id", ",", "None", ",", "TaskStatus", ".", "SPLIT", ")", "new_task", ".", "set_task_history", "(", "TaskAction", ".", "STATE_CHANGE", ",", "split_task_dto", ".", "user_id", ",", "None", ",", "TaskStatus", ".", "READY", ")", "new_task", ".", "task_status", "=", "TaskStatus", ".", "READY", ".", "value", "new_tasks", ".", "append", "(", "new_task", ")", "new_task", ".", "update", "(", ")", "new_tasks_dto", ".", "append", "(", "new_task", ".", "as_dto_with_instructions", "(", "split_task_dto", ".", "preferred_locale", ")", ")", "# delete original task from the database", "try", ":", "original_task", ".", "delete", "(", ")", "except", "Exception", ":", "db", ".", "session", ".", "rollback", "(", ")", "# Ensure the new tasks are cleaned up", "for", "new_task", "in", "new_tasks", ":", "new_task", ".", "delete", "(", ")", "db", ".", "session", ".", "commit", "(", ")", "raise", "# update project task counts", "project", "=", "Project", ".", "query", ".", "get", "(", "split_task_dto", ".", "project_id", ")", "project", ".", "total_tasks", "=", "project", ".", "tasks", ".", "count", "(", ")", "# update bad imagery because we may have split a bad imagery tile", "project", ".", "tasks_bad_imagery", "=", "project", ".", "tasks", ".", "filter", "(", "Task", ".", "task_status", "==", "TaskStatus", ".", "BADIMAGERY", ".", "value", ")", ".", "count", "(", ")", "project", ".", "save", "(", ")", "# return the new tasks in a DTO", "task_dtos", "=", "TaskDTOs", "(", ")", "task_dtos", ".", "tasks", "=", "new_tasks_dto", "return", "task_dtos" ]
https://github.com/hotosm/tasking-manager/blob/1a7b02c6ccd431029a96d709d4d786c83cb37f5e/backend/services/grid/split_service.py#L159-L254
replit-archive/jsrepl
36d79b6288ca5d26208e8bade2a168c6ebcb2376
extern/python/closured/lib/python2.7/rfc822.py
python
AddrlistClass.__init__
(self, field)
Initialize a new instance. `field' is an unparsed address header field, containing one or more addresses.
Initialize a new instance.
[ "Initialize", "a", "new", "instance", "." ]
def __init__(self, field): """Initialize a new instance. `field' is an unparsed address header field, containing one or more addresses. """ self.specials = '()<>@,:;.\"[]' self.pos = 0 self.LWS = ' \t' self.CR = '\r\n' self.atomends = self.specials + self.LWS + self.CR # Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it # is obsolete syntax. RFC 2822 requires that we recognize obsolete # syntax, so allow dots in phrases. self.phraseends = self.atomends.replace('.', '') self.field = field self.commentlist = []
[ "def", "__init__", "(", "self", ",", "field", ")", ":", "self", ".", "specials", "=", "'()<>@,:;.\\\"[]'", "self", ".", "pos", "=", "0", "self", ".", "LWS", "=", "' \\t'", "self", ".", "CR", "=", "'\\r\\n'", "self", ".", "atomends", "=", "self", ".", "specials", "+", "self", ".", "LWS", "+", "self", ".", "CR", "# Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it", "# is obsolete syntax. RFC 2822 requires that we recognize obsolete", "# syntax, so allow dots in phrases.", "self", ".", "phraseends", "=", "self", ".", "atomends", ".", "replace", "(", "'.'", ",", "''", ")", "self", ".", "field", "=", "field", "self", ".", "commentlist", "=", "[", "]" ]
https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/closured/lib/python2.7/rfc822.py#L508-L524
atom-community/ide-python
c046f9c2421713b34baa22648235541c5bb284fe
lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py
python
Window.screen_to_client
(self, x, y)
return tuple( win32.ScreenToClient( self.get_handle(), (x, y) ) )
Translates window screen coordinates to client coordinates. @note: This is a simplified interface to some of the functionality of the L{win32.Point} class. @see: {win32.Point.screen_to_client} @type x: int @param x: Horizontal coordinate. @type y: int @param y: Vertical coordinate. @rtype: tuple( int, int ) @return: Translated coordinates in a tuple (x, y). @raise WindowsError: An error occured while processing this request.
Translates window screen coordinates to client coordinates.
[ "Translates", "window", "screen", "coordinates", "to", "client", "coordinates", "." ]
def screen_to_client(self, x, y): """ Translates window screen coordinates to client coordinates. @note: This is a simplified interface to some of the functionality of the L{win32.Point} class. @see: {win32.Point.screen_to_client} @type x: int @param x: Horizontal coordinate. @type y: int @param y: Vertical coordinate. @rtype: tuple( int, int ) @return: Translated coordinates in a tuple (x, y). @raise WindowsError: An error occured while processing this request. """ return tuple( win32.ScreenToClient( self.get_handle(), (x, y) ) )
[ "def", "screen_to_client", "(", "self", ",", "x", ",", "y", ")", ":", "return", "tuple", "(", "win32", ".", "ScreenToClient", "(", "self", ".", "get_handle", "(", ")", ",", "(", "x", ",", "y", ")", ")", ")" ]
https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py#L402-L421
jxcore/jxcore
b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410
tools/gyp/pylib/gyp/common.py
python
ExceptionAppend
(e, msg)
Append a message to the given exception's message.
Append a message to the given exception's message.
[ "Append", "a", "message", "to", "the", "given", "exception", "s", "message", "." ]
def ExceptionAppend(e, msg): """Append a message to the given exception's message.""" if not e.args: e.args = (msg,) elif len(e.args) == 1: e.args = (str(e.args[0]) + ' ' + msg,) else: e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
[ "def", "ExceptionAppend", "(", "e", ",", "msg", ")", ":", "if", "not", "e", ".", "args", ":", "e", ".", "args", "=", "(", "msg", ",", ")", "elif", "len", "(", "e", ".", "args", ")", "==", "1", ":", "e", ".", "args", "=", "(", "str", "(", "e", ".", "args", "[", "0", "]", ")", "+", "' '", "+", "msg", ",", ")", "else", ":", "e", ".", "args", "=", "(", "str", "(", "e", ".", "args", "[", "0", "]", ")", "+", "' '", "+", "msg", ",", ")", "+", "e", ".", "args", "[", "1", ":", "]" ]
https://github.com/jxcore/jxcore/blob/b05f1f2d2c9d62c813c7d84f3013dbbf30b6e410/tools/gyp/pylib/gyp/common.py#L37-L44
replit-archive/jsrepl
36d79b6288ca5d26208e8bade2a168c6ebcb2376
extern/python/unclosured/lib/python2.7/pydoc.py
python
TextDoc.docdata
(self, object, name=None, mod=None, cl=None)
return self._docdescriptor(name, object, mod)
Produce text documentation for a data descriptor.
Produce text documentation for a data descriptor.
[ "Produce", "text", "documentation", "for", "a", "data", "descriptor", "." ]
def docdata(self, object, name=None, mod=None, cl=None): """Produce text documentation for a data descriptor.""" return self._docdescriptor(name, object, mod)
[ "def", "docdata", "(", "self", ",", "object", ",", "name", "=", "None", ",", "mod", "=", "None", ",", "cl", "=", "None", ")", ":", "return", "self", ".", "_docdescriptor", "(", "name", ",", "object", ",", "mod", ")" ]
https://github.com/replit-archive/jsrepl/blob/36d79b6288ca5d26208e8bade2a168c6ebcb2376/extern/python/unclosured/lib/python2.7/pydoc.py#L1296-L1298
TeamvisionCorp/TeamVision
aa2a57469e430ff50cce21174d8f280efa0a83a7
distribute/0.0.4/build_shell/teamvision/teamvision/home/pagefactory/home_autotask_pageworker.py
python
HomeAutoTaskPageWorker.__init__
(self)
Constructor
Constructor
[ "Constructor" ]
def __init__(self): ''' Constructor ''' self.pagemodel=HomeAutoTestingTaskLeftNavBar self.sub_sidebar_model=HomeAutoTaskSubNavBar
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "pagemodel", "=", "HomeAutoTestingTaskLeftNavBar", "self", ".", "sub_sidebar_model", "=", "HomeAutoTaskSubNavBar" ]
https://github.com/TeamvisionCorp/TeamVision/blob/aa2a57469e430ff50cce21174d8f280efa0a83a7/distribute/0.0.4/build_shell/teamvision/teamvision/home/pagefactory/home_autotask_pageworker.py#L17-L22
atom-community/ide-python
c046f9c2421713b34baa22648235541c5bb284fe
lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py
python
ThreadTracer.__call__
(self, frame, event, arg)
This is the callback used when we enter some context in the debugger. We also decorate the thread we are in with info about the debugging. The attributes added are: pydev_state pydev_step_stop pydev_step_cmd pydev_notify_kill :param PyDB py_db: This is the global debugger (this method should actually be added as a method to it).
This is the callback used when we enter some context in the debugger.
[ "This", "is", "the", "callback", "used", "when", "we", "enter", "some", "context", "in", "the", "debugger", "." ]
def __call__(self, frame, event, arg): ''' This is the callback used when we enter some context in the debugger. We also decorate the thread we are in with info about the debugging. The attributes added are: pydev_state pydev_step_stop pydev_step_cmd pydev_notify_kill :param PyDB py_db: This is the global debugger (this method should actually be added as a method to it). ''' # IFDEF CYTHON # cdef str filename; # cdef str base; # cdef int pydev_step_cmd; # cdef tuple frame_cache_key; # cdef dict cache_skips; # cdef bint is_stepping; # cdef tuple abs_path_real_path_and_base; # cdef PyDBAdditionalThreadInfo additional_info; # ENDIF # print('ENTER: trace_dispatch', frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name) py_db, t, additional_info, cache_skips, frame_skips_cache = self._args pydev_step_cmd = additional_info.pydev_step_cmd is_stepping = pydev_step_cmd != -1 try: if py_db._finish_debugging_session: if not py_db._termination_event_set: # that was not working very well because jython gave some socket errors try: if py_db.output_checker is None: kill_all_pydev_threads() except: traceback.print_exc() py_db._termination_event_set = True return None # if thread is not alive, cancel trace_dispatch processing if not is_thread_alive(t): py_db.notify_thread_not_alive(get_thread_id(t)) return None # suspend tracing if py_db.thread_analyser is not None: py_db.thread_analyser.log_event(frame) if py_db.asyncio_analyser is not None: py_db.asyncio_analyser.log_event(frame) # Note: it's important that the context name is also given because we may hit something once # in the global context and another in the local context. frame_cache_key = (frame.f_code.co_firstlineno, frame.f_code.co_name, frame.f_code.co_filename) if not is_stepping and frame_cache_key in cache_skips: # print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name) return None try: # Make fast path faster! abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] except: abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) filename = abs_path_real_path_and_base[1] file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug threading or anything related to pydevd if file_type is not None: if file_type == 1: # inlining LIB_FILE = 1 if not py_db.in_project_scope(filename): # print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) cache_skips[frame_cache_key] = 1 return None else: # print('skipped: trace_dispatch', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) cache_skips[frame_cache_key] = 1 return None if is_stepping: if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename): # ignore files matching stepping filters return None if py_db.is_filter_libraries and not py_db.in_project_scope(filename): # ignore library files while stepping return None # print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name, file_type) if additional_info.is_tracing: return None # we don't wan't to trace code invoked from pydevd_frame.trace_dispatch # Just create PyDBFrame directly (removed support for Python versions < 2.5, which required keeping a weak # reference to the frame). ret = PyDBFrame( ( py_db, filename, additional_info, t, frame_skips_cache, frame_cache_key, ) ).trace_dispatch(frame, event, arg) if ret is None: cache_skips[frame_cache_key] = 1 return None # IFDEF CYTHON # return SafeCallWrapper(ret) # ELSE return ret # ENDIF except SystemExit: return None except Exception: if py_db._finish_debugging_session: return None # Don't log errors when we're shutting down. # Log it try: if traceback is not None: # This can actually happen during the interpreter shutdown in Python 2.7 traceback.print_exc() except: # Error logging? We're really in the interpreter shutdown... # (https://github.com/fabioz/PyDev.Debugger/issues/8) pass return None
[ "def", "__call__", "(", "self", ",", "frame", ",", "event", ",", "arg", ")", ":", "# IFDEF CYTHON", "# cdef str filename;", "# cdef str base;", "# cdef int pydev_step_cmd;", "# cdef tuple frame_cache_key;", "# cdef dict cache_skips;", "# cdef bint is_stepping;", "# cdef tuple abs_path_real_path_and_base;", "# cdef PyDBAdditionalThreadInfo additional_info;", "# ENDIF", "# print('ENTER: trace_dispatch', frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name)", "py_db", ",", "t", ",", "additional_info", ",", "cache_skips", ",", "frame_skips_cache", "=", "self", ".", "_args", "pydev_step_cmd", "=", "additional_info", ".", "pydev_step_cmd", "is_stepping", "=", "pydev_step_cmd", "!=", "-", "1", "try", ":", "if", "py_db", ".", "_finish_debugging_session", ":", "if", "not", "py_db", ".", "_termination_event_set", ":", "# that was not working very well because jython gave some socket errors", "try", ":", "if", "py_db", ".", "output_checker", "is", "None", ":", "kill_all_pydev_threads", "(", ")", "except", ":", "traceback", ".", "print_exc", "(", ")", "py_db", ".", "_termination_event_set", "=", "True", "return", "None", "# if thread is not alive, cancel trace_dispatch processing", "if", "not", "is_thread_alive", "(", "t", ")", ":", "py_db", ".", "notify_thread_not_alive", "(", "get_thread_id", "(", "t", ")", ")", "return", "None", "# suspend tracing", "if", "py_db", ".", "thread_analyser", "is", "not", "None", ":", "py_db", ".", "thread_analyser", ".", "log_event", "(", "frame", ")", "if", "py_db", ".", "asyncio_analyser", "is", "not", "None", ":", "py_db", ".", "asyncio_analyser", ".", "log_event", "(", "frame", ")", "# Note: it's important that the context name is also given because we may hit something once", "# in the global context and another in the local context.", "frame_cache_key", "=", "(", "frame", ".", "f_code", ".", "co_firstlineno", ",", "frame", ".", "f_code", ".", "co_name", ",", "frame", ".", "f_code", ".", "co_filename", ")", "if", "not", "is_stepping", "and", "frame_cache_key", "in", "cache_skips", ":", "# print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name)", "return", "None", "try", ":", "# Make fast path faster!", "abs_path_real_path_and_base", "=", "NORM_PATHS_AND_BASE_CONTAINER", "[", "frame", ".", "f_code", ".", "co_filename", "]", "except", ":", "abs_path_real_path_and_base", "=", "get_abs_path_real_path_and_base_from_frame", "(", "frame", ")", "filename", "=", "abs_path_real_path_and_base", "[", "1", "]", "file_type", "=", "get_file_type", "(", "abs_path_real_path_and_base", "[", "-", "1", "]", ")", "#we don't want to debug threading or anything related to pydevd", "if", "file_type", "is", "not", "None", ":", "if", "file_type", "==", "1", ":", "# inlining LIB_FILE = 1", "if", "not", "py_db", ".", "in_project_scope", "(", "filename", ")", ":", "# print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type)", "cache_skips", "[", "frame_cache_key", "]", "=", "1", "return", "None", "else", ":", "# print('skipped: trace_dispatch', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type)", "cache_skips", "[", "frame_cache_key", "]", "=", "1", "return", "None", "if", "is_stepping", ":", "if", "py_db", ".", "is_filter_enabled", "and", "py_db", ".", "is_ignored_by_filters", "(", "filename", ")", ":", "# ignore files matching stepping filters", "return", "None", "if", "py_db", ".", "is_filter_libraries", "and", "not", "py_db", ".", "in_project_scope", "(", "filename", ")", ":", "# ignore library files while stepping", "return", "None", "# print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name, file_type)", "if", "additional_info", ".", "is_tracing", ":", "return", "None", "# we don't wan't to trace code invoked from pydevd_frame.trace_dispatch", "# Just create PyDBFrame directly (removed support for Python versions < 2.5, which required keeping a weak", "# reference to the frame).", "ret", "=", "PyDBFrame", "(", "(", "py_db", ",", "filename", ",", "additional_info", ",", "t", ",", "frame_skips_cache", ",", "frame_cache_key", ",", ")", ")", ".", "trace_dispatch", "(", "frame", ",", "event", ",", "arg", ")", "if", "ret", "is", "None", ":", "cache_skips", "[", "frame_cache_key", "]", "=", "1", "return", "None", "# IFDEF CYTHON", "# return SafeCallWrapper(ret)", "# ELSE", "return", "ret", "# ENDIF", "except", "SystemExit", ":", "return", "None", "except", "Exception", ":", "if", "py_db", ".", "_finish_debugging_session", ":", "return", "None", "# Don't log errors when we're shutting down.", "# Log it", "try", ":", "if", "traceback", "is", "not", "None", ":", "# This can actually happen during the interpreter shutdown in Python 2.7", "traceback", ".", "print_exc", "(", ")", "except", ":", "# Error logging? We're really in the interpreter shutdown...", "# (https://github.com/fabioz/PyDev.Debugger/issues/8)", "pass", "return", "None" ]
https://github.com/atom-community/ide-python/blob/c046f9c2421713b34baa22648235541c5bb284fe/lib/debugger/VendorLib/vs-py-debugger/pythonFiles/experimental/ptvsd/ptvsd/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py#L219-L342