id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
sequencelengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
1,214
def is_corpus(obj): try: if ('Corpus' in obj.__class__.__name__): return (True, obj) except: pass try: if (hasattr(obj, 'next') or hasattr(obj, '__next__')): doc1 = next(obj) obj = itertools.chain([doc1], obj) else: doc1 = next(iter(obj)) if (len(doc1) == 0): return (True, obj) (id1, val1) = next(iter(doc1)) (id1, val1) = (int(id1), float(val1)) except Exception: return (False, obj) return (True, obj)
[ "def", "is_corpus", "(", "obj", ")", ":", "try", ":", "if", "(", "'Corpus'", "in", "obj", ".", "__class__", ".", "__name__", ")", ":", "return", "(", "True", ",", "obj", ")", "except", ":", "pass", "try", ":", "if", "(", "hasattr", "(", "obj", ",", "'next'", ")", "or", "hasattr", "(", "obj", ",", "'__next__'", ")", ")", ":", "doc1", "=", "next", "(", "obj", ")", "obj", "=", "itertools", ".", "chain", "(", "[", "doc1", "]", ",", "obj", ")", "else", ":", "doc1", "=", "next", "(", "iter", "(", "obj", ")", ")", "if", "(", "len", "(", "doc1", ")", "==", "0", ")", ":", "return", "(", "True", ",", "obj", ")", "(", "id1", ",", "val1", ")", "=", "next", "(", "iter", "(", "doc1", ")", ")", "(", "id1", ",", "val1", ")", "=", "(", "int", "(", "id1", ")", ",", "float", "(", "val1", ")", ")", "except", "Exception", ":", "return", "(", "False", ",", "obj", ")", "return", "(", "True", ",", "obj", ")" ]
check whether obj is a corpus .
train
false
1,215
@contextfunction def changes_set_list(context, changesets): request = context['request'] response_format = 'html' if ('response_format' in context): response_format = context['response_format'] return Markup(render_to_string('changes/tags/changeset_list', {'changesets': changesets}, context_instance=RequestContext(request), response_format=response_format))
[ "@", "contextfunction", "def", "changes_set_list", "(", "context", ",", "changesets", ")", ":", "request", "=", "context", "[", "'request'", "]", "response_format", "=", "'html'", "if", "(", "'response_format'", "in", "context", ")", ":", "response_format", "=", "context", "[", "'response_format'", "]", "return", "Markup", "(", "render_to_string", "(", "'changes/tags/changeset_list'", ",", "{", "'changesets'", ":", "changesets", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")", ")" ]
print a list of changesets .
train
false
1,216
def factors(n): for p in genprimes(): while ((n != 1) and ((n % p) == 0)): (yield p) n /= p if (n == 1): break if ((p * p) > n): (yield n) break
[ "def", "factors", "(", "n", ")", ":", "for", "p", "in", "genprimes", "(", ")", ":", "while", "(", "(", "n", "!=", "1", ")", "and", "(", "(", "n", "%", "p", ")", "==", "0", ")", ")", ":", "(", "yield", "p", ")", "n", "/=", "p", "if", "(", "n", "==", "1", ")", ":", "break", "if", "(", "(", "p", "*", "p", ")", ">", "n", ")", ":", "(", "yield", "n", ")", "break" ]
yields the prime factors of the integer n .
train
false
1,219
def findSynchronizeBits(parser, start, max_size): address0 = parser.absolute_address end = (start + max_size) size = 0 while (start < end): length = parser.stream.searchBytesLength('\xff', False, start, end) if (length is None): return None size += length start += (length * 8) try: frame = createOrphanField(parser, (start - address0), Frame, 'frame') valid = frame.isValid() except HACHOIR_ERRORS: valid = False if valid: return size start += 8 size += 1 return None
[ "def", "findSynchronizeBits", "(", "parser", ",", "start", ",", "max_size", ")", ":", "address0", "=", "parser", ".", "absolute_address", "end", "=", "(", "start", "+", "max_size", ")", "size", "=", "0", "while", "(", "start", "<", "end", ")", ":", "length", "=", "parser", ".", "stream", ".", "searchBytesLength", "(", "'\\xff'", ",", "False", ",", "start", ",", "end", ")", "if", "(", "length", "is", "None", ")", ":", "return", "None", "size", "+=", "length", "start", "+=", "(", "length", "*", "8", ")", "try", ":", "frame", "=", "createOrphanField", "(", "parser", ",", "(", "start", "-", "address0", ")", ",", "Frame", ",", "'frame'", ")", "valid", "=", "frame", ".", "isValid", "(", ")", "except", "HACHOIR_ERRORS", ":", "valid", "=", "False", "if", "valid", ":", "return", "size", "start", "+=", "8", "size", "+=", "1", "return", "None" ]
find synchronisation bits returns none on error .
train
false
1,220
@membership_required def topic_create(request, slug, template_name='groups/topics/topic_form.html'): group = get_object_or_404(Group, slug=slug) if (request.method == 'POST'): form = GroupTopicForm(request.POST) if form.is_valid(): topic = form.save(commit=False) topic.user = request.user topic.group = group topic.save() return redirect(request, topic) else: form = GroupTopicForm() return render(request, template_name, {'form': form, 'group': group})
[ "@", "membership_required", "def", "topic_create", "(", "request", ",", "slug", ",", "template_name", "=", "'groups/topics/topic_form.html'", ")", ":", "group", "=", "get_object_or_404", "(", "Group", ",", "slug", "=", "slug", ")", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "form", "=", "GroupTopicForm", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "topic", "=", "form", ".", "save", "(", "commit", "=", "False", ")", "topic", ".", "user", "=", "request", ".", "user", "topic", ".", "group", "=", "group", "topic", ".", "save", "(", ")", "return", "redirect", "(", "request", ",", "topic", ")", "else", ":", "form", "=", "GroupTopicForm", "(", ")", "return", "render", "(", "request", ",", "template_name", ",", "{", "'form'", ":", "form", ",", "'group'", ":", "group", "}", ")" ]
create a topic .
train
false
1,221
@connect_on_app_finalize def add_chord_task(app): from celery import group, chord as _chord from celery.canvas import maybe_signature @app.task(name=u'celery.chord', bind=True, ignore_result=False, shared=False, lazy=False) def chord(self, header, body, partial_args=(), interval=None, countdown=1, max_retries=None, eager=False, **kwargs): app = self.app tasks = (header.tasks if isinstance(header, group) else header) header = group([maybe_signature(s, app=app) for s in tasks], app=self.app) body = maybe_signature(body, app=app) ch = _chord(header, body) return ch.run(header, body, partial_args, app, interval, countdown, max_retries, **kwargs) return chord
[ "@", "connect_on_app_finalize", "def", "add_chord_task", "(", "app", ")", ":", "from", "celery", "import", "group", ",", "chord", "as", "_chord", "from", "celery", ".", "canvas", "import", "maybe_signature", "@", "app", ".", "task", "(", "name", "=", "u'celery.chord'", ",", "bind", "=", "True", ",", "ignore_result", "=", "False", ",", "shared", "=", "False", ",", "lazy", "=", "False", ")", "def", "chord", "(", "self", ",", "header", ",", "body", ",", "partial_args", "=", "(", ")", ",", "interval", "=", "None", ",", "countdown", "=", "1", ",", "max_retries", "=", "None", ",", "eager", "=", "False", ",", "**", "kwargs", ")", ":", "app", "=", "self", ".", "app", "tasks", "=", "(", "header", ".", "tasks", "if", "isinstance", "(", "header", ",", "group", ")", "else", "header", ")", "header", "=", "group", "(", "[", "maybe_signature", "(", "s", ",", "app", "=", "app", ")", "for", "s", "in", "tasks", "]", ",", "app", "=", "self", ".", "app", ")", "body", "=", "maybe_signature", "(", "body", ",", "app", "=", "app", ")", "ch", "=", "_chord", "(", "header", ",", "body", ")", "return", "ch", ".", "run", "(", "header", ",", "body", ",", "partial_args", ",", "app", ",", "interval", ",", "countdown", ",", "max_retries", ",", "**", "kwargs", ")", "return", "chord" ]
no longer used .
train
false
1,222
def buildProofTransform(inputProfile, outputProfile, proofProfile, inMode, outMode, renderingIntent=INTENT_PERCEPTUAL, proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC, flags=FLAGS['SOFTPROOFING']): if ((not isinstance(renderingIntent, int)) or (not (0 <= renderingIntent <= 3))): raise PyCMSError('renderingIntent must be an integer between 0 and 3') if ((not isinstance(flags, int)) or (not (0 <= flags <= _MAX_FLAG))): raise PyCMSError(('flags must be an integer between 0 and %s' + _MAX_FLAG)) try: if (not isinstance(inputProfile, ImageCmsProfile)): inputProfile = ImageCmsProfile(inputProfile) if (not isinstance(outputProfile, ImageCmsProfile)): outputProfile = ImageCmsProfile(outputProfile) if (not isinstance(proofProfile, ImageCmsProfile)): proofProfile = ImageCmsProfile(proofProfile) return ImageCmsTransform(inputProfile, outputProfile, inMode, outMode, renderingIntent, proofProfile, proofRenderingIntent, flags) except (IOError, TypeError, ValueError) as v: raise PyCMSError(v)
[ "def", "buildProofTransform", "(", "inputProfile", ",", "outputProfile", ",", "proofProfile", ",", "inMode", ",", "outMode", ",", "renderingIntent", "=", "INTENT_PERCEPTUAL", ",", "proofRenderingIntent", "=", "INTENT_ABSOLUTE_COLORIMETRIC", ",", "flags", "=", "FLAGS", "[", "'SOFTPROOFING'", "]", ")", ":", "if", "(", "(", "not", "isinstance", "(", "renderingIntent", ",", "int", ")", ")", "or", "(", "not", "(", "0", "<=", "renderingIntent", "<=", "3", ")", ")", ")", ":", "raise", "PyCMSError", "(", "'renderingIntent must be an integer between 0 and 3'", ")", "if", "(", "(", "not", "isinstance", "(", "flags", ",", "int", ")", ")", "or", "(", "not", "(", "0", "<=", "flags", "<=", "_MAX_FLAG", ")", ")", ")", ":", "raise", "PyCMSError", "(", "(", "'flags must be an integer between 0 and %s'", "+", "_MAX_FLAG", ")", ")", "try", ":", "if", "(", "not", "isinstance", "(", "inputProfile", ",", "ImageCmsProfile", ")", ")", ":", "inputProfile", "=", "ImageCmsProfile", "(", "inputProfile", ")", "if", "(", "not", "isinstance", "(", "outputProfile", ",", "ImageCmsProfile", ")", ")", ":", "outputProfile", "=", "ImageCmsProfile", "(", "outputProfile", ")", "if", "(", "not", "isinstance", "(", "proofProfile", ",", "ImageCmsProfile", ")", ")", ":", "proofProfile", "=", "ImageCmsProfile", "(", "proofProfile", ")", "return", "ImageCmsTransform", "(", "inputProfile", ",", "outputProfile", ",", "inMode", ",", "outMode", ",", "renderingIntent", ",", "proofProfile", ",", "proofRenderingIntent", ",", "flags", ")", "except", "(", "IOError", ",", "TypeError", ",", "ValueError", ")", "as", "v", ":", "raise", "PyCMSError", "(", "v", ")" ]
builds an icc transform mapping from the inputprofile to the outputprofile .
train
false
1,223
def cert_time_to_seconds(cert_time): import time return time.mktime(time.strptime(cert_time, '%b %d %H:%M:%S %Y GMT'))
[ "def", "cert_time_to_seconds", "(", "cert_time", ")", ":", "import", "time", "return", "time", ".", "mktime", "(", "time", ".", "strptime", "(", "cert_time", ",", "'%b %d %H:%M:%S %Y GMT'", ")", ")" ]
return the time in seconds since the epoch .
train
false
1,225
def strip_null(arg, null=None): if (null is None): null = NULL if (type(arg) is types.ListType): return [i for i in arg if (i not in null)] elif (type(arg) is types.TupleType): return tuple([i for i in arg if (i not in null)]) elif (type(arg) is type(set())): return arg.difference(set(null)) elif (type(arg) is types.DictType): return {key: value for (key, value) in arg.items() if (value not in null)} return arg
[ "def", "strip_null", "(", "arg", ",", "null", "=", "None", ")", ":", "if", "(", "null", "is", "None", ")", ":", "null", "=", "NULL", "if", "(", "type", "(", "arg", ")", "is", "types", ".", "ListType", ")", ":", "return", "[", "i", "for", "i", "in", "arg", "if", "(", "i", "not", "in", "null", ")", "]", "elif", "(", "type", "(", "arg", ")", "is", "types", ".", "TupleType", ")", ":", "return", "tuple", "(", "[", "i", "for", "i", "in", "arg", "if", "(", "i", "not", "in", "null", ")", "]", ")", "elif", "(", "type", "(", "arg", ")", "is", "type", "(", "set", "(", ")", ")", ")", ":", "return", "arg", ".", "difference", "(", "set", "(", "null", ")", ")", "elif", "(", "type", "(", "arg", ")", "is", "types", ".", "DictType", ")", ":", "return", "{", "key", ":", "value", "for", "(", "key", ",", "value", ")", "in", "arg", ".", "items", "(", ")", "if", "(", "value", "not", "in", "null", ")", "}", "return", "arg" ]
strip list .
train
false
1,226
def test_write_fill_values(): out = StringIO() ascii.write(dat, out, Writer=ascii.FixedWidth, fill_values=('a', 'N/A')) assert_equal_splitlines(out.getvalue(), '| Col1 | Col2 | Col3 | Col4 |\n| 1.2 | "hello" | 1 | N/A |\n| 2.4 | \'s worlds | 2 | 2 |\n')
[ "def", "test_write_fill_values", "(", ")", ":", "out", "=", "StringIO", "(", ")", "ascii", ".", "write", "(", "dat", ",", "out", ",", "Writer", "=", "ascii", ".", "FixedWidth", ",", "fill_values", "=", "(", "'a'", ",", "'N/A'", ")", ")", "assert_equal_splitlines", "(", "out", ".", "getvalue", "(", ")", ",", "'| Col1 | Col2 | Col3 | Col4 |\\n| 1.2 | \"hello\" | 1 | N/A |\\n| 2.4 | \\'s worlds | 2 | 2 |\\n'", ")" ]
write a table as a normal fixed width table .
train
false
1,227
def rs_log(p, x, prec): if rs_is_puiseux(p, x): return rs_puiseux(rs_log, p, x, prec) R = p.ring if (p == 1): return R.zero if _has_constant_term(p, x): const = 0 zm = R.zero_monom c = p[zm] if (c == 1): pass else: c_expr = c.as_expr() if (R.domain is EX): const = log(c_expr) elif isinstance(c, PolyElement): try: const = R(log(c_expr)) except ValueError: raise DomainError("The given series can't be expanded in this domain.") else: try: const = R(log(c)) except ValueError: raise DomainError("The given series can't be expanded in this domain.") dlog = p.diff(x) dlog = rs_mul(dlog, _series_inversion1(p, x, prec), x, (prec - 1)) return (rs_integrate(dlog, x) + const) else: raise NotImplementedError
[ "def", "rs_log", "(", "p", ",", "x", ",", "prec", ")", ":", "if", "rs_is_puiseux", "(", "p", ",", "x", ")", ":", "return", "rs_puiseux", "(", "rs_log", ",", "p", ",", "x", ",", "prec", ")", "R", "=", "p", ".", "ring", "if", "(", "p", "==", "1", ")", ":", "return", "R", ".", "zero", "if", "_has_constant_term", "(", "p", ",", "x", ")", ":", "const", "=", "0", "zm", "=", "R", ".", "zero_monom", "c", "=", "p", "[", "zm", "]", "if", "(", "c", "==", "1", ")", ":", "pass", "else", ":", "c_expr", "=", "c", ".", "as_expr", "(", ")", "if", "(", "R", ".", "domain", "is", "EX", ")", ":", "const", "=", "log", "(", "c_expr", ")", "elif", "isinstance", "(", "c", ",", "PolyElement", ")", ":", "try", ":", "const", "=", "R", "(", "log", "(", "c_expr", ")", ")", "except", "ValueError", ":", "raise", "DomainError", "(", "\"The given series can't be expanded in this domain.\"", ")", "else", ":", "try", ":", "const", "=", "R", "(", "log", "(", "c", ")", ")", "except", "ValueError", ":", "raise", "DomainError", "(", "\"The given series can't be expanded in this domain.\"", ")", "dlog", "=", "p", ".", "diff", "(", "x", ")", "dlog", "=", "rs_mul", "(", "dlog", ",", "_series_inversion1", "(", "p", ",", "x", ",", "prec", ")", ",", "x", ",", "(", "prec", "-", "1", ")", ")", "return", "(", "rs_integrate", "(", "dlog", ",", "x", ")", "+", "const", ")", "else", ":", "raise", "NotImplementedError" ]
the logarithm of p modulo o .
train
false
1,228
def attrs(**kwds): def decorate(f): for k in kwds: setattr(f, k, kwds[k]) return f return decorate
[ "def", "attrs", "(", "**", "kwds", ")", ":", "def", "decorate", "(", "f", ")", ":", "for", "k", "in", "kwds", ":", "setattr", "(", "f", ",", "k", ",", "kwds", "[", "k", "]", ")", "return", "f", "return", "decorate" ]
helper function that returns a dict of xml attributes .
train
false
1,229
def get_topic_name(prefix, table, operation): return ('%s-%s-%s' % (prefix, table, operation))
[ "def", "get_topic_name", "(", "prefix", ",", "table", ",", "operation", ")", ":", "return", "(", "'%s-%s-%s'", "%", "(", "prefix", ",", "table", ",", "operation", ")", ")" ]
create a topic name .
train
false
1,230
@no_emulator @with_device def reboot_bootloader(): log.info(('Rebooting %s to bootloader' % context.device)) with AdbClient() as c: c.reboot_bootloader()
[ "@", "no_emulator", "@", "with_device", "def", "reboot_bootloader", "(", ")", ":", "log", ".", "info", "(", "(", "'Rebooting %s to bootloader'", "%", "context", ".", "device", ")", ")", "with", "AdbClient", "(", ")", "as", "c", ":", "c", ".", "reboot_bootloader", "(", ")" ]
reboots the device to the bootloader .
train
false
1,231
@after.each_scenario def screenshot_on_error(scenario): if scenario.failed: try: output_dir = '{}/log'.format(settings.TEST_ROOT) image_name = '{}/{}.png'.format(output_dir, scenario.name.replace(' ', '_')) world.browser.driver.save_screenshot(image_name) except WebDriverException: LOGGER.error('Could not capture a screenshot')
[ "@", "after", ".", "each_scenario", "def", "screenshot_on_error", "(", "scenario", ")", ":", "if", "scenario", ".", "failed", ":", "try", ":", "output_dir", "=", "'{}/log'", ".", "format", "(", "settings", ".", "TEST_ROOT", ")", "image_name", "=", "'{}/{}.png'", ".", "format", "(", "output_dir", ",", "scenario", ".", "name", ".", "replace", "(", "' '", ",", "'_'", ")", ")", "world", ".", "browser", ".", "driver", ".", "save_screenshot", "(", "image_name", ")", "except", "WebDriverException", ":", "LOGGER", ".", "error", "(", "'Could not capture a screenshot'", ")" ]
save a screenshot to help with debugging .
train
false
1,232
def textDiff(a, b): out = [] (a, b) = (html2list(a), html2list(b)) s = difflib.SequenceMatcher(None, a, b) for e in s.get_opcodes(): if (e[0] == 'replace'): out.append((((('<del>' + ''.join(a[e[1]:e[2]])) + '</del><ins>') + ''.join(b[e[3]:e[4]])) + '</ins>')) elif (e[0] == 'delete'): out.append((('<del >' + ''.join(a[e[1]:e[2]])) + '</del>')) elif (e[0] == 'insert'): out.append((('<ins >' + ''.join(b[e[3]:e[4]])) + '</ins>')) elif (e[0] == 'equal'): out.append(''.join(b[e[3]:e[4]])) else: raise (("Um, something's broken. I didn't expect a '" + `e[0]`) + "'.") return ''.join(out)
[ "def", "textDiff", "(", "a", ",", "b", ")", ":", "out", "=", "[", "]", "(", "a", ",", "b", ")", "=", "(", "html2list", "(", "a", ")", ",", "html2list", "(", "b", ")", ")", "s", "=", "difflib", ".", "SequenceMatcher", "(", "None", ",", "a", ",", "b", ")", "for", "e", "in", "s", ".", "get_opcodes", "(", ")", ":", "if", "(", "e", "[", "0", "]", "==", "'replace'", ")", ":", "out", ".", "append", "(", "(", "(", "(", "(", "'<del>'", "+", "''", ".", "join", "(", "a", "[", "e", "[", "1", "]", ":", "e", "[", "2", "]", "]", ")", ")", "+", "'</del><ins>'", ")", "+", "''", ".", "join", "(", "b", "[", "e", "[", "3", "]", ":", "e", "[", "4", "]", "]", ")", ")", "+", "'</ins>'", ")", ")", "elif", "(", "e", "[", "0", "]", "==", "'delete'", ")", ":", "out", ".", "append", "(", "(", "(", "'<del >'", "+", "''", ".", "join", "(", "a", "[", "e", "[", "1", "]", ":", "e", "[", "2", "]", "]", ")", ")", "+", "'</del>'", ")", ")", "elif", "(", "e", "[", "0", "]", "==", "'insert'", ")", ":", "out", ".", "append", "(", "(", "(", "'<ins >'", "+", "''", ".", "join", "(", "b", "[", "e", "[", "3", "]", ":", "e", "[", "4", "]", "]", ")", ")", "+", "'</ins>'", ")", ")", "elif", "(", "e", "[", "0", "]", "==", "'equal'", ")", ":", "out", ".", "append", "(", "''", ".", "join", "(", "b", "[", "e", "[", "3", "]", ":", "e", "[", "4", "]", "]", ")", ")", "else", ":", "raise", "(", "(", "\"Um, something's broken. I didn't expect a '\"", "+", " ", "`", "e", "[", "0", "]", "`", ")", "+", "\"'.\"", ")", "return", "''", ".", "join", "(", "out", ")" ]
takes in strings a and b and returns a human-readable html diff .
train
false
1,233
def dmp_expand(polys, u, K): if (not polys): return dmp_one(u, K) f = polys[0] for g in polys[1:]: f = dmp_mul(f, g, u, K) return f
[ "def", "dmp_expand", "(", "polys", ",", "u", ",", "K", ")", ":", "if", "(", "not", "polys", ")", ":", "return", "dmp_one", "(", "u", ",", "K", ")", "f", "=", "polys", "[", "0", "]", "for", "g", "in", "polys", "[", "1", ":", "]", ":", "f", "=", "dmp_mul", "(", "f", ",", "g", ",", "u", ",", "K", ")", "return", "f" ]
multiply together several polynomials in k[x] .
train
false
1,234
def test_accept(): formatter = hug.output_format.accept({'application/json': hug.output_format.json, 'text/plain': hug.output_format.text}) class FakeRequest(object, ): accept = 'application/json' request = FakeRequest() response = FakeRequest() converted = hug.input_format.json(formatter(BytesIO(hug.output_format.json({'name': 'name'})), request, response)) assert (converted == {'name': 'name'}) request.accept = 'text/plain' assert (formatter('hi', request, response) == 'hi') request.accept = 'application/json, text/plain; q=0.5' assert (formatter('hi', request, response) == '"hi"') request.accept = 'text/plain; q=0.5, application/json' assert (formatter('hi', request, response) == '"hi"') request.accept = 'application/json;q=0.4,text/plain; q=0.5' assert (formatter('hi', request, response) == 'hi') request.accept = '*' assert (formatter('hi', request, response) in ['"hi"', 'hi']) request.accept = 'undefined; always' with pytest.raises(hug.HTTPNotAcceptable): formatter('hi', request, response) formatter = hug.output_format.accept({'application/json': hug.output_format.json, 'text/plain': hug.output_format.text}, hug.output_format.json) assert (formatter('hi', request, response) == '"hi"')
[ "def", "test_accept", "(", ")", ":", "formatter", "=", "hug", ".", "output_format", ".", "accept", "(", "{", "'application/json'", ":", "hug", ".", "output_format", ".", "json", ",", "'text/plain'", ":", "hug", ".", "output_format", ".", "text", "}", ")", "class", "FakeRequest", "(", "object", ",", ")", ":", "accept", "=", "'application/json'", "request", "=", "FakeRequest", "(", ")", "response", "=", "FakeRequest", "(", ")", "converted", "=", "hug", ".", "input_format", ".", "json", "(", "formatter", "(", "BytesIO", "(", "hug", ".", "output_format", ".", "json", "(", "{", "'name'", ":", "'name'", "}", ")", ")", ",", "request", ",", "response", ")", ")", "assert", "(", "converted", "==", "{", "'name'", ":", "'name'", "}", ")", "request", ".", "accept", "=", "'text/plain'", "assert", "(", "formatter", "(", "'hi'", ",", "request", ",", "response", ")", "==", "'hi'", ")", "request", ".", "accept", "=", "'application/json, text/plain; q=0.5'", "assert", "(", "formatter", "(", "'hi'", ",", "request", ",", "response", ")", "==", "'\"hi\"'", ")", "request", ".", "accept", "=", "'text/plain; q=0.5, application/json'", "assert", "(", "formatter", "(", "'hi'", ",", "request", ",", "response", ")", "==", "'\"hi\"'", ")", "request", ".", "accept", "=", "'application/json;q=0.4,text/plain; q=0.5'", "assert", "(", "formatter", "(", "'hi'", ",", "request", ",", "response", ")", "==", "'hi'", ")", "request", ".", "accept", "=", "'*'", "assert", "(", "formatter", "(", "'hi'", ",", "request", ",", "response", ")", "in", "[", "'\"hi\"'", ",", "'hi'", "]", ")", "request", ".", "accept", "=", "'undefined; always'", "with", "pytest", ".", "raises", "(", "hug", ".", "HTTPNotAcceptable", ")", ":", "formatter", "(", "'hi'", ",", "request", ",", "response", ")", "formatter", "=", "hug", ".", "output_format", ".", "accept", "(", "{", "'application/json'", ":", "hug", ".", "output_format", ".", "json", ",", "'text/plain'", ":", "hug", ".", "output_format", ".", "text", "}", ",", "hug", ".", "output_format", ".", "json", ")", "assert", "(", "formatter", "(", "'hi'", ",", "request", ",", "response", ")", "==", "'\"hi\"'", ")" ]
tests to ensure the accept type wrapper works as expected .
train
false
1,235
def ISNOTNULL(x): return (x != None)
[ "def", "ISNOTNULL", "(", "x", ")", ":", "return", "(", "x", "!=", "None", ")" ]
emulate sqlobjects isnotnull .
train
false
1,236
def new_get_event_ts(ts_field): return (lambda event: lookup_es_key(event[0], ts_field))
[ "def", "new_get_event_ts", "(", "ts_field", ")", ":", "return", "(", "lambda", "event", ":", "lookup_es_key", "(", "event", "[", "0", "]", ",", "ts_field", ")", ")" ]
constructs a lambda that may be called to extract the timestamp field from a given event .
train
false
1,237
@with_open_mode('rt') @with_sizes('medium') def read_lines(f): f.seek(0) for line in f: pass
[ "@", "with_open_mode", "(", "'rt'", ")", "@", "with_sizes", "(", "'medium'", ")", "def", "read_lines", "(", "f", ")", ":", "f", ".", "seek", "(", "0", ")", "for", "line", "in", "f", ":", "pass" ]
read one line at a time .
train
false
1,238
def bitlist_to_string(data): result = [] pos = 0 c = 0 while (pos < len(data)): c += (data[pos] << (7 - (pos % 8))) if ((pos % 8) == 7): result.append(c) c = 0 pos += 1 return ''.join([chr(c) for c in result])
[ "def", "bitlist_to_string", "(", "data", ")", ":", "result", "=", "[", "]", "pos", "=", "0", "c", "=", "0", "while", "(", "pos", "<", "len", "(", "data", ")", ")", ":", "c", "+=", "(", "data", "[", "pos", "]", "<<", "(", "7", "-", "(", "pos", "%", "8", ")", ")", ")", "if", "(", "(", "pos", "%", "8", ")", "==", "7", ")", ":", "result", ".", "append", "(", "c", ")", "c", "=", "0", "pos", "+=", "1", "return", "''", ".", "join", "(", "[", "chr", "(", "c", ")", "for", "c", "in", "result", "]", ")" ]
transform from bit list to ascii string .
train
false
1,239
def _database_to_isoformat(datetimeobj): return (datetimeobj.strftime('%Y-%m-%dT%H:%M:%S.%f')[:(-3)] + 'Z')
[ "def", "_database_to_isoformat", "(", "datetimeobj", ")", ":", "return", "(", "datetimeobj", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%S.%f'", ")", "[", ":", "(", "-", "3", ")", "]", "+", "'Z'", ")" ]
return a xs:datetime parsable string from datatime .
train
false
1,240
def register_finder(importer_type, distribution_finder): _distribution_finders[importer_type] = distribution_finder
[ "def", "register_finder", "(", "importer_type", ",", "distribution_finder", ")", ":", "_distribution_finders", "[", "importer_type", "]", "=", "distribution_finder" ]
register distribution_finder to find distributions in sys .
train
false
1,241
def cache_local_file(path): if (not os.path.exists(path)): return '' path_cached = is_cached(path) if path_cached: path_hash = hash_file(path) path_cached_hash = hash_file(path_cached) if (path_hash['hsum'] == path_cached_hash['hsum']): return path_cached return _client().cache_local_file(path)
[ "def", "cache_local_file", "(", "path", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", ":", "return", "''", "path_cached", "=", "is_cached", "(", "path", ")", "if", "path_cached", ":", "path_hash", "=", "hash_file", "(", "path", ")", "path_cached_hash", "=", "hash_file", "(", "path_cached", ")", "if", "(", "path_hash", "[", "'hsum'", "]", "==", "path_cached_hash", "[", "'hsum'", "]", ")", ":", "return", "path_cached", "return", "_client", "(", ")", ".", "cache_local_file", "(", "path", ")" ]
cache a local file on the minion in the localfiles cache cli example: .
train
true
1,242
def eval_import(s): if (':' not in s): return simple_import(s) (module_name, expr) = s.split(':', 1) module = import_module(module_name) obj = eval(expr, module.__dict__) return obj
[ "def", "eval_import", "(", "s", ")", ":", "if", "(", "':'", "not", "in", "s", ")", ":", "return", "simple_import", "(", "s", ")", "(", "module_name", ",", "expr", ")", "=", "s", ".", "split", "(", "':'", ",", "1", ")", "module", "=", "import_module", "(", "module_name", ")", "obj", "=", "eval", "(", "expr", ",", "module", ".", "__dict__", ")", "return", "obj" ]
import a module .
train
false
1,243
def reverse_tag_iter(block): end = len(block) while True: pgt = block.rfind('>', 0, end) if (pgt == (-1)): break plt = block.rfind('<', 0, pgt) if (plt == (-1)): break (yield block[plt:(pgt + 1)]) end = plt
[ "def", "reverse_tag_iter", "(", "block", ")", ":", "end", "=", "len", "(", "block", ")", "while", "True", ":", "pgt", "=", "block", ".", "rfind", "(", "'>'", ",", "0", ",", "end", ")", "if", "(", "pgt", "==", "(", "-", "1", ")", ")", ":", "break", "plt", "=", "block", ".", "rfind", "(", "'<'", ",", "0", ",", "pgt", ")", "if", "(", "plt", "==", "(", "-", "1", ")", ")", ":", "break", "(", "yield", "block", "[", "plt", ":", "(", "pgt", "+", "1", ")", "]", ")", "end", "=", "plt" ]
iterate over all tags in block in reverse order .
train
false
1,245
def getversion(fname): for line in open(fname): if line.startswith('__version__'): return eval(line[13:]) raise NameError('Missing __version__ in decorator.py')
[ "def", "getversion", "(", "fname", ")", ":", "for", "line", "in", "open", "(", "fname", ")", ":", "if", "line", ".", "startswith", "(", "'__version__'", ")", ":", "return", "eval", "(", "line", "[", "13", ":", "]", ")", "raise", "NameError", "(", "'Missing __version__ in decorator.py'", ")" ]
get the __version__ reading the file: works both in python 2 .
train
false
1,246
def deepcopy_return_value_method_decorator(fn): def decorator(*args, **kwargs): return copy.deepcopy(fn(*args, **kwargs)) return decorator
[ "def", "deepcopy_return_value_method_decorator", "(", "fn", ")", ":", "def", "decorator", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "copy", ".", "deepcopy", "(", "fn", "(", "*", "args", ",", "**", "kwargs", ")", ")", "return", "decorator" ]
returns a deepcopy of the returned value of the wrapped function .
train
false
1,247
def dmp_zero_p(f, u): while u: if (len(f) != 1): return False f = f[0] u -= 1 return (not f)
[ "def", "dmp_zero_p", "(", "f", ",", "u", ")", ":", "while", "u", ":", "if", "(", "len", "(", "f", ")", "!=", "1", ")", ":", "return", "False", "f", "=", "f", "[", "0", "]", "u", "-=", "1", "return", "(", "not", "f", ")" ]
return true if f is zero in k[x] .
train
false
1,249
def generate_client_id(): client_id_generator = oauth2_settings.CLIENT_ID_GENERATOR_CLASS() return client_id_generator.hash()
[ "def", "generate_client_id", "(", ")", ":", "client_id_generator", "=", "oauth2_settings", ".", "CLIENT_ID_GENERATOR_CLASS", "(", ")", "return", "client_id_generator", ".", "hash", "(", ")" ]
generates an oauth client_id oauth 2 specify the format of client_id in URL#appendix-a .
train
false
1,250
def lm(x, y, alpha=ALPHA): x_is_date = _isdate(x.iloc[0]) if x_is_date: x = np.array([i.toordinal() for i in x]) X = sm.add_constant(x) fit = sm.OLS(y, X).fit() (prstd, iv_l, iv_u) = wls_prediction_std(fit) (_, summary_values, summary_names) = summary_table(fit, alpha=alpha) df = pd.DataFrame(summary_values, columns=map(_snakify, summary_names)) fittedvalues = df[u'predicted_value'].values predict_mean_ci_low = df[u'mean_ci_95%_low'].values predict_mean_ci_upp = df[u'mean_ci_95%_upp'].values predict_ci_low = df[u'predict_ci_95%_low'].values predict_ci_upp = df[u'predict_ci_95%_upp'].values if x_is_date: x = [Timestamp.fromordinal(int(i)) for i in x] return (x, fittedvalues, predict_mean_ci_low, predict_mean_ci_upp)
[ "def", "lm", "(", "x", ",", "y", ",", "alpha", "=", "ALPHA", ")", ":", "x_is_date", "=", "_isdate", "(", "x", ".", "iloc", "[", "0", "]", ")", "if", "x_is_date", ":", "x", "=", "np", ".", "array", "(", "[", "i", ".", "toordinal", "(", ")", "for", "i", "in", "x", "]", ")", "X", "=", "sm", ".", "add_constant", "(", "x", ")", "fit", "=", "sm", ".", "OLS", "(", "y", ",", "X", ")", ".", "fit", "(", ")", "(", "prstd", ",", "iv_l", ",", "iv_u", ")", "=", "wls_prediction_std", "(", "fit", ")", "(", "_", ",", "summary_values", ",", "summary_names", ")", "=", "summary_table", "(", "fit", ",", "alpha", "=", "alpha", ")", "df", "=", "pd", ".", "DataFrame", "(", "summary_values", ",", "columns", "=", "map", "(", "_snakify", ",", "summary_names", ")", ")", "fittedvalues", "=", "df", "[", "u'predicted_value'", "]", ".", "values", "predict_mean_ci_low", "=", "df", "[", "u'mean_ci_95%_low'", "]", ".", "values", "predict_mean_ci_upp", "=", "df", "[", "u'mean_ci_95%_upp'", "]", ".", "values", "predict_ci_low", "=", "df", "[", "u'predict_ci_95%_low'", "]", ".", "values", "predict_ci_upp", "=", "df", "[", "u'predict_ci_95%_upp'", "]", ".", "values", "if", "x_is_date", ":", "x", "=", "[", "Timestamp", ".", "fromordinal", "(", "int", "(", "i", ")", ")", "for", "i", "in", "x", "]", "return", "(", "x", ",", "fittedvalues", ",", "predict_mean_ci_low", ",", "predict_mean_ci_upp", ")" ]
fits an ols from statsmodels .
train
false
1,251
def plot_evoked(evoked, picks=None, exclude='bads', unit=True, show=True, ylim=None, xlim='tight', proj=False, hline=None, units=None, scalings=None, titles=None, axes=None, gfp=False, window_title=None, spatial_colors=False, zorder='unsorted', selectable=True): return _plot_evoked(evoked=evoked, picks=picks, exclude=exclude, unit=unit, show=show, ylim=ylim, proj=proj, xlim=xlim, hline=hline, units=units, scalings=scalings, titles=titles, axes=axes, plot_type='butterfly', gfp=gfp, window_title=window_title, spatial_colors=spatial_colors, zorder=zorder, selectable=selectable)
[ "def", "plot_evoked", "(", "evoked", ",", "picks", "=", "None", ",", "exclude", "=", "'bads'", ",", "unit", "=", "True", ",", "show", "=", "True", ",", "ylim", "=", "None", ",", "xlim", "=", "'tight'", ",", "proj", "=", "False", ",", "hline", "=", "None", ",", "units", "=", "None", ",", "scalings", "=", "None", ",", "titles", "=", "None", ",", "axes", "=", "None", ",", "gfp", "=", "False", ",", "window_title", "=", "None", ",", "spatial_colors", "=", "False", ",", "zorder", "=", "'unsorted'", ",", "selectable", "=", "True", ")", ":", "return", "_plot_evoked", "(", "evoked", "=", "evoked", ",", "picks", "=", "picks", ",", "exclude", "=", "exclude", ",", "unit", "=", "unit", ",", "show", "=", "show", ",", "ylim", "=", "ylim", ",", "proj", "=", "proj", ",", "xlim", "=", "xlim", ",", "hline", "=", "hline", ",", "units", "=", "units", ",", "scalings", "=", "scalings", ",", "titles", "=", "titles", ",", "axes", "=", "axes", ",", "plot_type", "=", "'butterfly'", ",", "gfp", "=", "gfp", ",", "window_title", "=", "window_title", ",", "spatial_colors", "=", "spatial_colors", ",", "zorder", "=", "zorder", ",", "selectable", "=", "selectable", ")" ]
plot evoked data using butteryfly plots .
train
false
1,252
def CreateHunt(flow_name=None, flow_args=None, hunt_runner_args=None, context=None): if (not flow_name): raise ValueError("flow_name can't be empty") request = api_pb2.ApiCreateHuntArgs(flow_name=flow_name) if flow_args: request.flow_args.value = flow_args.SerializeToString() request.flow_args.type_url = utils.GetTypeUrl(flow_args) if hunt_runner_args: request.hunt_runner_args.CopyFrom(hunt_runner_args) data = context.SendRequest('CreateHunt', request) return Hunt(data=data, context=context)
[ "def", "CreateHunt", "(", "flow_name", "=", "None", ",", "flow_args", "=", "None", ",", "hunt_runner_args", "=", "None", ",", "context", "=", "None", ")", ":", "if", "(", "not", "flow_name", ")", ":", "raise", "ValueError", "(", "\"flow_name can't be empty\"", ")", "request", "=", "api_pb2", ".", "ApiCreateHuntArgs", "(", "flow_name", "=", "flow_name", ")", "if", "flow_args", ":", "request", ".", "flow_args", ".", "value", "=", "flow_args", ".", "SerializeToString", "(", ")", "request", ".", "flow_args", ".", "type_url", "=", "utils", ".", "GetTypeUrl", "(", "flow_args", ")", "if", "hunt_runner_args", ":", "request", ".", "hunt_runner_args", ".", "CopyFrom", "(", "hunt_runner_args", ")", "data", "=", "context", ".", "SendRequest", "(", "'CreateHunt'", ",", "request", ")", "return", "Hunt", "(", "data", "=", "data", ",", "context", "=", "context", ")" ]
creates a new hunt .
train
true
1,253
def case(): def prep(r): if (r.method == 'update'): person_id = r.table.person_id person_id.writable = False person_id.comment = None else: dtable = s3db.disease_disease diseases = db((dtable.deleted == False)).select(dtable.id, limitby=(0, 2)) if (len(diseases) == 1): field = r.table.disease_id field.default = diseases.first().id field.writable = False if ((r.component_name == 'contact') or (r.component_name == 'exposure')): field = r.component.table.tracing_id field.readable = field.writable = False if r.interactive: field = r.table.person_id field.requires = IS_ADD_PERSON_WIDGET2() field.widget = S3AddPersonWidget2(controller='pr') return True s3.prep = prep def postp(r, output): if (isinstance(output, dict) and ('buttons' in output)): buttons = output['buttons'] if (('list_btn' in buttons) and ('summary_btn' in buttons)): buttons['list_btn'] = buttons['summary_btn'] return output s3.postp = postp return s3_rest_controller(rheader=s3db.disease_rheader)
[ "def", "case", "(", ")", ":", "def", "prep", "(", "r", ")", ":", "if", "(", "r", ".", "method", "==", "'update'", ")", ":", "person_id", "=", "r", ".", "table", ".", "person_id", "person_id", ".", "writable", "=", "False", "person_id", ".", "comment", "=", "None", "else", ":", "dtable", "=", "s3db", ".", "disease_disease", "diseases", "=", "db", "(", "(", "dtable", ".", "deleted", "==", "False", ")", ")", ".", "select", "(", "dtable", ".", "id", ",", "limitby", "=", "(", "0", ",", "2", ")", ")", "if", "(", "len", "(", "diseases", ")", "==", "1", ")", ":", "field", "=", "r", ".", "table", ".", "disease_id", "field", ".", "default", "=", "diseases", ".", "first", "(", ")", ".", "id", "field", ".", "writable", "=", "False", "if", "(", "(", "r", ".", "component_name", "==", "'contact'", ")", "or", "(", "r", ".", "component_name", "==", "'exposure'", ")", ")", ":", "field", "=", "r", ".", "component", ".", "table", ".", "tracing_id", "field", ".", "readable", "=", "field", ".", "writable", "=", "False", "if", "r", ".", "interactive", ":", "field", "=", "r", ".", "table", ".", "person_id", "field", ".", "requires", "=", "IS_ADD_PERSON_WIDGET2", "(", ")", "field", ".", "widget", "=", "S3AddPersonWidget2", "(", "controller", "=", "'pr'", ")", "return", "True", "s3", ".", "prep", "=", "prep", "def", "postp", "(", "r", ",", "output", ")", ":", "if", "(", "isinstance", "(", "output", ",", "dict", ")", "and", "(", "'buttons'", "in", "output", ")", ")", ":", "buttons", "=", "output", "[", "'buttons'", "]", "if", "(", "(", "'list_btn'", "in", "buttons", ")", "and", "(", "'summary_btn'", "in", "buttons", ")", ")", ":", "buttons", "[", "'list_btn'", "]", "=", "buttons", "[", "'summary_btn'", "]", "return", "output", "s3", ".", "postp", "=", "postp", "return", "s3_rest_controller", "(", "rheader", "=", "s3db", ".", "disease_rheader", ")" ]
cases: restful crud controller .
train
false
1,254
def test_distance_comparison(): a = Distance((15 * u.kpc)) b = Distance((15 * u.kpc)) assert (a == b) c = Distance((1.0 * u.Mpc)) assert (a < c)
[ "def", "test_distance_comparison", "(", ")", ":", "a", "=", "Distance", "(", "(", "15", "*", "u", ".", "kpc", ")", ")", "b", "=", "Distance", "(", "(", "15", "*", "u", ".", "kpc", ")", ")", "assert", "(", "a", "==", "b", ")", "c", "=", "Distance", "(", "(", "1.0", "*", "u", ".", "Mpc", ")", ")", "assert", "(", "a", "<", "c", ")" ]
ensure comparisons of distances work .
train
false
1,255
def _by_version_descending(names): def _by_version(name): '\n Parse each component of the filename\n ' (name, ext) = os.path.splitext(name) parts = itertools.chain(name.split('-'), [ext]) return [packaging.version.parse(part) for part in parts] return sorted(names, key=_by_version, reverse=True)
[ "def", "_by_version_descending", "(", "names", ")", ":", "def", "_by_version", "(", "name", ")", ":", "(", "name", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "name", ")", "parts", "=", "itertools", ".", "chain", "(", "name", ".", "split", "(", "'-'", ")", ",", "[", "ext", "]", ")", "return", "[", "packaging", ".", "version", ".", "parse", "(", "part", ")", "for", "part", "in", "parts", "]", "return", "sorted", "(", "names", ",", "key", "=", "_by_version", ",", "reverse", "=", "True", ")" ]
given a list of filenames .
train
true
1,256
def wc_mapper(document): for word in tokenize(document): (yield (word, 1))
[ "def", "wc_mapper", "(", "document", ")", ":", "for", "word", "in", "tokenize", "(", "document", ")", ":", "(", "yield", "(", "word", ",", "1", ")", ")" ]
for each word in the document .
train
false
1,259
def _is_valid_slug(slug): VALID_SLUG_RE = re.compile('^[a-z0-9\\-]+$') return VALID_SLUG_RE.match(slug)
[ "def", "_is_valid_slug", "(", "slug", ")", ":", "VALID_SLUG_RE", "=", "re", ".", "compile", "(", "'^[a-z0-9\\\\-]+$'", ")", "return", "VALID_SLUG_RE", ".", "match", "(", "slug", ")" ]
returns true if slug is valid .
train
false
1,260
def _parse_queue_list(list_output): queues = dict(((q.split('/')[(-1)], q) for q in list_output['stdout'])) return queues
[ "def", "_parse_queue_list", "(", "list_output", ")", ":", "queues", "=", "dict", "(", "(", "(", "q", ".", "split", "(", "'/'", ")", "[", "(", "-", "1", ")", "]", ",", "q", ")", "for", "q", "in", "list_output", "[", "'stdout'", "]", ")", ")", "return", "queues" ]
parse the queue to get a dict of name -> url .
train
true
1,261
@login_required def get_favorites(req): return render_to_response('favorite/favorite_list.html', RequestContext(req, {'favorites': models.Favorite.objects.favorites_for_user(req.user)}))
[ "@", "login_required", "def", "get_favorites", "(", "req", ")", ":", "return", "render_to_response", "(", "'favorite/favorite_list.html'", ",", "RequestContext", "(", "req", ",", "{", "'favorites'", ":", "models", ".", "Favorite", ".", "objects", ".", "favorites_for_user", "(", "req", ".", "user", ")", "}", ")", ")" ]
display the request users favorites .
train
false
1,264
def case_appointment_type(): return s3_rest_controller()
[ "def", "case_appointment_type", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
appointment type: restful crud controller .
train
false
1,265
def capped_log10_likelihood(actual, predicted): actual = np.array(actual) predicted = np.array(predicted) predicted[(predicted < 0.01)] = 0.01 predicted[(predicted > 0.99)] = 0.99 score = (- ((actual * np.log10(predicted)) + ((1 - actual) * np.log10((1 - predicted))))) if (type(score) == np.ndarray): score[np.isnan(score)] = 0 elif np.isnan(score): score = 0 return score
[ "def", "capped_log10_likelihood", "(", "actual", ",", "predicted", ")", ":", "actual", "=", "np", ".", "array", "(", "actual", ")", "predicted", "=", "np", ".", "array", "(", "predicted", ")", "predicted", "[", "(", "predicted", "<", "0.01", ")", "]", "=", "0.01", "predicted", "[", "(", "predicted", ">", "0.99", ")", "]", "=", "0.99", "score", "=", "(", "-", "(", "(", "actual", "*", "np", ".", "log10", "(", "predicted", ")", ")", "+", "(", "(", "1", "-", "actual", ")", "*", "np", ".", "log10", "(", "(", "1", "-", "predicted", ")", ")", ")", ")", ")", "if", "(", "type", "(", "score", ")", "==", "np", ".", "ndarray", ")", ":", "score", "[", "np", ".", "isnan", "(", "score", ")", "]", "=", "0", "elif", "np", ".", "isnan", "(", "score", ")", ":", "score", "=", "0", "return", "score" ]
computes the capped log10 likelihood .
train
false
1,266
def infer_time_unit(time_seconds_arr): if (len(time_seconds_arr) == 0): return u'hours' max_time_seconds = max(time_seconds_arr) if (max_time_seconds <= (60 * 2)): return u'seconds' elif (max_time_seconds <= ((60 * 60) * 2)): return u'minutes' elif (max_time_seconds <= (((24 * 60) * 60) * 2)): return u'hours' else: return u'days'
[ "def", "infer_time_unit", "(", "time_seconds_arr", ")", ":", "if", "(", "len", "(", "time_seconds_arr", ")", "==", "0", ")", ":", "return", "u'hours'", "max_time_seconds", "=", "max", "(", "time_seconds_arr", ")", "if", "(", "max_time_seconds", "<=", "(", "60", "*", "2", ")", ")", ":", "return", "u'seconds'", "elif", "(", "max_time_seconds", "<=", "(", "(", "60", "*", "60", ")", "*", "2", ")", ")", ":", "return", "u'minutes'", "elif", "(", "max_time_seconds", "<=", "(", "(", "(", "24", "*", "60", ")", "*", "60", ")", "*", "2", ")", ")", ":", "return", "u'hours'", "else", ":", "return", "u'days'" ]
determine the most appropriate time unit for an array of time durations specified in seconds .
train
false
1,267
def unhash_question(question): if (len(question) < 40): raise ValueError(u'Invalid data') hexsha = question[:40] timestamp = question[40:50] try: question = b64decode(question[50:]).decode(u'utf-8') except (TypeError, UnicodeError): raise ValueError(u'Invalid encoding') if (hexsha != checksum_question(question, timestamp)): raise ValueError(u'Tampered question!') return (question, int(timestamp, 16))
[ "def", "unhash_question", "(", "question", ")", ":", "if", "(", "len", "(", "question", ")", "<", "40", ")", ":", "raise", "ValueError", "(", "u'Invalid data'", ")", "hexsha", "=", "question", "[", ":", "40", "]", "timestamp", "=", "question", "[", "40", ":", "50", "]", "try", ":", "question", "=", "b64decode", "(", "question", "[", "50", ":", "]", ")", ".", "decode", "(", "u'utf-8'", ")", "except", "(", "TypeError", ",", "UnicodeError", ")", ":", "raise", "ValueError", "(", "u'Invalid encoding'", ")", "if", "(", "hexsha", "!=", "checksum_question", "(", "question", ",", "timestamp", ")", ")", ":", "raise", "ValueError", "(", "u'Tampered question!'", ")", "return", "(", "question", ",", "int", "(", "timestamp", ",", "16", ")", ")" ]
unhashes question .
train
false
1,268
def _deg(f): k = f.ring.ngens degf = ((0,) * (k - 1)) for monom in f.itermonoms(): if (monom[:(-1)] > degf): degf = monom[:(-1)] return degf
[ "def", "_deg", "(", "f", ")", ":", "k", "=", "f", ".", "ring", ".", "ngens", "degf", "=", "(", "(", "0", ",", ")", "*", "(", "k", "-", "1", ")", ")", "for", "monom", "in", "f", ".", "itermonoms", "(", ")", ":", "if", "(", "monom", "[", ":", "(", "-", "1", ")", "]", ">", "degf", ")", ":", "degf", "=", "monom", "[", ":", "(", "-", "1", ")", "]", "return", "degf" ]
compute the degree of a multivariate polynomial f in k[x_0 .
train
false
1,269
def make_password(password, salt=None, hasher='default'): if (not password): return UNUSABLE_PASSWORD hasher = get_hasher(hasher) password = smart_str(password) if (not salt): salt = hasher.salt() salt = smart_str(salt) return hasher.encode(password, salt)
[ "def", "make_password", "(", "password", ",", "salt", "=", "None", ",", "hasher", "=", "'default'", ")", ":", "if", "(", "not", "password", ")", ":", "return", "UNUSABLE_PASSWORD", "hasher", "=", "get_hasher", "(", "hasher", ")", "password", "=", "smart_str", "(", "password", ")", "if", "(", "not", "salt", ")", ":", "salt", "=", "hasher", ".", "salt", "(", ")", "salt", "=", "smart_str", "(", "salt", ")", "return", "hasher", ".", "encode", "(", "password", ",", "salt", ")" ]
turn a plain-text password into a hash for database storage same as encode() but generates a new random salt .
train
false
1,270
def cast_keys(o, cast=str, encoding='utf-8'): if isinstance(o, (dict, Storage)): if isinstance(o, dict): newobj = dict() else: newobj = Storage() for (k, v) in o.items(): if ((cast == str) and isinstance(k, unicodeT)): key = k.encode(encoding) else: key = cast(k) newobj[key] = cast_keys(v, cast=cast, encoding=encoding) elif isinstance(o, (tuple, set, list)): newobj = [] for item in o: newobj.append(cast_keys(item, cast=cast, encoding=encoding)) if isinstance(o, tuple): newobj = tuple(newobj) elif isinstance(o, set): newobj = set(newobj) else: newobj = o return newobj
[ "def", "cast_keys", "(", "o", ",", "cast", "=", "str", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "isinstance", "(", "o", ",", "(", "dict", ",", "Storage", ")", ")", ":", "if", "isinstance", "(", "o", ",", "dict", ")", ":", "newobj", "=", "dict", "(", ")", "else", ":", "newobj", "=", "Storage", "(", ")", "for", "(", "k", ",", "v", ")", "in", "o", ".", "items", "(", ")", ":", "if", "(", "(", "cast", "==", "str", ")", "and", "isinstance", "(", "k", ",", "unicodeT", ")", ")", ":", "key", "=", "k", ".", "encode", "(", "encoding", ")", "else", ":", "key", "=", "cast", "(", "k", ")", "newobj", "[", "key", "]", "=", "cast_keys", "(", "v", ",", "cast", "=", "cast", ",", "encoding", "=", "encoding", ")", "elif", "isinstance", "(", "o", ",", "(", "tuple", ",", "set", ",", "list", ")", ")", ":", "newobj", "=", "[", "]", "for", "item", "in", "o", ":", "newobj", ".", "append", "(", "cast_keys", "(", "item", ",", "cast", "=", "cast", ",", "encoding", "=", "encoding", ")", ")", "if", "isinstance", "(", "o", ",", "tuple", ")", ":", "newobj", "=", "tuple", "(", "newobj", ")", "elif", "isinstance", "(", "o", ",", "set", ")", ":", "newobj", "=", "set", "(", "newobj", ")", "else", ":", "newobj", "=", "o", "return", "newobj" ]
builds a new object with <cast> type keys .
train
false
1,272
def expand_ip_block(block): addresses = [] ip_r = block.base_ip.split('.') last_quad = int(ip_r[3]) address_root = ('%s.%s.%s.' % (ip_r[0], ip_r[1], ip_r[2])) for i in range(int(block.size)): addresses.append((address_root + str((last_quad + i)))) return addresses
[ "def", "expand_ip_block", "(", "block", ")", ":", "addresses", "=", "[", "]", "ip_r", "=", "block", ".", "base_ip", ".", "split", "(", "'.'", ")", "last_quad", "=", "int", "(", "ip_r", "[", "3", "]", ")", "address_root", "=", "(", "'%s.%s.%s.'", "%", "(", "ip_r", "[", "0", "]", ",", "ip_r", "[", "1", "]", ",", "ip_r", "[", "2", "]", ")", ")", "for", "i", "in", "range", "(", "int", "(", "block", ".", "size", ")", ")", ":", "addresses", ".", "append", "(", "(", "address_root", "+", "str", "(", "(", "last_quad", "+", "i", ")", ")", ")", ")", "return", "addresses" ]
expand public ip block to show all addresses .
train
false
1,273
def rax_required_together(): return [['api_key', 'username']]
[ "def", "rax_required_together", "(", ")", ":", "return", "[", "[", "'api_key'", ",", "'username'", "]", "]" ]
return the default list used for the required_together argument to ansiblemodule .
train
false
1,275
def moon(): return load('moon.png')
[ "def", "moon", "(", ")", ":", "return", "load", "(", "'moon.png'", ")" ]
surface of the moon .
train
false
1,276
@register.tag def annotate_form_field(parser, token): args = token.split_contents() if (len(args) < 2): raise template.TemplateSyntaxError('annotate_form_field tag requires a form field to be passed') return FormFieldNode(args[1])
[ "@", "register", ".", "tag", "def", "annotate_form_field", "(", "parser", ",", "token", ")", ":", "args", "=", "token", ".", "split_contents", "(", ")", "if", "(", "len", "(", "args", ")", "<", "2", ")", ":", "raise", "template", ".", "TemplateSyntaxError", "(", "'annotate_form_field tag requires a form field to be passed'", ")", "return", "FormFieldNode", "(", "args", "[", "1", "]", ")" ]
set an attribute on a form field with the widget type this means templates can use the widget type to render things differently if they want to .
train
false
1,278
def calc_gc_content(sequence): d = {} for nt in ['A', 'T', 'G', 'C']: d[nt] = (sequence.count(nt) + sequence.count(nt.lower())) gc = (d.get('G', 0) + d.get('C', 0)) if (gc == 0): return 0 return ((gc * 1.0) / ((d['A'] + d['T']) + gc))
[ "def", "calc_gc_content", "(", "sequence", ")", ":", "d", "=", "{", "}", "for", "nt", "in", "[", "'A'", ",", "'T'", ",", "'G'", ",", "'C'", "]", ":", "d", "[", "nt", "]", "=", "(", "sequence", ".", "count", "(", "nt", ")", "+", "sequence", ".", "count", "(", "nt", ".", "lower", "(", ")", ")", ")", "gc", "=", "(", "d", ".", "get", "(", "'G'", ",", "0", ")", "+", "d", ".", "get", "(", "'C'", ",", "0", ")", ")", "if", "(", "gc", "==", "0", ")", ":", "return", "0", "return", "(", "(", "gc", "*", "1.0", ")", "/", "(", "(", "d", "[", "'A'", "]", "+", "d", "[", "'T'", "]", ")", "+", "gc", ")", ")" ]
returns the % g+c content in a passed sequence .
train
false
1,279
def load_vi_open_in_editor_bindings(): registry = Registry() navigation_mode = (ViMode() & ViNavigationMode()) registry.add_binding(u'v')(get_by_name(u'edit-and-execute-command')) return registry
[ "def", "load_vi_open_in_editor_bindings", "(", ")", ":", "registry", "=", "Registry", "(", ")", "navigation_mode", "=", "(", "ViMode", "(", ")", "&", "ViNavigationMode", "(", ")", ")", "registry", ".", "add_binding", "(", "u'v'", ")", "(", "get_by_name", "(", "u'edit-and-execute-command'", ")", ")", "return", "registry" ]
pressing v in navigation mode will open the buffer in an external editor .
train
false
1,280
def delete_identity_pools(IdentityPoolName, IdentityPoolId=None, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: ids = _find_identity_pool_ids(IdentityPoolName, IdentityPoolId, conn) count = 0 if ids: for pool_id in ids: conn.delete_identity_pool(IdentityPoolId=pool_id) count += 1 return {'deleted': True, 'count': count} else: return {'deleted': False, 'count': count} except ClientError as e: return {'deleted': False, 'error': salt.utils.boto3.get_error(e)}
[ "def", "delete_identity_pools", "(", "IdentityPoolName", ",", "IdentityPoolId", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "ids", "=", "_find_identity_pool_ids", "(", "IdentityPoolName", ",", "IdentityPoolId", ",", "conn", ")", "count", "=", "0", "if", "ids", ":", "for", "pool_id", "in", "ids", ":", "conn", ".", "delete_identity_pool", "(", "IdentityPoolId", "=", "pool_id", ")", "count", "+=", "1", "return", "{", "'deleted'", ":", "True", ",", "'count'", ":", "count", "}", "else", ":", "return", "{", "'deleted'", ":", "False", ",", "'count'", ":", "count", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'deleted'", ":", "False", ",", "'error'", ":", "salt", ".", "utils", ".", "boto3", ".", "get_error", "(", "e", ")", "}" ]
given an identity pool name .
train
false
1,281
def TagByteSize(field_number): return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
[ "def", "TagByteSize", "(", "field_number", ")", ":", "return", "_VarUInt64ByteSizeNoTag", "(", "PackTag", "(", "field_number", ",", "0", ")", ")" ]
returns the bytes required to serialize a tag with this field number .
train
false
1,282
def short_float_fmt(x): return u'{0:f}'.format(x).rstrip(u'0').rstrip(u'.')
[ "def", "short_float_fmt", "(", "x", ")", ":", "return", "u'{0:f}'", ".", "format", "(", "x", ")", ".", "rstrip", "(", "u'0'", ")", ".", "rstrip", "(", "u'.'", ")" ]
create a short string representation of a float .
train
false
1,284
def get_content_type(filename): return (mimetypes.guess_type(filename)[0] or 'application/octet-stream')
[ "def", "get_content_type", "(", "filename", ")", ":", "return", "(", "mimetypes", ".", "guess_type", "(", "filename", ")", "[", "0", "]", "or", "'application/octet-stream'", ")" ]
returns the full content type string with charset for a mimetype .
train
false
1,285
def teardown_test_show_dir(): if os.path.exists(SHOW_DIR): shutil.rmtree(SHOW_DIR)
[ "def", "teardown_test_show_dir", "(", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "SHOW_DIR", ")", ":", "shutil", ".", "rmtree", "(", "SHOW_DIR", ")" ]
remove the test show .
train
false
1,287
def _strip_value(value, lookup='exact'): if (lookup == 'in'): stripped_value = [_strip_object(el) for el in value] else: stripped_value = _strip_object(value) return stripped_value
[ "def", "_strip_value", "(", "value", ",", "lookup", "=", "'exact'", ")", ":", "if", "(", "lookup", "==", "'in'", ")", ":", "stripped_value", "=", "[", "_strip_object", "(", "el", ")", "for", "el", "in", "value", "]", "else", ":", "stripped_value", "=", "_strip_object", "(", "value", ")", "return", "stripped_value" ]
helper function to remove the branch and version information from the given value .
train
true
1,288
def _get_gl_version(_lib): try: return _lib.glGetString(7938).decode('utf-8') except Exception: return 'unknown'
[ "def", "_get_gl_version", "(", "_lib", ")", ":", "try", ":", "return", "_lib", ".", "glGetString", "(", "7938", ")", ".", "decode", "(", "'utf-8'", ")", "except", "Exception", ":", "return", "'unknown'" ]
helper to get the gl version string .
train
false
1,289
def clean_up_joinables(names): for name in names: if os.path.exists(name): logging.debug('Deleting %s', name) try: os.remove(name) except: pass name1 = (name + '.1') if os.path.exists(name1): logging.debug('Deleting %s', name1) try: os.remove(name1) except: pass
[ "def", "clean_up_joinables", "(", "names", ")", ":", "for", "name", "in", "names", ":", "if", "os", ".", "path", ".", "exists", "(", "name", ")", ":", "logging", ".", "debug", "(", "'Deleting %s'", ",", "name", ")", "try", ":", "os", ".", "remove", "(", "name", ")", "except", ":", "pass", "name1", "=", "(", "name", "+", "'.1'", ")", "if", "os", ".", "path", ".", "exists", "(", "name1", ")", ":", "logging", ".", "debug", "(", "'Deleting %s'", ",", "name1", ")", "try", ":", "os", ".", "remove", "(", "name1", ")", "except", ":", "pass" ]
remove joinable files and their .
train
false
1,290
def OBV(ds1, volumeDs, count): data1 = value_ds_to_numpy(ds1, count) if (data1 is None): return None data2 = value_ds_to_numpy(volumeDs, count) if (data2 is None): return None return talib.OBV(data1, data2)
[ "def", "OBV", "(", "ds1", ",", "volumeDs", ",", "count", ")", ":", "data1", "=", "value_ds_to_numpy", "(", "ds1", ",", "count", ")", "if", "(", "data1", "is", "None", ")", ":", "return", "None", "data2", "=", "value_ds_to_numpy", "(", "volumeDs", ",", "count", ")", "if", "(", "data2", "is", "None", ")", ":", "return", "None", "return", "talib", ".", "OBV", "(", "data1", ",", "data2", ")" ]
on balance volume .
train
false
1,291
def get_manager(cls): db_user = boto.config.get('DB', 'db_user', None) db_passwd = boto.config.get('DB', 'db_passwd', None) db_type = boto.config.get('DB', 'db_type', 'SimpleDB') db_name = boto.config.get('DB', 'db_name', None) db_table = boto.config.get('DB', 'db_table', None) db_host = boto.config.get('DB', 'db_host', 'sdb.amazonaws.com') db_port = boto.config.getint('DB', 'db_port', 443) enable_ssl = boto.config.getbool('DB', 'enable_ssl', True) sql_dir = boto.config.get('DB', 'sql_dir', None) debug = boto.config.getint('DB', 'debug', 0) module_name = cls.__module__.replace('.', '_') db_section = ((('DB_' + module_name) + '_') + cls.__name__) if (not boto.config.has_section(db_section)): db_section = ('DB_' + cls.__name__) if boto.config.has_section(db_section): db_user = boto.config.get(db_section, 'db_user', db_user) db_passwd = boto.config.get(db_section, 'db_passwd', db_passwd) db_type = boto.config.get(db_section, 'db_type', db_type) db_name = boto.config.get(db_section, 'db_name', db_name) db_table = boto.config.get(db_section, 'db_table', db_table) db_host = boto.config.get(db_section, 'db_host', db_host) db_port = boto.config.getint(db_section, 'db_port', db_port) enable_ssl = boto.config.getint(db_section, 'enable_ssl', enable_ssl) debug = boto.config.getint(db_section, 'debug', debug) elif (hasattr(cls, '_db_name') and (cls._db_name is not None)): db_name = cls._db_name elif hasattr(cls.__bases__[0], '_manager'): return cls.__bases__[0]._manager if (db_type == 'SimpleDB'): from boto.sdb.db.manager.sdbmanager import SDBManager return SDBManager(cls, db_name, db_user, db_passwd, db_host, db_port, db_table, sql_dir, enable_ssl) elif (db_type == 'XML'): from boto.sdb.db.manager.xmlmanager import XMLManager return XMLManager(cls, db_name, db_user, db_passwd, db_host, db_port, db_table, sql_dir, enable_ssl) else: raise ValueError(('Unknown db_type: %s' % db_type))
[ "def", "get_manager", "(", "cls", ")", ":", "db_user", "=", "boto", ".", "config", ".", "get", "(", "'DB'", ",", "'db_user'", ",", "None", ")", "db_passwd", "=", "boto", ".", "config", ".", "get", "(", "'DB'", ",", "'db_passwd'", ",", "None", ")", "db_type", "=", "boto", ".", "config", ".", "get", "(", "'DB'", ",", "'db_type'", ",", "'SimpleDB'", ")", "db_name", "=", "boto", ".", "config", ".", "get", "(", "'DB'", ",", "'db_name'", ",", "None", ")", "db_table", "=", "boto", ".", "config", ".", "get", "(", "'DB'", ",", "'db_table'", ",", "None", ")", "db_host", "=", "boto", ".", "config", ".", "get", "(", "'DB'", ",", "'db_host'", ",", "'sdb.amazonaws.com'", ")", "db_port", "=", "boto", ".", "config", ".", "getint", "(", "'DB'", ",", "'db_port'", ",", "443", ")", "enable_ssl", "=", "boto", ".", "config", ".", "getbool", "(", "'DB'", ",", "'enable_ssl'", ",", "True", ")", "sql_dir", "=", "boto", ".", "config", ".", "get", "(", "'DB'", ",", "'sql_dir'", ",", "None", ")", "debug", "=", "boto", ".", "config", ".", "getint", "(", "'DB'", ",", "'debug'", ",", "0", ")", "module_name", "=", "cls", ".", "__module__", ".", "replace", "(", "'.'", ",", "'_'", ")", "db_section", "=", "(", "(", "(", "'DB_'", "+", "module_name", ")", "+", "'_'", ")", "+", "cls", ".", "__name__", ")", "if", "(", "not", "boto", ".", "config", ".", "has_section", "(", "db_section", ")", ")", ":", "db_section", "=", "(", "'DB_'", "+", "cls", ".", "__name__", ")", "if", "boto", ".", "config", ".", "has_section", "(", "db_section", ")", ":", "db_user", "=", "boto", ".", "config", ".", "get", "(", "db_section", ",", "'db_user'", ",", "db_user", ")", "db_passwd", "=", "boto", ".", "config", ".", "get", "(", "db_section", ",", "'db_passwd'", ",", "db_passwd", ")", "db_type", "=", "boto", ".", "config", ".", "get", "(", "db_section", ",", "'db_type'", ",", "db_type", ")", "db_name", "=", "boto", ".", "config", ".", "get", "(", "db_section", ",", "'db_name'", ",", "db_name", ")", "db_table", "=", "boto", ".", "config", ".", "get", "(", "db_section", ",", "'db_table'", ",", "db_table", ")", "db_host", "=", "boto", ".", "config", ".", "get", "(", "db_section", ",", "'db_host'", ",", "db_host", ")", "db_port", "=", "boto", ".", "config", ".", "getint", "(", "db_section", ",", "'db_port'", ",", "db_port", ")", "enable_ssl", "=", "boto", ".", "config", ".", "getint", "(", "db_section", ",", "'enable_ssl'", ",", "enable_ssl", ")", "debug", "=", "boto", ".", "config", ".", "getint", "(", "db_section", ",", "'debug'", ",", "debug", ")", "elif", "(", "hasattr", "(", "cls", ",", "'_db_name'", ")", "and", "(", "cls", ".", "_db_name", "is", "not", "None", ")", ")", ":", "db_name", "=", "cls", ".", "_db_name", "elif", "hasattr", "(", "cls", ".", "__bases__", "[", "0", "]", ",", "'_manager'", ")", ":", "return", "cls", ".", "__bases__", "[", "0", "]", ".", "_manager", "if", "(", "db_type", "==", "'SimpleDB'", ")", ":", "from", "boto", ".", "sdb", ".", "db", ".", "manager", ".", "sdbmanager", "import", "SDBManager", "return", "SDBManager", "(", "cls", ",", "db_name", ",", "db_user", ",", "db_passwd", ",", "db_host", ",", "db_port", ",", "db_table", ",", "sql_dir", ",", "enable_ssl", ")", "elif", "(", "db_type", "==", "'XML'", ")", ":", "from", "boto", ".", "sdb", ".", "db", ".", "manager", ".", "xmlmanager", "import", "XMLManager", "return", "XMLManager", "(", "cls", ",", "db_name", ",", "db_user", ",", "db_passwd", ",", "db_host", ",", "db_port", ",", "db_table", ",", "sql_dir", ",", "enable_ssl", ")", "else", ":", "raise", "ValueError", "(", "(", "'Unknown db_type: %s'", "%", "db_type", ")", ")" ]
get pyrabbit manager .
train
true
1,293
def _blockdevicevolume_from_blockdevice_id(blockdevice_id, size, attached_to=None): dataset_id = UUID(blockdevice_id[6:]) return BlockDeviceVolume(size=size, attached_to=attached_to, dataset_id=dataset_id, blockdevice_id=blockdevice_id)
[ "def", "_blockdevicevolume_from_blockdevice_id", "(", "blockdevice_id", ",", "size", ",", "attached_to", "=", "None", ")", ":", "dataset_id", "=", "UUID", "(", "blockdevice_id", "[", "6", ":", "]", ")", "return", "BlockDeviceVolume", "(", "size", "=", "size", ",", "attached_to", "=", "attached_to", ",", "dataset_id", "=", "dataset_id", ",", "blockdevice_id", "=", "blockdevice_id", ")" ]
create a new blockdevicevolume with a dataset_id derived from the given blockdevice_id .
train
false
1,294
@register.filter def flag_url(obj, slug): content_type = ContentType.objects.get_for_model(obj) return reverse('flag', kwargs={'slug': slug, 'app_label': content_type.app_label, 'model': content_type.model, 'object_id': obj.pk})
[ "@", "register", ".", "filter", "def", "flag_url", "(", "obj", ",", "slug", ")", ":", "content_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "obj", ")", "return", "reverse", "(", "'flag'", ",", "kwargs", "=", "{", "'slug'", ":", "slug", ",", "'app_label'", ":", "content_type", ".", "app_label", ",", "'model'", ":", "content_type", ".", "model", ",", "'object_id'", ":", "obj", ".", "pk", "}", ")" ]
returns a url used to flag an object .
train
false
1,295
def alias_create(indices, alias, hosts=None, body=None, profile=None): es = _get_instance(hosts, profile) try: result = es.indices.put_alias(index=indices, name=alias, body=body) return True except elasticsearch.exceptions.NotFoundError: return None return None
[ "def", "alias_create", "(", "indices", ",", "alias", ",", "hosts", "=", "None", ",", "body", "=", "None", ",", "profile", "=", "None", ")", ":", "es", "=", "_get_instance", "(", "hosts", ",", "profile", ")", "try", ":", "result", "=", "es", ".", "indices", ".", "put_alias", "(", "index", "=", "indices", ",", "name", "=", "alias", ",", "body", "=", "body", ")", "return", "True", "except", "elasticsearch", ".", "exceptions", ".", "NotFoundError", ":", "return", "None", "return", "None" ]
create an alias for a specific index/indices cli example:: salt myminion elasticsearch .
train
false
1,300
@register.inclusion_tag('inclusion.html', takes_context=True) def inclusion_no_params_with_context(context): return {'result': ('inclusion_no_params_with_context - Expected result (context value: %s)' % context['value'])}
[ "@", "register", ".", "inclusion_tag", "(", "'inclusion.html'", ",", "takes_context", "=", "True", ")", "def", "inclusion_no_params_with_context", "(", "context", ")", ":", "return", "{", "'result'", ":", "(", "'inclusion_no_params_with_context - Expected result (context value: %s)'", "%", "context", "[", "'value'", "]", ")", "}" ]
expected inclusion_no_params_with_context __doc__ .
train
false
1,301
def unique_labels(*ys): if (not ys): raise ValueError('No argument has been passed.') ys_types = set((type_of_target(x) for x in ys)) if (ys_types == set(['binary', 'multiclass'])): ys_types = set(['multiclass']) if (len(ys_types) > 1): raise ValueError(('Mix type of y not allowed, got types %s' % ys_types)) label_type = ys_types.pop() if ((label_type == 'multilabel-indicator') and (len(set((check_array(y, ['csr', 'csc', 'coo']).shape[1] for y in ys))) > 1)): raise ValueError('Multi-label binary indicator input with different numbers of labels') _unique_labels = _FN_UNIQUE_LABELS.get(label_type, None) if (not _unique_labels): raise ValueError(('Unknown label type: %s' % repr(ys))) ys_labels = set(chain.from_iterable((_unique_labels(y) for y in ys))) if (len(set((isinstance(label, string_types) for label in ys_labels))) > 1): raise ValueError('Mix of label input types (string and number)') return np.array(sorted(ys_labels))
[ "def", "unique_labels", "(", "*", "ys", ")", ":", "if", "(", "not", "ys", ")", ":", "raise", "ValueError", "(", "'No argument has been passed.'", ")", "ys_types", "=", "set", "(", "(", "type_of_target", "(", "x", ")", "for", "x", "in", "ys", ")", ")", "if", "(", "ys_types", "==", "set", "(", "[", "'binary'", ",", "'multiclass'", "]", ")", ")", ":", "ys_types", "=", "set", "(", "[", "'multiclass'", "]", ")", "if", "(", "len", "(", "ys_types", ")", ">", "1", ")", ":", "raise", "ValueError", "(", "(", "'Mix type of y not allowed, got types %s'", "%", "ys_types", ")", ")", "label_type", "=", "ys_types", ".", "pop", "(", ")", "if", "(", "(", "label_type", "==", "'multilabel-indicator'", ")", "and", "(", "len", "(", "set", "(", "(", "check_array", "(", "y", ",", "[", "'csr'", ",", "'csc'", ",", "'coo'", "]", ")", ".", "shape", "[", "1", "]", "for", "y", "in", "ys", ")", ")", ")", ">", "1", ")", ")", ":", "raise", "ValueError", "(", "'Multi-label binary indicator input with different numbers of labels'", ")", "_unique_labels", "=", "_FN_UNIQUE_LABELS", ".", "get", "(", "label_type", ",", "None", ")", "if", "(", "not", "_unique_labels", ")", ":", "raise", "ValueError", "(", "(", "'Unknown label type: %s'", "%", "repr", "(", "ys", ")", ")", ")", "ys_labels", "=", "set", "(", "chain", ".", "from_iterable", "(", "(", "_unique_labels", "(", "y", ")", "for", "y", "in", "ys", ")", ")", ")", "if", "(", "len", "(", "set", "(", "(", "isinstance", "(", "label", ",", "string_types", ")", "for", "label", "in", "ys_labels", ")", ")", ")", ">", "1", ")", ":", "raise", "ValueError", "(", "'Mix of label input types (string and number)'", ")", "return", "np", ".", "array", "(", "sorted", "(", "ys_labels", ")", ")" ]
extract an ordered integer array of unique labels this implementation ignores any occurrence of nans .
train
false
1,303
def target_internal_dependencies(target): for dep in target.dependencies: if isinstance(dep, Jarable): (yield dep) else: for childdep in target_internal_dependencies(dep): (yield childdep)
[ "def", "target_internal_dependencies", "(", "target", ")", ":", "for", "dep", "in", "target", ".", "dependencies", ":", "if", "isinstance", "(", "dep", ",", "Jarable", ")", ":", "(", "yield", "dep", ")", "else", ":", "for", "childdep", "in", "target_internal_dependencies", "(", "dep", ")", ":", "(", "yield", "childdep", ")" ]
returns internal jarable dependencies that were "directly" declared .
train
true
1,304
def _FixedSizer(value_size): def SpecificSizer(field_number, is_repeated, is_packed): tag_size = _TagSize(field_number) if is_packed: local_VarintSize = _VarintSize def PackedFieldSize(value): result = (len(value) * value_size) return ((result + local_VarintSize(result)) + tag_size) return PackedFieldSize elif is_repeated: element_size = (value_size + tag_size) def RepeatedFieldSize(value): return (len(value) * element_size) return RepeatedFieldSize else: field_size = (value_size + tag_size) def FieldSize(value): return field_size return FieldSize return SpecificSizer
[ "def", "_FixedSizer", "(", "value_size", ")", ":", "def", "SpecificSizer", "(", "field_number", ",", "is_repeated", ",", "is_packed", ")", ":", "tag_size", "=", "_TagSize", "(", "field_number", ")", "if", "is_packed", ":", "local_VarintSize", "=", "_VarintSize", "def", "PackedFieldSize", "(", "value", ")", ":", "result", "=", "(", "len", "(", "value", ")", "*", "value_size", ")", "return", "(", "(", "result", "+", "local_VarintSize", "(", "result", ")", ")", "+", "tag_size", ")", "return", "PackedFieldSize", "elif", "is_repeated", ":", "element_size", "=", "(", "value_size", "+", "tag_size", ")", "def", "RepeatedFieldSize", "(", "value", ")", ":", "return", "(", "len", "(", "value", ")", "*", "element_size", ")", "return", "RepeatedFieldSize", "else", ":", "field_size", "=", "(", "value_size", "+", "tag_size", ")", "def", "FieldSize", "(", "value", ")", ":", "return", "field_size", "return", "FieldSize", "return", "SpecificSizer" ]
like _simplesizer except for a fixed-size field .
train
true
1,305
def feature_permission(url, option, msg, yes_action, no_action, abort_on): config_val = config.get(*option) if (config_val == 'ask'): if url.isValid(): text = 'Allow the website at <b>{}</b> to {}?'.format(html.escape(url.toDisplayString()), msg) else: text = 'Allow the website to {}?'.format(msg) return message.confirm_async(yes_action=yes_action, no_action=no_action, cancel_action=no_action, abort_on=abort_on, title='Permission request', text=text) elif config_val: yes_action() return None else: no_action() return None
[ "def", "feature_permission", "(", "url", ",", "option", ",", "msg", ",", "yes_action", ",", "no_action", ",", "abort_on", ")", ":", "config_val", "=", "config", ".", "get", "(", "*", "option", ")", "if", "(", "config_val", "==", "'ask'", ")", ":", "if", "url", ".", "isValid", "(", ")", ":", "text", "=", "'Allow the website at <b>{}</b> to {}?'", ".", "format", "(", "html", ".", "escape", "(", "url", ".", "toDisplayString", "(", ")", ")", ",", "msg", ")", "else", ":", "text", "=", "'Allow the website to {}?'", ".", "format", "(", "msg", ")", "return", "message", ".", "confirm_async", "(", "yes_action", "=", "yes_action", ",", "no_action", "=", "no_action", ",", "cancel_action", "=", "no_action", ",", "abort_on", "=", "abort_on", ",", "title", "=", "'Permission request'", ",", "text", "=", "text", ")", "elif", "config_val", ":", "yes_action", "(", ")", "return", "None", "else", ":", "no_action", "(", ")", "return", "None" ]
handle a feature permission request .
train
false
1,306
def _cherry_pick_args(func, args, dargs): if (func.func_code.co_flags & 4): p_args = args else: p_args = () if (func.func_code.co_flags & 8): p_dargs = dargs else: p_dargs = {} for param in _get_nonstar_args(func): if (param in dargs): p_dargs[param] = dargs[param] return (p_args, p_dargs)
[ "def", "_cherry_pick_args", "(", "func", ",", "args", ",", "dargs", ")", ":", "if", "(", "func", ".", "func_code", ".", "co_flags", "&", "4", ")", ":", "p_args", "=", "args", "else", ":", "p_args", "=", "(", ")", "if", "(", "func", ".", "func_code", ".", "co_flags", "&", "8", ")", ":", "p_dargs", "=", "dargs", "else", ":", "p_dargs", "=", "{", "}", "for", "param", "in", "_get_nonstar_args", "(", "func", ")", ":", "if", "(", "param", "in", "dargs", ")", ":", "p_dargs", "[", "param", "]", "=", "dargs", "[", "param", "]", "return", "(", "p_args", ",", "p_dargs", ")" ]
sanitize positional and keyword arguments before calling a function .
train
false
1,307
def disable_signing(**kwargs): return botocore.UNSIGNED
[ "def", "disable_signing", "(", "**", "kwargs", ")", ":", "return", "botocore", ".", "UNSIGNED" ]
this handler disables request signing by setting the signer name to a special sentinel value .
train
false
1,308
def get_aparc_aseg(files): for name in files: if ('aparc+aseg' in name): return name raise ValueError('aparc+aseg.mgz not found')
[ "def", "get_aparc_aseg", "(", "files", ")", ":", "for", "name", "in", "files", ":", "if", "(", "'aparc+aseg'", "in", "name", ")", ":", "return", "name", "raise", "ValueError", "(", "'aparc+aseg.mgz not found'", ")" ]
return the aparc+aseg .
train
false
1,309
def rebuild_all_translation_files(): for lang in get_all_languages(): for app in frappe.get_all_apps(): write_translations_file(app, lang)
[ "def", "rebuild_all_translation_files", "(", ")", ":", "for", "lang", "in", "get_all_languages", "(", ")", ":", "for", "app", "in", "frappe", ".", "get_all_apps", "(", ")", ":", "write_translations_file", "(", "app", ",", "lang", ")" ]
rebuild all translation files: [app]/translations/[lang] .
train
false
1,310
def get_valid_student_with_email(identifier): user = email = None try: user = get_student_from_identifier(identifier) except User.DoesNotExist: email = identifier else: email = user.email try: validate_email(email) except ValidationError: raise CCXUserValidationException('Could not find a user with name or email "{0}" '.format(identifier)) return (email, user)
[ "def", "get_valid_student_with_email", "(", "identifier", ")", ":", "user", "=", "email", "=", "None", "try", ":", "user", "=", "get_student_from_identifier", "(", "identifier", ")", "except", "User", ".", "DoesNotExist", ":", "email", "=", "identifier", "else", ":", "email", "=", "user", ".", "email", "try", ":", "validate_email", "(", "email", ")", "except", "ValidationError", ":", "raise", "CCXUserValidationException", "(", "'Could not find a user with name or email \"{0}\" '", ".", "format", "(", "identifier", ")", ")", "return", "(", "email", ",", "user", ")" ]
helper function to get an user email from an identifier and validate it .
train
false
1,313
def _copy_dist_from_dir(link_path, location): if os.path.isdir(location): rmtree(location) setup_py = 'setup.py' sdist_args = [sys.executable] sdist_args.append('-c') sdist_args.append((SETUPTOOLS_SHIM % setup_py)) sdist_args.append('sdist') sdist_args += ['--dist-dir', location] logger.info('Running setup.py sdist for %s', link_path) with indent_log(): call_subprocess(sdist_args, cwd=link_path, show_stdout=False) sdist = os.path.join(location, os.listdir(location)[0]) logger.info('Unpacking sdist %s into %s', sdist, location) unpack_file(sdist, location, content_type=None, link=None)
[ "def", "_copy_dist_from_dir", "(", "link_path", ",", "location", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "location", ")", ":", "rmtree", "(", "location", ")", "setup_py", "=", "'setup.py'", "sdist_args", "=", "[", "sys", ".", "executable", "]", "sdist_args", ".", "append", "(", "'-c'", ")", "sdist_args", ".", "append", "(", "(", "SETUPTOOLS_SHIM", "%", "setup_py", ")", ")", "sdist_args", ".", "append", "(", "'sdist'", ")", "sdist_args", "+=", "[", "'--dist-dir'", ",", "location", "]", "logger", ".", "info", "(", "'Running setup.py sdist for %s'", ",", "link_path", ")", "with", "indent_log", "(", ")", ":", "call_subprocess", "(", "sdist_args", ",", "cwd", "=", "link_path", ",", "show_stdout", "=", "False", ")", "sdist", "=", "os", ".", "path", ".", "join", "(", "location", ",", "os", ".", "listdir", "(", "location", ")", "[", "0", "]", ")", "logger", ".", "info", "(", "'Unpacking sdist %s into %s'", ",", "sdist", ",", "location", ")", "unpack_file", "(", "sdist", ",", "location", ",", "content_type", "=", "None", ",", "link", "=", "None", ")" ]
copy distribution files in link_path to location .
train
true
1,314
def _should_retry_response(resp_status, content): if (resp_status >= 500): return True if (resp_status == _TOO_MANY_REQUESTS): return True if (resp_status == six.moves.http_client.FORBIDDEN): if (not content): return False try: data = json.loads(content.decode('utf-8')) reason = data['error']['errors'][0]['reason'] except (UnicodeDecodeError, ValueError, KeyError): LOGGER.warning('Invalid JSON content from response: %s', content) return False LOGGER.warning('Encountered 403 Forbidden with reason "%s"', reason) if (reason in ('userRateLimitExceeded', 'rateLimitExceeded')): return True return False
[ "def", "_should_retry_response", "(", "resp_status", ",", "content", ")", ":", "if", "(", "resp_status", ">=", "500", ")", ":", "return", "True", "if", "(", "resp_status", "==", "_TOO_MANY_REQUESTS", ")", ":", "return", "True", "if", "(", "resp_status", "==", "six", ".", "moves", ".", "http_client", ".", "FORBIDDEN", ")", ":", "if", "(", "not", "content", ")", ":", "return", "False", "try", ":", "data", "=", "json", ".", "loads", "(", "content", ".", "decode", "(", "'utf-8'", ")", ")", "reason", "=", "data", "[", "'error'", "]", "[", "'errors'", "]", "[", "0", "]", "[", "'reason'", "]", "except", "(", "UnicodeDecodeError", ",", "ValueError", ",", "KeyError", ")", ":", "LOGGER", ".", "warning", "(", "'Invalid JSON content from response: %s'", ",", "content", ")", "return", "False", "LOGGER", ".", "warning", "(", "'Encountered 403 Forbidden with reason \"%s\"'", ",", "reason", ")", "if", "(", "reason", "in", "(", "'userRateLimitExceeded'", ",", "'rateLimitExceeded'", ")", ")", ":", "return", "True", "return", "False" ]
determines whether a response should be retried .
train
false
1,315
def GetLocalUser(): return (pwd.getpwuid(os.getuid())[0] or os.getlogin())
[ "def", "GetLocalUser", "(", ")", ":", "return", "(", "pwd", ".", "getpwuid", "(", "os", ".", "getuid", "(", ")", ")", "[", "0", "]", "or", "os", ".", "getlogin", "(", ")", ")" ]
return the local user running the program .
train
false
1,316
def LocalPathToCanonicalPath(path): return utils.NormalizePath(path)
[ "def", "LocalPathToCanonicalPath", "(", "path", ")", ":", "return", "utils", ".", "NormalizePath", "(", "path", ")" ]
osx uses a normal path .
train
false
1,317
def parse_with_objects(code, var, **kwargs): deps = {} for (key, value) in kwargs.items(): if isinstance(value, _compat.integer_types): value = str(value) if _compat.PY3: if (value is None): value = str(value) if ((not isinstance(value, _compat.string_types)) and (not isinstance(value, CodeBlock))): new_var = var(value) deps[new_var] = value kwargs[key] = new_var (block, var) = parse_code(code, var, **kwargs) for (key, dep) in _compat.iteritems(deps): block.add_dependency(key, dep) return (block, var)
[ "def", "parse_with_objects", "(", "code", ",", "var", ",", "**", "kwargs", ")", ":", "deps", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "kwargs", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "_compat", ".", "integer_types", ")", ":", "value", "=", "str", "(", "value", ")", "if", "_compat", ".", "PY3", ":", "if", "(", "value", "is", "None", ")", ":", "value", "=", "str", "(", "value", ")", "if", "(", "(", "not", "isinstance", "(", "value", ",", "_compat", ".", "string_types", ")", ")", "and", "(", "not", "isinstance", "(", "value", ",", "CodeBlock", ")", ")", ")", ":", "new_var", "=", "var", "(", "value", ")", "deps", "[", "new_var", "]", "=", "value", "kwargs", "[", "key", "]", "=", "new_var", "(", "block", ",", "var", ")", "=", "parse_code", "(", "code", ",", "var", ",", "**", "kwargs", ")", "for", "(", "key", ",", "dep", ")", "in", "_compat", ".", "iteritems", "(", "deps", ")", ":", "block", ".", "add_dependency", "(", "key", ",", "dep", ")", "return", "(", "block", ",", "var", ")" ]
parse code and include non string/codeblock kwargs as dependencies .
train
true
1,319
def hash_160_to_address(h160, addrtype=0): if ((h160 is None) or (len(h160) is not 20)): return None vh160 = (chr(addrtype) + h160) h = Hash(vh160) addr = (vh160 + h[0:4]) return b58encode(addr)
[ "def", "hash_160_to_address", "(", "h160", ",", "addrtype", "=", "0", ")", ":", "if", "(", "(", "h160", "is", "None", ")", "or", "(", "len", "(", "h160", ")", "is", "not", "20", ")", ")", ":", "return", "None", "vh160", "=", "(", "chr", "(", "addrtype", ")", "+", "h160", ")", "h", "=", "Hash", "(", "vh160", ")", "addr", "=", "(", "vh160", "+", "h", "[", "0", ":", "4", "]", ")", "return", "b58encode", "(", "addr", ")" ]
checks if the provided hash is actually 160bits or 20 bytes long and returns the address .
train
false
1,321
def local_bitwidth(): return (struct.calcsize('P') * 8)
[ "def", "local_bitwidth", "(", ")", ":", "return", "(", "struct", ".", "calcsize", "(", "'P'", ")", "*", "8", ")" ]
return 32 for 32bit arch .
train
false
1,323
def put_group_policy(group_name, policy_name, policy_json, region=None, key=None, keyid=None, profile=None): group = get_group(group_name, region=region, key=key, keyid=keyid, profile=profile) if (not group): log.error('Group {0} does not exist'.format(group_name)) return False conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: if (not isinstance(policy_json, six.string_types)): policy_json = json.dumps(policy_json) created = conn.put_group_policy(group_name, policy_name, policy_json) if created: log.info('Created policy for group {0}.'.format(group_name)) return True msg = 'Could not create policy for group {0}' log.error(msg.format(group_name)) except boto.exception.BotoServerError as e: log.debug(e) msg = 'Failed to create policy for group {0}' log.error(msg.format(group_name)) return False
[ "def", "put_group_policy", "(", "group_name", ",", "policy_name", ",", "policy_json", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "group", "=", "get_group", "(", "group_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "not", "group", ")", ":", "log", ".", "error", "(", "'Group {0} does not exist'", ".", "format", "(", "group_name", ")", ")", "return", "False", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "try", ":", "if", "(", "not", "isinstance", "(", "policy_json", ",", "six", ".", "string_types", ")", ")", ":", "policy_json", "=", "json", ".", "dumps", "(", "policy_json", ")", "created", "=", "conn", ".", "put_group_policy", "(", "group_name", ",", "policy_name", ",", "policy_json", ")", "if", "created", ":", "log", ".", "info", "(", "'Created policy for group {0}.'", ".", "format", "(", "group_name", ")", ")", "return", "True", "msg", "=", "'Could not create policy for group {0}'", "log", ".", "error", "(", "msg", ".", "format", "(", "group_name", ")", ")", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "debug", "(", "e", ")", "msg", "=", "'Failed to create policy for group {0}'", "log", ".", "error", "(", "msg", ".", "format", "(", "group_name", ")", ")", "return", "False" ]
adds or updates the specified policy document for the specified group .
train
true
1,324
def process_multipart(entity): ib = u'' if (u'boundary' in entity.content_type.params): ib = entity.content_type.params['boundary'].strip(u'"') if (not re.match(u'^[ -~]{0,200}[!-~]$', ib)): raise ValueError((u'Invalid boundary in multipart form: %r' % (ib,))) ib = (u'--' + ib).encode('ascii') while True: b = entity.readline() if (not b): return b = b.strip() if (b == ib): break while True: part = entity.part_class.from_fp(entity.fp, ib) entity.parts.append(part) part.process() if part.fp.done: break
[ "def", "process_multipart", "(", "entity", ")", ":", "ib", "=", "u''", "if", "(", "u'boundary'", "in", "entity", ".", "content_type", ".", "params", ")", ":", "ib", "=", "entity", ".", "content_type", ".", "params", "[", "'boundary'", "]", ".", "strip", "(", "u'\"'", ")", "if", "(", "not", "re", ".", "match", "(", "u'^[ -~]{0,200}[!-~]$'", ",", "ib", ")", ")", ":", "raise", "ValueError", "(", "(", "u'Invalid boundary in multipart form: %r'", "%", "(", "ib", ",", ")", ")", ")", "ib", "=", "(", "u'--'", "+", "ib", ")", ".", "encode", "(", "'ascii'", ")", "while", "True", ":", "b", "=", "entity", ".", "readline", "(", ")", "if", "(", "not", "b", ")", ":", "return", "b", "=", "b", ".", "strip", "(", ")", "if", "(", "b", "==", "ib", ")", ":", "break", "while", "True", ":", "part", "=", "entity", ".", "part_class", ".", "from_fp", "(", "entity", ".", "fp", ",", "ib", ")", "entity", ".", "parts", ".", "append", "(", "part", ")", "part", ".", "process", "(", ")", "if", "part", ".", "fp", ".", "done", ":", "break" ]
read all multipart parts into entity .
train
false
1,325
def load_package_dependencies(package, load_recursive=False): global _loaded_packages _init() if VERBOSE: print('Load dependencies for package', package) if (not load_recursive): manifest_file = roslib.manifest.manifest_file(package, True) m = roslib.manifest.parse_file(manifest_file) depends = [d.package for d in m.depends] else: depends = rospkg.RosPack().get_depends(package, implicit=True) msgs = [] failures = [] for d in depends: if VERBOSE: print('Load dependency', d) if ((d in _loaded_packages) or (d == package)): continue _loaded_packages.append(d) (specs, failed) = get_pkg_msg_specs(d) msgs.extend(specs) failures.extend(failed) for (key, spec) in msgs: register(key, spec)
[ "def", "load_package_dependencies", "(", "package", ",", "load_recursive", "=", "False", ")", ":", "global", "_loaded_packages", "_init", "(", ")", "if", "VERBOSE", ":", "print", "(", "'Load dependencies for package'", ",", "package", ")", "if", "(", "not", "load_recursive", ")", ":", "manifest_file", "=", "roslib", ".", "manifest", ".", "manifest_file", "(", "package", ",", "True", ")", "m", "=", "roslib", ".", "manifest", ".", "parse_file", "(", "manifest_file", ")", "depends", "=", "[", "d", ".", "package", "for", "d", "in", "m", ".", "depends", "]", "else", ":", "depends", "=", "rospkg", ".", "RosPack", "(", ")", ".", "get_depends", "(", "package", ",", "implicit", "=", "True", ")", "msgs", "=", "[", "]", "failures", "=", "[", "]", "for", "d", "in", "depends", ":", "if", "VERBOSE", ":", "print", "(", "'Load dependency'", ",", "d", ")", "if", "(", "(", "d", "in", "_loaded_packages", ")", "or", "(", "d", "==", "package", ")", ")", ":", "continue", "_loaded_packages", ".", "append", "(", "d", ")", "(", "specs", ",", "failed", ")", "=", "get_pkg_msg_specs", "(", "d", ")", "msgs", ".", "extend", "(", "specs", ")", "failures", ".", "extend", "(", "failed", ")", "for", "(", "key", ",", "spec", ")", "in", "msgs", ":", "register", "(", "key", ",", "spec", ")" ]
register all messages that the specified package depends on .
train
false
1,326
def DEFINE_list(name, default, help): CONFIG.AddOption(type_info.List(name=name, default=default, description=help, validator=type_info.String()))
[ "def", "DEFINE_list", "(", "name", ",", "default", ",", "help", ")", ":", "CONFIG", ".", "AddOption", "(", "type_info", ".", "List", "(", "name", "=", "name", ",", "default", "=", "default", ",", "description", "=", "help", ",", "validator", "=", "type_info", ".", "String", "(", ")", ")", ")" ]
registers a flag whose value is a comma-separated list of strings .
train
false
1,328
def _get_route_variables(match, default_kwargs=None): kwargs = (default_kwargs or {}) kwargs.update(match.groupdict()) if kwargs: args = tuple((value[1] for value in sorted(((int(key[2:(-2)]), kwargs.pop(key)) for key in kwargs.keys() if (key.startswith('__') and key.endswith('__')))))) else: args = () return (args, kwargs)
[ "def", "_get_route_variables", "(", "match", ",", "default_kwargs", "=", "None", ")", ":", "kwargs", "=", "(", "default_kwargs", "or", "{", "}", ")", "kwargs", ".", "update", "(", "match", ".", "groupdict", "(", ")", ")", "if", "kwargs", ":", "args", "=", "tuple", "(", "(", "value", "[", "1", "]", "for", "value", "in", "sorted", "(", "(", "(", "int", "(", "key", "[", "2", ":", "(", "-", "2", ")", "]", ")", ",", "kwargs", ".", "pop", "(", "key", ")", ")", "for", "key", "in", "kwargs", ".", "keys", "(", ")", "if", "(", "key", ".", "startswith", "(", "'__'", ")", "and", "key", ".", "endswith", "(", "'__'", ")", ")", ")", ")", ")", ")", "else", ":", "args", "=", "(", ")", "return", "(", "args", ",", "kwargs", ")" ]
returns for a route match .
train
false
1,329
def sortedEpisodes(m, season=None): episodes = [] seasons = season if (season is None): seasons = sortedSeasons(m) elif (not isinstance(season, (tuple, list))): seasons = [season] for s in seasons: eps_indx = m.get('episodes', {}).get(s, {}).keys() eps_indx.sort() for e in eps_indx: episodes.append(m['episodes'][s][e]) return episodes
[ "def", "sortedEpisodes", "(", "m", ",", "season", "=", "None", ")", ":", "episodes", "=", "[", "]", "seasons", "=", "season", "if", "(", "season", "is", "None", ")", ":", "seasons", "=", "sortedSeasons", "(", "m", ")", "elif", "(", "not", "isinstance", "(", "season", ",", "(", "tuple", ",", "list", ")", ")", ")", ":", "seasons", "=", "[", "season", "]", "for", "s", "in", "seasons", ":", "eps_indx", "=", "m", ".", "get", "(", "'episodes'", ",", "{", "}", ")", ".", "get", "(", "s", ",", "{", "}", ")", ".", "keys", "(", ")", "eps_indx", ".", "sort", "(", ")", "for", "e", "in", "eps_indx", ":", "episodes", ".", "append", "(", "m", "[", "'episodes'", "]", "[", "s", "]", "[", "e", "]", ")", "return", "episodes" ]
return a sorted list of episodes of the given series .
train
false
1,330
def get_timestamp(d): if (isinstance(d, datetime.date) and (not isinstance(d, datetime.datetime))): d = datetime.datetime.combine(d, datetime.time(0, 0, 0, 0)) msec = str(d.microsecond).rjust(6).replace(' ', '0') return float(('%s.%s' % (calendar.timegm(d.utctimetuple()), msec)))
[ "def", "get_timestamp", "(", "d", ")", ":", "if", "(", "isinstance", "(", "d", ",", "datetime", ".", "date", ")", "and", "(", "not", "isinstance", "(", "d", ",", "datetime", ".", "datetime", ")", ")", ")", ":", "d", "=", "datetime", ".", "datetime", ".", "combine", "(", "d", ",", "datetime", ".", "time", "(", "0", ",", "0", ",", "0", ",", "0", ")", ")", "msec", "=", "str", "(", "d", ".", "microsecond", ")", ".", "rjust", "(", "6", ")", ".", "replace", "(", "' '", ",", "'0'", ")", "return", "float", "(", "(", "'%s.%s'", "%", "(", "calendar", ".", "timegm", "(", "d", ".", "utctimetuple", "(", ")", ")", ",", "msec", ")", ")", ")" ]
returns a utc timestamp for a c{datetime .
train
true
1,332
def group_to_gid(group): func_name = '{0}.group_to_gid'.format(__virtualname__) if (__opts__.get('fun', '') == func_name): log.info('The function {0} should not be used on Windows systems; see function docs for details.'.format(func_name)) return _user_to_uid(group)
[ "def", "group_to_gid", "(", "group", ")", ":", "func_name", "=", "'{0}.group_to_gid'", ".", "format", "(", "__virtualname__", ")", "if", "(", "__opts__", ".", "get", "(", "'fun'", ",", "''", ")", "==", "func_name", ")", ":", "log", ".", "info", "(", "'The function {0} should not be used on Windows systems; see function docs for details.'", ".", "format", "(", "func_name", ")", ")", "return", "_user_to_uid", "(", "group", ")" ]
convert the group to the gid on this system group group to convert to its gid cli example: .
train
false
1,333
@register.filter def friends(user): try: return Relationship.objects.get_friends_for_user(user) except AttributeError: return []
[ "@", "register", ".", "filter", "def", "friends", "(", "user", ")", ":", "try", ":", "return", "Relationship", ".", "objects", ".", "get_friends_for_user", "(", "user", ")", "except", "AttributeError", ":", "return", "[", "]" ]
returns people user is following sans people blocking user .
train
false
1,334
def parse_url(url): scheme = None auth = None host = None port = None path = None fragment = None query = None if ('://' in url): (scheme, url) = url.split('://', 1) (url, path_, delim) = split_first(url, ['/', '?', '#']) if delim: path = (delim + path_) if ('@' in url): (auth, url) = url.split('@', 1) if (url and (url[0] == '[')): (host, url) = url.split(']', 1) host += ']' if (':' in url): (_host, port) = url.split(':', 1) if (not host): host = _host if (not port.isdigit()): raise LocationParseError(('Failed to parse: %s' % url)) port = int(port) elif ((not host) and url): host = url if (not path): return Url(scheme, auth, host, port, path, query, fragment) if ('#' in path): (path, fragment) = path.split('#', 1) if ('?' in path): (path, query) = path.split('?', 1) return Url(scheme, auth, host, port, path, query, fragment)
[ "def", "parse_url", "(", "url", ")", ":", "scheme", "=", "None", "auth", "=", "None", "host", "=", "None", "port", "=", "None", "path", "=", "None", "fragment", "=", "None", "query", "=", "None", "if", "(", "'://'", "in", "url", ")", ":", "(", "scheme", ",", "url", ")", "=", "url", ".", "split", "(", "'://'", ",", "1", ")", "(", "url", ",", "path_", ",", "delim", ")", "=", "split_first", "(", "url", ",", "[", "'/'", ",", "'?'", ",", "'#'", "]", ")", "if", "delim", ":", "path", "=", "(", "delim", "+", "path_", ")", "if", "(", "'@'", "in", "url", ")", ":", "(", "auth", ",", "url", ")", "=", "url", ".", "split", "(", "'@'", ",", "1", ")", "if", "(", "url", "and", "(", "url", "[", "0", "]", "==", "'['", ")", ")", ":", "(", "host", ",", "url", ")", "=", "url", ".", "split", "(", "']'", ",", "1", ")", "host", "+=", "']'", "if", "(", "':'", "in", "url", ")", ":", "(", "_host", ",", "port", ")", "=", "url", ".", "split", "(", "':'", ",", "1", ")", "if", "(", "not", "host", ")", ":", "host", "=", "_host", "if", "(", "not", "port", ".", "isdigit", "(", ")", ")", ":", "raise", "LocationParseError", "(", "(", "'Failed to parse: %s'", "%", "url", ")", ")", "port", "=", "int", "(", "port", ")", "elif", "(", "(", "not", "host", ")", "and", "url", ")", ":", "host", "=", "url", "if", "(", "not", "path", ")", ":", "return", "Url", "(", "scheme", ",", "auth", ",", "host", ",", "port", ",", "path", ",", "query", ",", "fragment", ")", "if", "(", "'#'", "in", "path", ")", ":", "(", "path", ",", "fragment", ")", "=", "path", ".", "split", "(", "'#'", ",", "1", ")", "if", "(", "'?'", "in", "path", ")", ":", "(", "path", ",", "query", ")", "=", "path", ".", "split", "(", "'?'", ",", "1", ")", "return", "Url", "(", "scheme", ",", "auth", ",", "host", ",", "port", ",", "path", ",", "query", ",", "fragment", ")" ]
parse url into mapping of components .
train
true
1,335
@task def send_purchase_receipt(contrib_id, **kw): contrib = Contribution.objects.get(pk=contrib_id) with contrib.user.activate_lang(): addon = contrib.addon version = (addon.current_version or addon.latest_version) subject = _('Receipt for {0}').format(contrib.addon.name) data = {'app_name': addon.name, 'developer_name': (version.developer_name if version else ''), 'price': contrib.get_amount_locale(get_locale_from_lang(contrib.source_locale)), 'date': jingo.helpers.datetime(contrib.created.date()), 'purchaser_email': contrib.user.email, 'transaction_id': contrib.uuid, 'purchases_url': absolutify('/purchases'), 'support_url': addon.support_url, 'terms_of_service_url': absolutify('/terms-of-use')} log.info(('Sending email about purchase: %s' % contrib_id)) text_template = 'purchase/receipt.ltxt' html_template = 'purchase/receipt.html' send_html_mail_jinja(subject, html_template, text_template, data, recipient_list=[contrib.user.email])
[ "@", "task", "def", "send_purchase_receipt", "(", "contrib_id", ",", "**", "kw", ")", ":", "contrib", "=", "Contribution", ".", "objects", ".", "get", "(", "pk", "=", "contrib_id", ")", "with", "contrib", ".", "user", ".", "activate_lang", "(", ")", ":", "addon", "=", "contrib", ".", "addon", "version", "=", "(", "addon", ".", "current_version", "or", "addon", ".", "latest_version", ")", "subject", "=", "_", "(", "'Receipt for {0}'", ")", ".", "format", "(", "contrib", ".", "addon", ".", "name", ")", "data", "=", "{", "'app_name'", ":", "addon", ".", "name", ",", "'developer_name'", ":", "(", "version", ".", "developer_name", "if", "version", "else", "''", ")", ",", "'price'", ":", "contrib", ".", "get_amount_locale", "(", "get_locale_from_lang", "(", "contrib", ".", "source_locale", ")", ")", ",", "'date'", ":", "jingo", ".", "helpers", ".", "datetime", "(", "contrib", ".", "created", ".", "date", "(", ")", ")", ",", "'purchaser_email'", ":", "contrib", ".", "user", ".", "email", ",", "'transaction_id'", ":", "contrib", ".", "uuid", ",", "'purchases_url'", ":", "absolutify", "(", "'/purchases'", ")", ",", "'support_url'", ":", "addon", ".", "support_url", ",", "'terms_of_service_url'", ":", "absolutify", "(", "'/terms-of-use'", ")", "}", "log", ".", "info", "(", "(", "'Sending email about purchase: %s'", "%", "contrib_id", ")", ")", "text_template", "=", "'purchase/receipt.ltxt'", "html_template", "=", "'purchase/receipt.html'", "send_html_mail_jinja", "(", "subject", ",", "html_template", ",", "text_template", ",", "data", ",", "recipient_list", "=", "[", "contrib", ".", "user", ".", "email", "]", ")" ]
sends an email to the purchaser of the app .
train
false
1,336
def _PERM_OP(a, b, n, m): t = (((a >> n) ^ b) & m) b = (b ^ t) a = (a ^ (t << n)) return (a, b)
[ "def", "_PERM_OP", "(", "a", ",", "b", ",", "n", ",", "m", ")", ":", "t", "=", "(", "(", "(", "a", ">>", "n", ")", "^", "b", ")", "&", "m", ")", "b", "=", "(", "b", "^", "t", ")", "a", "=", "(", "a", "^", "(", "t", "<<", "n", ")", ")", "return", "(", "a", ",", "b", ")" ]
cleverer bit manipulation .
train
false
1,337
def okayAt63(level, pos): return ((level.blockAt(pos[0], 63, pos[2]) != 0) or (level.blockAt(pos[0], 64, pos[2]) != 0))
[ "def", "okayAt63", "(", "level", ",", "pos", ")", ":", "return", "(", "(", "level", ".", "blockAt", "(", "pos", "[", "0", "]", ",", "63", ",", "pos", "[", "2", "]", ")", "!=", "0", ")", "or", "(", "level", ".", "blockAt", "(", "pos", "[", "0", "]", ",", "64", ",", "pos", "[", "2", "]", ")", "!=", "0", ")", ")" ]
blocks 63 or 64 must be occupied .
train
false
1,338
def member(): return s3_rest_controller()
[ "def", "member", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
require a member user .
train
false
1,339
def is_python(text, filename='<string>'): try: compile(text, filename, 'exec') except (SyntaxError, TypeError): return False else: return True
[ "def", "is_python", "(", "text", ",", "filename", "=", "'<string>'", ")", ":", "try", ":", "compile", "(", "text", ",", "filename", ",", "'exec'", ")", "except", "(", "SyntaxError", ",", "TypeError", ")", ":", "return", "False", "else", ":", "return", "True" ]
is this string a valid python script? .
train
true
1,340
def get_container_view(service_instance, obj_type, container=None): if (not container): container = service_instance.content.rootFolder view_ref = service_instance.content.viewManager.CreateContainerView(container=container, type=obj_type, recursive=True) return view_ref
[ "def", "get_container_view", "(", "service_instance", ",", "obj_type", ",", "container", "=", "None", ")", ":", "if", "(", "not", "container", ")", ":", "container", "=", "service_instance", ".", "content", ".", "rootFolder", "view_ref", "=", "service_instance", ".", "content", ".", "viewManager", ".", "CreateContainerView", "(", "container", "=", "container", ",", "type", "=", "obj_type", ",", "recursive", "=", "True", ")", "return", "view_ref" ]
get a vsphere container view reference to all objects of type obj_type it is up to the caller to take care of destroying the view when no longer needed .
train
false