id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
sequencelengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
1,737
def is_sqrt(expr): return (expr.is_Pow and expr.exp.is_Rational and (abs(expr.exp) is S.Half))
[ "def", "is_sqrt", "(", "expr", ")", ":", "return", "(", "expr", ".", "is_Pow", "and", "expr", ".", "exp", ".", "is_Rational", "and", "(", "abs", "(", "expr", ".", "exp", ")", "is", "S", ".", "Half", ")", ")" ]
return true if expr is a sqrt .
train
false
1,738
def create_textfile_with_contents(filename, contents, encoding='utf-8'): ensure_directory_exists(os.path.dirname(filename)) if os.path.exists(filename): os.remove(filename) outstream = codecs.open(filename, 'w', encoding) outstream.write(contents) if (contents and (not contents.endswith('\n'))): outstream.write('\n') outstream.flush() outstream.close() assert os.path.exists(filename), ('ENSURE file exists: %s' % filename)
[ "def", "create_textfile_with_contents", "(", "filename", ",", "contents", ",", "encoding", "=", "'utf-8'", ")", ":", "ensure_directory_exists", "(", "os", ".", "path", ".", "dirname", "(", "filename", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "os", ".", "remove", "(", "filename", ")", "outstream", "=", "codecs", ".", "open", "(", "filename", ",", "'w'", ",", "encoding", ")", "outstream", ".", "write", "(", "contents", ")", "if", "(", "contents", "and", "(", "not", "contents", ".", "endswith", "(", "'\\n'", ")", ")", ")", ":", "outstream", ".", "write", "(", "'\\n'", ")", "outstream", ".", "flush", "(", ")", "outstream", ".", "close", "(", ")", "assert", "os", ".", "path", ".", "exists", "(", "filename", ")", ",", "(", "'ENSURE file exists: %s'", "%", "filename", ")" ]
creates a textual file with the provided contents in the workdir .
train
true
1,739
def _fixed_cbLogin(self, xxx_todo_changeme): (interface, avatar, logout) = xxx_todo_changeme if (not IJellyable.providedBy(avatar)): avatar = pb.AsReferenceable(avatar, 'perspective') puid = avatar.processUniqueID() logout = [logout] def maybeLogout(): if (not logout): return fn = logout[0] del logout[0] fn() self.broker._localCleanup[puid] = maybeLogout self.broker.notifyOnDisconnect(maybeLogout) return avatar
[ "def", "_fixed_cbLogin", "(", "self", ",", "xxx_todo_changeme", ")", ":", "(", "interface", ",", "avatar", ",", "logout", ")", "=", "xxx_todo_changeme", "if", "(", "not", "IJellyable", ".", "providedBy", "(", "avatar", ")", ")", ":", "avatar", "=", "pb", ".", "AsReferenceable", "(", "avatar", ",", "'perspective'", ")", "puid", "=", "avatar", ".", "processUniqueID", "(", ")", "logout", "=", "[", "logout", "]", "def", "maybeLogout", "(", ")", ":", "if", "(", "not", "logout", ")", ":", "return", "fn", "=", "logout", "[", "0", "]", "del", "logout", "[", "0", "]", "fn", "(", ")", "self", ".", "broker", ".", "_localCleanup", "[", "puid", "]", "=", "maybeLogout", "self", ".", "broker", ".", "notifyOnDisconnect", "(", "maybeLogout", ")", "return", "avatar" ]
ensure that the avatar to be returned to the client is jellyable and set up disconnection notification to call the realms logout object .
train
false
1,740
def getSupportedKeyExchanges(): from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import ec from twisted.conch.ssh.keys import _curveTable backend = default_backend() kexAlgorithms = _kexAlgorithms.copy() for keyAlgorithm in list(kexAlgorithms): if keyAlgorithm.startswith('ecdh'): keyAlgorithmDsa = keyAlgorithm.replace('ecdh', 'ecdsa') supported = backend.elliptic_curve_exchange_algorithm_supported(ec.ECDH(), _curveTable[keyAlgorithmDsa]) if (not supported): kexAlgorithms.pop(keyAlgorithm) return sorted(kexAlgorithms, key=(lambda kexAlgorithm: kexAlgorithms[kexAlgorithm].preference))
[ "def", "getSupportedKeyExchanges", "(", ")", ":", "from", "cryptography", ".", "hazmat", ".", "backends", "import", "default_backend", "from", "cryptography", ".", "hazmat", ".", "primitives", ".", "asymmetric", "import", "ec", "from", "twisted", ".", "conch", ".", "ssh", ".", "keys", "import", "_curveTable", "backend", "=", "default_backend", "(", ")", "kexAlgorithms", "=", "_kexAlgorithms", ".", "copy", "(", ")", "for", "keyAlgorithm", "in", "list", "(", "kexAlgorithms", ")", ":", "if", "keyAlgorithm", ".", "startswith", "(", "'ecdh'", ")", ":", "keyAlgorithmDsa", "=", "keyAlgorithm", ".", "replace", "(", "'ecdh'", ",", "'ecdsa'", ")", "supported", "=", "backend", ".", "elliptic_curve_exchange_algorithm_supported", "(", "ec", ".", "ECDH", "(", ")", ",", "_curveTable", "[", "keyAlgorithmDsa", "]", ")", "if", "(", "not", "supported", ")", ":", "kexAlgorithms", ".", "pop", "(", "keyAlgorithm", ")", "return", "sorted", "(", "kexAlgorithms", ",", "key", "=", "(", "lambda", "kexAlgorithm", ":", "kexAlgorithms", "[", "kexAlgorithm", "]", ".", "preference", ")", ")" ]
get a list of supported key exchange algorithm names in order of preference .
train
false
1,741
def _format_range_unified(start, stop): beginning = (start + 1) length = (stop - start) if (length == 1): return '{}'.format(beginning) if (not length): beginning -= 1 return '{},{}'.format(beginning, length)
[ "def", "_format_range_unified", "(", "start", ",", "stop", ")", ":", "beginning", "=", "(", "start", "+", "1", ")", "length", "=", "(", "stop", "-", "start", ")", "if", "(", "length", "==", "1", ")", ":", "return", "'{}'", ".", "format", "(", "beginning", ")", "if", "(", "not", "length", ")", ":", "beginning", "-=", "1", "return", "'{},{}'", ".", "format", "(", "beginning", ",", "length", ")" ]
convert range to the "ed" format .
train
true
1,742
def handle_register(request, text_template=None, html_template=None, subject=None, email_data=None, *args, **kwargs): if (request.method == 'POST'): form = RegisterForm(request.POST) if form.is_valid(): form = try_send_email_with_form(RegistrationProfile.objects.create_inactive_user, form, 'email', form.cleaned_data['username'], form.cleaned_data['password'], form.cleaned_data['email'], locale=request.LANGUAGE_CODE, text_template=text_template, html_template=html_template, subject=subject, email_data=email_data, volunteer_interest=form.cleaned_data['interested'], *args, **kwargs) if (not form.is_valid()): User.objects.filter(email=form.instance.email).delete() else: statsd.incr('user.register') return form return RegisterForm()
[ "def", "handle_register", "(", "request", ",", "text_template", "=", "None", ",", "html_template", "=", "None", ",", "subject", "=", "None", ",", "email_data", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "form", "=", "RegisterForm", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "form", "=", "try_send_email_with_form", "(", "RegistrationProfile", ".", "objects", ".", "create_inactive_user", ",", "form", ",", "'email'", ",", "form", ".", "cleaned_data", "[", "'username'", "]", ",", "form", ".", "cleaned_data", "[", "'password'", "]", ",", "form", ".", "cleaned_data", "[", "'email'", "]", ",", "locale", "=", "request", ".", "LANGUAGE_CODE", ",", "text_template", "=", "text_template", ",", "html_template", "=", "html_template", ",", "subject", "=", "subject", ",", "email_data", "=", "email_data", ",", "volunteer_interest", "=", "form", ".", "cleaned_data", "[", "'interested'", "]", ",", "*", "args", ",", "**", "kwargs", ")", "if", "(", "not", "form", ".", "is_valid", "(", ")", ")", ":", "User", ".", "objects", ".", "filter", "(", "email", "=", "form", ".", "instance", ".", "email", ")", ".", "delete", "(", ")", "else", ":", "statsd", ".", "incr", "(", "'user.register'", ")", "return", "form", "return", "RegisterForm", "(", ")" ]
handle to help registration .
train
false
1,744
@handle_response_format @treeio_login_required @module_admin_required() def pagefolder_add(request, response_format='html'): if request.POST: form = PageFolderForm(request.POST) if form.is_valid(): folder = form.save() return HttpResponseRedirect(reverse('core_admin_pagefolder_view', args=[folder.id])) else: form = PageFolderForm() return render_to_response('core/administration/pagefolder_add', {'form': form}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "@", "module_admin_required", "(", ")", "def", "pagefolder_add", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "if", "request", ".", "POST", ":", "form", "=", "PageFolderForm", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "folder", "=", "form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'core_admin_pagefolder_view'", ",", "args", "=", "[", "folder", ".", "id", "]", ")", ")", "else", ":", "form", "=", "PageFolderForm", "(", ")", "return", "render_to_response", "(", "'core/administration/pagefolder_add'", ",", "{", "'form'", ":", "form", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
folder for static pages add .
train
false
1,745
def check_fixed_len_bcs_dups(header, mapping_data, errors): header_field_to_check = 'BarcodeSequence' try: check_ix = header.index(header_field_to_check) except ValueError: return errors barcodes = [] correction = 1 for curr_data in mapping_data: barcodes.append(upper(curr_data[check_ix])) dups = duplicates_indices(barcodes) for curr_dup in dups: for curr_loc in dups[curr_dup]: errors.append(('Duplicate barcode %s found. DCTB %d,%d' % (curr_dup, (curr_loc + correction), check_ix))) return errors
[ "def", "check_fixed_len_bcs_dups", "(", "header", ",", "mapping_data", ",", "errors", ")", ":", "header_field_to_check", "=", "'BarcodeSequence'", "try", ":", "check_ix", "=", "header", ".", "index", "(", "header_field_to_check", ")", "except", "ValueError", ":", "return", "errors", "barcodes", "=", "[", "]", "correction", "=", "1", "for", "curr_data", "in", "mapping_data", ":", "barcodes", ".", "append", "(", "upper", "(", "curr_data", "[", "check_ix", "]", ")", ")", "dups", "=", "duplicates_indices", "(", "barcodes", ")", "for", "curr_dup", "in", "dups", ":", "for", "curr_loc", "in", "dups", "[", "curr_dup", "]", ":", "errors", ".", "append", "(", "(", "'Duplicate barcode %s found. DCTB %d,%d'", "%", "(", "curr_dup", ",", "(", "curr_loc", "+", "correction", ")", ",", "check_ix", ")", ")", ")", "return", "errors" ]
checks barcodes of same length for duplicates .
train
false
1,746
def all_index_generator(k=10): all_make_index_funcs = [makeIntIndex, makeFloatIndex, makeStringIndex, makeUnicodeIndex, makeDateIndex, makePeriodIndex, makeTimedeltaIndex, makeBoolIndex, makeCategoricalIndex] for make_index_func in all_make_index_funcs: (yield make_index_func(k=k))
[ "def", "all_index_generator", "(", "k", "=", "10", ")", ":", "all_make_index_funcs", "=", "[", "makeIntIndex", ",", "makeFloatIndex", ",", "makeStringIndex", ",", "makeUnicodeIndex", ",", "makeDateIndex", ",", "makePeriodIndex", ",", "makeTimedeltaIndex", ",", "makeBoolIndex", ",", "makeCategoricalIndex", "]", "for", "make_index_func", "in", "all_make_index_funcs", ":", "(", "yield", "make_index_func", "(", "k", "=", "k", ")", ")" ]
generator which can be iterated over to get instances of all the various index classes .
train
false
1,748
def structured(inputSchema, outputSchema, schema_store=None, ignore_body=False): if (schema_store is None): schema_store = {} inputValidator = getValidator(inputSchema, schema_store) outputValidator = getValidator(outputSchema, schema_store) def deco(original): @wraps(original) @_remote_logging @_logging @_serialize(outputValidator) def loadAndDispatch(self, request, **routeArguments): if ((request.method in ('GET', 'DELETE')) or ignore_body): objects = {} else: body = request.content.read() try: objects = loads(body) except ValueError: raise DECODING_ERROR errors = [] for error in inputValidator.iter_errors(objects): errors.append(error.message) if errors: raise InvalidRequestJSON(errors=errors, schema=inputSchema) objects.update(routeArguments) return maybeDeferred(original, self, **objects) loadAndDispatch.inputSchema = inputSchema loadAndDispatch.outputSchema = outputSchema return loadAndDispatch return deco
[ "def", "structured", "(", "inputSchema", ",", "outputSchema", ",", "schema_store", "=", "None", ",", "ignore_body", "=", "False", ")", ":", "if", "(", "schema_store", "is", "None", ")", ":", "schema_store", "=", "{", "}", "inputValidator", "=", "getValidator", "(", "inputSchema", ",", "schema_store", ")", "outputValidator", "=", "getValidator", "(", "outputSchema", ",", "schema_store", ")", "def", "deco", "(", "original", ")", ":", "@", "wraps", "(", "original", ")", "@", "_remote_logging", "@", "_logging", "@", "_serialize", "(", "outputValidator", ")", "def", "loadAndDispatch", "(", "self", ",", "request", ",", "**", "routeArguments", ")", ":", "if", "(", "(", "request", ".", "method", "in", "(", "'GET'", ",", "'DELETE'", ")", ")", "or", "ignore_body", ")", ":", "objects", "=", "{", "}", "else", ":", "body", "=", "request", ".", "content", ".", "read", "(", ")", "try", ":", "objects", "=", "loads", "(", "body", ")", "except", "ValueError", ":", "raise", "DECODING_ERROR", "errors", "=", "[", "]", "for", "error", "in", "inputValidator", ".", "iter_errors", "(", "objects", ")", ":", "errors", ".", "append", "(", "error", ".", "message", ")", "if", "errors", ":", "raise", "InvalidRequestJSON", "(", "errors", "=", "errors", ",", "schema", "=", "inputSchema", ")", "objects", ".", "update", "(", "routeArguments", ")", "return", "maybeDeferred", "(", "original", ",", "self", ",", "**", "objects", ")", "loadAndDispatch", ".", "inputSchema", "=", "inputSchema", "loadAndDispatch", ".", "outputSchema", "=", "outputSchema", "return", "loadAndDispatch", "return", "deco" ]
decorate a klein-style endpoint method so that the request body is automatically decoded and the response body is automatically encoded .
train
false
1,749
def interleave(inter, f, seq): seq = iter(seq) try: f(seq.next()) except StopIteration: pass else: for x in seq: inter() f(x)
[ "def", "interleave", "(", "inter", ",", "f", ",", "seq", ")", ":", "seq", "=", "iter", "(", "seq", ")", "try", ":", "f", "(", "seq", ".", "next", "(", ")", ")", "except", "StopIteration", ":", "pass", "else", ":", "for", "x", "in", "seq", ":", "inter", "(", ")", "f", "(", "x", ")" ]
call f on each item in seq .
train
true
1,750
def compress(body, compress_level): import zlib (yield '\x1f\x8b') (yield '\x08') (yield '\x00') (yield struct.pack('<L', (int(time.time()) & 4294967295L))) (yield '\x02') (yield '\xff') crc = zlib.crc32('') size = 0 zobj = zlib.compressobj(compress_level, zlib.DEFLATED, (- zlib.MAX_WBITS), zlib.DEF_MEM_LEVEL, 0) for line in body: size += len(line) crc = zlib.crc32(line, crc) (yield zobj.compress(line)) (yield zobj.flush()) (yield struct.pack('<L', (crc & 4294967295L))) (yield struct.pack('<L', (size & 4294967295L)))
[ "def", "compress", "(", "body", ",", "compress_level", ")", ":", "import", "zlib", "(", "yield", "'\\x1f\\x8b'", ")", "(", "yield", "'\\x08'", ")", "(", "yield", "'\\x00'", ")", "(", "yield", "struct", ".", "pack", "(", "'<L'", ",", "(", "int", "(", "time", ".", "time", "(", ")", ")", "&", "4294967295", "L", ")", ")", ")", "(", "yield", "'\\x02'", ")", "(", "yield", "'\\xff'", ")", "crc", "=", "zlib", ".", "crc32", "(", "''", ")", "size", "=", "0", "zobj", "=", "zlib", ".", "compressobj", "(", "compress_level", ",", "zlib", ".", "DEFLATED", ",", "(", "-", "zlib", ".", "MAX_WBITS", ")", ",", "zlib", ".", "DEF_MEM_LEVEL", ",", "0", ")", "for", "line", "in", "body", ":", "size", "+=", "len", "(", "line", ")", "crc", "=", "zlib", ".", "crc32", "(", "line", ",", "crc", ")", "(", "yield", "zobj", ".", "compress", "(", "line", ")", ")", "(", "yield", "zobj", ".", "flush", "(", ")", ")", "(", "yield", "struct", ".", "pack", "(", "'<L'", ",", "(", "crc", "&", "4294967295", "L", ")", ")", ")", "(", "yield", "struct", ".", "pack", "(", "'<L'", ",", "(", "size", "&", "4294967295", "L", ")", ")", ")" ]
compress a block of data .
train
false
1,751
@require_GET def contributors(request): product = _get_product(request) category = _get_category(request) return render_readouts(request, CONTRIBUTOR_READOUTS, 'contributors.html', locale=settings.WIKI_DEFAULT_LANGUAGE, product=product, extra_data={'overview_rows': kb_overview_rows(locale=request.LANGUAGE_CODE, product=product, mode=smart_int(request.GET.get('mode'), None), max=smart_int(request.GET.get('max'), 10), category=category), 'overview_modes': PERIODS, 'category': category, 'categories': CATEGORIES})
[ "@", "require_GET", "def", "contributors", "(", "request", ")", ":", "product", "=", "_get_product", "(", "request", ")", "category", "=", "_get_category", "(", "request", ")", "return", "render_readouts", "(", "request", ",", "CONTRIBUTOR_READOUTS", ",", "'contributors.html'", ",", "locale", "=", "settings", ".", "WIKI_DEFAULT_LANGUAGE", ",", "product", "=", "product", ",", "extra_data", "=", "{", "'overview_rows'", ":", "kb_overview_rows", "(", "locale", "=", "request", ".", "LANGUAGE_CODE", ",", "product", "=", "product", ",", "mode", "=", "smart_int", "(", "request", ".", "GET", ".", "get", "(", "'mode'", ")", ",", "None", ")", ",", "max", "=", "smart_int", "(", "request", ".", "GET", ".", "get", "(", "'max'", ")", ",", "10", ")", ",", "category", "=", "category", ")", ",", "'overview_modes'", ":", "PERIODS", ",", "'category'", ":", "category", ",", "'categories'", ":", "CATEGORIES", "}", ")" ]
render aggregate data about the articles in the default locale .
train
false
1,752
def qt5_qml_plugins_datas(directory): datas = [] qmldir = qt5_qml_dir() qt5_qml_plugin_dir = os.path.join(qmldir, directory) files = [] for (root, _dirs, _files) in os.walk(qt5_qml_plugin_dir): files.extend(misc.files_in_dir(root, ['qmldir', '*.qmltypes'])) for f in files: relpath = os.path.relpath(f, qmldir) (instdir, file) = os.path.split(relpath) instdir = os.path.join('qml', instdir) logger.debug(('qt5_qml_plugins_datas installing %s in %s' % (f, instdir))) datas.append((f, instdir)) return datas
[ "def", "qt5_qml_plugins_datas", "(", "directory", ")", ":", "datas", "=", "[", "]", "qmldir", "=", "qt5_qml_dir", "(", ")", "qt5_qml_plugin_dir", "=", "os", ".", "path", ".", "join", "(", "qmldir", ",", "directory", ")", "files", "=", "[", "]", "for", "(", "root", ",", "_dirs", ",", "_files", ")", "in", "os", ".", "walk", "(", "qt5_qml_plugin_dir", ")", ":", "files", ".", "extend", "(", "misc", ".", "files_in_dir", "(", "root", ",", "[", "'qmldir'", ",", "'*.qmltypes'", "]", ")", ")", "for", "f", "in", "files", ":", "relpath", "=", "os", ".", "path", ".", "relpath", "(", "f", ",", "qmldir", ")", "(", "instdir", ",", "file", ")", "=", "os", ".", "path", ".", "split", "(", "relpath", ")", "instdir", "=", "os", ".", "path", ".", "join", "(", "'qml'", ",", "instdir", ")", "logger", ".", "debug", "(", "(", "'qt5_qml_plugins_datas installing %s in %s'", "%", "(", "f", ",", "instdir", ")", ")", ")", "datas", ".", "append", "(", "(", "f", ",", "instdir", ")", ")", "return", "datas" ]
return list of data files for mod .
train
false
1,753
def readpipe(argv, preexec_fn=None, shell=False): p = subprocess.Popen(argv, stdout=subprocess.PIPE, preexec_fn=preexec_fn, shell=shell) (out, err) = p.communicate() if (p.returncode != 0): raise Exception(('subprocess %r failed with status %d' % (' '.join(argv), p.returncode))) return out
[ "def", "readpipe", "(", "argv", ",", "preexec_fn", "=", "None", ",", "shell", "=", "False", ")", ":", "p", "=", "subprocess", ".", "Popen", "(", "argv", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "preexec_fn", "=", "preexec_fn", ",", "shell", "=", "shell", ")", "(", "out", ",", "err", ")", "=", "p", ".", "communicate", "(", ")", "if", "(", "p", ".", "returncode", "!=", "0", ")", ":", "raise", "Exception", "(", "(", "'subprocess %r failed with status %d'", "%", "(", "' '", ".", "join", "(", "argv", ")", ",", "p", ".", "returncode", ")", ")", ")", "return", "out" ]
run a subprocess and return its output .
train
false
1,756
def _make_update_dict(update): return {'id': update['id'], 'date': update['date'], 'content': update['content']}
[ "def", "_make_update_dict", "(", "update", ")", ":", "return", "{", "'id'", ":", "update", "[", "'id'", "]", ",", "'date'", ":", "update", "[", "'date'", "]", ",", "'content'", ":", "update", "[", "'content'", "]", "}" ]
return course update item as a dictionary with required keys .
train
false
1,757
@intercept_errors(UserAPIInternalError, ignore_errors=[UserAPIRequestError]) def update_email_opt_in(user, org, opt_in): (preference, _) = UserOrgTag.objects.get_or_create(user=user, org=org, key='email-optin') try: user_profile = UserProfile.objects.get(user=user) except ObjectDoesNotExist: raise UserNotFound() if user_profile.requires_parental_consent(age_limit=getattr(settings, 'EMAIL_OPTIN_MINIMUM_AGE', 13), default_requires_consent=False): opt_in = False preference.value = str(opt_in) try: preference.save() if (hasattr(settings, 'LMS_SEGMENT_KEY') and settings.LMS_SEGMENT_KEY): _track_update_email_opt_in(user.id, org, opt_in) except IntegrityError as err: log.warn(u'Could not update organization wide preference due to IntegrityError: {}'.format(err.message))
[ "@", "intercept_errors", "(", "UserAPIInternalError", ",", "ignore_errors", "=", "[", "UserAPIRequestError", "]", ")", "def", "update_email_opt_in", "(", "user", ",", "org", ",", "opt_in", ")", ":", "(", "preference", ",", "_", ")", "=", "UserOrgTag", ".", "objects", ".", "get_or_create", "(", "user", "=", "user", ",", "org", "=", "org", ",", "key", "=", "'email-optin'", ")", "try", ":", "user_profile", "=", "UserProfile", ".", "objects", ".", "get", "(", "user", "=", "user", ")", "except", "ObjectDoesNotExist", ":", "raise", "UserNotFound", "(", ")", "if", "user_profile", ".", "requires_parental_consent", "(", "age_limit", "=", "getattr", "(", "settings", ",", "'EMAIL_OPTIN_MINIMUM_AGE'", ",", "13", ")", ",", "default_requires_consent", "=", "False", ")", ":", "opt_in", "=", "False", "preference", ".", "value", "=", "str", "(", "opt_in", ")", "try", ":", "preference", ".", "save", "(", ")", "if", "(", "hasattr", "(", "settings", ",", "'LMS_SEGMENT_KEY'", ")", "and", "settings", ".", "LMS_SEGMENT_KEY", ")", ":", "_track_update_email_opt_in", "(", "user", ".", "id", ",", "org", ",", "opt_in", ")", "except", "IntegrityError", "as", "err", ":", "log", ".", "warn", "(", "u'Could not update organization wide preference due to IntegrityError: {}'", ".", "format", "(", "err", ".", "message", ")", ")" ]
updates a users preference for receiving org-wide emails .
train
false
1,758
def load_backend(build_configuration, backend_package): backend_module = (backend_package + u'.register') try: module = importlib.import_module(backend_module) except ImportError as e: traceback.print_exc() raise BackendConfigurationError(u'Failed to load the {backend} backend: {error}'.format(backend=backend_module, error=e)) def invoke_entrypoint(name): entrypoint = getattr(module, name, (lambda : None)) try: return entrypoint() except TypeError as e: traceback.print_exc() raise BackendConfigurationError(u'Entrypoint {entrypoint} in {backend} must be a zero-arg callable: {error}'.format(entrypoint=name, backend=backend_module, error=e)) build_file_aliases = invoke_entrypoint(u'build_file_aliases') if build_file_aliases: build_configuration.register_aliases(build_file_aliases) subsystems = invoke_entrypoint(u'global_subsystems') if subsystems: build_configuration.register_subsystems(subsystems) invoke_entrypoint(u'register_goals')
[ "def", "load_backend", "(", "build_configuration", ",", "backend_package", ")", ":", "backend_module", "=", "(", "backend_package", "+", "u'.register'", ")", "try", ":", "module", "=", "importlib", ".", "import_module", "(", "backend_module", ")", "except", "ImportError", "as", "e", ":", "traceback", ".", "print_exc", "(", ")", "raise", "BackendConfigurationError", "(", "u'Failed to load the {backend} backend: {error}'", ".", "format", "(", "backend", "=", "backend_module", ",", "error", "=", "e", ")", ")", "def", "invoke_entrypoint", "(", "name", ")", ":", "entrypoint", "=", "getattr", "(", "module", ",", "name", ",", "(", "lambda", ":", "None", ")", ")", "try", ":", "return", "entrypoint", "(", ")", "except", "TypeError", "as", "e", ":", "traceback", ".", "print_exc", "(", ")", "raise", "BackendConfigurationError", "(", "u'Entrypoint {entrypoint} in {backend} must be a zero-arg callable: {error}'", ".", "format", "(", "entrypoint", "=", "name", ",", "backend", "=", "backend_module", ",", "error", "=", "e", ")", ")", "build_file_aliases", "=", "invoke_entrypoint", "(", "u'build_file_aliases'", ")", "if", "build_file_aliases", ":", "build_configuration", ".", "register_aliases", "(", "build_file_aliases", ")", "subsystems", "=", "invoke_entrypoint", "(", "u'global_subsystems'", ")", "if", "subsystems", ":", "build_configuration", ".", "register_subsystems", "(", "subsystems", ")", "invoke_entrypoint", "(", "u'register_goals'", ")" ]
loads a backend for interacting with the search engine .
train
false
1,759
def get_umc_admin_objects(): import univention.admin return univention.admin.objects
[ "def", "get_umc_admin_objects", "(", ")", ":", "import", "univention", ".", "admin", "return", "univention", ".", "admin", ".", "objects" ]
convenience accessor for getting univention .
train
false
1,760
def obtain_serial_number(show_ver): match = re.search('Processor board ID (.+)', show_ver) if match: return match.group(1).strip() else: return None
[ "def", "obtain_serial_number", "(", "show_ver", ")", ":", "match", "=", "re", ".", "search", "(", "'Processor board ID (.+)'", ",", "show_ver", ")", "if", "match", ":", "return", "match", ".", "group", "(", "1", ")", ".", "strip", "(", ")", "else", ":", "return", "None" ]
example string from cisco ios: processor board id ftx1000008x return the serial_number .
train
false
1,763
def convert_serializable(records): for r in records: r.msg = r.getMessage() r.args = ()
[ "def", "convert_serializable", "(", "records", ")", ":", "for", "r", "in", "records", ":", "r", ".", "msg", "=", "r", ".", "getMessage", "(", ")", "r", ".", "args", "=", "(", ")" ]
convert logrecord serializable .
train
false
1,764
def check_valid_naming(pattern=None, multi=None, anime_type=None): if (pattern is None): pattern = sickbeard.NAMING_PATTERN if (anime_type is None): anime_type = sickbeard.NAMING_ANIME logger.log(((u'Checking whether the pattern ' + pattern) + u' is valid for a single episode'), logger.DEBUG) valid = validate_name(pattern, None, anime_type) if (multi is not None): logger.log(((u'Checking whether the pattern ' + pattern) + u' is valid for a multi episode'), logger.DEBUG) valid = (valid and validate_name(pattern, multi, anime_type)) return valid
[ "def", "check_valid_naming", "(", "pattern", "=", "None", ",", "multi", "=", "None", ",", "anime_type", "=", "None", ")", ":", "if", "(", "pattern", "is", "None", ")", ":", "pattern", "=", "sickbeard", ".", "NAMING_PATTERN", "if", "(", "anime_type", "is", "None", ")", ":", "anime_type", "=", "sickbeard", ".", "NAMING_ANIME", "logger", ".", "log", "(", "(", "(", "u'Checking whether the pattern '", "+", "pattern", ")", "+", "u' is valid for a single episode'", ")", ",", "logger", ".", "DEBUG", ")", "valid", "=", "validate_name", "(", "pattern", ",", "None", ",", "anime_type", ")", "if", "(", "multi", "is", "not", "None", ")", ":", "logger", ".", "log", "(", "(", "(", "u'Checking whether the pattern '", "+", "pattern", ")", "+", "u' is valid for a multi episode'", ")", ",", "logger", ".", "DEBUG", ")", "valid", "=", "(", "valid", "and", "validate_name", "(", "pattern", ",", "multi", ",", "anime_type", ")", ")", "return", "valid" ]
checks if the name is can be parsed back to its original form for both single and multi episodes .
train
false
1,765
def volume_attach(name, server_name, device='/dev/xvdb', profile=None, timeout=300): conn = _auth(profile) return conn.volume_attach(name, server_name, device, timeout)
[ "def", "volume_attach", "(", "name", ",", "server_name", ",", "device", "=", "'/dev/xvdb'", ",", "profile", "=", "None", ",", "timeout", "=", "300", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "volume_attach", "(", "name", ",", "server_name", ",", "device", ",", "timeout", ")" ]
attach a volume .
train
true
1,766
@not_implemented_for('undirected') def out_degree_centrality(G): centrality = {} s = (1.0 / (len(G) - 1.0)) centrality = {n: (d * s) for (n, d) in G.out_degree()} return centrality
[ "@", "not_implemented_for", "(", "'undirected'", ")", "def", "out_degree_centrality", "(", "G", ")", ":", "centrality", "=", "{", "}", "s", "=", "(", "1.0", "/", "(", "len", "(", "G", ")", "-", "1.0", ")", ")", "centrality", "=", "{", "n", ":", "(", "d", "*", "s", ")", "for", "(", "n", ",", "d", ")", "in", "G", ".", "out_degree", "(", ")", "}", "return", "centrality" ]
compute the out-degree centrality for nodes .
train
false
1,767
def set_sharing_strategy(new_strategy): global _sharing_strategy assert (new_strategy in _all_sharing_strategies) _sharing_strategy = new_strategy
[ "def", "set_sharing_strategy", "(", "new_strategy", ")", ":", "global", "_sharing_strategy", "assert", "(", "new_strategy", "in", "_all_sharing_strategies", ")", "_sharing_strategy", "=", "new_strategy" ]
sets the strategy for sharing cpu tensors .
train
false
1,769
def get_dup_labels_perc(fasta_labels): fasta_labels_count = float(len(fasta_labels)) fasta_labels_derep = float(len(set(fasta_labels))) perc_dup = ('%1.3f' % ((fasta_labels_count - fasta_labels_derep) / fasta_labels_count)) label_counts = defaultdict(int) for curr_label in fasta_labels: label_counts[curr_label] += 1 labels_from_dups = [] for label in label_counts: if (label_counts[label] > 1): labels_from_dups.append(label) return (perc_dup, labels_from_dups)
[ "def", "get_dup_labels_perc", "(", "fasta_labels", ")", ":", "fasta_labels_count", "=", "float", "(", "len", "(", "fasta_labels", ")", ")", "fasta_labels_derep", "=", "float", "(", "len", "(", "set", "(", "fasta_labels", ")", ")", ")", "perc_dup", "=", "(", "'%1.3f'", "%", "(", "(", "fasta_labels_count", "-", "fasta_labels_derep", ")", "/", "fasta_labels_count", ")", ")", "label_counts", "=", "defaultdict", "(", "int", ")", "for", "curr_label", "in", "fasta_labels", ":", "label_counts", "[", "curr_label", "]", "+=", "1", "labels_from_dups", "=", "[", "]", "for", "label", "in", "label_counts", ":", "if", "(", "label_counts", "[", "label", "]", ">", "1", ")", ":", "labels_from_dups", ".", "append", "(", "label", ")", "return", "(", "perc_dup", ",", "labels_from_dups", ")" ]
calculates percentage of sequences with duplicate labels fasta_labels: list of fasta labels .
train
false
1,770
def _write_batch_lmdb(db, batch, image_count): try: with db.begin(write=True) as lmdb_txn: for (i, datum) in enumerate(batch): key = ('%08d_%d' % ((image_count + i), datum.label)) lmdb_txn.put(key, datum.SerializeToString()) except lmdb.MapFullError: curr_limit = db.info()['map_size'] new_limit = (curr_limit * 2) try: db.set_mapsize(new_limit) except AttributeError as e: version = tuple((int(x) for x in lmdb.__version__.split('.'))) if (version < (0, 87)): raise Error(('py-lmdb is out of date (%s vs 0.87)' % lmdb.__version__)) else: raise e _write_batch_lmdb(db, batch, image_count)
[ "def", "_write_batch_lmdb", "(", "db", ",", "batch", ",", "image_count", ")", ":", "try", ":", "with", "db", ".", "begin", "(", "write", "=", "True", ")", "as", "lmdb_txn", ":", "for", "(", "i", ",", "datum", ")", "in", "enumerate", "(", "batch", ")", ":", "key", "=", "(", "'%08d_%d'", "%", "(", "(", "image_count", "+", "i", ")", ",", "datum", ".", "label", ")", ")", "lmdb_txn", ".", "put", "(", "key", ",", "datum", ".", "SerializeToString", "(", ")", ")", "except", "lmdb", ".", "MapFullError", ":", "curr_limit", "=", "db", ".", "info", "(", ")", "[", "'map_size'", "]", "new_limit", "=", "(", "curr_limit", "*", "2", ")", "try", ":", "db", ".", "set_mapsize", "(", "new_limit", ")", "except", "AttributeError", "as", "e", ":", "version", "=", "tuple", "(", "(", "int", "(", "x", ")", "for", "x", "in", "lmdb", ".", "__version__", ".", "split", "(", "'.'", ")", ")", ")", "if", "(", "version", "<", "(", "0", ",", "87", ")", ")", ":", "raise", "Error", "(", "(", "'py-lmdb is out of date (%s vs 0.87)'", "%", "lmdb", ".", "__version__", ")", ")", "else", ":", "raise", "e", "_write_batch_lmdb", "(", "db", ",", "batch", ",", "image_count", ")" ]
write a batch to an lmdb database .
train
false
1,772
def tag_list(context, data_dict): model = context['model'] vocab_id_or_name = data_dict.get('vocabulary_id') query = (data_dict.get('query') or data_dict.get('q')) if query: query = query.strip() all_fields = data_dict.get('all_fields', None) _check_access('tag_list', context, data_dict) if query: (tags, count) = _tag_search(context, data_dict) else: tags = model.Tag.all(vocab_id_or_name) if tags: if all_fields: tag_list = model_dictize.tag_list_dictize(tags, context) else: tag_list = [tag.name for tag in tags] else: tag_list = [] return tag_list
[ "def", "tag_list", "(", "context", ",", "data_dict", ")", ":", "model", "=", "context", "[", "'model'", "]", "vocab_id_or_name", "=", "data_dict", ".", "get", "(", "'vocabulary_id'", ")", "query", "=", "(", "data_dict", ".", "get", "(", "'query'", ")", "or", "data_dict", ".", "get", "(", "'q'", ")", ")", "if", "query", ":", "query", "=", "query", ".", "strip", "(", ")", "all_fields", "=", "data_dict", ".", "get", "(", "'all_fields'", ",", "None", ")", "_check_access", "(", "'tag_list'", ",", "context", ",", "data_dict", ")", "if", "query", ":", "(", "tags", ",", "count", ")", "=", "_tag_search", "(", "context", ",", "data_dict", ")", "else", ":", "tags", "=", "model", ".", "Tag", ".", "all", "(", "vocab_id_or_name", ")", "if", "tags", ":", "if", "all_fields", ":", "tag_list", "=", "model_dictize", ".", "tag_list_dictize", "(", "tags", ",", "context", ")", "else", ":", "tag_list", "=", "[", "tag", ".", "name", "for", "tag", "in", "tags", "]", "else", ":", "tag_list", "=", "[", "]", "return", "tag_list" ]
display list of tags .
train
false
1,773
def get_trending(obj, region=None): return _property_value_by_region(obj, region=region, property='trending')
[ "def", "get_trending", "(", "obj", ",", "region", "=", "None", ")", ":", "return", "_property_value_by_region", "(", "obj", ",", "region", "=", "region", ",", "property", "=", "'trending'", ")" ]
returns trending value for the given obj to use in elasticsearch .
train
false
1,775
def getElementsPath(subName=''): return getJoinedPath(getGeometryUtilitiesPath('evaluate_elements'), subName)
[ "def", "getElementsPath", "(", "subName", "=", "''", ")", ":", "return", "getJoinedPath", "(", "getGeometryUtilitiesPath", "(", "'evaluate_elements'", ")", ",", "subName", ")" ]
get the evaluate_elements directory path .
train
false
1,776
def rewrite_links(root, link_repl_func, resolve_base_href=False): from cssutils import replaceUrls, log, CSSParser log.setLevel(logging.WARN) log.raiseExceptions = False if resolve_base_href: resolve_base_href(root) for (el, attrib, link, pos) in iterlinks(root, find_links_in_css=False): new_link = link_repl_func(link.strip()) if (new_link == link): continue if (new_link is None): if (attrib is None): el.text = '' else: del el.attrib[attrib] continue if (attrib is None): new = ((el.text[:pos] + new_link) + el.text[(pos + len(link)):]) el.text = new else: cur = el.attrib[attrib] if ((not pos) and (len(cur) == len(link))): el.attrib[attrib] = new_link else: new = ((cur[:pos] + new_link) + cur[(pos + len(link)):]) el.attrib[attrib] = new parser = CSSParser(raiseExceptions=False, log=_css_logger, fetcher=(lambda x: (None, None))) for el in root.iter(etree.Element): try: tag = el.tag except UnicodeDecodeError: continue if ((tag == XHTML('style')) and el.text and ((_css_url_re.search(el.text) is not None) or ('@import' in el.text))): stylesheet = parser.parseString(el.text, validate=False) replaceUrls(stylesheet, link_repl_func) repl = stylesheet.cssText if isbytestring(repl): repl = repl.decode('utf-8') el.text = (('\n' + repl) + '\n') if ('style' in el.attrib): text = el.attrib['style'] if (_css_url_re.search(text) is not None): try: stext = parser.parseStyle(text, validate=False) except: continue replaceUrls(stext, link_repl_func) repl = stext.cssText.replace('\n', ' ').replace('\r', ' ') if isbytestring(repl): repl = repl.decode('utf-8') el.attrib['style'] = repl
[ "def", "rewrite_links", "(", "root", ",", "link_repl_func", ",", "resolve_base_href", "=", "False", ")", ":", "from", "cssutils", "import", "replaceUrls", ",", "log", ",", "CSSParser", "log", ".", "setLevel", "(", "logging", ".", "WARN", ")", "log", ".", "raiseExceptions", "=", "False", "if", "resolve_base_href", ":", "resolve_base_href", "(", "root", ")", "for", "(", "el", ",", "attrib", ",", "link", ",", "pos", ")", "in", "iterlinks", "(", "root", ",", "find_links_in_css", "=", "False", ")", ":", "new_link", "=", "link_repl_func", "(", "link", ".", "strip", "(", ")", ")", "if", "(", "new_link", "==", "link", ")", ":", "continue", "if", "(", "new_link", "is", "None", ")", ":", "if", "(", "attrib", "is", "None", ")", ":", "el", ".", "text", "=", "''", "else", ":", "del", "el", ".", "attrib", "[", "attrib", "]", "continue", "if", "(", "attrib", "is", "None", ")", ":", "new", "=", "(", "(", "el", ".", "text", "[", ":", "pos", "]", "+", "new_link", ")", "+", "el", ".", "text", "[", "(", "pos", "+", "len", "(", "link", ")", ")", ":", "]", ")", "el", ".", "text", "=", "new", "else", ":", "cur", "=", "el", ".", "attrib", "[", "attrib", "]", "if", "(", "(", "not", "pos", ")", "and", "(", "len", "(", "cur", ")", "==", "len", "(", "link", ")", ")", ")", ":", "el", ".", "attrib", "[", "attrib", "]", "=", "new_link", "else", ":", "new", "=", "(", "(", "cur", "[", ":", "pos", "]", "+", "new_link", ")", "+", "cur", "[", "(", "pos", "+", "len", "(", "link", ")", ")", ":", "]", ")", "el", ".", "attrib", "[", "attrib", "]", "=", "new", "parser", "=", "CSSParser", "(", "raiseExceptions", "=", "False", ",", "log", "=", "_css_logger", ",", "fetcher", "=", "(", "lambda", "x", ":", "(", "None", ",", "None", ")", ")", ")", "for", "el", "in", "root", ".", "iter", "(", "etree", ".", "Element", ")", ":", "try", ":", "tag", "=", "el", ".", "tag", "except", "UnicodeDecodeError", ":", "continue", "if", "(", "(", "tag", "==", "XHTML", "(", "'style'", ")", ")", "and", "el", ".", "text", "and", "(", "(", "_css_url_re", ".", "search", "(", "el", ".", "text", ")", "is", "not", "None", ")", "or", "(", "'@import'", "in", "el", ".", "text", ")", ")", ")", ":", "stylesheet", "=", "parser", ".", "parseString", "(", "el", ".", "text", ",", "validate", "=", "False", ")", "replaceUrls", "(", "stylesheet", ",", "link_repl_func", ")", "repl", "=", "stylesheet", ".", "cssText", "if", "isbytestring", "(", "repl", ")", ":", "repl", "=", "repl", ".", "decode", "(", "'utf-8'", ")", "el", ".", "text", "=", "(", "(", "'\\n'", "+", "repl", ")", "+", "'\\n'", ")", "if", "(", "'style'", "in", "el", ".", "attrib", ")", ":", "text", "=", "el", ".", "attrib", "[", "'style'", "]", "if", "(", "_css_url_re", ".", "search", "(", "text", ")", "is", "not", "None", ")", ":", "try", ":", "stext", "=", "parser", ".", "parseStyle", "(", "text", ",", "validate", "=", "False", ")", "except", ":", "continue", "replaceUrls", "(", "stext", ",", "link_repl_func", ")", "repl", "=", "stext", ".", "cssText", ".", "replace", "(", "'\\n'", ",", "' '", ")", ".", "replace", "(", "'\\r'", ",", "' '", ")", "if", "isbytestring", "(", "repl", ")", ":", "repl", "=", "repl", ".", "decode", "(", "'utf-8'", ")", "el", ".", "attrib", "[", "'style'", "]", "=", "repl" ]
thin wrapper around lxmls rewrite_links() that prevents extra html markup from being produced when theres no root tag present in the input html .
train
false
1,777
def version_requirement(version): return (version >= REQUIRED_VERSION)
[ "def", "version_requirement", "(", "version", ")", ":", "return", "(", "version", ">=", "REQUIRED_VERSION", ")" ]
test for minimum required version .
train
false
1,778
def to_one_hot(y, nb_class, dtype=None): ret = theano.tensor.zeros((y.shape[0], nb_class), dtype=dtype) ret = theano.tensor.set_subtensor(ret[(theano.tensor.arange(y.shape[0]), y)], 1) return ret
[ "def", "to_one_hot", "(", "y", ",", "nb_class", ",", "dtype", "=", "None", ")", ":", "ret", "=", "theano", ".", "tensor", ".", "zeros", "(", "(", "y", ".", "shape", "[", "0", "]", ",", "nb_class", ")", ",", "dtype", "=", "dtype", ")", "ret", "=", "theano", ".", "tensor", ".", "set_subtensor", "(", "ret", "[", "(", "theano", ".", "tensor", ".", "arange", "(", "y", ".", "shape", "[", "0", "]", ")", ",", "y", ")", "]", ",", "1", ")", "return", "ret" ]
return a matrix where each row correspond to the one hot encoding of each element in y .
train
false
1,780
def get_object_jsonschema(question, required_fields, is_reviewer): object_jsonschema = {'type': 'object', 'additionalProperties': False, 'properties': {}} required = [] properties = question.get('properties') if properties: for property in properties: if (property.get('required', False) and required_fields): required.append(property['id']) values = extract_question_values(property, required_fields, is_reviewer) object_jsonschema['properties'][property['id']] = {'type': 'object', 'additionalProperties': False, 'properties': values} if required_fields: object_jsonschema['properties'][property['id']]['required'] = ['value'] if (required_fields and is_required(question)): object_jsonschema['required'] = required return object_jsonschema
[ "def", "get_object_jsonschema", "(", "question", ",", "required_fields", ",", "is_reviewer", ")", ":", "object_jsonschema", "=", "{", "'type'", ":", "'object'", ",", "'additionalProperties'", ":", "False", ",", "'properties'", ":", "{", "}", "}", "required", "=", "[", "]", "properties", "=", "question", ".", "get", "(", "'properties'", ")", "if", "properties", ":", "for", "property", "in", "properties", ":", "if", "(", "property", ".", "get", "(", "'required'", ",", "False", ")", "and", "required_fields", ")", ":", "required", ".", "append", "(", "property", "[", "'id'", "]", ")", "values", "=", "extract_question_values", "(", "property", ",", "required_fields", ",", "is_reviewer", ")", "object_jsonschema", "[", "'properties'", "]", "[", "property", "[", "'id'", "]", "]", "=", "{", "'type'", ":", "'object'", ",", "'additionalProperties'", ":", "False", ",", "'properties'", ":", "values", "}", "if", "required_fields", ":", "object_jsonschema", "[", "'properties'", "]", "[", "property", "[", "'id'", "]", "]", "[", "'required'", "]", "=", "[", "'value'", "]", "if", "(", "required_fields", "and", "is_required", "(", "question", ")", ")", ":", "object_jsonschema", "[", "'required'", "]", "=", "required", "return", "object_jsonschema" ]
returns jsonschema for nested objects within schema .
train
false
1,781
def save_traceback(app): import platform exc = traceback.format_exc() (fd, path) = tempfile.mkstemp('.log', 'sphinx-err-') os.write(fd, (_DEBUG_HEADER % (sphinx.__version__, platform.python_version(), docutils.__version__, docutils.__version_details__, jinja2.__version__)).encode('utf-8')) if (app is not None): for (extname, extmod) in app._extensions.iteritems(): os.write(fd, ('# %s from %s\n' % (extname, getattr(extmod, '__file__', 'unknown'))).encode('utf-8')) os.write(fd, exc.encode('utf-8')) os.close(fd) return path
[ "def", "save_traceback", "(", "app", ")", ":", "import", "platform", "exc", "=", "traceback", ".", "format_exc", "(", ")", "(", "fd", ",", "path", ")", "=", "tempfile", ".", "mkstemp", "(", "'.log'", ",", "'sphinx-err-'", ")", "os", ".", "write", "(", "fd", ",", "(", "_DEBUG_HEADER", "%", "(", "sphinx", ".", "__version__", ",", "platform", ".", "python_version", "(", ")", ",", "docutils", ".", "__version__", ",", "docutils", ".", "__version_details__", ",", "jinja2", ".", "__version__", ")", ")", ".", "encode", "(", "'utf-8'", ")", ")", "if", "(", "app", "is", "not", "None", ")", ":", "for", "(", "extname", ",", "extmod", ")", "in", "app", ".", "_extensions", ".", "iteritems", "(", ")", ":", "os", ".", "write", "(", "fd", ",", "(", "'# %s from %s\\n'", "%", "(", "extname", ",", "getattr", "(", "extmod", ",", "'__file__'", ",", "'unknown'", ")", ")", ")", ".", "encode", "(", "'utf-8'", ")", ")", "os", ".", "write", "(", "fd", ",", "exc", ".", "encode", "(", "'utf-8'", ")", ")", "os", ".", "close", "(", "fd", ")", "return", "path" ]
save the current exceptions traceback in a temporary file .
train
false
1,782
def vn_free_ar(call=None, kwargs=None): if (call != 'function'): raise SaltCloudSystemExit('The vn_free_ar function must be called with -f or --function.') if (kwargs is None): kwargs = {} vn_id = kwargs.get('vn_id', None) vn_name = kwargs.get('vn_name', None) ar_id = kwargs.get('ar_id', None) if (ar_id is None): raise SaltCloudSystemExit("The vn_free_ar function requires an 'rn_id' to be provided.") if vn_id: if vn_name: log.warning("Both the 'vn_id' and 'vn_name' arguments were provided. 'vn_id' will take precedence.") elif vn_name: vn_id = get_vn_id(kwargs={'name': vn_name}) else: raise SaltCloudSystemExit("The vn_free_ar function requires a 'vn_id' or a 'vn_name' to be provided.") (server, user, password) = _get_xml_rpc() auth = ':'.join([user, password]) response = server.one.vn.free_ar(auth, int(vn_id), int(ar_id)) data = {'action': 'vn.free_ar', 'ar_freed': response[0], 'resource_id': response[1], 'error_code': response[2]} return data
[ "def", "vn_free_ar", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_free_ar function must be called with -f or --function.'", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "vn_id", "=", "kwargs", ".", "get", "(", "'vn_id'", ",", "None", ")", "vn_name", "=", "kwargs", ".", "get", "(", "'vn_name'", ",", "None", ")", "ar_id", "=", "kwargs", ".", "get", "(", "'ar_id'", ",", "None", ")", "if", "(", "ar_id", "is", "None", ")", ":", "raise", "SaltCloudSystemExit", "(", "\"The vn_free_ar function requires an 'rn_id' to be provided.\"", ")", "if", "vn_id", ":", "if", "vn_name", ":", "log", ".", "warning", "(", "\"Both the 'vn_id' and 'vn_name' arguments were provided. 'vn_id' will take precedence.\"", ")", "elif", "vn_name", ":", "vn_id", "=", "get_vn_id", "(", "kwargs", "=", "{", "'name'", ":", "vn_name", "}", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "\"The vn_free_ar function requires a 'vn_id' or a 'vn_name' to be provided.\"", ")", "(", "server", ",", "user", ",", "password", ")", "=", "_get_xml_rpc", "(", ")", "auth", "=", "':'", ".", "join", "(", "[", "user", ",", "password", "]", ")", "response", "=", "server", ".", "one", ".", "vn", ".", "free_ar", "(", "auth", ",", "int", "(", "vn_id", ")", ",", "int", "(", "ar_id", ")", ")", "data", "=", "{", "'action'", ":", "'vn.free_ar'", ",", "'ar_freed'", ":", "response", "[", "0", "]", ",", "'resource_id'", ":", "response", "[", "1", "]", ",", "'error_code'", ":", "response", "[", "2", "]", "}", "return", "data" ]
frees a reserved address range from a virtual network .
train
true
1,785
def unpickleStringI(val, sek): x = _cStringIO(val) x.seek(sek) return x
[ "def", "unpickleStringI", "(", "val", ",", "sek", ")", ":", "x", "=", "_cStringIO", "(", "val", ")", "x", ".", "seek", "(", "sek", ")", "return", "x" ]
convert the output of l{picklestringi} into an appropriate type for the current python version .
train
false
1,786
def CallSetAllowedModule(name, desired): if (USING_SDK and (name == 'django')): sys.path[:] = [dirname for dirname in sys.path if (not dirname.startswith(os.path.join(PYTHON_LIB, 'lib', 'django')))] if (desired in ('0.96', '1.2', '1.3')): sys.path.insert(1, os.path.join(PYTHON_LIB, 'lib', ('django-' + desired))) SetAllowedModule(name)
[ "def", "CallSetAllowedModule", "(", "name", ",", "desired", ")", ":", "if", "(", "USING_SDK", "and", "(", "name", "==", "'django'", ")", ")", ":", "sys", ".", "path", "[", ":", "]", "=", "[", "dirname", "for", "dirname", "in", "sys", ".", "path", "if", "(", "not", "dirname", ".", "startswith", "(", "os", ".", "path", ".", "join", "(", "PYTHON_LIB", ",", "'lib'", ",", "'django'", ")", ")", ")", "]", "if", "(", "desired", "in", "(", "'0.96'", ",", "'1.2'", ",", "'1.3'", ")", ")", ":", "sys", ".", "path", ".", "insert", "(", "1", ",", "os", ".", "path", ".", "join", "(", "PYTHON_LIB", ",", "'lib'", ",", "(", "'django-'", "+", "desired", ")", ")", ")", "SetAllowedModule", "(", "name", ")" ]
helper to call setallowedmodule .
train
false
1,788
def _assure_identity(fnc): def _wrapped(*args, **kwargs): if (identity is None): _create_identity() return fnc(*args, **kwargs) return _wrapped
[ "def", "_assure_identity", "(", "fnc", ")", ":", "def", "_wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "identity", "is", "None", ")", ":", "_create_identity", "(", ")", "return", "fnc", "(", "*", "args", ",", "**", "kwargs", ")", "return", "_wrapped" ]
ensures that the identity attribute is not none .
train
true
1,789
def add_error_patch(result): return make_instancemethod(TextTestResult.addError, result)
[ "def", "add_error_patch", "(", "result", ")", ":", "return", "make_instancemethod", "(", "TextTestResult", ".", "addError", ",", "result", ")" ]
create a new adderror method to patch into a result instance that recognizes the errorclasses attribute and deals with errorclasses correctly .
train
false
1,791
def setup_fog(): glEnable(GL_FOG) glFogfv(GL_FOG_COLOR, (GLfloat * 4)(0.5, 0.69, 1.0, 1)) glHint(GL_FOG_HINT, GL_DONT_CARE) glFogi(GL_FOG_MODE, GL_LINEAR) glFogf(GL_FOG_START, 20.0) glFogf(GL_FOG_END, 60.0)
[ "def", "setup_fog", "(", ")", ":", "glEnable", "(", "GL_FOG", ")", "glFogfv", "(", "GL_FOG_COLOR", ",", "(", "GLfloat", "*", "4", ")", "(", "0.5", ",", "0.69", ",", "1.0", ",", "1", ")", ")", "glHint", "(", "GL_FOG_HINT", ",", "GL_DONT_CARE", ")", "glFogi", "(", "GL_FOG_MODE", ",", "GL_LINEAR", ")", "glFogf", "(", "GL_FOG_START", ",", "20.0", ")", "glFogf", "(", "GL_FOG_END", ",", "60.0", ")" ]
configure the opengl fog properties .
train
false
1,792
def _log_backport(ovo, target_version): if (target_version and (target_version != ovo.VERSION)): LOG.debug('Backporting %(obj_name)s from version %(src_vers)s to version %(dst_vers)s', {'obj_name': ovo.obj_name(), 'src_vers': ovo.VERSION, 'dst_vers': target_version})
[ "def", "_log_backport", "(", "ovo", ",", "target_version", ")", ":", "if", "(", "target_version", "and", "(", "target_version", "!=", "ovo", ".", "VERSION", ")", ")", ":", "LOG", ".", "debug", "(", "'Backporting %(obj_name)s from version %(src_vers)s to version %(dst_vers)s'", ",", "{", "'obj_name'", ":", "ovo", ".", "obj_name", "(", ")", ",", "'src_vers'", ":", "ovo", ".", "VERSION", ",", "'dst_vers'", ":", "target_version", "}", ")" ]
log backported versioned objects .
train
false
1,793
def _fwd_bem_linear_collocation_solution(m): for surf in m['surfs']: complete_surface_info(surf, copy=False, verbose=False) logger.info('Computing the linear collocation solution...') logger.info(' Matrix coefficients...') coeff = _fwd_bem_lin_pot_coeff(m['surfs']) m['nsol'] = len(coeff) logger.info(' Inverting the coefficient matrix...') nps = [surf['np'] for surf in m['surfs']] m['solution'] = _fwd_bem_multi_solution(coeff, m['gamma'], nps) if (len(m['surfs']) == 3): ip_mult = (m['sigma'][1] / m['sigma'][2]) if (ip_mult <= FIFF.FWD_BEM_IP_APPROACH_LIMIT): logger.info('IP approach required...') logger.info(' Matrix coefficients (homog)...') coeff = _fwd_bem_lin_pot_coeff([m['surfs'][(-1)]]) logger.info(' Inverting the coefficient matrix (homog)...') ip_solution = _fwd_bem_homog_solution(coeff, [m['surfs'][(-1)]['np']]) logger.info(' Modify the original solution to incorporate IP approach...') _fwd_bem_ip_modify_solution(m['solution'], ip_solution, ip_mult, nps) m['bem_method'] = FIFF.FWD_BEM_LINEAR_COLL logger.info('Solution ready.')
[ "def", "_fwd_bem_linear_collocation_solution", "(", "m", ")", ":", "for", "surf", "in", "m", "[", "'surfs'", "]", ":", "complete_surface_info", "(", "surf", ",", "copy", "=", "False", ",", "verbose", "=", "False", ")", "logger", ".", "info", "(", "'Computing the linear collocation solution...'", ")", "logger", ".", "info", "(", "' Matrix coefficients...'", ")", "coeff", "=", "_fwd_bem_lin_pot_coeff", "(", "m", "[", "'surfs'", "]", ")", "m", "[", "'nsol'", "]", "=", "len", "(", "coeff", ")", "logger", ".", "info", "(", "' Inverting the coefficient matrix...'", ")", "nps", "=", "[", "surf", "[", "'np'", "]", "for", "surf", "in", "m", "[", "'surfs'", "]", "]", "m", "[", "'solution'", "]", "=", "_fwd_bem_multi_solution", "(", "coeff", ",", "m", "[", "'gamma'", "]", ",", "nps", ")", "if", "(", "len", "(", "m", "[", "'surfs'", "]", ")", "==", "3", ")", ":", "ip_mult", "=", "(", "m", "[", "'sigma'", "]", "[", "1", "]", "/", "m", "[", "'sigma'", "]", "[", "2", "]", ")", "if", "(", "ip_mult", "<=", "FIFF", ".", "FWD_BEM_IP_APPROACH_LIMIT", ")", ":", "logger", ".", "info", "(", "'IP approach required...'", ")", "logger", ".", "info", "(", "' Matrix coefficients (homog)...'", ")", "coeff", "=", "_fwd_bem_lin_pot_coeff", "(", "[", "m", "[", "'surfs'", "]", "[", "(", "-", "1", ")", "]", "]", ")", "logger", ".", "info", "(", "' Inverting the coefficient matrix (homog)...'", ")", "ip_solution", "=", "_fwd_bem_homog_solution", "(", "coeff", ",", "[", "m", "[", "'surfs'", "]", "[", "(", "-", "1", ")", "]", "[", "'np'", "]", "]", ")", "logger", ".", "info", "(", "' Modify the original solution to incorporate IP approach...'", ")", "_fwd_bem_ip_modify_solution", "(", "m", "[", "'solution'", "]", ",", "ip_solution", ",", "ip_mult", ",", "nps", ")", "m", "[", "'bem_method'", "]", "=", "FIFF", ".", "FWD_BEM_LINEAR_COLL", "logger", ".", "info", "(", "'Solution ready.'", ")" ]
compute the linear collocation potential solution .
train
false
1,794
def polynomial_multiply_mod(m1, m2, polymod, p): prod = (((len(m1) + len(m2)) - 1) * [0]) for i in range(len(m1)): for j in range(len(m2)): prod[(i + j)] = ((prod[(i + j)] + (m1[i] * m2[j])) % p) return polynomial_reduce_mod(prod, polymod, p)
[ "def", "polynomial_multiply_mod", "(", "m1", ",", "m2", ",", "polymod", ",", "p", ")", ":", "prod", "=", "(", "(", "(", "len", "(", "m1", ")", "+", "len", "(", "m2", ")", ")", "-", "1", ")", "*", "[", "0", "]", ")", "for", "i", "in", "range", "(", "len", "(", "m1", ")", ")", ":", "for", "j", "in", "range", "(", "len", "(", "m2", ")", ")", ":", "prod", "[", "(", "i", "+", "j", ")", "]", "=", "(", "(", "prod", "[", "(", "i", "+", "j", ")", "]", "+", "(", "m1", "[", "i", "]", "*", "m2", "[", "j", "]", ")", ")", "%", "p", ")", "return", "polynomial_reduce_mod", "(", "prod", ",", "polymod", ",", "p", ")" ]
polynomial multiplication modulo a polynomial over ints mod p .
train
true
1,796
def buildSlicedNetwork(): N = FeedForwardNetwork('sliced') a = LinearLayer(2, name='a') b = LinearLayer(2, name='b') N.addInputModule(a) N.addOutputModule(b) N.addConnection(FullConnection(a, b, inSliceTo=1, outSliceFrom=1)) N.addConnection(FullConnection(a, b, inSliceFrom=1, outSliceTo=1)) N.sortModules() return N
[ "def", "buildSlicedNetwork", "(", ")", ":", "N", "=", "FeedForwardNetwork", "(", "'sliced'", ")", "a", "=", "LinearLayer", "(", "2", ",", "name", "=", "'a'", ")", "b", "=", "LinearLayer", "(", "2", ",", "name", "=", "'b'", ")", "N", ".", "addInputModule", "(", "a", ")", "N", ".", "addOutputModule", "(", "b", ")", "N", ".", "addConnection", "(", "FullConnection", "(", "a", ",", "b", ",", "inSliceTo", "=", "1", ",", "outSliceFrom", "=", "1", ")", ")", "N", ".", "addConnection", "(", "FullConnection", "(", "a", ",", "b", ",", "inSliceFrom", "=", "1", ",", "outSliceTo", "=", "1", ")", ")", "N", ".", "sortModules", "(", ")", "return", "N" ]
build a network with shared connections .
train
false
1,797
def lookupZone(name, timeout=None): return getResolver().lookupZone(name, timeout)
[ "def", "lookupZone", "(", "name", ",", "timeout", "=", "None", ")", ":", "return", "getResolver", "(", ")", ".", "lookupZone", "(", "name", ",", "timeout", ")" ]
perform an axfr record lookup .
train
false
1,798
def _find_function(name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) for funcs in salt.utils.boto3.paged_call(conn.list_functions): for func in funcs['Functions']: if (func['FunctionName'] == name): return func return None
[ "def", "_find_function", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "for", "funcs", "in", "salt", ".", "utils", ".", "boto3", ".", "paged_call", "(", "conn", ".", "list_functions", ")", ":", "for", "func", "in", "funcs", "[", "'Functions'", "]", ":", "if", "(", "func", "[", "'FunctionName'", "]", "==", "name", ")", ":", "return", "func", "return", "None" ]
given function name .
train
false
1,800
@blueprint.route('/jobs/<job_id>', methods=['GET']) def show_job(job_id): job = scheduler.get_job(job_id) if (job is None): raise werkzeug.exceptions.NotFound('Job not found') if isinstance(job, dataset.DatasetJob): return flask.redirect(flask.url_for('digits.dataset.views.show', job_id=job_id)) if isinstance(job, model.ModelJob): return flask.redirect(flask.url_for('digits.model.views.show', job_id=job_id)) if isinstance(job, pretrained_model.PretrainedModelJob): return flask.redirect(flask.url_for('digits.pretrained_model.views.show', job_id=job_id)) else: raise werkzeug.exceptions.BadRequest('Invalid job type')
[ "@", "blueprint", ".", "route", "(", "'/jobs/<job_id>'", ",", "methods", "=", "[", "'GET'", "]", ")", "def", "show_job", "(", "job_id", ")", ":", "job", "=", "scheduler", ".", "get_job", "(", "job_id", ")", "if", "(", "job", "is", "None", ")", ":", "raise", "werkzeug", ".", "exceptions", ".", "NotFound", "(", "'Job not found'", ")", "if", "isinstance", "(", "job", ",", "dataset", ".", "DatasetJob", ")", ":", "return", "flask", ".", "redirect", "(", "flask", ".", "url_for", "(", "'digits.dataset.views.show'", ",", "job_id", "=", "job_id", ")", ")", "if", "isinstance", "(", "job", ",", "model", ".", "ModelJob", ")", ":", "return", "flask", ".", "redirect", "(", "flask", ".", "url_for", "(", "'digits.model.views.show'", ",", "job_id", "=", "job_id", ")", ")", "if", "isinstance", "(", "job", ",", "pretrained_model", ".", "PretrainedModelJob", ")", ":", "return", "flask", ".", "redirect", "(", "flask", ".", "url_for", "(", "'digits.pretrained_model.views.show'", ",", "job_id", "=", "job_id", ")", ")", "else", ":", "raise", "werkzeug", ".", "exceptions", ".", "BadRequest", "(", "'Invalid job type'", ")" ]
redirects to the appropriate /datasets/ or /models/ page .
train
false
1,802
def _setup_selinux_for_keys(fs, sshdir): if (not fs.has_file(os.path.join('etc', 'selinux'))): return rclocal = os.path.join('etc', 'rc.local') rc_d = os.path.join('etc', 'rc.d') if ((not fs.has_file(rclocal)) and fs.has_file(rc_d)): rclocal = os.path.join(rc_d, 'rc.local') restorecon = ['\n', '# Added by Nova to ensure injected ssh keys have the right context\n', ('restorecon -RF %s 2>/dev/null || :\n' % sshdir)] if (not fs.has_file(rclocal)): restorecon.insert(0, '#!/bin/sh') _inject_file_into_fs(fs, rclocal, ''.join(restorecon), append=True) fs.set_permissions(rclocal, 448)
[ "def", "_setup_selinux_for_keys", "(", "fs", ",", "sshdir", ")", ":", "if", "(", "not", "fs", ".", "has_file", "(", "os", ".", "path", ".", "join", "(", "'etc'", ",", "'selinux'", ")", ")", ")", ":", "return", "rclocal", "=", "os", ".", "path", ".", "join", "(", "'etc'", ",", "'rc.local'", ")", "rc_d", "=", "os", ".", "path", ".", "join", "(", "'etc'", ",", "'rc.d'", ")", "if", "(", "(", "not", "fs", ".", "has_file", "(", "rclocal", ")", ")", "and", "fs", ".", "has_file", "(", "rc_d", ")", ")", ":", "rclocal", "=", "os", ".", "path", ".", "join", "(", "rc_d", ",", "'rc.local'", ")", "restorecon", "=", "[", "'\\n'", ",", "'# Added by Nova to ensure injected ssh keys have the right context\\n'", ",", "(", "'restorecon -RF %s 2>/dev/null || :\\n'", "%", "sshdir", ")", "]", "if", "(", "not", "fs", ".", "has_file", "(", "rclocal", ")", ")", ":", "restorecon", ".", "insert", "(", "0", ",", "'#!/bin/sh'", ")", "_inject_file_into_fs", "(", "fs", ",", "rclocal", ",", "''", ".", "join", "(", "restorecon", ")", ",", "append", "=", "True", ")", "fs", ".", "set_permissions", "(", "rclocal", ",", "448", ")" ]
get selinux guests to ensure correct context on injected keys .
train
false
1,804
def firebase_delete(path): (response, content) = _get_http().request(path, method='DELETE')
[ "def", "firebase_delete", "(", "path", ")", ":", "(", "response", ",", "content", ")", "=", "_get_http", "(", ")", ".", "request", "(", "path", ",", "method", "=", "'DELETE'", ")" ]
removes the data at a particular path .
train
false
1,806
def create_image_metadata(data): disk_format = data['disk_format'] if (disk_format in ('ami', 'aki', 'ari')): container_format = disk_format elif (disk_format == 'docker'): disk_format = 'raw' container_format = 'docker' else: container_format = 'bare' meta = {'protected': data['protected'], 'disk_format': disk_format, 'container_format': container_format, 'min_disk': (data['minimum_disk'] or 0), 'min_ram': (data['minimum_ram'] or 0), 'name': data['name']} is_public = data.get('is_public', data.get('public', False)) properties = {} if data.get('description'): properties['description'] = data['description'] if data.get('kernel'): properties['kernel_id'] = data['kernel'] if data.get('ramdisk'): properties['ramdisk_id'] = data['ramdisk'] if data.get('architecture'): properties['architecture'] = data['architecture'] if (api.glance.VERSIONS.active < 2): meta.update({'is_public': is_public, 'properties': properties}) else: meta['visibility'] = ('public' if is_public else 'private') meta.update(properties) return meta
[ "def", "create_image_metadata", "(", "data", ")", ":", "disk_format", "=", "data", "[", "'disk_format'", "]", "if", "(", "disk_format", "in", "(", "'ami'", ",", "'aki'", ",", "'ari'", ")", ")", ":", "container_format", "=", "disk_format", "elif", "(", "disk_format", "==", "'docker'", ")", ":", "disk_format", "=", "'raw'", "container_format", "=", "'docker'", "else", ":", "container_format", "=", "'bare'", "meta", "=", "{", "'protected'", ":", "data", "[", "'protected'", "]", ",", "'disk_format'", ":", "disk_format", ",", "'container_format'", ":", "container_format", ",", "'min_disk'", ":", "(", "data", "[", "'minimum_disk'", "]", "or", "0", ")", ",", "'min_ram'", ":", "(", "data", "[", "'minimum_ram'", "]", "or", "0", ")", ",", "'name'", ":", "data", "[", "'name'", "]", "}", "is_public", "=", "data", ".", "get", "(", "'is_public'", ",", "data", ".", "get", "(", "'public'", ",", "False", ")", ")", "properties", "=", "{", "}", "if", "data", ".", "get", "(", "'description'", ")", ":", "properties", "[", "'description'", "]", "=", "data", "[", "'description'", "]", "if", "data", ".", "get", "(", "'kernel'", ")", ":", "properties", "[", "'kernel_id'", "]", "=", "data", "[", "'kernel'", "]", "if", "data", ".", "get", "(", "'ramdisk'", ")", ":", "properties", "[", "'ramdisk_id'", "]", "=", "data", "[", "'ramdisk'", "]", "if", "data", ".", "get", "(", "'architecture'", ")", ":", "properties", "[", "'architecture'", "]", "=", "data", "[", "'architecture'", "]", "if", "(", "api", ".", "glance", ".", "VERSIONS", ".", "active", "<", "2", ")", ":", "meta", ".", "update", "(", "{", "'is_public'", ":", "is_public", ",", "'properties'", ":", "properties", "}", ")", "else", ":", "meta", "[", "'visibility'", "]", "=", "(", "'public'", "if", "is_public", "else", "'private'", ")", "meta", ".", "update", "(", "properties", ")", "return", "meta" ]
use the given dict of image form data to generate the metadata used for creating the image in glance .
train
false
1,807
def _get_recurse_set(recurse): if (not recurse): return set() if (not isinstance(recurse, list)): raise TypeError('"recurse" must be formed as a list of strings') try: recurse_set = set(recurse) except TypeError: recurse_set = None if ((recurse_set is None) or (not (set(_RECURSE_TYPES) >= recurse_set))): raise ValueError('Types for "recurse" limited to {0}.'.format(', '.join(('"{0}"'.format(rtype) for rtype in _RECURSE_TYPES)))) if (('ignore_files' in recurse_set) and ('ignore_dirs' in recurse_set)): raise ValueError('Must not specify "recurse" options "ignore_files" and "ignore_dirs" at the same time.') return recurse_set
[ "def", "_get_recurse_set", "(", "recurse", ")", ":", "if", "(", "not", "recurse", ")", ":", "return", "set", "(", ")", "if", "(", "not", "isinstance", "(", "recurse", ",", "list", ")", ")", ":", "raise", "TypeError", "(", "'\"recurse\" must be formed as a list of strings'", ")", "try", ":", "recurse_set", "=", "set", "(", "recurse", ")", "except", "TypeError", ":", "recurse_set", "=", "None", "if", "(", "(", "recurse_set", "is", "None", ")", "or", "(", "not", "(", "set", "(", "_RECURSE_TYPES", ")", ">=", "recurse_set", ")", ")", ")", ":", "raise", "ValueError", "(", "'Types for \"recurse\" limited to {0}.'", ".", "format", "(", "', '", ".", "join", "(", "(", "'\"{0}\"'", ".", "format", "(", "rtype", ")", "for", "rtype", "in", "_RECURSE_TYPES", ")", ")", ")", ")", "if", "(", "(", "'ignore_files'", "in", "recurse_set", ")", "and", "(", "'ignore_dirs'", "in", "recurse_set", ")", ")", ":", "raise", "ValueError", "(", "'Must not specify \"recurse\" options \"ignore_files\" and \"ignore_dirs\" at the same time.'", ")", "return", "recurse_set" ]
converse *recurse* definition to a set of strings .
train
true
1,808
def get_service_module_name(service_model): name = service_model.metadata.get('serviceAbbreviation', service_model.metadata.get('serviceFullName', service_model.service_name)) name = name.replace('Amazon', '') name = name.replace('AWS', '') name = re.sub('\\W+', '', name) return name
[ "def", "get_service_module_name", "(", "service_model", ")", ":", "name", "=", "service_model", ".", "metadata", ".", "get", "(", "'serviceAbbreviation'", ",", "service_model", ".", "metadata", ".", "get", "(", "'serviceFullName'", ",", "service_model", ".", "service_name", ")", ")", "name", "=", "name", ".", "replace", "(", "'Amazon'", ",", "''", ")", "name", "=", "name", ".", "replace", "(", "'AWS'", ",", "''", ")", "name", "=", "re", ".", "sub", "(", "'\\\\W+'", ",", "''", ",", "name", ")", "return", "name" ]
returns the module name for a service this is the value used in both the documentation and client class name .
train
false
1,810
def s_one_set(topics): s_one_set = [] for top_words in topics: s_one_set_t = [] for w_prime in top_words: s_one_set_t.append((w_prime, top_words)) s_one_set.append(s_one_set_t) return s_one_set
[ "def", "s_one_set", "(", "topics", ")", ":", "s_one_set", "=", "[", "]", "for", "top_words", "in", "topics", ":", "s_one_set_t", "=", "[", "]", "for", "w_prime", "in", "top_words", ":", "s_one_set_t", ".", "append", "(", "(", "w_prime", ",", "top_words", ")", ")", "s_one_set", ".", "append", "(", "s_one_set_t", ")", "return", "s_one_set" ]
this function performs s_one_set segmentation on a list of topics .
train
false
1,811
def reversed_arguments(func): def wrapped(*args): return func(*reversed(args)) return wrapped
[ "def", "reversed_arguments", "(", "func", ")", ":", "def", "wrapped", "(", "*", "args", ")", ":", "return", "func", "(", "*", "reversed", "(", "args", ")", ")", "return", "wrapped" ]
return a function with reversed argument order .
train
false
1,812
@require_http_methods('GET') @login_required @expect_json def xblock_container_handler(request, usage_key_string): usage_key = usage_key_with_run(usage_key_string) if (not has_studio_read_access(request.user, usage_key.course_key)): raise PermissionDenied() response_format = request.GET.get('format', 'html') if ((response_format == 'json') or ('application/json' in request.META.get('HTTP_ACCEPT', 'application/json'))): with modulestore().bulk_operations(usage_key.course_key): response = _get_module_info(_get_xblock(usage_key, request.user), include_ancestor_info=True, include_publishing_info=True) return JsonResponse(response) else: return Http404
[ "@", "require_http_methods", "(", "'GET'", ")", "@", "login_required", "@", "expect_json", "def", "xblock_container_handler", "(", "request", ",", "usage_key_string", ")", ":", "usage_key", "=", "usage_key_with_run", "(", "usage_key_string", ")", "if", "(", "not", "has_studio_read_access", "(", "request", ".", "user", ",", "usage_key", ".", "course_key", ")", ")", ":", "raise", "PermissionDenied", "(", ")", "response_format", "=", "request", ".", "GET", ".", "get", "(", "'format'", ",", "'html'", ")", "if", "(", "(", "response_format", "==", "'json'", ")", "or", "(", "'application/json'", "in", "request", ".", "META", ".", "get", "(", "'HTTP_ACCEPT'", ",", "'application/json'", ")", ")", ")", ":", "with", "modulestore", "(", ")", ".", "bulk_operations", "(", "usage_key", ".", "course_key", ")", ":", "response", "=", "_get_module_info", "(", "_get_xblock", "(", "usage_key", ",", "request", ".", "user", ")", ",", "include_ancestor_info", "=", "True", ",", "include_publishing_info", "=", "True", ")", "return", "JsonResponse", "(", "response", ")", "else", ":", "return", "Http404" ]
the restful handler for requests for xblock information about the block and its children .
train
false
1,813
def elink(**keywds): cgi = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi' variables = {} variables.update(keywds) return _open(cgi, variables)
[ "def", "elink", "(", "**", "keywds", ")", ":", "cgi", "=", "'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi'", "variables", "=", "{", "}", "variables", ".", "update", "(", "keywds", ")", "return", "_open", "(", "cgi", ",", "variables", ")" ]
elink checks for linked external articles and returns a handle .
train
false
1,816
def assertRaises(_exception, _callable=None, *args, **kwargs): manager = _AssertRaisesContextmanager(exception=_exception) if (_callable is not None): with manager: _callable(*args, **kwargs) else: return manager
[ "def", "assertRaises", "(", "_exception", ",", "_callable", "=", "None", ",", "*", "args", ",", "**", "kwargs", ")", ":", "manager", "=", "_AssertRaisesContextmanager", "(", "exception", "=", "_exception", ")", "if", "(", "_callable", "is", "not", "None", ")", ":", "with", "manager", ":", "_callable", "(", "*", "args", ",", "**", "kwargs", ")", "else", ":", "return", "manager" ]
assertraises that is usable as context manager or in a with statement exceptions that dont match the given exception type fall through:: .
train
false
1,818
def renew_hook(config, domains, lineage_path): if config.renew_hook: if (not config.dry_run): os.environ['RENEWED_DOMAINS'] = ' '.join(domains) os.environ['RENEWED_LINEAGE'] = lineage_path logger.info('Running renew-hook command: %s', config.renew_hook) _run_hook(config.renew_hook) else: logger.warning('Dry run: skipping renewal hook command: %s', config.renew_hook)
[ "def", "renew_hook", "(", "config", ",", "domains", ",", "lineage_path", ")", ":", "if", "config", ".", "renew_hook", ":", "if", "(", "not", "config", ".", "dry_run", ")", ":", "os", ".", "environ", "[", "'RENEWED_DOMAINS'", "]", "=", "' '", ".", "join", "(", "domains", ")", "os", ".", "environ", "[", "'RENEWED_LINEAGE'", "]", "=", "lineage_path", "logger", ".", "info", "(", "'Running renew-hook command: %s'", ",", "config", ".", "renew_hook", ")", "_run_hook", "(", "config", ".", "renew_hook", ")", "else", ":", "logger", ".", "warning", "(", "'Dry run: skipping renewal hook command: %s'", ",", "config", ".", "renew_hook", ")" ]
run post-renewal hook if defined .
train
false
1,820
def example_data(infile='functional.nii'): filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) outfile = os.path.join(basedir, 'data', infile) if (not os.path.exists(outfile)): raise IOError(('%s empty data file does NOT exist' % outfile)) return outfile
[ "def", "example_data", "(", "infile", "=", "'functional.nii'", ")", ":", "filepath", "=", "os", ".", "path", ".", "abspath", "(", "__file__", ")", "basedir", "=", "os", ".", "path", ".", "dirname", "(", "filepath", ")", "outfile", "=", "os", ".", "path", ".", "join", "(", "basedir", ",", "'data'", ",", "infile", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "outfile", ")", ")", ":", "raise", "IOError", "(", "(", "'%s empty data file does NOT exist'", "%", "outfile", ")", ")", "return", "outfile" ]
returns path to empty example data files for doc tests it will raise an exception if filename is not in the directory .
train
false
1,821
@gen.engine def ListClientLogUsers(logs_store, callback): user_dir_re = re.compile('^([0-9]+)/$') (subdirs, _) = (yield gen.Task(store_utils.ListFilesAndDirs, logs_store, '/')) filtered = [] for s in subdirs: res = user_dir_re.match(s) if (res is not None): filtered.append(res.group(1)) callback(sorted(filtered))
[ "@", "gen", ".", "engine", "def", "ListClientLogUsers", "(", "logs_store", ",", "callback", ")", ":", "user_dir_re", "=", "re", ".", "compile", "(", "'^([0-9]+)/$'", ")", "(", "subdirs", ",", "_", ")", "=", "(", "yield", "gen", ".", "Task", "(", "store_utils", ".", "ListFilesAndDirs", ",", "logs_store", ",", "'/'", ")", ")", "filtered", "=", "[", "]", "for", "s", "in", "subdirs", ":", "res", "=", "user_dir_re", ".", "match", "(", "s", ")", "if", "(", "res", "is", "not", "None", ")", ":", "filtered", ".", "append", "(", "res", ".", "group", "(", "1", ")", ")", "callback", "(", "sorted", "(", "filtered", ")", ")" ]
return the list of all users with data in the client log repository .
train
false
1,822
def durable_hash(item, ephemeral_paths): durable_item = deepcopy(item) for path in ephemeral_paths: try: dpath.util.delete(durable_item, path, separator='$') except PathNotFound: pass return hash_config(durable_item)
[ "def", "durable_hash", "(", "item", ",", "ephemeral_paths", ")", ":", "durable_item", "=", "deepcopy", "(", "item", ")", "for", "path", "in", "ephemeral_paths", ":", "try", ":", "dpath", ".", "util", ".", "delete", "(", "durable_item", ",", "path", ",", "separator", "=", "'$'", ")", "except", "PathNotFound", ":", "pass", "return", "hash_config", "(", "durable_item", ")" ]
remove all ephemeral paths from the item and return the hash of the new structure .
train
false
1,823
def registerDateHandler(func): _date_handlers.insert(0, func)
[ "def", "registerDateHandler", "(", "func", ")", ":", "_date_handlers", ".", "insert", "(", "0", ",", "func", ")" ]
register a date handler function .
train
false
1,824
@bdd.when(bdd.parsers.re('I press the keys? "(?P<keys>[^"]*)"')) def press_keys(quteproc, keys): quteproc.press_keys(keys)
[ "@", "bdd", ".", "when", "(", "bdd", ".", "parsers", ".", "re", "(", "'I press the keys? \"(?P<keys>[^\"]*)\"'", ")", ")", "def", "press_keys", "(", "quteproc", ",", "keys", ")", ":", "quteproc", ".", "press_keys", "(", "keys", ")" ]
send the given fake keys to qutebrowser .
train
false
1,827
def _process_tools_arg(plot, tools): tool_objs = [] tool_map = {} temp_tool_str = '' repeated_tools = [] if isinstance(tools, (list, tuple)): for tool in tools: if isinstance(tool, Tool): tool_objs.append(tool) elif isinstance(tool, string_types): temp_tool_str += (tool + ',') else: raise ValueError('tool should be a string or an instance of Tool class') tools = temp_tool_str for tool in re.split('\\s*,\\s*', tools.strip()): if (tool == ''): continue tool_obj = _tool_from_string(tool) tool_objs.append(tool_obj) tool_map[tool] = tool_obj for (typename, group) in itertools.groupby(sorted([tool.__class__.__name__ for tool in tool_objs])): if (len(list(group)) > 1): repeated_tools.append(typename) if repeated_tools: warnings.warn(('%s are being repeated' % ','.join(repeated_tools))) return (tool_objs, tool_map)
[ "def", "_process_tools_arg", "(", "plot", ",", "tools", ")", ":", "tool_objs", "=", "[", "]", "tool_map", "=", "{", "}", "temp_tool_str", "=", "''", "repeated_tools", "=", "[", "]", "if", "isinstance", "(", "tools", ",", "(", "list", ",", "tuple", ")", ")", ":", "for", "tool", "in", "tools", ":", "if", "isinstance", "(", "tool", ",", "Tool", ")", ":", "tool_objs", ".", "append", "(", "tool", ")", "elif", "isinstance", "(", "tool", ",", "string_types", ")", ":", "temp_tool_str", "+=", "(", "tool", "+", "','", ")", "else", ":", "raise", "ValueError", "(", "'tool should be a string or an instance of Tool class'", ")", "tools", "=", "temp_tool_str", "for", "tool", "in", "re", ".", "split", "(", "'\\\\s*,\\\\s*'", ",", "tools", ".", "strip", "(", ")", ")", ":", "if", "(", "tool", "==", "''", ")", ":", "continue", "tool_obj", "=", "_tool_from_string", "(", "tool", ")", "tool_objs", ".", "append", "(", "tool_obj", ")", "tool_map", "[", "tool", "]", "=", "tool_obj", "for", "(", "typename", ",", "group", ")", "in", "itertools", ".", "groupby", "(", "sorted", "(", "[", "tool", ".", "__class__", ".", "__name__", "for", "tool", "in", "tool_objs", "]", ")", ")", ":", "if", "(", "len", "(", "list", "(", "group", ")", ")", ">", "1", ")", ":", "repeated_tools", ".", "append", "(", "typename", ")", "if", "repeated_tools", ":", "warnings", ".", "warn", "(", "(", "'%s are being repeated'", "%", "','", ".", "join", "(", "repeated_tools", ")", ")", ")", "return", "(", "tool_objs", ",", "tool_map", ")" ]
adds tools to the plot object args: plot : instance of a plot object tools : list of tool types or string listing the tool names .
train
true
1,829
@ioflo.base.deeding.deedify('SaltRaetEventReturnFork', ioinits={'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def event_return_fork(self): self.proc_mgr.value.add_process(salt.utils.event.EventReturn, args=(self.opts.value,))
[ "@", "ioflo", ".", "base", ".", "deeding", ".", "deedify", "(", "'SaltRaetEventReturnFork'", ",", "ioinits", "=", "{", "'opts'", ":", "'.salt.opts'", ",", "'proc_mgr'", ":", "'.salt.usr.proc_mgr'", "}", ")", "def", "event_return_fork", "(", "self", ")", ":", "self", ".", "proc_mgr", ".", "value", ".", "add_process", "(", "salt", ".", "utils", ".", "event", ".", "EventReturn", ",", "args", "=", "(", "self", ".", "opts", ".", "value", ",", ")", ")" ]
add a reactor object to the process manager .
train
false
1,830
def matchSetStrengthNoNoise(match_set, target_set, noise): sum = 0.0 for t in target_set: sum += max((matchStrengthNoNoise(m, t, noise) for m in match_set)) return ((sum / len(target_set)),)
[ "def", "matchSetStrengthNoNoise", "(", "match_set", ",", "target_set", ",", "noise", ")", ":", "sum", "=", "0.0", "for", "t", "in", "target_set", ":", "sum", "+=", "max", "(", "(", "matchStrengthNoNoise", "(", "m", ",", "t", ",", "noise", ")", "for", "m", "in", "match_set", ")", ")", "return", "(", "(", "sum", "/", "len", "(", "target_set", ")", ")", ",", ")" ]
compute the match strength of a set of strings on the target set of strings .
train
false
1,831
@queued_task @app.task @transaction() def unsubscribe_mailchimp_async(list_name, user_id, username=None, send_goodbye=True): unsubscribe_mailchimp(list_name=list_name, user_id=user_id, username=username, send_goodbye=send_goodbye)
[ "@", "queued_task", "@", "app", ".", "task", "@", "transaction", "(", ")", "def", "unsubscribe_mailchimp_async", "(", "list_name", ",", "user_id", ",", "username", "=", "None", ",", "send_goodbye", "=", "True", ")", ":", "unsubscribe_mailchimp", "(", "list_name", "=", "list_name", ",", "user_id", "=", "user_id", ",", "username", "=", "username", ",", "send_goodbye", "=", "send_goodbye", ")" ]
same args as unsubscribe_mailchimp .
train
false
1,832
def get_breadcrumbs(url, request=None): from rest_framework.reverse import preserve_builtin_query_params from rest_framework.views import APIView def breadcrumbs_recursive(url, breadcrumbs_list, prefix, seen): u'\n Add tuples of (name, url) to the breadcrumbs list,\n progressively chomping off parts of the url.\n ' try: (view, unused_args, unused_kwargs) = resolve(url) except Exception: pass else: cls = getattr(view, u'cls', None) if ((cls is not None) and issubclass(cls, APIView)): if ((not seen) or (seen[(-1)] != view)): c = cls() c.suffix = getattr(view, u'suffix', None) name = c.get_view_name() insert_url = preserve_builtin_query_params((prefix + url), request) breadcrumbs_list.insert(0, (name, insert_url)) seen.append(view) if (url == u''): return breadcrumbs_list elif url.endswith(u'/'): url = url.rstrip(u'/') return breadcrumbs_recursive(url, breadcrumbs_list, prefix, seen) url = url[:(url.rfind(u'/') + 1)] return breadcrumbs_recursive(url, breadcrumbs_list, prefix, seen) prefix = get_script_prefix().rstrip(u'/') url = url[len(prefix):] return breadcrumbs_recursive(url, [], prefix, [])
[ "def", "get_breadcrumbs", "(", "url", ",", "request", "=", "None", ")", ":", "from", "rest_framework", ".", "reverse", "import", "preserve_builtin_query_params", "from", "rest_framework", ".", "views", "import", "APIView", "def", "breadcrumbs_recursive", "(", "url", ",", "breadcrumbs_list", ",", "prefix", ",", "seen", ")", ":", "try", ":", "(", "view", ",", "unused_args", ",", "unused_kwargs", ")", "=", "resolve", "(", "url", ")", "except", "Exception", ":", "pass", "else", ":", "cls", "=", "getattr", "(", "view", ",", "u'cls'", ",", "None", ")", "if", "(", "(", "cls", "is", "not", "None", ")", "and", "issubclass", "(", "cls", ",", "APIView", ")", ")", ":", "if", "(", "(", "not", "seen", ")", "or", "(", "seen", "[", "(", "-", "1", ")", "]", "!=", "view", ")", ")", ":", "c", "=", "cls", "(", ")", "c", ".", "suffix", "=", "getattr", "(", "view", ",", "u'suffix'", ",", "None", ")", "name", "=", "c", ".", "get_view_name", "(", ")", "insert_url", "=", "preserve_builtin_query_params", "(", "(", "prefix", "+", "url", ")", ",", "request", ")", "breadcrumbs_list", ".", "insert", "(", "0", ",", "(", "name", ",", "insert_url", ")", ")", "seen", ".", "append", "(", "view", ")", "if", "(", "url", "==", "u''", ")", ":", "return", "breadcrumbs_list", "elif", "url", ".", "endswith", "(", "u'/'", ")", ":", "url", "=", "url", ".", "rstrip", "(", "u'/'", ")", "return", "breadcrumbs_recursive", "(", "url", ",", "breadcrumbs_list", ",", "prefix", ",", "seen", ")", "url", "=", "url", "[", ":", "(", "url", ".", "rfind", "(", "u'/'", ")", "+", "1", ")", "]", "return", "breadcrumbs_recursive", "(", "url", ",", "breadcrumbs_list", ",", "prefix", ",", "seen", ")", "prefix", "=", "get_script_prefix", "(", ")", ".", "rstrip", "(", "u'/'", ")", "url", "=", "url", "[", "len", "(", "prefix", ")", ":", "]", "return", "breadcrumbs_recursive", "(", "url", ",", "[", "]", ",", "prefix", ",", "[", "]", ")" ]
get breadcrumbs .
train
false
1,834
def _asfarray(x): if (hasattr(x, 'dtype') and (x.dtype.char in numpy.typecodes['AllFloat'])): if (x.dtype == numpy.half): return numpy.asarray(x, dtype=numpy.float32) return numpy.asarray(x, dtype=x.dtype) else: ret = numpy.asarray(x) if (ret.dtype == numpy.half): return numpy.asarray(ret, dtype=numpy.float32) elif (ret.dtype.char not in numpy.typecodes['AllFloat']): return numpy.asfarray(x) return ret
[ "def", "_asfarray", "(", "x", ")", ":", "if", "(", "hasattr", "(", "x", ",", "'dtype'", ")", "and", "(", "x", ".", "dtype", ".", "char", "in", "numpy", ".", "typecodes", "[", "'AllFloat'", "]", ")", ")", ":", "if", "(", "x", ".", "dtype", "==", "numpy", ".", "half", ")", ":", "return", "numpy", ".", "asarray", "(", "x", ",", "dtype", "=", "numpy", ".", "float32", ")", "return", "numpy", ".", "asarray", "(", "x", ",", "dtype", "=", "x", ".", "dtype", ")", "else", ":", "ret", "=", "numpy", ".", "asarray", "(", "x", ")", "if", "(", "ret", ".", "dtype", "==", "numpy", ".", "half", ")", ":", "return", "numpy", ".", "asarray", "(", "ret", ",", "dtype", "=", "numpy", ".", "float32", ")", "elif", "(", "ret", ".", "dtype", ".", "char", "not", "in", "numpy", ".", "typecodes", "[", "'AllFloat'", "]", ")", ":", "return", "numpy", ".", "asfarray", "(", "x", ")", "return", "ret" ]
like numpy asfarray .
train
false
1,835
def general_context(request): return {'game_name': GAME_NAME, 'game_slogan': GAME_SLOGAN, 'evennia_userapps': PLAYER_RELATED, 'evennia_entityapps': GAME_ENTITIES, 'evennia_setupapps': GAME_SETUP, 'evennia_connectapps': CONNECTIONS, 'evennia_websiteapps': WEBSITE, 'webclient_enabled': WEBCLIENT_ENABLED, 'websocket_enabled': WEBSOCKET_CLIENT_ENABLED, 'websocket_port': WEBSOCKET_PORT, 'websocket_url': WEBSOCKET_URL}
[ "def", "general_context", "(", "request", ")", ":", "return", "{", "'game_name'", ":", "GAME_NAME", ",", "'game_slogan'", ":", "GAME_SLOGAN", ",", "'evennia_userapps'", ":", "PLAYER_RELATED", ",", "'evennia_entityapps'", ":", "GAME_ENTITIES", ",", "'evennia_setupapps'", ":", "GAME_SETUP", ",", "'evennia_connectapps'", ":", "CONNECTIONS", ",", "'evennia_websiteapps'", ":", "WEBSITE", ",", "'webclient_enabled'", ":", "WEBCLIENT_ENABLED", ",", "'websocket_enabled'", ":", "WEBSOCKET_CLIENT_ENABLED", ",", "'websocket_port'", ":", "WEBSOCKET_PORT", ",", "'websocket_url'", ":", "WEBSOCKET_URL", "}" ]
returns common evennia-related context stuff .
train
false
1,836
def lowercase_value(value): if isinstance(value, six.string_types): result = value.lower() elif isinstance(value, (list, tuple)): result = [str(item).lower() for item in value] elif isinstance(value, dict): result = {} for (key, value) in six.iteritems(value): result[key.lower()] = str(value).lower() else: result = value return result
[ "def", "lowercase_value", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "result", "=", "value", ".", "lower", "(", ")", "elif", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ")", ")", ":", "result", "=", "[", "str", "(", "item", ")", ".", "lower", "(", ")", "for", "item", "in", "value", "]", "elif", "isinstance", "(", "value", ",", "dict", ")", ":", "result", "=", "{", "}", "for", "(", "key", ",", "value", ")", "in", "six", ".", "iteritems", "(", "value", ")", ":", "result", "[", "key", ".", "lower", "(", ")", "]", "=", "str", "(", "value", ")", ".", "lower", "(", ")", "else", ":", "result", "=", "value", "return", "result" ]
lowercase the provided value .
train
false
1,838
def connect_awslambda(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): from boto.awslambda.layer1 import AWSLambdaConnection return AWSLambdaConnection(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, **kwargs)
[ "def", "connect_awslambda", "(", "aws_access_key_id", "=", "None", ",", "aws_secret_access_key", "=", "None", ",", "**", "kwargs", ")", ":", "from", "boto", ".", "awslambda", ".", "layer1", "import", "AWSLambdaConnection", "return", "AWSLambdaConnection", "(", "aws_access_key_id", "=", "aws_access_key_id", ",", "aws_secret_access_key", "=", "aws_secret_access_key", ",", "**", "kwargs", ")" ]
connect to aws lambda :type aws_access_key_id: string .
train
false
1,841
def update_activity_first_published_msec(activity_type, activity_id, first_published_msec): activity_rights = _get_activity_rights(activity_type, activity_id) commit_cmds = [{'cmd': CMD_UPDATE_FIRST_PUBLISHED_MSEC, 'old_first_published_msec': activity_rights.first_published_msec, 'new_first_published_msec': first_published_msec}] activity_rights.first_published_msec = first_published_msec _save_activity_rights(feconf.SYSTEM_COMMITTER_ID, activity_rights, activity_type, 'set first published time in msec', commit_cmds)
[ "def", "update_activity_first_published_msec", "(", "activity_type", ",", "activity_id", ",", "first_published_msec", ")", ":", "activity_rights", "=", "_get_activity_rights", "(", "activity_type", ",", "activity_id", ")", "commit_cmds", "=", "[", "{", "'cmd'", ":", "CMD_UPDATE_FIRST_PUBLISHED_MSEC", ",", "'old_first_published_msec'", ":", "activity_rights", ".", "first_published_msec", ",", "'new_first_published_msec'", ":", "first_published_msec", "}", "]", "activity_rights", ".", "first_published_msec", "=", "first_published_msec", "_save_activity_rights", "(", "feconf", ".", "SYSTEM_COMMITTER_ID", ",", "activity_rights", ",", "activity_type", ",", "'set first published time in msec'", ",", "commit_cmds", ")" ]
updates the first_published_msec field for an activity .
train
false
1,842
def identify_hasher(encoded): if (((len(encoded) == 32) and (u'$' not in encoded)) or ((len(encoded) == 37) and encoded.startswith(u'md5$$'))): algorithm = u'unsalted_md5' elif ((len(encoded) == 46) and encoded.startswith(u'sha1$$')): algorithm = u'unsalted_sha1' else: algorithm = encoded.split(u'$', 1)[0] return get_hasher(algorithm)
[ "def", "identify_hasher", "(", "encoded", ")", ":", "if", "(", "(", "(", "len", "(", "encoded", ")", "==", "32", ")", "and", "(", "u'$'", "not", "in", "encoded", ")", ")", "or", "(", "(", "len", "(", "encoded", ")", "==", "37", ")", "and", "encoded", ".", "startswith", "(", "u'md5$$'", ")", ")", ")", ":", "algorithm", "=", "u'unsalted_md5'", "elif", "(", "(", "len", "(", "encoded", ")", "==", "46", ")", "and", "encoded", ".", "startswith", "(", "u'sha1$$'", ")", ")", ":", "algorithm", "=", "u'unsalted_sha1'", "else", ":", "algorithm", "=", "encoded", ".", "split", "(", "u'$'", ",", "1", ")", "[", "0", "]", "return", "get_hasher", "(", "algorithm", ")" ]
returns an instance of a loaded password hasher .
train
false
1,843
def configure_codelab_igor_jenkins(): YamlBindings.update_yml_source('/opt/spinnaker/config/spinnaker-local.yml', {'jenkins': {'defaultMaster': {'name': 'CodelabJenkins', 'baseUrl': 'http://localhost:9090', 'username': 'admin', 'password': 'admin'}}, 'igor': {'enabled': 'true'}})
[ "def", "configure_codelab_igor_jenkins", "(", ")", ":", "YamlBindings", ".", "update_yml_source", "(", "'/opt/spinnaker/config/spinnaker-local.yml'", ",", "{", "'jenkins'", ":", "{", "'defaultMaster'", ":", "{", "'name'", ":", "'CodelabJenkins'", ",", "'baseUrl'", ":", "'http://localhost:9090'", ",", "'username'", ":", "'admin'", ",", "'password'", ":", "'admin'", "}", "}", ",", "'igor'", ":", "{", "'enabled'", ":", "'true'", "}", "}", ")" ]
configures igor to be enabled and to point to the codelab jenkins instance .
train
false
1,844
def simulate_user_history(session, user): total_sessions = random.randrange(10) previous_user_session = None for _ in range(total_sessions): user_session = simulate_user_session(session, user, previous_user_session) previous_user_session = user_session
[ "def", "simulate_user_history", "(", "session", ",", "user", ")", ":", "total_sessions", "=", "random", ".", "randrange", "(", "10", ")", "previous_user_session", "=", "None", "for", "_", "in", "range", "(", "total_sessions", ")", ":", "user_session", "=", "simulate_user_session", "(", "session", ",", "user", ",", "previous_user_session", ")", "previous_user_session", "=", "user_session" ]
simulates the entire history of activity for a single user .
train
false
1,845
def conv_output_length(input_length, filter_size, border_mode, stride, dilation=1): if (input_length is None): return None assert (border_mode in {'same', 'valid', 'full'}) dilated_filter_size = (filter_size + ((filter_size - 1) * (dilation - 1))) if (border_mode == 'same'): output_length = input_length elif (border_mode == 'valid'): output_length = ((input_length - dilated_filter_size) + 1) elif (border_mode == 'full'): output_length = ((input_length + dilated_filter_size) - 1) return (((output_length + stride) - 1) // stride)
[ "def", "conv_output_length", "(", "input_length", ",", "filter_size", ",", "border_mode", ",", "stride", ",", "dilation", "=", "1", ")", ":", "if", "(", "input_length", "is", "None", ")", ":", "return", "None", "assert", "(", "border_mode", "in", "{", "'same'", ",", "'valid'", ",", "'full'", "}", ")", "dilated_filter_size", "=", "(", "filter_size", "+", "(", "(", "filter_size", "-", "1", ")", "*", "(", "dilation", "-", "1", ")", ")", ")", "if", "(", "border_mode", "==", "'same'", ")", ":", "output_length", "=", "input_length", "elif", "(", "border_mode", "==", "'valid'", ")", ":", "output_length", "=", "(", "(", "input_length", "-", "dilated_filter_size", ")", "+", "1", ")", "elif", "(", "border_mode", "==", "'full'", ")", ":", "output_length", "=", "(", "(", "input_length", "+", "dilated_filter_size", ")", "-", "1", ")", "return", "(", "(", "(", "output_length", "+", "stride", ")", "-", "1", ")", "//", "stride", ")" ]
helper function to compute the output size of a convolution operation this function computes the length along a single axis .
train
true
1,846
def CreateCGate(name, latexname=None): if (not latexname): latexname = name onequbitgate = CreateOneQubitGate(name, latexname) def ControlledGate(ctrls, target): return CGate(tuple(ctrls), onequbitgate(target)) return ControlledGate
[ "def", "CreateCGate", "(", "name", ",", "latexname", "=", "None", ")", ":", "if", "(", "not", "latexname", ")", ":", "latexname", "=", "name", "onequbitgate", "=", "CreateOneQubitGate", "(", "name", ",", "latexname", ")", "def", "ControlledGate", "(", "ctrls", ",", "target", ")", ":", "return", "CGate", "(", "tuple", "(", "ctrls", ")", ",", "onequbitgate", "(", "target", ")", ")", "return", "ControlledGate" ]
use a lexical closure to make a controlled gate .
train
false
1,847
def get_materialized_checkpoints(checkpoints, colnames, lower_dt, odo_kwargs): if (checkpoints is not None): ts = checkpoints[TS_FIELD_NAME] checkpoints_ts = odo(ts[(ts <= lower_dt)].max(), pd.Timestamp, **odo_kwargs) if pd.isnull(checkpoints_ts): materialized_checkpoints = pd.DataFrame(columns=colnames) lower = None else: materialized_checkpoints = odo(checkpoints[(ts == checkpoints_ts)][colnames], pd.DataFrame, **odo_kwargs) lower = checkpoints_ts else: materialized_checkpoints = pd.DataFrame(columns=colnames) lower = None return (lower, materialized_checkpoints)
[ "def", "get_materialized_checkpoints", "(", "checkpoints", ",", "colnames", ",", "lower_dt", ",", "odo_kwargs", ")", ":", "if", "(", "checkpoints", "is", "not", "None", ")", ":", "ts", "=", "checkpoints", "[", "TS_FIELD_NAME", "]", "checkpoints_ts", "=", "odo", "(", "ts", "[", "(", "ts", "<=", "lower_dt", ")", "]", ".", "max", "(", ")", ",", "pd", ".", "Timestamp", ",", "**", "odo_kwargs", ")", "if", "pd", ".", "isnull", "(", "checkpoints_ts", ")", ":", "materialized_checkpoints", "=", "pd", ".", "DataFrame", "(", "columns", "=", "colnames", ")", "lower", "=", "None", "else", ":", "materialized_checkpoints", "=", "odo", "(", "checkpoints", "[", "(", "ts", "==", "checkpoints_ts", ")", "]", "[", "colnames", "]", ",", "pd", ".", "DataFrame", ",", "**", "odo_kwargs", ")", "lower", "=", "checkpoints_ts", "else", ":", "materialized_checkpoints", "=", "pd", ".", "DataFrame", "(", "columns", "=", "colnames", ")", "lower", "=", "None", "return", "(", "lower", ",", "materialized_checkpoints", ")" ]
computes a lower bound and a dataframe checkpoints .
train
true
1,848
def convert(t, convstr=True): for (i, op) in enumerate(t.op): context.clear_status() if (op in RoundModes): t.cop.append(op) t.pop.append(op) elif (((not t.contextfunc) and (i == 0)) or (convstr and isinstance(op, str))): try: c = C.Decimal(op) cex = None except (TypeError, ValueError, OverflowError) as e: c = None cex = e.__class__ try: p = RestrictedDecimal(op) pex = None except (TypeError, ValueError, OverflowError) as e: p = None pex = e.__class__ t.cop.append(c) t.cex.append(cex) t.pop.append(p) t.pex.append(pex) if (cex is pex): if ((str(c) != str(p)) or (not context.assert_eq_status())): raise_error(t) if (cex and pex): return 0 else: raise_error(t) elif isinstance(op, Context): t.context = op t.cop.append(op.c) t.pop.append(op.p) else: t.cop.append(op) t.pop.append(op) return 1
[ "def", "convert", "(", "t", ",", "convstr", "=", "True", ")", ":", "for", "(", "i", ",", "op", ")", "in", "enumerate", "(", "t", ".", "op", ")", ":", "context", ".", "clear_status", "(", ")", "if", "(", "op", "in", "RoundModes", ")", ":", "t", ".", "cop", ".", "append", "(", "op", ")", "t", ".", "pop", ".", "append", "(", "op", ")", "elif", "(", "(", "(", "not", "t", ".", "contextfunc", ")", "and", "(", "i", "==", "0", ")", ")", "or", "(", "convstr", "and", "isinstance", "(", "op", ",", "str", ")", ")", ")", ":", "try", ":", "c", "=", "C", ".", "Decimal", "(", "op", ")", "cex", "=", "None", "except", "(", "TypeError", ",", "ValueError", ",", "OverflowError", ")", "as", "e", ":", "c", "=", "None", "cex", "=", "e", ".", "__class__", "try", ":", "p", "=", "RestrictedDecimal", "(", "op", ")", "pex", "=", "None", "except", "(", "TypeError", ",", "ValueError", ",", "OverflowError", ")", "as", "e", ":", "p", "=", "None", "pex", "=", "e", ".", "__class__", "t", ".", "cop", ".", "append", "(", "c", ")", "t", ".", "cex", ".", "append", "(", "cex", ")", "t", ".", "pop", ".", "append", "(", "p", ")", "t", ".", "pex", ".", "append", "(", "pex", ")", "if", "(", "cex", "is", "pex", ")", ":", "if", "(", "(", "str", "(", "c", ")", "!=", "str", "(", "p", ")", ")", "or", "(", "not", "context", ".", "assert_eq_status", "(", ")", ")", ")", ":", "raise_error", "(", "t", ")", "if", "(", "cex", "and", "pex", ")", ":", "return", "0", "else", ":", "raise_error", "(", "t", ")", "elif", "isinstance", "(", "op", ",", "Context", ")", ":", "t", ".", "context", "=", "op", "t", ".", "cop", ".", "append", "(", "op", ".", "c", ")", "t", ".", "pop", ".", "append", "(", "op", ".", "p", ")", "else", ":", "t", ".", "cop", ".", "append", "(", "op", ")", "t", ".", "pop", ".", "append", "(", "op", ")", "return", "1" ]
convert the input to a properly typed numpy value according to the current casting policy .
train
false
1,850
def skipWithClientIf(condition, reason): def client_pass(*args, **kwargs): pass def skipdec(obj): retval = unittest.skip(reason)(obj) if (not isinstance(obj, type)): retval.client_skip = (lambda f: client_pass) return retval def noskipdec(obj): if (not (isinstance(obj, type) or hasattr(obj, 'client_skip'))): obj.client_skip = (lambda f: f) return obj return (skipdec if condition else noskipdec)
[ "def", "skipWithClientIf", "(", "condition", ",", "reason", ")", ":", "def", "client_pass", "(", "*", "args", ",", "**", "kwargs", ")", ":", "pass", "def", "skipdec", "(", "obj", ")", ":", "retval", "=", "unittest", ".", "skip", "(", "reason", ")", "(", "obj", ")", "if", "(", "not", "isinstance", "(", "obj", ",", "type", ")", ")", ":", "retval", ".", "client_skip", "=", "(", "lambda", "f", ":", "client_pass", ")", "return", "retval", "def", "noskipdec", "(", "obj", ")", ":", "if", "(", "not", "(", "isinstance", "(", "obj", ",", "type", ")", "or", "hasattr", "(", "obj", ",", "'client_skip'", ")", ")", ")", ":", "obj", ".", "client_skip", "=", "(", "lambda", "f", ":", "f", ")", "return", "obj", "return", "(", "skipdec", "if", "condition", "else", "noskipdec", ")" ]
skip decorated test if condition is true .
train
false
1,852
def remove_series(name, forget=False): downloaded_releases = [] with Session() as session: series = session.query(Series).filter((Series.name == name)).all() if series: for s in series: if forget: for episode in s.episodes: for release in episode.downloaded_releases: downloaded_releases.append(release.title) session.delete(s) session.commit() log.debug(u'Removed series %s from database.', name) else: raise ValueError((u'Unknown series %s' % name)) for downloaded_release in downloaded_releases: fire_event(u'forget', downloaded_release)
[ "def", "remove_series", "(", "name", ",", "forget", "=", "False", ")", ":", "downloaded_releases", "=", "[", "]", "with", "Session", "(", ")", "as", "session", ":", "series", "=", "session", ".", "query", "(", "Series", ")", ".", "filter", "(", "(", "Series", ".", "name", "==", "name", ")", ")", ".", "all", "(", ")", "if", "series", ":", "for", "s", "in", "series", ":", "if", "forget", ":", "for", "episode", "in", "s", ".", "episodes", ":", "for", "release", "in", "episode", ".", "downloaded_releases", ":", "downloaded_releases", ".", "append", "(", "release", ".", "title", ")", "session", ".", "delete", "(", "s", ")", "session", ".", "commit", "(", ")", "log", ".", "debug", "(", "u'Removed series %s from database.'", ",", "name", ")", "else", ":", "raise", "ValueError", "(", "(", "u'Unknown series %s'", "%", "name", ")", ")", "for", "downloaded_release", "in", "downloaded_releases", ":", "fire_event", "(", "u'forget'", ",", "downloaded_release", ")" ]
remove a whole series name from database .
train
false
1,854
def ipaddress_from_string(ip_address_string): parts = ip_address_string.rsplit('%', 1) ip_address_string = parts[0] return IPAddress(ip_address_string)
[ "def", "ipaddress_from_string", "(", "ip_address_string", ")", ":", "parts", "=", "ip_address_string", ".", "rsplit", "(", "'%'", ",", "1", ")", "ip_address_string", "=", "parts", "[", "0", "]", "return", "IPAddress", "(", "ip_address_string", ")" ]
parse an ipv4 or ipv6 address string and return an ipaddress instance .
train
false
1,855
def deploy_member_filters(status=False): T = current.T widgets = [S3TextFilter(['person_id$first_name', 'person_id$middle_name', 'person_id$last_name'], label=T('Name')), S3OptionsFilter('organisation_id', filter=True, hidden=True), S3OptionsFilter('credential.job_title_id', label=T('Sector'), hidden=True)] settings = current.deployment_settings if settings.get_hrm_teams(): widgets.append(S3OptionsFilter('group_membership.group_id', label=T('Teams'), hidden=True)) if settings.get_org_regions(): if settings.get_org_regions_hierarchical(): widgets.insert(1, S3HierarchyFilter('organisation_id$region_id', lookup='org_region', hidden=True, none=T('No Region'))) else: widgets.insert(1, S3OptionsFilter('organisation_id$region_id', widget='multiselect', filter=True)) if status: widgets.insert(1, S3OptionsFilter('application.active', cols=2, default=True, label=T('Status'), options={'True': T('active'), 'False': T('inactive')})) return widgets
[ "def", "deploy_member_filters", "(", "status", "=", "False", ")", ":", "T", "=", "current", ".", "T", "widgets", "=", "[", "S3TextFilter", "(", "[", "'person_id$first_name'", ",", "'person_id$middle_name'", ",", "'person_id$last_name'", "]", ",", "label", "=", "T", "(", "'Name'", ")", ")", ",", "S3OptionsFilter", "(", "'organisation_id'", ",", "filter", "=", "True", ",", "hidden", "=", "True", ")", ",", "S3OptionsFilter", "(", "'credential.job_title_id'", ",", "label", "=", "T", "(", "'Sector'", ")", ",", "hidden", "=", "True", ")", "]", "settings", "=", "current", ".", "deployment_settings", "if", "settings", ".", "get_hrm_teams", "(", ")", ":", "widgets", ".", "append", "(", "S3OptionsFilter", "(", "'group_membership.group_id'", ",", "label", "=", "T", "(", "'Teams'", ")", ",", "hidden", "=", "True", ")", ")", "if", "settings", ".", "get_org_regions", "(", ")", ":", "if", "settings", ".", "get_org_regions_hierarchical", "(", ")", ":", "widgets", ".", "insert", "(", "1", ",", "S3HierarchyFilter", "(", "'organisation_id$region_id'", ",", "lookup", "=", "'org_region'", ",", "hidden", "=", "True", ",", "none", "=", "T", "(", "'No Region'", ")", ")", ")", "else", ":", "widgets", ".", "insert", "(", "1", ",", "S3OptionsFilter", "(", "'organisation_id$region_id'", ",", "widget", "=", "'multiselect'", ",", "filter", "=", "True", ")", ")", "if", "status", ":", "widgets", ".", "insert", "(", "1", ",", "S3OptionsFilter", "(", "'application.active'", ",", "cols", "=", "2", ",", "default", "=", "True", ",", "label", "=", "T", "(", "'Status'", ")", ",", "options", "=", "{", "'True'", ":", "T", "(", "'active'", ")", ",", "'False'", ":", "T", "(", "'inactive'", ")", "}", ")", ")", "return", "widgets" ]
filter widgets for members .
train
false
1,856
@treeio_login_required def widget_index(request, response_format='html'): tickets = Object.filter_by_request(request, Ticket.objects.filter(status__hidden=False)) context = _get_default_context(request) context.update({'tickets': tickets}) return render_to_response('services/widgets/index', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "def", "widget_index", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "tickets", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Ticket", ".", "objects", ".", "filter", "(", "status__hidden", "=", "False", ")", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'tickets'", ":", "tickets", "}", ")", "return", "render_to_response", "(", "'services/widgets/index'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
all active tickets .
train
false
1,858
def _search_coding_line(txt): coding_pattern = u'coding[:=]\\s*([-\\w.]+)' pat_coding = re.search(coding_pattern, txt) if (pat_coding and (pat_coding.groups()[0] != u'None')): return pat_coding.groups()[0] return None
[ "def", "_search_coding_line", "(", "txt", ")", ":", "coding_pattern", "=", "u'coding[:=]\\\\s*([-\\\\w.]+)'", "pat_coding", "=", "re", ".", "search", "(", "coding_pattern", ",", "txt", ")", "if", "(", "pat_coding", "and", "(", "pat_coding", ".", "groups", "(", ")", "[", "0", "]", "!=", "u'None'", ")", ")", ":", "return", "pat_coding", ".", "groups", "(", ")", "[", "0", "]", "return", "None" ]
search a pattern like this: # -*- coding: utf-8 -*- .
train
false
1,860
def req_factory_factory(url='', user=None, post=False, data=None, **kwargs): req = RequestFactory() if post: req = req.post(url, (data or {})) else: req = req.get(url, (data or {})) if user: req.user = UserProfile.objects.get(id=user.id) req.groups = user.groups.all() else: req.user = AnonymousUser() req.check_ownership = partial(check_ownership, req) req.REGION = kwargs.pop('region', mkt.regions.REGIONS_CHOICES[0][1]) req.API_VERSION = 2 for key in kwargs: setattr(req, key, kwargs[key]) return req
[ "def", "req_factory_factory", "(", "url", "=", "''", ",", "user", "=", "None", ",", "post", "=", "False", ",", "data", "=", "None", ",", "**", "kwargs", ")", ":", "req", "=", "RequestFactory", "(", ")", "if", "post", ":", "req", "=", "req", ".", "post", "(", "url", ",", "(", "data", "or", "{", "}", ")", ")", "else", ":", "req", "=", "req", ".", "get", "(", "url", ",", "(", "data", "or", "{", "}", ")", ")", "if", "user", ":", "req", ".", "user", "=", "UserProfile", ".", "objects", ".", "get", "(", "id", "=", "user", ".", "id", ")", "req", ".", "groups", "=", "user", ".", "groups", ".", "all", "(", ")", "else", ":", "req", ".", "user", "=", "AnonymousUser", "(", ")", "req", ".", "check_ownership", "=", "partial", "(", "check_ownership", ",", "req", ")", "req", ".", "REGION", "=", "kwargs", ".", "pop", "(", "'region'", ",", "mkt", ".", "regions", ".", "REGIONS_CHOICES", "[", "0", "]", "[", "1", "]", ")", "req", ".", "API_VERSION", "=", "2", "for", "key", "in", "kwargs", ":", "setattr", "(", "req", ",", "key", ",", "kwargs", "[", "key", "]", ")", "return", "req" ]
creates a request factory .
train
false
1,862
def add_problem_to_course(course, problem_type, extra_meta=None): assert (problem_type in PROBLEM_DICT) factory_dict = PROBLEM_DICT[problem_type] problem_xml = factory_dict['factory'].build_xml(**factory_dict['kwargs']) metadata = ({'rerandomize': 'always'} if ('metadata' not in factory_dict) else factory_dict['metadata']) if extra_meta: metadata = dict(metadata, **extra_meta) category_name = 'problem' return world.ItemFactory.create(parent_location=section_location(course), category=category_name, display_name=str(problem_type), data=problem_xml, metadata=metadata)
[ "def", "add_problem_to_course", "(", "course", ",", "problem_type", ",", "extra_meta", "=", "None", ")", ":", "assert", "(", "problem_type", "in", "PROBLEM_DICT", ")", "factory_dict", "=", "PROBLEM_DICT", "[", "problem_type", "]", "problem_xml", "=", "factory_dict", "[", "'factory'", "]", ".", "build_xml", "(", "**", "factory_dict", "[", "'kwargs'", "]", ")", "metadata", "=", "(", "{", "'rerandomize'", ":", "'always'", "}", "if", "(", "'metadata'", "not", "in", "factory_dict", ")", "else", "factory_dict", "[", "'metadata'", "]", ")", "if", "extra_meta", ":", "metadata", "=", "dict", "(", "metadata", ",", "**", "extra_meta", ")", "category_name", "=", "'problem'", "return", "world", ".", "ItemFactory", ".", "create", "(", "parent_location", "=", "section_location", "(", "course", ")", ",", "category", "=", "category_name", ",", "display_name", "=", "str", "(", "problem_type", ")", ",", "data", "=", "problem_xml", ",", "metadata", "=", "metadata", ")" ]
add a problem to the course we have created using factories .
train
false
1,865
def slstm(c_prev1, c_prev2, x1, x2): return SLSTM()(c_prev1, c_prev2, x1, x2)
[ "def", "slstm", "(", "c_prev1", ",", "c_prev2", ",", "x1", ",", "x2", ")", ":", "return", "SLSTM", "(", ")", "(", "c_prev1", ",", "c_prev2", ",", "x1", ",", "x2", ")" ]
s-lstm units as an activation function .
train
false
1,866
def reduceby(key, binop, seq, init=no_default): is_no_default = (init == no_default) if ((not is_no_default) and (not callable(init))): _init = init init = (lambda : _init) if (not callable(key)): key = getter(key) d = {} for item in seq: k = key(item) if (k not in d): if is_no_default: d[k] = item continue else: d[k] = init() d[k] = binop(d[k], item) return d
[ "def", "reduceby", "(", "key", ",", "binop", ",", "seq", ",", "init", "=", "no_default", ")", ":", "is_no_default", "=", "(", "init", "==", "no_default", ")", "if", "(", "(", "not", "is_no_default", ")", "and", "(", "not", "callable", "(", "init", ")", ")", ")", ":", "_init", "=", "init", "init", "=", "(", "lambda", ":", "_init", ")", "if", "(", "not", "callable", "(", "key", ")", ")", ":", "key", "=", "getter", "(", "key", ")", "d", "=", "{", "}", "for", "item", "in", "seq", ":", "k", "=", "key", "(", "item", ")", "if", "(", "k", "not", "in", "d", ")", ":", "if", "is_no_default", ":", "d", "[", "k", "]", "=", "item", "continue", "else", ":", "d", "[", "k", "]", "=", "init", "(", ")", "d", "[", "k", "]", "=", "binop", "(", "d", "[", "k", "]", ",", "item", ")", "return", "d" ]
perform a simultaneous groupby and reduction the computation: .
train
false
1,867
def getRightStripMinusSplit(lineString): oldLineStringLength = (-1) while (oldLineStringLength < len(lineString)): oldLineStringLength = len(lineString) lineString = lineString.replace('- ', '-') return lineString.split()
[ "def", "getRightStripMinusSplit", "(", "lineString", ")", ":", "oldLineStringLength", "=", "(", "-", "1", ")", "while", "(", "oldLineStringLength", "<", "len", "(", "lineString", ")", ")", ":", "oldLineStringLength", "=", "len", "(", "lineString", ")", "lineString", "=", "lineString", ".", "replace", "(", "'- '", ",", "'-'", ")", "return", "lineString", ".", "split", "(", ")" ]
get string with spaces after the minus sign stripped .
train
false
1,868
def _gl_score(estimators, scoring, X, y): n_iter = X.shape[(-1)] n_est = len(estimators) for (ii, est) in enumerate(estimators): for jj in range(X.shape[(-1)]): if (scoring is not None): _score = scoring(est, X[..., jj], y) else: _score = est.score(X[..., jj], y) if ((ii == 0) & (jj == 0)): if isinstance(_score, np.ndarray): dtype = _score.dtype shape = np.r_[(n_est, n_iter, _score.shape)] else: dtype = type(_score) shape = [n_est, n_iter] score = np.zeros(shape, dtype) score[ii, jj, ...] = _score return score
[ "def", "_gl_score", "(", "estimators", ",", "scoring", ",", "X", ",", "y", ")", ":", "n_iter", "=", "X", ".", "shape", "[", "(", "-", "1", ")", "]", "n_est", "=", "len", "(", "estimators", ")", "for", "(", "ii", ",", "est", ")", "in", "enumerate", "(", "estimators", ")", ":", "for", "jj", "in", "range", "(", "X", ".", "shape", "[", "(", "-", "1", ")", "]", ")", ":", "if", "(", "scoring", "is", "not", "None", ")", ":", "_score", "=", "scoring", "(", "est", ",", "X", "[", "...", ",", "jj", "]", ",", "y", ")", "else", ":", "_score", "=", "est", ".", "score", "(", "X", "[", "...", ",", "jj", "]", ",", "y", ")", "if", "(", "(", "ii", "==", "0", ")", "&", "(", "jj", "==", "0", ")", ")", ":", "if", "isinstance", "(", "_score", ",", "np", ".", "ndarray", ")", ":", "dtype", "=", "_score", ".", "dtype", "shape", "=", "np", ".", "r_", "[", "(", "n_est", ",", "n_iter", ",", "_score", ".", "shape", ")", "]", "else", ":", "dtype", "=", "type", "(", "_score", ")", "shape", "=", "[", "n_est", ",", "n_iter", "]", "score", "=", "np", ".", "zeros", "(", "shape", ",", "dtype", ")", "score", "[", "ii", ",", "jj", ",", "...", "]", "=", "_score", "return", "score" ]
score _generalizationlight in parallel .
train
false
1,872
def tag_as_test_install(flocker_version, distribution, package_name): repository_url = get_repository_url(distribution=distribution, flocker_version=flocker_version) repository_host = urlparse(repository_url).hostname tag_url = bytes('https://{host}/clusterhq-internal-acceptance-test/{distribution}/{package}/{version}'.format(host=repository_host, distribution=distribution, package=package_name, version=flocker_version)) return http_get(tag_url)
[ "def", "tag_as_test_install", "(", "flocker_version", ",", "distribution", ",", "package_name", ")", ":", "repository_url", "=", "get_repository_url", "(", "distribution", "=", "distribution", ",", "flocker_version", "=", "flocker_version", ")", "repository_host", "=", "urlparse", "(", "repository_url", ")", ".", "hostname", "tag_url", "=", "bytes", "(", "'https://{host}/clusterhq-internal-acceptance-test/{distribution}/{package}/{version}'", ".", "format", "(", "host", "=", "repository_host", ",", "distribution", "=", "distribution", ",", "package", "=", "package_name", ",", "version", "=", "flocker_version", ")", ")", "return", "http_get", "(", "tag_url", ")" ]
creates an effect of making an http get to a specific url in an s3 bucket that has logging enabled .
train
false
1,875
def scalar_symbols(exprs): new_names = (('_%d' % i) for i in itertools.count(1)) scalars = [] names = set() for expr in exprs: if (expr._name and (expr._name not in names)): name = expr._name names.add(name) else: name = next(new_names) s = symbol(name, expr.schema) scalars.append(s) return scalars
[ "def", "scalar_symbols", "(", "exprs", ")", ":", "new_names", "=", "(", "(", "'_%d'", "%", "i", ")", "for", "i", "in", "itertools", ".", "count", "(", "1", ")", ")", "scalars", "=", "[", "]", "names", "=", "set", "(", ")", "for", "expr", "in", "exprs", ":", "if", "(", "expr", ".", "_name", "and", "(", "expr", ".", "_name", "not", "in", "names", ")", ")", ":", "name", "=", "expr", ".", "_name", "names", ".", "add", "(", "name", ")", "else", ":", "name", "=", "next", "(", "new_names", ")", "s", "=", "symbol", "(", "name", ",", "expr", ".", "schema", ")", "scalars", ".", "append", "(", "s", ")", "return", "scalars" ]
gives a sequence of scalar symbols to mirror these expressions examples .
train
false
1,876
def connect_to_region(region_name, **kw_params): for region in regions(): if (region.name == region_name): return region.connect(**kw_params) return None
[ "def", "connect_to_region", "(", "region_name", ",", "**", "kw_params", ")", ":", "for", "region", "in", "regions", "(", ")", ":", "if", "(", "region", ".", "name", "==", "region_name", ")", ":", "return", "region", ".", "connect", "(", "**", "kw_params", ")", "return", "None" ]
given a valid region name .
train
true
1,877
def github(registry, xml_parent, data): ghtrig = XML.SubElement(xml_parent, 'com.cloudbees.jenkins.GitHubPushTrigger') XML.SubElement(ghtrig, 'spec').text = ''
[ "def", "github", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "ghtrig", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'com.cloudbees.jenkins.GitHubPushTrigger'", ")", "XML", ".", "SubElement", "(", "ghtrig", ",", "'spec'", ")", ".", "text", "=", "''" ]
yaml: github sets the github url for the project .
train
false
1,878
def _matches(event, token_values): if (event.user_id is not None): for attribute_name in ['user_id', 'trustor_id', 'trustee_id']: if (event.user_id == token_values[attribute_name]): break else: return False if (event.domain_id is not None): for attribute_name in ['identity_domain_id', 'assignment_domain_id']: if (event.domain_id == token_values[attribute_name]): break else: return False if (event.domain_scope_id is not None): if (event.domain_scope_id != token_values['assignment_domain_id']): return False attribute_names = ['project_id', 'expires_at', 'trust_id', 'consumer_id', 'access_token_id', 'audit_id', 'audit_chain_id'] for attribute_name in attribute_names: if (getattr(event, attribute_name) is not None): if (getattr(event, attribute_name) != token_values[attribute_name]): return False if (event.role_id is not None): roles = token_values['roles'] for role in roles: if (event.role_id == role): break else: return False if (token_values['issued_at'] > event.issued_before): return False return True
[ "def", "_matches", "(", "event", ",", "token_values", ")", ":", "if", "(", "event", ".", "user_id", "is", "not", "None", ")", ":", "for", "attribute_name", "in", "[", "'user_id'", ",", "'trustor_id'", ",", "'trustee_id'", "]", ":", "if", "(", "event", ".", "user_id", "==", "token_values", "[", "attribute_name", "]", ")", ":", "break", "else", ":", "return", "False", "if", "(", "event", ".", "domain_id", "is", "not", "None", ")", ":", "for", "attribute_name", "in", "[", "'identity_domain_id'", ",", "'assignment_domain_id'", "]", ":", "if", "(", "event", ".", "domain_id", "==", "token_values", "[", "attribute_name", "]", ")", ":", "break", "else", ":", "return", "False", "if", "(", "event", ".", "domain_scope_id", "is", "not", "None", ")", ":", "if", "(", "event", ".", "domain_scope_id", "!=", "token_values", "[", "'assignment_domain_id'", "]", ")", ":", "return", "False", "attribute_names", "=", "[", "'project_id'", ",", "'expires_at'", ",", "'trust_id'", ",", "'consumer_id'", ",", "'access_token_id'", ",", "'audit_id'", ",", "'audit_chain_id'", "]", "for", "attribute_name", "in", "attribute_names", ":", "if", "(", "getattr", "(", "event", ",", "attribute_name", ")", "is", "not", "None", ")", ":", "if", "(", "getattr", "(", "event", ",", "attribute_name", ")", "!=", "token_values", "[", "attribute_name", "]", ")", ":", "return", "False", "if", "(", "event", ".", "role_id", "is", "not", "None", ")", ":", "roles", "=", "token_values", "[", "'roles'", "]", "for", "role", "in", "roles", ":", "if", "(", "event", ".", "role_id", "==", "role", ")", ":", "break", "else", ":", "return", "False", "if", "(", "token_values", "[", "'issued_at'", "]", ">", "event", ".", "issued_before", ")", ":", "return", "False", "return", "True" ]
see if the token matches the revocation event .
train
false
1,879
def sanitize_address(addr, encoding): if (not isinstance(addr, tuple)): addr = parseaddr(force_text(addr)) (nm, addr) = addr (localpart, domain) = (None, None) nm = Header(nm, encoding).encode() try: addr.encode('ascii') except UnicodeEncodeError: (localpart, domain) = split_addr(addr, encoding) if (localpart and domain): address = Address(nm, username=localpart, domain=domain) return str(address) try: address = Address(nm, addr_spec=addr) except (InvalidHeaderDefect, NonASCIILocalPartDefect): (localpart, domain) = split_addr(addr, encoding) address = Address(nm, username=localpart, domain=domain) return str(address)
[ "def", "sanitize_address", "(", "addr", ",", "encoding", ")", ":", "if", "(", "not", "isinstance", "(", "addr", ",", "tuple", ")", ")", ":", "addr", "=", "parseaddr", "(", "force_text", "(", "addr", ")", ")", "(", "nm", ",", "addr", ")", "=", "addr", "(", "localpart", ",", "domain", ")", "=", "(", "None", ",", "None", ")", "nm", "=", "Header", "(", "nm", ",", "encoding", ")", ".", "encode", "(", ")", "try", ":", "addr", ".", "encode", "(", "'ascii'", ")", "except", "UnicodeEncodeError", ":", "(", "localpart", ",", "domain", ")", "=", "split_addr", "(", "addr", ",", "encoding", ")", "if", "(", "localpart", "and", "domain", ")", ":", "address", "=", "Address", "(", "nm", ",", "username", "=", "localpart", ",", "domain", "=", "domain", ")", "return", "str", "(", "address", ")", "try", ":", "address", "=", "Address", "(", "nm", ",", "addr_spec", "=", "addr", ")", "except", "(", "InvalidHeaderDefect", ",", "NonASCIILocalPartDefect", ")", ":", "(", "localpart", ",", "domain", ")", "=", "split_addr", "(", "addr", ",", "encoding", ")", "address", "=", "Address", "(", "nm", ",", "username", "=", "localpart", ",", "domain", "=", "domain", ")", "return", "str", "(", "address", ")" ]
format a pair of or an email address string .
train
false
1,880
def is_valid_closer(string): openers = ['[', '(', '{'] closers = [']', ')', '}'] stack = [] for ch in string: if ((not stack) and (ch in closers)): return False elif (ch in openers): stack.append(ch) elif (ch == ']'): if (stack.pop() != '['): return False elif (ch == ')'): if (stack.pop() != '('): return False elif (ch == '}'): if (stack.pop() != '{'): return False return (not stack)
[ "def", "is_valid_closer", "(", "string", ")", ":", "openers", "=", "[", "'['", ",", "'('", ",", "'{'", "]", "closers", "=", "[", "']'", ",", "')'", ",", "'}'", "]", "stack", "=", "[", "]", "for", "ch", "in", "string", ":", "if", "(", "(", "not", "stack", ")", "and", "(", "ch", "in", "closers", ")", ")", ":", "return", "False", "elif", "(", "ch", "in", "openers", ")", ":", "stack", ".", "append", "(", "ch", ")", "elif", "(", "ch", "==", "']'", ")", ":", "if", "(", "stack", ".", "pop", "(", ")", "!=", "'['", ")", ":", "return", "False", "elif", "(", "ch", "==", "')'", ")", ":", "if", "(", "stack", ".", "pop", "(", ")", "!=", "'('", ")", ":", "return", "False", "elif", "(", "ch", "==", "'}'", ")", ":", "if", "(", "stack", ".", "pop", "(", ")", "!=", "'{'", ")", ":", "return", "False", "return", "(", "not", "stack", ")" ]
returns true if every opener has a valid closer .
train
false