id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
sequencelengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
1,076
def update_codex(module): params = module.params changed = False codex = codex_list(module) fresh = codex_fresh(codex, module) if module.check_mode: if (not params['name']): if (not fresh): changed = True module.exit_json(changed=changed, msg='would have updated Codex') elif ((not fresh) or (params['name'] and (params['state'] == 'latest'))): module.run_command_environ_update.update(dict(SILENT='1')) cmd_scribe = ('%s update' % SORCERY['scribe']) (rc, stdout, stderr) = module.run_command(cmd_scribe) if (rc != 0): module.fail_json(msg=('unable to update Codex: ' + stdout)) if (codex != codex_list(module)): changed = True if (not params['name']): module.exit_json(changed=changed, msg='successfully updated Codex')
[ "def", "update_codex", "(", "module", ")", ":", "params", "=", "module", ".", "params", "changed", "=", "False", "codex", "=", "codex_list", "(", "module", ")", "fresh", "=", "codex_fresh", "(", "codex", ",", "module", ")", "if", "module", ".", "check_mode", ":", "if", "(", "not", "params", "[", "'name'", "]", ")", ":", "if", "(", "not", "fresh", ")", ":", "changed", "=", "True", "module", ".", "exit_json", "(", "changed", "=", "changed", ",", "msg", "=", "'would have updated Codex'", ")", "elif", "(", "(", "not", "fresh", ")", "or", "(", "params", "[", "'name'", "]", "and", "(", "params", "[", "'state'", "]", "==", "'latest'", ")", ")", ")", ":", "module", ".", "run_command_environ_update", ".", "update", "(", "dict", "(", "SILENT", "=", "'1'", ")", ")", "cmd_scribe", "=", "(", "'%s update'", "%", "SORCERY", "[", "'scribe'", "]", ")", "(", "rc", ",", "stdout", ",", "stderr", ")", "=", "module", ".", "run_command", "(", "cmd_scribe", ")", "if", "(", "rc", "!=", "0", ")", ":", "module", ".", "fail_json", "(", "msg", "=", "(", "'unable to update Codex: '", "+", "stdout", ")", ")", "if", "(", "codex", "!=", "codex_list", "(", "module", ")", ")", ":", "changed", "=", "True", "if", "(", "not", "params", "[", "'name'", "]", ")", ":", "module", ".", "exit_json", "(", "changed", "=", "changed", ",", "msg", "=", "'successfully updated Codex'", ")" ]
update grimoire collections .
train
false
1,077
def deprecate_option(key, msg=None, rkey=None, removal_ver=None): key = key.lower() if (key in _deprecated_options): raise OptionError(("Option '%s' has already been defined as deprecated." % key)) _deprecated_options[key] = DeprecatedOption(key, msg, rkey, removal_ver)
[ "def", "deprecate_option", "(", "key", ",", "msg", "=", "None", ",", "rkey", "=", "None", ",", "removal_ver", "=", "None", ")", ":", "key", "=", "key", ".", "lower", "(", ")", "if", "(", "key", "in", "_deprecated_options", ")", ":", "raise", "OptionError", "(", "(", "\"Option '%s' has already been defined as deprecated.\"", "%", "key", ")", ")", "_deprecated_options", "[", "key", "]", "=", "DeprecatedOption", "(", "key", ",", "msg", ",", "rkey", ",", "removal_ver", ")" ]
mark option key as deprecated .
train
true
1,078
def clean_orphan_obj_perms(): from guardian.models import UserObjectPermission from guardian.models import GroupObjectPermission deleted = 0 for perm in chain(UserObjectPermission.objects.all().iterator(), GroupObjectPermission.objects.all().iterator()): if (perm.content_object is None): logger.debug((u'Removing %s (pk=%d)' % (perm, perm.pk))) perm.delete() deleted += 1 logger.info((u'Total removed orphan object permissions instances: %d' % deleted)) return deleted
[ "def", "clean_orphan_obj_perms", "(", ")", ":", "from", "guardian", ".", "models", "import", "UserObjectPermission", "from", "guardian", ".", "models", "import", "GroupObjectPermission", "deleted", "=", "0", "for", "perm", "in", "chain", "(", "UserObjectPermission", ".", "objects", ".", "all", "(", ")", ".", "iterator", "(", ")", ",", "GroupObjectPermission", ".", "objects", ".", "all", "(", ")", ".", "iterator", "(", ")", ")", ":", "if", "(", "perm", ".", "content_object", "is", "None", ")", ":", "logger", ".", "debug", "(", "(", "u'Removing %s (pk=%d)'", "%", "(", "perm", ",", "perm", ".", "pk", ")", ")", ")", "perm", ".", "delete", "(", ")", "deleted", "+=", "1", "logger", ".", "info", "(", "(", "u'Total removed orphan object permissions instances: %d'", "%", "deleted", ")", ")", "return", "deleted" ]
seeks and removes all object permissions entries pointing at non-existing targets .
train
false
1,079
def import_pylab(user_ns, import_all=True): s = 'import numpy\nimport matplotlib\nfrom matplotlib import pylab, mlab, pyplot\nnp = numpy\nplt = pyplot\n' exec s in user_ns if import_all: s = 'from matplotlib.pylab import *\nfrom numpy import *\n' exec s in user_ns user_ns['figsize'] = figsize from IPython.core.display import display user_ns['display'] = display user_ns['getfigs'] = getfigs
[ "def", "import_pylab", "(", "user_ns", ",", "import_all", "=", "True", ")", ":", "s", "=", "'import numpy\\nimport matplotlib\\nfrom matplotlib import pylab, mlab, pyplot\\nnp = numpy\\nplt = pyplot\\n'", "exec", "s", "in", "user_ns", "if", "import_all", ":", "s", "=", "'from matplotlib.pylab import *\\nfrom numpy import *\\n'", "exec", "s", "in", "user_ns", "user_ns", "[", "'figsize'", "]", "=", "figsize", "from", "IPython", ".", "core", ".", "display", "import", "display", "user_ns", "[", "'display'", "]", "=", "display", "user_ns", "[", "'getfigs'", "]", "=", "getfigs" ]
populate the namespace with pylab-related values .
train
false
1,080
def generate_tracking_message_id(res_id): try: rnd = random.SystemRandom().random() except NotImplementedError: rnd = random.random() rndstr = ('%.15f' % rnd)[2:] return ('<%.15f.%s-openerp-%s@%s>' % (time.time(), rndstr, res_id, socket.gethostname()))
[ "def", "generate_tracking_message_id", "(", "res_id", ")", ":", "try", ":", "rnd", "=", "random", ".", "SystemRandom", "(", ")", ".", "random", "(", ")", "except", "NotImplementedError", ":", "rnd", "=", "random", ".", "random", "(", ")", "rndstr", "=", "(", "'%.15f'", "%", "rnd", ")", "[", "2", ":", "]", "return", "(", "'<%.15f.%s-openerp-%s@%s>'", "%", "(", "time", ".", "time", "(", ")", ",", "rndstr", ",", "res_id", ",", "socket", ".", "gethostname", "(", ")", ")", ")" ]
returns a string that can be used in the message-id rfc822 header field used to track the replies related to a given object thanks to the "in-reply-to" or "references" fields that mail user agents will set .
train
false
1,081
def display_completions_like_readline(event): b = event.current_buffer if (b.completer is None): return complete_event = CompleteEvent(completion_requested=True) completions = list(b.completer.get_completions(b.document, complete_event)) common_suffix = get_common_complete_suffix(b.document, completions) if (len(completions) == 1): b.delete_before_cursor((- completions[0].start_position)) b.insert_text(completions[0].text) elif common_suffix: b.insert_text(common_suffix) elif completions: _display_completions_like_readline(event.cli, completions)
[ "def", "display_completions_like_readline", "(", "event", ")", ":", "b", "=", "event", ".", "current_buffer", "if", "(", "b", ".", "completer", "is", "None", ")", ":", "return", "complete_event", "=", "CompleteEvent", "(", "completion_requested", "=", "True", ")", "completions", "=", "list", "(", "b", ".", "completer", ".", "get_completions", "(", "b", ".", "document", ",", "complete_event", ")", ")", "common_suffix", "=", "get_common_complete_suffix", "(", "b", ".", "document", ",", "completions", ")", "if", "(", "len", "(", "completions", ")", "==", "1", ")", ":", "b", ".", "delete_before_cursor", "(", "(", "-", "completions", "[", "0", "]", ".", "start_position", ")", ")", "b", ".", "insert_text", "(", "completions", "[", "0", "]", ".", "text", ")", "elif", "common_suffix", ":", "b", ".", "insert_text", "(", "common_suffix", ")", "elif", "completions", ":", "_display_completions_like_readline", "(", "event", ".", "cli", ",", "completions", ")" ]
key binding handler for readline-style tab completion .
train
true
1,082
def _clean_alignments(alignments): unique_alignments = [] for align in alignments: if (align not in unique_alignments): unique_alignments.append(align) i = 0 while (i < len(unique_alignments)): (seqA, seqB, score, begin, end) = unique_alignments[i] if (end is None): end = len(seqA) elif (end < 0): end = (end + len(seqA)) if (begin >= end): del unique_alignments[i] continue unique_alignments[i] = (seqA, seqB, score, begin, end) i += 1 return unique_alignments
[ "def", "_clean_alignments", "(", "alignments", ")", ":", "unique_alignments", "=", "[", "]", "for", "align", "in", "alignments", ":", "if", "(", "align", "not", "in", "unique_alignments", ")", ":", "unique_alignments", ".", "append", "(", "align", ")", "i", "=", "0", "while", "(", "i", "<", "len", "(", "unique_alignments", ")", ")", ":", "(", "seqA", ",", "seqB", ",", "score", ",", "begin", ",", "end", ")", "=", "unique_alignments", "[", "i", "]", "if", "(", "end", "is", "None", ")", ":", "end", "=", "len", "(", "seqA", ")", "elif", "(", "end", "<", "0", ")", ":", "end", "=", "(", "end", "+", "len", "(", "seqA", ")", ")", "if", "(", "begin", ">=", "end", ")", ":", "del", "unique_alignments", "[", "i", "]", "continue", "unique_alignments", "[", "i", "]", "=", "(", "seqA", ",", "seqB", ",", "score", ",", "begin", ",", "end", ")", "i", "+=", "1", "return", "unique_alignments" ]
take a list of alignments and return a cleaned version .
train
false
1,083
def add_anonymous_profile(apps, schema_editor): User = apps.get_model(u'auth', u'User') Group = apps.get_model(u'auth', u'Group') Profile = apps.get_model(u'accounts', u'Profile') anon_user = User.objects.get_or_create(username=ANONYMOUS_USER_NAME, defaults={u'is_active': False, u'password': make_password(None), u'email': u'noreply@weblate.org'})[0] guest_group = Group.objects.get_or_create(name=u'Guests')[0] anon_user.groups.clear() anon_user.groups.add(guest_group) Profile.objects.get_or_create(user=anon_user)
[ "def", "add_anonymous_profile", "(", "apps", ",", "schema_editor", ")", ":", "User", "=", "apps", ".", "get_model", "(", "u'auth'", ",", "u'User'", ")", "Group", "=", "apps", ".", "get_model", "(", "u'auth'", ",", "u'Group'", ")", "Profile", "=", "apps", ".", "get_model", "(", "u'accounts'", ",", "u'Profile'", ")", "anon_user", "=", "User", ".", "objects", ".", "get_or_create", "(", "username", "=", "ANONYMOUS_USER_NAME", ",", "defaults", "=", "{", "u'is_active'", ":", "False", ",", "u'password'", ":", "make_password", "(", "None", ")", ",", "u'email'", ":", "u'noreply@weblate.org'", "}", ")", "[", "0", "]", "guest_group", "=", "Group", ".", "objects", ".", "get_or_create", "(", "name", "=", "u'Guests'", ")", "[", "0", "]", "anon_user", ".", "groups", ".", "clear", "(", ")", "anon_user", ".", "groups", ".", "add", "(", "guest_group", ")", "Profile", ".", "objects", ".", "get_or_create", "(", "user", "=", "anon_user", ")" ]
ensure anonymous user has profile .
train
false
1,084
def _yield_name_and_cat(num, app=None, type=None): categories = generate_categories(app=app, type=type) if (num > len(generate_names())): base_names = islice(cycle(generate_names()), num) addons = ['{name} {i}'.format(name=name, i=i) for (i, name) in enumerate(base_names)] else: addons = random.sample(generate_names(), num) num_cats = len(categories) for (i, addon_name) in enumerate(addons): cat = categories[(i % num_cats)] (yield (addon_name, cat))
[ "def", "_yield_name_and_cat", "(", "num", ",", "app", "=", "None", ",", "type", "=", "None", ")", ":", "categories", "=", "generate_categories", "(", "app", "=", "app", ",", "type", "=", "type", ")", "if", "(", "num", ">", "len", "(", "generate_names", "(", ")", ")", ")", ":", "base_names", "=", "islice", "(", "cycle", "(", "generate_names", "(", ")", ")", ",", "num", ")", "addons", "=", "[", "'{name} {i}'", ".", "format", "(", "name", "=", "name", ",", "i", "=", "i", ")", "for", "(", "i", ",", "name", ")", "in", "enumerate", "(", "base_names", ")", "]", "else", ":", "addons", "=", "random", ".", "sample", "(", "generate_names", "(", ")", ",", "num", ")", "num_cats", "=", "len", "(", "categories", ")", "for", "(", "i", ",", "addon_name", ")", "in", "enumerate", "(", "addons", ")", ":", "cat", "=", "categories", "[", "(", "i", "%", "num_cats", ")", "]", "(", "yield", "(", "addon_name", ",", "cat", ")", ")" ]
yield num tuples of for the given app and type .
train
false
1,085
def validate_trigger_parameters(trigger_type_ref, parameters): if (not trigger_type_ref): return None is_system_trigger = (trigger_type_ref in SYSTEM_TRIGGER_TYPES) if is_system_trigger: parameters_schema = SYSTEM_TRIGGER_TYPES[trigger_type_ref]['parameters_schema'] else: trigger_type_db = triggers.get_trigger_type_db(trigger_type_ref) if (not trigger_type_db): return None parameters_schema = getattr(trigger_type_db, 'parameters_schema', {}) if (not parameters_schema): return None if ((not is_system_trigger) and (not cfg.CONF.system.validate_trigger_parameters)): LOG.debug(('Got non-system trigger "%s", but trigger parameter validation for non-systemtriggers is disabled, skipping validation.' % trigger_type_ref)) return None cleaned = util_schema.validate(instance=parameters, schema=parameters_schema, cls=util_schema.CustomValidator, use_default=True, allow_default_none=True) if (trigger_type_ref == CRON_TIMER_TRIGGER_REF): CronTrigger(**parameters) return cleaned
[ "def", "validate_trigger_parameters", "(", "trigger_type_ref", ",", "parameters", ")", ":", "if", "(", "not", "trigger_type_ref", ")", ":", "return", "None", "is_system_trigger", "=", "(", "trigger_type_ref", "in", "SYSTEM_TRIGGER_TYPES", ")", "if", "is_system_trigger", ":", "parameters_schema", "=", "SYSTEM_TRIGGER_TYPES", "[", "trigger_type_ref", "]", "[", "'parameters_schema'", "]", "else", ":", "trigger_type_db", "=", "triggers", ".", "get_trigger_type_db", "(", "trigger_type_ref", ")", "if", "(", "not", "trigger_type_db", ")", ":", "return", "None", "parameters_schema", "=", "getattr", "(", "trigger_type_db", ",", "'parameters_schema'", ",", "{", "}", ")", "if", "(", "not", "parameters_schema", ")", ":", "return", "None", "if", "(", "(", "not", "is_system_trigger", ")", "and", "(", "not", "cfg", ".", "CONF", ".", "system", ".", "validate_trigger_parameters", ")", ")", ":", "LOG", ".", "debug", "(", "(", "'Got non-system trigger \"%s\", but trigger parameter validation for non-systemtriggers is disabled, skipping validation.'", "%", "trigger_type_ref", ")", ")", "return", "None", "cleaned", "=", "util_schema", ".", "validate", "(", "instance", "=", "parameters", ",", "schema", "=", "parameters_schema", ",", "cls", "=", "util_schema", ".", "CustomValidator", ",", "use_default", "=", "True", ",", "allow_default_none", "=", "True", ")", "if", "(", "trigger_type_ref", "==", "CRON_TIMER_TRIGGER_REF", ")", ":", "CronTrigger", "(", "**", "parameters", ")", "return", "cleaned" ]
this function validates parameters for system and user-defined triggers .
train
false
1,086
def _run_until_rpc(): el = eventloop.get_event_loop() while el.current: el.run0()
[ "def", "_run_until_rpc", "(", ")", ":", "el", "=", "eventloop", ".", "get_event_loop", "(", ")", "while", "el", ".", "current", ":", "el", ".", "run0", "(", ")" ]
eagerly evaluate tasklets until it is blocking on some rpc .
train
false
1,088
def gmail_login(email, passwd): def connect(): gmail = smtplib.SMTP('smtp.gmail.com', port=587) gmail.starttls() return gmail smtp_login(email, passwd, connect)
[ "def", "gmail_login", "(", "email", ",", "passwd", ")", ":", "def", "connect", "(", ")", ":", "gmail", "=", "smtplib", ".", "SMTP", "(", "'smtp.gmail.com'", ",", "port", "=", "587", ")", "gmail", ".", "starttls", "(", ")", "return", "gmail", "smtp_login", "(", "email", ",", "passwd", ",", "connect", ")" ]
logs into your gmail account with your full email address and password .
train
false
1,089
def child_node_from_list(child_node_name): def child_node_from_list_lambda(values): return [(child_node_name, value) for value in values] return child_node_from_list_lambda
[ "def", "child_node_from_list", "(", "child_node_name", ")", ":", "def", "child_node_from_list_lambda", "(", "values", ")", ":", "return", "[", "(", "child_node_name", ",", "value", ")", "for", "value", "in", "values", "]", "return", "child_node_from_list_lambda" ]
return a value suitable for generating an xml child node on export .
train
false
1,090
@require_GET def list_documents(request, category=None): docs = Document.objects.filter(locale=request.LANGUAGE_CODE).order_by('title') if category: docs = docs.filter(category=category) try: category_id = int(category) except ValueError: raise Http404 try: category = unicode(dict(CATEGORIES)[category_id]) except KeyError: raise Http404 docs = paginate(request, docs, per_page=DOCUMENTS_PER_PAGE) return render(request, 'wiki/list_documents.html', {'documents': docs, 'category': category})
[ "@", "require_GET", "def", "list_documents", "(", "request", ",", "category", "=", "None", ")", ":", "docs", "=", "Document", ".", "objects", ".", "filter", "(", "locale", "=", "request", ".", "LANGUAGE_CODE", ")", ".", "order_by", "(", "'title'", ")", "if", "category", ":", "docs", "=", "docs", ".", "filter", "(", "category", "=", "category", ")", "try", ":", "category_id", "=", "int", "(", "category", ")", "except", "ValueError", ":", "raise", "Http404", "try", ":", "category", "=", "unicode", "(", "dict", "(", "CATEGORIES", ")", "[", "category_id", "]", ")", "except", "KeyError", ":", "raise", "Http404", "docs", "=", "paginate", "(", "request", ",", "docs", ",", "per_page", "=", "DOCUMENTS_PER_PAGE", ")", "return", "render", "(", "request", ",", "'wiki/list_documents.html'", ",", "{", "'documents'", ":", "docs", ",", "'category'", ":", "category", "}", ")" ]
list wiki documents .
train
false
1,091
def reset(): _runtime.reset()
[ "def", "reset", "(", ")", ":", "_runtime", ".", "reset", "(", ")" ]
reset working tree to the current revision of the branch .
train
false
1,092
@utils.no_4byte_params def metadef_property_update(context, namespace_name, property_id, property_dict, session=None): session = (session or get_session()) return metadef_property_api.update(context, namespace_name, property_id, property_dict, session)
[ "@", "utils", ".", "no_4byte_params", "def", "metadef_property_update", "(", "context", ",", "namespace_name", ",", "property_id", ",", "property_dict", ",", "session", "=", "None", ")", ":", "session", "=", "(", "session", "or", "get_session", "(", ")", ")", "return", "metadef_property_api", ".", "update", "(", "context", ",", "namespace_name", ",", "property_id", ",", "property_dict", ",", "session", ")" ]
update a metadef property .
train
false
1,093
def squeeze_2x2(input_): return squeeze_nxn(input_, n_factor=2)
[ "def", "squeeze_2x2", "(", "input_", ")", ":", "return", "squeeze_nxn", "(", "input_", ",", "n_factor", "=", "2", ")" ]
squeezing operation: reshape to convert space to channels .
train
false
1,094
def is_scalar_zero(builder, value): return _scalar_pred_against_zero(builder, value, functools.partial(builder.fcmp_ordered, '=='), '==')
[ "def", "is_scalar_zero", "(", "builder", ",", "value", ")", ":", "return", "_scalar_pred_against_zero", "(", "builder", ",", "value", ",", "functools", ".", "partial", "(", "builder", ".", "fcmp_ordered", ",", "'=='", ")", ",", "'=='", ")" ]
return a predicate representing whether *value* is equal to zero .
train
false
1,095
def extract_substructure(base, substructure): if ((type(substructure) is not dict) and (type(base) is not dict)): return base if (type(base) is not dict): raise MissingConfigError('Found non-dict value {} when expecting a sub-configuration {}.'.format(repr(base), repr(substructure))) if (type(substructure) is not dict): raise MissingConfigError('Found dict value {} when expecting a simple configuration value {}.'.format(repr(base), repr(substructure))) try: subdict = [] for (key, value) in substructure.iteritems(): if (type(value) is Optional): base_val = base.get(key, value.default) elif _is_optional(value): base_val = base.get(key, {}) else: base_val = base[key] subdict.append((key, extract_substructure(base_val, value))) return dict(subdict) except KeyError as e: raise MissingConfigError('Missing key {} in configuration'.format(e.args[0]))
[ "def", "extract_substructure", "(", "base", ",", "substructure", ")", ":", "if", "(", "(", "type", "(", "substructure", ")", "is", "not", "dict", ")", "and", "(", "type", "(", "base", ")", "is", "not", "dict", ")", ")", ":", "return", "base", "if", "(", "type", "(", "base", ")", "is", "not", "dict", ")", ":", "raise", "MissingConfigError", "(", "'Found non-dict value {} when expecting a sub-configuration {}.'", ".", "format", "(", "repr", "(", "base", ")", ",", "repr", "(", "substructure", ")", ")", ")", "if", "(", "type", "(", "substructure", ")", "is", "not", "dict", ")", ":", "raise", "MissingConfigError", "(", "'Found dict value {} when expecting a simple configuration value {}.'", ".", "format", "(", "repr", "(", "base", ")", ",", "repr", "(", "substructure", ")", ")", ")", "try", ":", "subdict", "=", "[", "]", "for", "(", "key", ",", "value", ")", "in", "substructure", ".", "iteritems", "(", ")", ":", "if", "(", "type", "(", "value", ")", "is", "Optional", ")", ":", "base_val", "=", "base", ".", "get", "(", "key", ",", "value", ".", "default", ")", "elif", "_is_optional", "(", "value", ")", ":", "base_val", "=", "base", ".", "get", "(", "key", ",", "{", "}", ")", "else", ":", "base_val", "=", "base", "[", "key", "]", "subdict", ".", "append", "(", "(", "key", ",", "extract_substructure", "(", "base_val", ",", "value", ")", ")", ")", "return", "dict", "(", "subdict", ")", "except", "KeyError", "as", "e", ":", "raise", "MissingConfigError", "(", "'Missing key {} in configuration'", ".", "format", "(", "e", ".", "args", "[", "0", "]", ")", ")" ]
assuming that substructure is a possibly nested dictionary .
train
false
1,096
def get_current_language(): language_code = translation.get_language() return get_language_code(language_code)
[ "def", "get_current_language", "(", ")", ":", "language_code", "=", "translation", ".", "get_language", "(", ")", "return", "get_language_code", "(", "language_code", ")" ]
returns the currently active language its a replacement for djangos translation .
train
false
1,097
def assert_not_none(obj, msg=None, values=True): _msg = 'is None' if (obj is None): if (msg is None): msg = _msg elif (values is True): msg = ('%s: %s' % (msg, _msg)) _report_failure(msg)
[ "def", "assert_not_none", "(", "obj", ",", "msg", "=", "None", ",", "values", "=", "True", ")", ":", "_msg", "=", "'is None'", "if", "(", "obj", "is", "None", ")", ":", "if", "(", "msg", "is", "None", ")", ":", "msg", "=", "_msg", "elif", "(", "values", "is", "True", ")", ":", "msg", "=", "(", "'%s: %s'", "%", "(", "msg", ",", "_msg", ")", ")", "_report_failure", "(", "msg", ")" ]
verify that item is none .
train
false
1,100
def test_password_hash(): password = 'DcosTestingPassword!@#' hash_pw = subprocess.check_output(['dcos_installer', '--hash-password', password]) print hash_pw hash_pw = hash_pw.decode('ascii').strip('\n') assert passlib.hash.sha512_crypt.verify(password, hash_pw), 'Hash does not match password'
[ "def", "test_password_hash", "(", ")", ":", "password", "=", "'DcosTestingPassword!@#'", "hash_pw", "=", "subprocess", ".", "check_output", "(", "[", "'dcos_installer'", ",", "'--hash-password'", ",", "password", "]", ")", "print", "hash_pw", "hash_pw", "=", "hash_pw", ".", "decode", "(", "'ascii'", ")", ".", "strip", "(", "'\\n'", ")", "assert", "passlib", ".", "hash", ".", "sha512_crypt", ".", "verify", "(", "password", ",", "hash_pw", ")", ",", "'Hash does not match password'" ]
tests that the password hashing method creates de-cryptable hash .
train
false
1,101
def plot_dipole_amplitudes(dipoles, colors=None, show=True): import matplotlib.pyplot as plt if (colors is None): colors = cycle(COLORS) (fig, ax) = plt.subplots(1, 1) xlim = [np.inf, (- np.inf)] for (dip, color) in zip(dipoles, colors): ax.plot(dip.times, dip.amplitude, color=color, linewidth=1.5) xlim[0] = min(xlim[0], dip.times[0]) xlim[1] = max(xlim[1], dip.times[(-1)]) ax.set_xlim(xlim) ax.set_xlabel('Time (sec)') ax.set_ylabel('Amplitude (nAm)') if show: fig.show(warn=False) return fig
[ "def", "plot_dipole_amplitudes", "(", "dipoles", ",", "colors", "=", "None", ",", "show", "=", "True", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "if", "(", "colors", "is", "None", ")", ":", "colors", "=", "cycle", "(", "COLORS", ")", "(", "fig", ",", "ax", ")", "=", "plt", ".", "subplots", "(", "1", ",", "1", ")", "xlim", "=", "[", "np", ".", "inf", ",", "(", "-", "np", ".", "inf", ")", "]", "for", "(", "dip", ",", "color", ")", "in", "zip", "(", "dipoles", ",", "colors", ")", ":", "ax", ".", "plot", "(", "dip", ".", "times", ",", "dip", ".", "amplitude", ",", "color", "=", "color", ",", "linewidth", "=", "1.5", ")", "xlim", "[", "0", "]", "=", "min", "(", "xlim", "[", "0", "]", ",", "dip", ".", "times", "[", "0", "]", ")", "xlim", "[", "1", "]", "=", "max", "(", "xlim", "[", "1", "]", ",", "dip", ".", "times", "[", "(", "-", "1", ")", "]", ")", "ax", ".", "set_xlim", "(", "xlim", ")", "ax", ".", "set_xlabel", "(", "'Time (sec)'", ")", "ax", ".", "set_ylabel", "(", "'Amplitude (nAm)'", ")", "if", "show", ":", "fig", ".", "show", "(", "warn", "=", "False", ")", "return", "fig" ]
plot the amplitude traces of a set of dipoles .
train
false
1,102
@assignment_tag(takes_context=True) def page_permissions(context, page): if (u'user_page_permissions' not in context): context[u'user_page_permissions'] = UserPagePermissionsProxy(context[u'request'].user) return context[u'user_page_permissions'].for_page(page)
[ "@", "assignment_tag", "(", "takes_context", "=", "True", ")", "def", "page_permissions", "(", "context", ",", "page", ")", ":", "if", "(", "u'user_page_permissions'", "not", "in", "context", ")", ":", "context", "[", "u'user_page_permissions'", "]", "=", "UserPagePermissionsProxy", "(", "context", "[", "u'request'", "]", ".", "user", ")", "return", "context", "[", "u'user_page_permissions'", "]", ".", "for_page", "(", "page", ")" ]
usage: {% page_permissions page as page_perms %} sets the variable page_perms to a pagepermissiontester object that can be queried to find out what actions the current logged-in user can perform on the given page .
train
false
1,103
def get_normalized_file_path(file_path): if hasattr(sys, 'frozen'): file_path = ('logging%s__init__%s' % (os.sep, file_path[(-4):])) elif (file_path[(-4):].lower() in ['.pyc', '.pyo']): file_path = (file_path[:(-4)] + '.py') else: file_path = file_path file_path = os.path.normcase(file_path) return file_path
[ "def", "get_normalized_file_path", "(", "file_path", ")", ":", "if", "hasattr", "(", "sys", ",", "'frozen'", ")", ":", "file_path", "=", "(", "'logging%s__init__%s'", "%", "(", "os", ".", "sep", ",", "file_path", "[", "(", "-", "4", ")", ":", "]", ")", ")", "elif", "(", "file_path", "[", "(", "-", "4", ")", ":", "]", ".", "lower", "(", ")", "in", "[", "'.pyc'", ",", "'.pyo'", "]", ")", ":", "file_path", "=", "(", "file_path", "[", ":", "(", "-", "4", ")", "]", "+", "'.py'", ")", "else", ":", "file_path", "=", "file_path", "file_path", "=", "os", ".", "path", ".", "normcase", "(", "file_path", ")", "return", "file_path" ]
return a full normalized file path for the provided path string .
train
false
1,104
def update_git_repos(opts=None, clean=False, masterless=False): if (opts is None): opts = __opts__ winrepo_dir = opts['winrepo_dir'] winrepo_remotes = opts['winrepo_remotes'] winrepo_cfg = [(winrepo_remotes, winrepo_dir), (opts['winrepo_remotes_ng'], opts['winrepo_dir_ng'])] ret = {} for (remotes, base_dir) in winrepo_cfg: if (not any((salt.utils.gitfs.HAS_GITPYTHON, salt.utils.gitfs.HAS_PYGIT2))): winrepo_result = {} for remote_info in remotes: if ('/' in remote_info): targetname = remote_info.split('/')[(-1)] else: targetname = remote_info rev = 'HEAD' try: (rev, remote_url) = remote_info.strip().split() except ValueError: remote_url = remote_info gittarget = os.path.join(base_dir, targetname).replace('.', '_') if masterless: result = __salt__['state.single']('git.latest', name=remote_url, rev=rev, branch='winrepo', target=gittarget, force_checkout=True, force_reset=True) if isinstance(result, list): raise CommandExecutionError('Failed up update winrepo remotes: {0}'.format('\n'.join(result))) if ('name' not in result): key = next(iter(result)) result = result[key] else: mminion = salt.minion.MasterMinion(opts) result = mminion.states['git.latest'](remote_url, rev=rev, branch='winrepo', target=gittarget, force_checkout=True, force_reset=True) winrepo_result[result['name']] = result['result'] ret.update(winrepo_result) else: try: winrepo = salt.utils.gitfs.WinRepo(opts, base_dir) winrepo.init_remotes(remotes, PER_REMOTE_OVERRIDES) winrepo.fetch_remotes() if clean: winrepo.clear_old_remotes() winrepo.checkout() except Exception as exc: msg = 'Failed to update winrepo_remotes: {0}'.format(exc) log.error(msg, exc_info_on_loglevel=logging.DEBUG) return msg ret.update(winrepo.winrepo_dirs) return ret
[ "def", "update_git_repos", "(", "opts", "=", "None", ",", "clean", "=", "False", ",", "masterless", "=", "False", ")", ":", "if", "(", "opts", "is", "None", ")", ":", "opts", "=", "__opts__", "winrepo_dir", "=", "opts", "[", "'winrepo_dir'", "]", "winrepo_remotes", "=", "opts", "[", "'winrepo_remotes'", "]", "winrepo_cfg", "=", "[", "(", "winrepo_remotes", ",", "winrepo_dir", ")", ",", "(", "opts", "[", "'winrepo_remotes_ng'", "]", ",", "opts", "[", "'winrepo_dir_ng'", "]", ")", "]", "ret", "=", "{", "}", "for", "(", "remotes", ",", "base_dir", ")", "in", "winrepo_cfg", ":", "if", "(", "not", "any", "(", "(", "salt", ".", "utils", ".", "gitfs", ".", "HAS_GITPYTHON", ",", "salt", ".", "utils", ".", "gitfs", ".", "HAS_PYGIT2", ")", ")", ")", ":", "winrepo_result", "=", "{", "}", "for", "remote_info", "in", "remotes", ":", "if", "(", "'/'", "in", "remote_info", ")", ":", "targetname", "=", "remote_info", ".", "split", "(", "'/'", ")", "[", "(", "-", "1", ")", "]", "else", ":", "targetname", "=", "remote_info", "rev", "=", "'HEAD'", "try", ":", "(", "rev", ",", "remote_url", ")", "=", "remote_info", ".", "strip", "(", ")", ".", "split", "(", ")", "except", "ValueError", ":", "remote_url", "=", "remote_info", "gittarget", "=", "os", ".", "path", ".", "join", "(", "base_dir", ",", "targetname", ")", ".", "replace", "(", "'.'", ",", "'_'", ")", "if", "masterless", ":", "result", "=", "__salt__", "[", "'state.single'", "]", "(", "'git.latest'", ",", "name", "=", "remote_url", ",", "rev", "=", "rev", ",", "branch", "=", "'winrepo'", ",", "target", "=", "gittarget", ",", "force_checkout", "=", "True", ",", "force_reset", "=", "True", ")", "if", "isinstance", "(", "result", ",", "list", ")", ":", "raise", "CommandExecutionError", "(", "'Failed up update winrepo remotes: {0}'", ".", "format", "(", "'\\n'", ".", "join", "(", "result", ")", ")", ")", "if", "(", "'name'", "not", "in", "result", ")", ":", "key", "=", "next", "(", "iter", "(", "result", ")", ")", "result", "=", "result", "[", "key", "]", "else", ":", "mminion", "=", "salt", ".", "minion", ".", "MasterMinion", "(", "opts", ")", "result", "=", "mminion", ".", "states", "[", "'git.latest'", "]", "(", "remote_url", ",", "rev", "=", "rev", ",", "branch", "=", "'winrepo'", ",", "target", "=", "gittarget", ",", "force_checkout", "=", "True", ",", "force_reset", "=", "True", ")", "winrepo_result", "[", "result", "[", "'name'", "]", "]", "=", "result", "[", "'result'", "]", "ret", ".", "update", "(", "winrepo_result", ")", "else", ":", "try", ":", "winrepo", "=", "salt", ".", "utils", ".", "gitfs", ".", "WinRepo", "(", "opts", ",", "base_dir", ")", "winrepo", ".", "init_remotes", "(", "remotes", ",", "PER_REMOTE_OVERRIDES", ")", "winrepo", ".", "fetch_remotes", "(", ")", "if", "clean", ":", "winrepo", ".", "clear_old_remotes", "(", ")", "winrepo", ".", "checkout", "(", ")", "except", "Exception", "as", "exc", ":", "msg", "=", "'Failed to update winrepo_remotes: {0}'", ".", "format", "(", "exc", ")", "log", ".", "error", "(", "msg", ",", "exc_info_on_loglevel", "=", "logging", ".", "DEBUG", ")", "return", "msg", "ret", ".", "update", "(", "winrepo", ".", "winrepo_dirs", ")", "return", "ret" ]
checkout git repos containing :ref:windows software package definitions <windows-package-manager> .
train
true
1,106
def _validate_center_shape(X, n_centers, centers): if (len(centers) != n_centers): raise ValueError(('The shape of the initial centers (%s) does not match the number of clusters %i' % (centers.shape, n_centers))) if (centers.shape[1] != X.shape[1]): raise ValueError(('The number of features of the initial centers %s does not match the number of features of the data %s.' % (centers.shape[1], X.shape[1])))
[ "def", "_validate_center_shape", "(", "X", ",", "n_centers", ",", "centers", ")", ":", "if", "(", "len", "(", "centers", ")", "!=", "n_centers", ")", ":", "raise", "ValueError", "(", "(", "'The shape of the initial centers (%s) does not match the number of clusters %i'", "%", "(", "centers", ".", "shape", ",", "n_centers", ")", ")", ")", "if", "(", "centers", ".", "shape", "[", "1", "]", "!=", "X", ".", "shape", "[", "1", "]", ")", ":", "raise", "ValueError", "(", "(", "'The number of features of the initial centers %s does not match the number of features of the data %s.'", "%", "(", "centers", ".", "shape", "[", "1", "]", ",", "X", ".", "shape", "[", "1", "]", ")", ")", ")" ]
check if centers is compatible with x and n_centers .
train
false
1,107
def _filter_datastores_matching_storage_policy(session, data_stores, storage_policy): profile_id = pbm.get_profile_id_by_name(session, storage_policy) if profile_id: factory = session.pbm.client.factory ds_mors = [oc.obj for oc in data_stores.objects] hubs = pbm.convert_datastores_to_hubs(factory, ds_mors) matching_hubs = pbm.filter_hubs_by_profile(session, hubs, profile_id) if matching_hubs: matching_ds = pbm.filter_datastores_by_hubs(matching_hubs, ds_mors) object_contents = [oc for oc in data_stores.objects if (oc.obj in matching_ds)] data_stores.objects = object_contents return data_stores LOG.error(_LE('Unable to retrieve storage policy with name %s'), storage_policy)
[ "def", "_filter_datastores_matching_storage_policy", "(", "session", ",", "data_stores", ",", "storage_policy", ")", ":", "profile_id", "=", "pbm", ".", "get_profile_id_by_name", "(", "session", ",", "storage_policy", ")", "if", "profile_id", ":", "factory", "=", "session", ".", "pbm", ".", "client", ".", "factory", "ds_mors", "=", "[", "oc", ".", "obj", "for", "oc", "in", "data_stores", ".", "objects", "]", "hubs", "=", "pbm", ".", "convert_datastores_to_hubs", "(", "factory", ",", "ds_mors", ")", "matching_hubs", "=", "pbm", ".", "filter_hubs_by_profile", "(", "session", ",", "hubs", ",", "profile_id", ")", "if", "matching_hubs", ":", "matching_ds", "=", "pbm", ".", "filter_datastores_by_hubs", "(", "matching_hubs", ",", "ds_mors", ")", "object_contents", "=", "[", "oc", "for", "oc", "in", "data_stores", ".", "objects", "if", "(", "oc", ".", "obj", "in", "matching_ds", ")", "]", "data_stores", ".", "objects", "=", "object_contents", "return", "data_stores", "LOG", ".", "error", "(", "_LE", "(", "'Unable to retrieve storage policy with name %s'", ")", ",", "storage_policy", ")" ]
get datastores matching the given storage policy .
train
false
1,109
@pytest.mark.parametrize('value, typ', [(None, None), (42, int)]) def test_conv_default_param(value, typ): def func(foo=value): pass param = inspect.signature(func).parameters['foo'] assert (argparser.type_conv(param, typ, value, str_choices=['val']) == value)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'value, typ'", ",", "[", "(", "None", ",", "None", ")", ",", "(", "42", ",", "int", ")", "]", ")", "def", "test_conv_default_param", "(", "value", ",", "typ", ")", ":", "def", "func", "(", "foo", "=", "value", ")", ":", "pass", "param", "=", "inspect", ".", "signature", "(", "func", ")", ".", "parameters", "[", "'foo'", "]", "assert", "(", "argparser", ".", "type_conv", "(", "param", ",", "typ", ",", "value", ",", "str_choices", "=", "[", "'val'", "]", ")", "==", "value", ")" ]
the default value should always be a valid choice .
train
false
1,110
def to_csv(df, filename, name_function=None, compression=None, compute=True, get=None, **kwargs): values = [_to_csv_chunk(d, **kwargs) for d in df.to_delayed()] values = write_bytes(values, filename, name_function, compression, encoding=None) if compute: delayed(values).compute(get=get) else: return values
[ "def", "to_csv", "(", "df", ",", "filename", ",", "name_function", "=", "None", ",", "compression", "=", "None", ",", "compute", "=", "True", ",", "get", "=", "None", ",", "**", "kwargs", ")", ":", "values", "=", "[", "_to_csv_chunk", "(", "d", ",", "**", "kwargs", ")", "for", "d", "in", "df", ".", "to_delayed", "(", ")", "]", "values", "=", "write_bytes", "(", "values", ",", "filename", ",", "name_function", ",", "compression", ",", "encoding", "=", "None", ")", "if", "compute", ":", "delayed", "(", "values", ")", ".", "compute", "(", "get", "=", "get", ")", "else", ":", "return", "values" ]
return a csv string from a list of indices .
train
false
1,112
def upload_file_to_s3(file_path, s3_filename, content_type='text/html', extra_message=''): try: conn = boto.connect_s3() with open(file_path, 'rb') as f: contents = f.read() upload = True except NoAuthHandlerFound: fail('Upload was requested but could not connect to S3.') fail('This is expected if you are an external contributor submitting a PR to Bokeh.') fail('This could also happen if S3 credentials are not available on the machine where this test is running.') upload = False except OSError: fail(('Upload was requested but file %s was not available.' % file_path)) upload = False if __version__.endswith('-dirty'): fail('Uploads are not permitted when working directory is dirty.') fail("Make sure that __version__ doesn't contain -dirty suffix.") upload = False if upload: bucket = conn.get_bucket(S3_BUCKET) key = S3Key(bucket, s3_filename) key.set_metadata('Content-Type', content_type) key.set_contents_from_string(contents, policy='public-read') ok(('%s | Access upload at: %s' % (extra_message, join(S3_URL, s3_filename))))
[ "def", "upload_file_to_s3", "(", "file_path", ",", "s3_filename", ",", "content_type", "=", "'text/html'", ",", "extra_message", "=", "''", ")", ":", "try", ":", "conn", "=", "boto", ".", "connect_s3", "(", ")", "with", "open", "(", "file_path", ",", "'rb'", ")", "as", "f", ":", "contents", "=", "f", ".", "read", "(", ")", "upload", "=", "True", "except", "NoAuthHandlerFound", ":", "fail", "(", "'Upload was requested but could not connect to S3.'", ")", "fail", "(", "'This is expected if you are an external contributor submitting a PR to Bokeh.'", ")", "fail", "(", "'This could also happen if S3 credentials are not available on the machine where this test is running.'", ")", "upload", "=", "False", "except", "OSError", ":", "fail", "(", "(", "'Upload was requested but file %s was not available.'", "%", "file_path", ")", ")", "upload", "=", "False", "if", "__version__", ".", "endswith", "(", "'-dirty'", ")", ":", "fail", "(", "'Uploads are not permitted when working directory is dirty.'", ")", "fail", "(", "\"Make sure that __version__ doesn't contain -dirty suffix.\"", ")", "upload", "=", "False", "if", "upload", ":", "bucket", "=", "conn", ".", "get_bucket", "(", "S3_BUCKET", ")", "key", "=", "S3Key", "(", "bucket", ",", "s3_filename", ")", "key", ".", "set_metadata", "(", "'Content-Type'", ",", "content_type", ")", "key", ".", "set_contents_from_string", "(", "contents", ",", "policy", "=", "'public-read'", ")", "ok", "(", "(", "'%s | Access upload at: %s'", "%", "(", "extra_message", ",", "join", "(", "S3_URL", ",", "s3_filename", ")", ")", ")", ")" ]
uploads a file to bokeh-travis s3 bucket .
train
false
1,113
def geoip_suggestion(request): locales = request.GET.getlist('locales[]') response = {'locales': {}} for locale in locales: response['locales'][locale] = LOCALES[locale] with uselocale(locale): response[locale] = {'suggestion': _('Would you like to view this page in %(language)s instead?'), 'confirm': _('Yes'), 'cancel': _('No')} return HttpResponse(json.dumps(response), content_type='application/json')
[ "def", "geoip_suggestion", "(", "request", ")", ":", "locales", "=", "request", ".", "GET", ".", "getlist", "(", "'locales[]'", ")", "response", "=", "{", "'locales'", ":", "{", "}", "}", "for", "locale", "in", "locales", ":", "response", "[", "'locales'", "]", "[", "locale", "]", "=", "LOCALES", "[", "locale", "]", "with", "uselocale", "(", "locale", ")", ":", "response", "[", "locale", "]", "=", "{", "'suggestion'", ":", "_", "(", "'Would you like to view this page in %(language)s instead?'", ")", ",", "'confirm'", ":", "_", "(", "'Yes'", ")", ",", "'cancel'", ":", "_", "(", "'No'", ")", "}", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "response", ")", ",", "content_type", "=", "'application/json'", ")" ]
ajax view to return the localized text for geoip locale change suggestion .
train
false
1,116
def _get_bootstrap_content(directory='.'): try: with salt.utils.fopen(os.path.join(os.path.abspath(directory), 'bootstrap.py')) as fic: oldcontent = fic.read() except (OSError, IOError): oldcontent = '' return oldcontent
[ "def", "_get_bootstrap_content", "(", "directory", "=", "'.'", ")", ":", "try", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "abspath", "(", "directory", ")", ",", "'bootstrap.py'", ")", ")", "as", "fic", ":", "oldcontent", "=", "fic", ".", "read", "(", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "oldcontent", "=", "''", "return", "oldcontent" ]
get the current bootstrap .
train
true
1,117
def _prepare_projectors(params): import matplotlib.pyplot as plt import matplotlib as mpl epochs = params['epochs'] projs = params['projs'] if ((len(projs) > 0) and (not epochs.proj)): ax_button = plt.subplot2grid((10, 15), (9, 14)) opt_button = mpl.widgets.Button(ax_button, 'Proj') callback_option = partial(_toggle_options, params=params) opt_button.on_clicked(callback_option) params['opt_button'] = opt_button params['ax_button'] = ax_button params['plot_update_proj_callback'] = _plot_update_epochs_proj callback_proj = partial(_toggle_proj, params=params) params['callback_proj'] = callback_proj callback_proj('none')
[ "def", "_prepare_projectors", "(", "params", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "import", "matplotlib", "as", "mpl", "epochs", "=", "params", "[", "'epochs'", "]", "projs", "=", "params", "[", "'projs'", "]", "if", "(", "(", "len", "(", "projs", ")", ">", "0", ")", "and", "(", "not", "epochs", ".", "proj", ")", ")", ":", "ax_button", "=", "plt", ".", "subplot2grid", "(", "(", "10", ",", "15", ")", ",", "(", "9", ",", "14", ")", ")", "opt_button", "=", "mpl", ".", "widgets", ".", "Button", "(", "ax_button", ",", "'Proj'", ")", "callback_option", "=", "partial", "(", "_toggle_options", ",", "params", "=", "params", ")", "opt_button", ".", "on_clicked", "(", "callback_option", ")", "params", "[", "'opt_button'", "]", "=", "opt_button", "params", "[", "'ax_button'", "]", "=", "ax_button", "params", "[", "'plot_update_proj_callback'", "]", "=", "_plot_update_epochs_proj", "callback_proj", "=", "partial", "(", "_toggle_proj", ",", "params", "=", "params", ")", "params", "[", "'callback_proj'", "]", "=", "callback_proj", "callback_proj", "(", "'none'", ")" ]
set up the projectors for epochs browser .
train
false
1,118
def _force_alphabet(alignment_iterator, alphabet): given_base_class = _get_base_alphabet(alphabet).__class__ for align in alignment_iterator: if (not isinstance(_get_base_alphabet(align._alphabet), given_base_class)): raise ValueError(('Specified alphabet %s clashes with that determined from the file, %s' % (repr(alphabet), repr(align._alphabet)))) for record in align: if (not isinstance(_get_base_alphabet(record.seq.alphabet), given_base_class)): raise ValueError(('Specified alphabet %s clashes with that determined from the file, %s' % (repr(alphabet), repr(record.seq.alphabet)))) record.seq.alphabet = alphabet align._alphabet = alphabet (yield align)
[ "def", "_force_alphabet", "(", "alignment_iterator", ",", "alphabet", ")", ":", "given_base_class", "=", "_get_base_alphabet", "(", "alphabet", ")", ".", "__class__", "for", "align", "in", "alignment_iterator", ":", "if", "(", "not", "isinstance", "(", "_get_base_alphabet", "(", "align", ".", "_alphabet", ")", ",", "given_base_class", ")", ")", ":", "raise", "ValueError", "(", "(", "'Specified alphabet %s clashes with that determined from the file, %s'", "%", "(", "repr", "(", "alphabet", ")", ",", "repr", "(", "align", ".", "_alphabet", ")", ")", ")", ")", "for", "record", "in", "align", ":", "if", "(", "not", "isinstance", "(", "_get_base_alphabet", "(", "record", ".", "seq", ".", "alphabet", ")", ",", "given_base_class", ")", ")", ":", "raise", "ValueError", "(", "(", "'Specified alphabet %s clashes with that determined from the file, %s'", "%", "(", "repr", "(", "alphabet", ")", ",", "repr", "(", "record", ".", "seq", ".", "alphabet", ")", ")", ")", ")", "record", ".", "seq", ".", "alphabet", "=", "alphabet", "align", ".", "_alphabet", "=", "alphabet", "(", "yield", "align", ")" ]
iterate over records .
train
false
1,119
def volume_create_attach(name, call=None, **kwargs): if (call == 'function'): raise SaltCloudSystemExit('The create_attach_volumes action must be called with -a or --action.') if (type(kwargs['volumes']) is str): volumes = yaml.safe_load(kwargs['volumes']) else: volumes = kwargs['volumes'] ret = [] for volume in volumes: created = False volume_dict = {'name': volume['name']} if ('volume_id' in volume): volume_dict['volume_id'] = volume['volume_id'] elif ('snapshot' in volume): volume_dict['snapshot'] = volume['snapshot'] else: volume_dict['size'] = volume['size'] if ('type' in volume): volume_dict['type'] = volume['type'] if ('iops' in volume): volume_dict['iops'] = volume['iops'] if ('id' not in volume_dict): created_volume = create_volume(**volume_dict) created = True volume_dict.update(created_volume) attach = attach_volume(name=volume['name'], server_name=name, device=volume.get('device', None), call='action') if attach: msg = '{0} attached to {1} (aka {2})'.format(volume_dict['id'], name, volume_dict['name']) log.info(msg) ret.append(msg) return ret
[ "def", "volume_create_attach", "(", "name", ",", "call", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "call", "==", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The create_attach_volumes action must be called with -a or --action.'", ")", "if", "(", "type", "(", "kwargs", "[", "'volumes'", "]", ")", "is", "str", ")", ":", "volumes", "=", "yaml", ".", "safe_load", "(", "kwargs", "[", "'volumes'", "]", ")", "else", ":", "volumes", "=", "kwargs", "[", "'volumes'", "]", "ret", "=", "[", "]", "for", "volume", "in", "volumes", ":", "created", "=", "False", "volume_dict", "=", "{", "'name'", ":", "volume", "[", "'name'", "]", "}", "if", "(", "'volume_id'", "in", "volume", ")", ":", "volume_dict", "[", "'volume_id'", "]", "=", "volume", "[", "'volume_id'", "]", "elif", "(", "'snapshot'", "in", "volume", ")", ":", "volume_dict", "[", "'snapshot'", "]", "=", "volume", "[", "'snapshot'", "]", "else", ":", "volume_dict", "[", "'size'", "]", "=", "volume", "[", "'size'", "]", "if", "(", "'type'", "in", "volume", ")", ":", "volume_dict", "[", "'type'", "]", "=", "volume", "[", "'type'", "]", "if", "(", "'iops'", "in", "volume", ")", ":", "volume_dict", "[", "'iops'", "]", "=", "volume", "[", "'iops'", "]", "if", "(", "'id'", "not", "in", "volume_dict", ")", ":", "created_volume", "=", "create_volume", "(", "**", "volume_dict", ")", "created", "=", "True", "volume_dict", ".", "update", "(", "created_volume", ")", "attach", "=", "attach_volume", "(", "name", "=", "volume", "[", "'name'", "]", ",", "server_name", "=", "name", ",", "device", "=", "volume", ".", "get", "(", "'device'", ",", "None", ")", ",", "call", "=", "'action'", ")", "if", "attach", ":", "msg", "=", "'{0} attached to {1} (aka {2})'", ".", "format", "(", "volume_dict", "[", "'id'", "]", ",", "name", ",", "volume_dict", "[", "'name'", "]", ")", "log", ".", "info", "(", "msg", ")", "ret", ".", "append", "(", "msg", ")", "return", "ret" ]
create and attach volumes to created node .
train
true
1,120
def fix_dashes(view, edit, text_region, dash_region): if (len(view.substr(text_region).strip()) == 0): return old_dashes = view.substr(dash_region) first_dash = old_dashes[0] new_dashes = (first_dash * text_region.size()) view.replace(edit, dash_region, new_dashes)
[ "def", "fix_dashes", "(", "view", ",", "edit", ",", "text_region", ",", "dash_region", ")", ":", "if", "(", "len", "(", "view", ".", "substr", "(", "text_region", ")", ".", "strip", "(", ")", ")", "==", "0", ")", ":", "return", "old_dashes", "=", "view", ".", "substr", "(", "dash_region", ")", "first_dash", "=", "old_dashes", "[", "0", "]", "new_dashes", "=", "(", "first_dash", "*", "text_region", ".", "size", "(", ")", ")", "view", ".", "replace", "(", "edit", ",", "dash_region", ",", "new_dashes", ")" ]
replaces the underlined "dash" region of a setext header with a run of dashes or equal-signs that match the length of the header text .
train
false
1,121
def delete_instance_type_info(metadata, *prefixes): for key in system_metadata_instance_type_props.keys(): for prefix in prefixes: to_key = ('%sinstance_type_%s' % (prefix, key)) del metadata[to_key] return metadata
[ "def", "delete_instance_type_info", "(", "metadata", ",", "*", "prefixes", ")", ":", "for", "key", "in", "system_metadata_instance_type_props", ".", "keys", "(", ")", ":", "for", "prefix", "in", "prefixes", ":", "to_key", "=", "(", "'%sinstance_type_%s'", "%", "(", "prefix", ",", "key", ")", ")", "del", "metadata", "[", "to_key", "]", "return", "metadata" ]
delete instance_type information from instances system_metadata by prefix .
train
false
1,124
def extends(*args, **kwargs): def decorator(func): func.wsgi_extends = (func.__name__, kwargs.get('action')) return func if args: return decorator(*args) return decorator
[ "def", "extends", "(", "*", "args", ",", "**", "kwargs", ")", ":", "def", "decorator", "(", "func", ")", ":", "func", ".", "wsgi_extends", "=", "(", "func", ".", "__name__", ",", "kwargs", ".", "get", "(", "'action'", ")", ")", "return", "func", "if", "args", ":", "return", "decorator", "(", "*", "args", ")", "return", "decorator" ]
indicate a function extends an operation .
train
false
1,125
def _embl_convert_fasta(in_handle, out_handle, alphabet=None): from Bio.GenBank.Scanner import EmblScanner records = EmblScanner().parse_records(in_handle, do_features=False) return SeqIO.write(records, out_handle, 'fasta')
[ "def", "_embl_convert_fasta", "(", "in_handle", ",", "out_handle", ",", "alphabet", "=", "None", ")", ":", "from", "Bio", ".", "GenBank", ".", "Scanner", "import", "EmblScanner", "records", "=", "EmblScanner", "(", ")", ".", "parse_records", "(", "in_handle", ",", "do_features", "=", "False", ")", "return", "SeqIO", ".", "write", "(", "records", ",", "out_handle", ",", "'fasta'", ")" ]
fast embl to fasta .
train
false
1,126
def get_template_request_context(request=None): if (request is None): request = get_current_request() if (request is None): return None request_cache_dict = request_cache.get_cache('edxmako') cache_key = 'request_context' if (cache_key in request_cache_dict): return request_cache_dict[cache_key] context = RequestContext(request) context['is_secure'] = request.is_secure() context['site'] = safe_get_host(request) for processor in get_template_context_processors(): context.update(processor(request)) request_cache_dict[cache_key] = context return context
[ "def", "get_template_request_context", "(", "request", "=", "None", ")", ":", "if", "(", "request", "is", "None", ")", ":", "request", "=", "get_current_request", "(", ")", "if", "(", "request", "is", "None", ")", ":", "return", "None", "request_cache_dict", "=", "request_cache", ".", "get_cache", "(", "'edxmako'", ")", "cache_key", "=", "'request_context'", "if", "(", "cache_key", "in", "request_cache_dict", ")", ":", "return", "request_cache_dict", "[", "cache_key", "]", "context", "=", "RequestContext", "(", "request", ")", "context", "[", "'is_secure'", "]", "=", "request", ".", "is_secure", "(", ")", "context", "[", "'site'", "]", "=", "safe_get_host", "(", "request", ")", "for", "processor", "in", "get_template_context_processors", "(", ")", ":", "context", ".", "update", "(", "processor", "(", "request", ")", ")", "request_cache_dict", "[", "cache_key", "]", "=", "context", "return", "context" ]
returns the template processing context to use for the current request .
train
false
1,127
def mocked_i18n_open(*content): read_data = ''.join((textwrap.dedent(c) for c in content)) return patch.object(pavelib.i18n, 'open', create=True, new=mock_open(read_data=read_data))
[ "def", "mocked_i18n_open", "(", "*", "content", ")", ":", "read_data", "=", "''", ".", "join", "(", "(", "textwrap", ".", "dedent", "(", "c", ")", "for", "c", "in", "content", ")", ")", "return", "patch", ".", "object", "(", "pavelib", ".", "i18n", ",", "'open'", ",", "create", "=", "True", ",", "new", "=", "mock_open", "(", "read_data", "=", "read_data", ")", ")" ]
helper decorator to mock open() in pavelib .
train
false
1,129
def decodeInventoryEntry_level1(document): count = Inventory(str(document)) for token in document: if (token.pos_ == (u'NOUN' or u'NNS' or u'NN')): item = str(token) for child in token.children: if ((child.dep_ == u'compound') or (child.dep_ == u'ad')): item = (str(child) + str(item)) elif (child.dep_ == u'nummod'): count.amount = str(child).strip() for numerical_child in child.children: count.amount = (str(numerical_child) + str(count.amount).strip()) else: print ((('WARNING: unknown child: ' + str(child)) + ':') + str(child.dep_)) count.item = item count.unit = item return count
[ "def", "decodeInventoryEntry_level1", "(", "document", ")", ":", "count", "=", "Inventory", "(", "str", "(", "document", ")", ")", "for", "token", "in", "document", ":", "if", "(", "token", ".", "pos_", "==", "(", "u'NOUN'", "or", "u'NNS'", "or", "u'NN'", ")", ")", ":", "item", "=", "str", "(", "token", ")", "for", "child", "in", "token", ".", "children", ":", "if", "(", "(", "child", ".", "dep_", "==", "u'compound'", ")", "or", "(", "child", ".", "dep_", "==", "u'ad'", ")", ")", ":", "item", "=", "(", "str", "(", "child", ")", "+", "str", "(", "item", ")", ")", "elif", "(", "child", ".", "dep_", "==", "u'nummod'", ")", ":", "count", ".", "amount", "=", "str", "(", "child", ")", ".", "strip", "(", ")", "for", "numerical_child", "in", "child", ".", "children", ":", "count", ".", "amount", "=", "(", "str", "(", "numerical_child", ")", "+", "str", "(", "count", ".", "amount", ")", ".", "strip", "(", ")", ")", "else", ":", "print", "(", "(", "(", "'WARNING: unknown child: '", "+", "str", "(", "child", ")", ")", "+", "':'", ")", "+", "str", "(", "child", ".", "dep_", ")", ")", "count", ".", "item", "=", "item", "count", ".", "unit", "=", "item", "return", "count" ]
decodes a basic entry such as: 6 lobster cake or 6 cakes .
train
false
1,130
def create_menu_item(ia, parent_menu, menu_text, image=None, tooltip=None, shortcut=(), triggered=None, is_checked=None): if (shortcut is not None): if (len(shortcut) == 0): shortcut = () else: shortcut = _(shortcut) ac = ia.create_action(spec=(menu_text, None, tooltip, shortcut), attr=menu_text) if image: ac.setIcon(get_icon(image)) if (triggered is not None): ac.triggered.connect(triggered) if (is_checked is not None): ac.setCheckable(True) if is_checked: ac.setChecked(True) parent_menu.addAction(ac) return ac
[ "def", "create_menu_item", "(", "ia", ",", "parent_menu", ",", "menu_text", ",", "image", "=", "None", ",", "tooltip", "=", "None", ",", "shortcut", "=", "(", ")", ",", "triggered", "=", "None", ",", "is_checked", "=", "None", ")", ":", "if", "(", "shortcut", "is", "not", "None", ")", ":", "if", "(", "len", "(", "shortcut", ")", "==", "0", ")", ":", "shortcut", "=", "(", ")", "else", ":", "shortcut", "=", "_", "(", "shortcut", ")", "ac", "=", "ia", ".", "create_action", "(", "spec", "=", "(", "menu_text", ",", "None", ",", "tooltip", ",", "shortcut", ")", ",", "attr", "=", "menu_text", ")", "if", "image", ":", "ac", ".", "setIcon", "(", "get_icon", "(", "image", ")", ")", "if", "(", "triggered", "is", "not", "None", ")", ":", "ac", ".", "triggered", ".", "connect", "(", "triggered", ")", "if", "(", "is_checked", "is", "not", "None", ")", ":", "ac", ".", "setCheckable", "(", "True", ")", "if", "is_checked", ":", "ac", ".", "setChecked", "(", "True", ")", "parent_menu", ".", "addAction", "(", "ac", ")", "return", "ac" ]
create a menu action with the specified criteria and action note that if no shortcut is specified .
train
false
1,132
@requires_auth('home') def schema_collection_endpoint(): schemas = {} for (resource_name, resource_config) in app.config['DOMAIN'].items(): if resource_name.endswith(config.VERSIONS): continue internal = resource_config.get('internal_resource', False) if internal: continue auth = resource_auth(resource_name) if (auth and (request.method not in resource_config['public_methods'])): roles = list(resource_config['allowed_roles']) roles += resource_config['allowed_read_roles'] if (not auth.authorized(roles, resource_name, request.method)): continue schemas[resource_name] = resource_config['schema'] return send_response(None, (schemas,))
[ "@", "requires_auth", "(", "'home'", ")", "def", "schema_collection_endpoint", "(", ")", ":", "schemas", "=", "{", "}", "for", "(", "resource_name", ",", "resource_config", ")", "in", "app", ".", "config", "[", "'DOMAIN'", "]", ".", "items", "(", ")", ":", "if", "resource_name", ".", "endswith", "(", "config", ".", "VERSIONS", ")", ":", "continue", "internal", "=", "resource_config", ".", "get", "(", "'internal_resource'", ",", "False", ")", "if", "internal", ":", "continue", "auth", "=", "resource_auth", "(", "resource_name", ")", "if", "(", "auth", "and", "(", "request", ".", "method", "not", "in", "resource_config", "[", "'public_methods'", "]", ")", ")", ":", "roles", "=", "list", "(", "resource_config", "[", "'allowed_roles'", "]", ")", "roles", "+=", "resource_config", "[", "'allowed_read_roles'", "]", "if", "(", "not", "auth", ".", "authorized", "(", "roles", ",", "resource_name", ",", "request", ".", "method", ")", ")", ":", "continue", "schemas", "[", "resource_name", "]", "=", "resource_config", "[", "'schema'", "]", "return", "send_response", "(", "None", ",", "(", "schemas", ",", ")", ")" ]
this endpoint is active when schema_endpoint != none .
train
false
1,133
def convert_png_to_data_url(filepath): file_contents = get_file_contents(filepath, raw_bytes=True, mode='rb') return convert_png_binary_to_data_url(file_contents)
[ "def", "convert_png_to_data_url", "(", "filepath", ")", ":", "file_contents", "=", "get_file_contents", "(", "filepath", ",", "raw_bytes", "=", "True", ",", "mode", "=", "'rb'", ")", "return", "convert_png_binary_to_data_url", "(", "file_contents", ")" ]
converts the png file at filepath to a data url .
train
false
1,134
def fixup_indent(suite): kids = suite.children[::(-1)] while kids: node = kids.pop() if (node.type == token.INDENT): break while kids: node = kids.pop() if (isinstance(node, Leaf) and (node.type != token.DEDENT)): if node.prefix: node.prefix = u'' return else: kids.extend(node.children[::(-1)])
[ "def", "fixup_indent", "(", "suite", ")", ":", "kids", "=", "suite", ".", "children", "[", ":", ":", "(", "-", "1", ")", "]", "while", "kids", ":", "node", "=", "kids", ".", "pop", "(", ")", "if", "(", "node", ".", "type", "==", "token", ".", "INDENT", ")", ":", "break", "while", "kids", ":", "node", "=", "kids", ".", "pop", "(", ")", "if", "(", "isinstance", "(", "node", ",", "Leaf", ")", "and", "(", "node", ".", "type", "!=", "token", ".", "DEDENT", ")", ")", ":", "if", "node", ".", "prefix", ":", "node", ".", "prefix", "=", "u''", "return", "else", ":", "kids", ".", "extend", "(", "node", ".", "children", "[", ":", ":", "(", "-", "1", ")", "]", ")" ]
if an indent is followed by a thing with a prefix then nuke the prefix otherwise we get in trouble when removing __metaclass__ at suite start .
train
true
1,135
def waitforbuttonpress(*args, **kwargs): return gcf().waitforbuttonpress(*args, **kwargs)
[ "def", "waitforbuttonpress", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "gcf", "(", ")", ".", "waitforbuttonpress", "(", "*", "args", ",", "**", "kwargs", ")" ]
blocking call to interact with the figure .
train
false
1,136
def survey_answer_list_represent(value): db = current.db qtable = current.s3db.survey_question answer_text = value answer_list = answer_text.splitlines() result = TABLE() questions = {} xml_decode = S3Codec.xml_decode for line in answer_list: line = xml_decode(line) (question, answer) = line.split(',', 1) question = question.strip('" ') if (question in questions): question = questions[question] else: query = (qtable.code == question) qstn = db(query).select(qtable.name, limitby=(0, 1)).first() if (not qstn): continue questions[question] = qstn.name question = qstn.name answer = answer.strip('" ') result.append(TR(TD(B(question)), TD(answer))) return result
[ "def", "survey_answer_list_represent", "(", "value", ")", ":", "db", "=", "current", ".", "db", "qtable", "=", "current", ".", "s3db", ".", "survey_question", "answer_text", "=", "value", "answer_list", "=", "answer_text", ".", "splitlines", "(", ")", "result", "=", "TABLE", "(", ")", "questions", "=", "{", "}", "xml_decode", "=", "S3Codec", ".", "xml_decode", "for", "line", "in", "answer_list", ":", "line", "=", "xml_decode", "(", "line", ")", "(", "question", ",", "answer", ")", "=", "line", ".", "split", "(", "','", ",", "1", ")", "question", "=", "question", ".", "strip", "(", "'\" '", ")", "if", "(", "question", "in", "questions", ")", ":", "question", "=", "questions", "[", "question", "]", "else", ":", "query", "=", "(", "qtable", ".", "code", "==", "question", ")", "qstn", "=", "db", "(", "query", ")", ".", "select", "(", "qtable", ".", "name", ",", "limitby", "=", "(", "0", ",", "1", ")", ")", ".", "first", "(", ")", "if", "(", "not", "qstn", ")", ":", "continue", "questions", "[", "question", "]", "=", "qstn", ".", "name", "question", "=", "qstn", ".", "name", "answer", "=", "answer", ".", "strip", "(", "'\" '", ")", "result", ".", "append", "(", "TR", "(", "TD", "(", "B", "(", "question", ")", ")", ",", "TD", "(", "answer", ")", ")", ")", "return", "result" ]
display the answer list in a formatted table .
train
false
1,137
def update_list_settings(doctype, list_settings, for_update=False): if for_update: current = json.loads(list_settings) else: current = json.loads(get_list_settings(doctype, for_update=True)) if isinstance(current, basestring): current = {} current.update(list_settings) frappe.cache().hset('_list_settings', '{0}::{1}'.format(doctype, frappe.session.user), json.dumps(current))
[ "def", "update_list_settings", "(", "doctype", ",", "list_settings", ",", "for_update", "=", "False", ")", ":", "if", "for_update", ":", "current", "=", "json", ".", "loads", "(", "list_settings", ")", "else", ":", "current", "=", "json", ".", "loads", "(", "get_list_settings", "(", "doctype", ",", "for_update", "=", "True", ")", ")", "if", "isinstance", "(", "current", ",", "basestring", ")", ":", "current", "=", "{", "}", "current", ".", "update", "(", "list_settings", ")", "frappe", ".", "cache", "(", ")", ".", "hset", "(", "'_list_settings'", ",", "'{0}::{1}'", ".", "format", "(", "doctype", ",", "frappe", ".", "session", ".", "user", ")", ",", "json", ".", "dumps", "(", "current", ")", ")" ]
update list settings in cache .
train
false
1,138
def get_connection_func(service, module=None): return partial(get_connection, service, module=module)
[ "def", "get_connection_func", "(", "service", ",", "module", "=", "None", ")", ":", "return", "partial", "(", "get_connection", ",", "service", ",", "module", "=", "module", ")" ]
returns a partial get_connection function for the provided service .
train
false
1,140
def notificationResponse(): a = TpPd(pd=6) b = MessageType(mesType=38) c = MobileStationClassmark2() d = MobileId() e = DescriptiveGroupOrBroadcastCallReference() packet = ((((a / b) / c) / d) / e) return packet
[ "def", "notificationResponse", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "6", ")", "b", "=", "MessageType", "(", "mesType", "=", "38", ")", "c", "=", "MobileStationClassmark2", "(", ")", "d", "=", "MobileId", "(", ")", "e", "=", "DescriptiveGroupOrBroadcastCallReference", "(", ")", "packet", "=", "(", "(", "(", "(", "a", "/", "b", ")", "/", "c", ")", "/", "d", ")", "/", "e", ")", "return", "packet" ]
notification response section 9 .
train
true
1,141
def tryInt(s, s_default=0): try: return int(s) except Exception: return s_default
[ "def", "tryInt", "(", "s", ",", "s_default", "=", "0", ")", ":", "try", ":", "return", "int", "(", "s", ")", "except", "Exception", ":", "return", "s_default" ]
try to convert to int .
train
false
1,142
def sftp_from_config(config): host = config[u'host'] port = config[u'port'] username = config[u'username'] password = config[u'password'] private_key = config[u'private_key'] private_key_pass = config[u'private_key_pass'] conn_conf = ConnectionConfig(host, port, username, password, private_key, private_key_pass) try: sftp = sftp_connect(conn_conf) except Exception as e: raise plugin.PluginError((u'Failed to connect to %s (%s)' % (host, e))) return sftp
[ "def", "sftp_from_config", "(", "config", ")", ":", "host", "=", "config", "[", "u'host'", "]", "port", "=", "config", "[", "u'port'", "]", "username", "=", "config", "[", "u'username'", "]", "password", "=", "config", "[", "u'password'", "]", "private_key", "=", "config", "[", "u'private_key'", "]", "private_key_pass", "=", "config", "[", "u'private_key_pass'", "]", "conn_conf", "=", "ConnectionConfig", "(", "host", ",", "port", ",", "username", ",", "password", ",", "private_key", ",", "private_key_pass", ")", "try", ":", "sftp", "=", "sftp_connect", "(", "conn_conf", ")", "except", "Exception", "as", "e", ":", "raise", "plugin", ".", "PluginError", "(", "(", "u'Failed to connect to %s (%s)'", "%", "(", "host", ",", "e", ")", ")", ")", "return", "sftp" ]
creates an sftp connection from a flexget config object .
train
false
1,143
def _audio_data_generator(buff): stop = False while (not stop): data = [buff.get()] while True: try: data.append(buff.get(block=False)) except queue.Empty: break if (None in data): stop = True data.remove(None) (yield ''.join(data))
[ "def", "_audio_data_generator", "(", "buff", ")", ":", "stop", "=", "False", "while", "(", "not", "stop", ")", ":", "data", "=", "[", "buff", ".", "get", "(", ")", "]", "while", "True", ":", "try", ":", "data", ".", "append", "(", "buff", ".", "get", "(", "block", "=", "False", ")", ")", "except", "queue", ".", "Empty", ":", "break", "if", "(", "None", "in", "data", ")", ":", "stop", "=", "True", "data", ".", "remove", "(", "None", ")", "(", "yield", "''", ".", "join", "(", "data", ")", ")" ]
a generator that yields all available data in the given buffer .
train
false
1,144
def _check_notification_opt_out(event_type, outcome): if ('authenticate' in event_type): event_type = ((event_type + '.') + outcome) if (event_type in CONF.notification_opt_out): return True return False
[ "def", "_check_notification_opt_out", "(", "event_type", ",", "outcome", ")", ":", "if", "(", "'authenticate'", "in", "event_type", ")", ":", "event_type", "=", "(", "(", "event_type", "+", "'.'", ")", "+", "outcome", ")", "if", "(", "event_type", "in", "CONF", ".", "notification_opt_out", ")", ":", "return", "True", "return", "False" ]
check if a particular event_type has been opted-out of .
train
false
1,146
@endpoint(u'/interface-data/book-metadata/{book_id=0}', postprocess=json) def book_metadata(ctx, rd, book_id): (library_id, db) = get_basic_query_data(ctx, rd)[:2] book_ids = ctx.allowed_book_ids(rd, db) def notfound(): raise HTTPNotFound((_(u'No book with id: %d in library') % book_id)) if (not book_ids): notfound() if (not book_id): book_id = random.choice(tuple(book_ids)) elif (book_id not in book_ids): notfound() data = book_as_json(db, book_id) if (data is None): notfound() data[u'id'] = book_id return data
[ "@", "endpoint", "(", "u'/interface-data/book-metadata/{book_id=0}'", ",", "postprocess", "=", "json", ")", "def", "book_metadata", "(", "ctx", ",", "rd", ",", "book_id", ")", ":", "(", "library_id", ",", "db", ")", "=", "get_basic_query_data", "(", "ctx", ",", "rd", ")", "[", ":", "2", "]", "book_ids", "=", "ctx", ".", "allowed_book_ids", "(", "rd", ",", "db", ")", "def", "notfound", "(", ")", ":", "raise", "HTTPNotFound", "(", "(", "_", "(", "u'No book with id: %d in library'", ")", "%", "book_id", ")", ")", "if", "(", "not", "book_ids", ")", ":", "notfound", "(", ")", "if", "(", "not", "book_id", ")", ":", "book_id", "=", "random", ".", "choice", "(", "tuple", "(", "book_ids", ")", ")", "elif", "(", "book_id", "not", "in", "book_ids", ")", ":", "notfound", "(", ")", "data", "=", "book_as_json", "(", "db", ",", "book_id", ")", "if", "(", "data", "is", "None", ")", ":", "notfound", "(", ")", "data", "[", "u'id'", "]", "=", "book_id", "return", "data" ]
get metadata for the specified book .
train
false
1,147
def fingerprint(registry, xml_parent, data): finger = XML.SubElement(xml_parent, 'hudson.tasks.Fingerprinter') mappings = [('files', 'targets', ''), ('record-artifacts', 'recordBuildArtifacts', False)] helpers.convert_mapping_to_xml(finger, data, mappings, fail_required=True)
[ "def", "fingerprint", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "finger", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.tasks.Fingerprinter'", ")", "mappings", "=", "[", "(", "'files'", ",", "'targets'", ",", "''", ")", ",", "(", "'record-artifacts'", ",", "'recordBuildArtifacts'", ",", "False", ")", "]", "helpers", ".", "convert_mapping_to_xml", "(", "finger", ",", "data", ",", "mappings", ",", "fail_required", "=", "True", ")" ]
return the fingerprint for an ssh public key .
train
false
1,152
@command(usage='convert thunder:// (and more) to normal url') def decode_url(args): from lixian_url import url_unmask for x in args: print url_unmask(x)
[ "@", "command", "(", "usage", "=", "'convert thunder:// (and more) to normal url'", ")", "def", "decode_url", "(", "args", ")", ":", "from", "lixian_url", "import", "url_unmask", "for", "x", "in", "args", ":", "print", "url_unmask", "(", "x", ")" ]
usage: lx decode-url thunder:// .
train
false
1,154
def socket_read_n(sock, n): buf = '' while (n > 0): data = sock.recv(n) if (data == ''): raise RuntimeError('unexpected connection close') buf += data n -= len(data) return buf
[ "def", "socket_read_n", "(", "sock", ",", "n", ")", ":", "buf", "=", "''", "while", "(", "n", ">", "0", ")", ":", "data", "=", "sock", ".", "recv", "(", "n", ")", "if", "(", "data", "==", "''", ")", ":", "raise", "RuntimeError", "(", "'unexpected connection close'", ")", "buf", "+=", "data", "n", "-=", "len", "(", "data", ")", "return", "buf" ]
read exactly n bytes from the socket .
train
false
1,155
def get_comment(conn, table, field): rows = query(conn, '\n SELECT d.description AS comment\n FROM pg_class c\n JOIN pg_description d ON c.oid=d.objoid\n JOIN pg_attribute a ON c.oid = a.attrelid\n WHERE c.relname=%s AND a.attname=%s\n AND a.attnum = d.objsubid\n ;', table, field['column_name']) return ((rows and rows[0]['comment']) or None)
[ "def", "get_comment", "(", "conn", ",", "table", ",", "field", ")", ":", "rows", "=", "query", "(", "conn", ",", "'\\n SELECT d.description AS comment\\n FROM pg_class c\\n JOIN pg_description d ON c.oid=d.objoid\\n JOIN pg_attribute a ON c.oid = a.attrelid\\n WHERE c.relname=%s AND a.attname=%s\\n AND a.attnum = d.objsubid\\n ;'", ",", "table", ",", "field", "[", "'column_name'", "]", ")", "return", "(", "(", "rows", "and", "rows", "[", "0", "]", "[", "'comment'", "]", ")", "or", "None", ")" ]
find the column comment .
train
false
1,158
def vb_get_manager(): global _virtualboxManager if ((_virtualboxManager is None) and HAS_LIBS): reload(vboxapi) _virtualboxManager = vboxapi.VirtualBoxManager(None, None) return _virtualboxManager
[ "def", "vb_get_manager", "(", ")", ":", "global", "_virtualboxManager", "if", "(", "(", "_virtualboxManager", "is", "None", ")", "and", "HAS_LIBS", ")", ":", "reload", "(", "vboxapi", ")", "_virtualboxManager", "=", "vboxapi", ".", "VirtualBoxManager", "(", "None", ",", "None", ")", "return", "_virtualboxManager" ]
creates a "singleton" manager to communicate with a local virtualbox hypervisor .
train
false
1,164
def make_shared_replacements(vars, model): othervars = (set(model.vars) - set(vars)) return {var: theano.shared(var.tag.test_value, (var.name + '_shared')) for var in othervars}
[ "def", "make_shared_replacements", "(", "vars", ",", "model", ")", ":", "othervars", "=", "(", "set", "(", "model", ".", "vars", ")", "-", "set", "(", "vars", ")", ")", "return", "{", "var", ":", "theano", ".", "shared", "(", "var", ".", "tag", ".", "test_value", ",", "(", "var", ".", "name", "+", "'_shared'", ")", ")", "for", "var", "in", "othervars", "}" ]
makes shared replacements for all *other* variables than the ones passed .
train
false
1,165
def test_redapp(): saved = [] def saveit(status, headers, exc_info=None): saved.append((status, headers)) def redapp(environ, start_response): raise HTTPFound('/bing/foo') app = HTTPExceptionHandler(redapp) result = list(app({'HTTP_ACCEPT': 'text/html'}, saveit)) assert ('<a href="/bing/foo">' in result[0]) assert ('302 Found' == saved[0][0]) if six.PY3: assert ('text/html; charset=utf8' == header_value(saved[0][1], 'content-type')) else: assert ('text/html' == header_value(saved[0][1], 'content-type')) assert ('/bing/foo' == header_value(saved[0][1], 'location')) result = list(app({'HTTP_ACCEPT': 'text/plain'}, saveit)) assert ('text/plain; charset=utf8' == header_value(saved[1][1], 'content-type')) assert ('/bing/foo' == header_value(saved[1][1], 'location'))
[ "def", "test_redapp", "(", ")", ":", "saved", "=", "[", "]", "def", "saveit", "(", "status", ",", "headers", ",", "exc_info", "=", "None", ")", ":", "saved", ".", "append", "(", "(", "status", ",", "headers", ")", ")", "def", "redapp", "(", "environ", ",", "start_response", ")", ":", "raise", "HTTPFound", "(", "'/bing/foo'", ")", "app", "=", "HTTPExceptionHandler", "(", "redapp", ")", "result", "=", "list", "(", "app", "(", "{", "'HTTP_ACCEPT'", ":", "'text/html'", "}", ",", "saveit", ")", ")", "assert", "(", "'<a href=\"/bing/foo\">'", "in", "result", "[", "0", "]", ")", "assert", "(", "'302 Found'", "==", "saved", "[", "0", "]", "[", "0", "]", ")", "if", "six", ".", "PY3", ":", "assert", "(", "'text/html; charset=utf8'", "==", "header_value", "(", "saved", "[", "0", "]", "[", "1", "]", ",", "'content-type'", ")", ")", "else", ":", "assert", "(", "'text/html'", "==", "header_value", "(", "saved", "[", "0", "]", "[", "1", "]", ",", "'content-type'", ")", ")", "assert", "(", "'/bing/foo'", "==", "header_value", "(", "saved", "[", "0", "]", "[", "1", "]", ",", "'location'", ")", ")", "result", "=", "list", "(", "app", "(", "{", "'HTTP_ACCEPT'", ":", "'text/plain'", "}", ",", "saveit", ")", ")", "assert", "(", "'text/plain; charset=utf8'", "==", "header_value", "(", "saved", "[", "1", "]", "[", "1", "]", ",", "'content-type'", ")", ")", "assert", "(", "'/bing/foo'", "==", "header_value", "(", "saved", "[", "1", "]", "[", "1", "]", ",", "'location'", ")", ")" ]
check that redirect returns the correct .
train
false
1,166
def is_module_or_submodule(name, mod_or_submod): return (name.startswith((mod_or_submod + '.')) or (name == mod_or_submod))
[ "def", "is_module_or_submodule", "(", "name", ",", "mod_or_submod", ")", ":", "return", "(", "name", ".", "startswith", "(", "(", "mod_or_submod", "+", "'.'", ")", ")", "or", "(", "name", "==", "mod_or_submod", ")", ")" ]
this helper function is designed for use in the filter argument of collect_submodules .
train
false
1,167
def getFloat(value): return float(value)
[ "def", "getFloat", "(", "value", ")", ":", "return", "float", "(", "value", ")" ]
get the float .
train
false
1,168
def print_result_from_timeit(stmt='pass', setup='pass', number=1000000): units = ['s', 'ms', 'us', 'ns'] duration = timeit(stmt, setup, number=number) avg_duration = (duration / float(number)) thousands = int(math.floor(math.log(avg_duration, 1000))) print ('Total time: %fs. Average run: %.3f%s.' % (duration, (avg_duration * (1000 ** (- thousands))), units[(- thousands)]))
[ "def", "print_result_from_timeit", "(", "stmt", "=", "'pass'", ",", "setup", "=", "'pass'", ",", "number", "=", "1000000", ")", ":", "units", "=", "[", "'s'", ",", "'ms'", ",", "'us'", ",", "'ns'", "]", "duration", "=", "timeit", "(", "stmt", ",", "setup", ",", "number", "=", "number", ")", "avg_duration", "=", "(", "duration", "/", "float", "(", "number", ")", ")", "thousands", "=", "int", "(", "math", ".", "floor", "(", "math", ".", "log", "(", "avg_duration", ",", "1000", ")", ")", ")", "print", "(", "'Total time: %fs. Average run: %.3f%s.'", "%", "(", "duration", ",", "(", "avg_duration", "*", "(", "1000", "**", "(", "-", "thousands", ")", ")", ")", ",", "units", "[", "(", "-", "thousands", ")", "]", ")", ")" ]
clean function to know how much time took the execution of one statement .
train
true
1,169
def substr_in_list(string_to_search_for, list_to_search): return any(((string_to_search_for in s) for s in list_to_search))
[ "def", "substr_in_list", "(", "string_to_search_for", ",", "list_to_search", ")", ":", "return", "any", "(", "(", "(", "string_to_search_for", "in", "s", ")", "for", "s", "in", "list_to_search", ")", ")" ]
return a boolean value that indicates whether or not a given string is present in any of the strings which comprise a list .
train
false
1,171
def db_next_hid(self, n=1): conn = object_session(self).connection() table = self.table trans = conn.begin() try: next_hid = select([table.c.hid_counter], (table.c.id == self.id), for_update=True).scalar() table.update((table.c.id == self.id)).execute(hid_counter=(next_hid + n)) trans.commit() return next_hid except: trans.rollback() raise
[ "def", "db_next_hid", "(", "self", ",", "n", "=", "1", ")", ":", "conn", "=", "object_session", "(", "self", ")", ".", "connection", "(", ")", "table", "=", "self", ".", "table", "trans", "=", "conn", ".", "begin", "(", ")", "try", ":", "next_hid", "=", "select", "(", "[", "table", ".", "c", ".", "hid_counter", "]", ",", "(", "table", ".", "c", ".", "id", "==", "self", ".", "id", ")", ",", "for_update", "=", "True", ")", ".", "scalar", "(", ")", "table", ".", "update", "(", "(", "table", ".", "c", ".", "id", "==", "self", ".", "id", ")", ")", ".", "execute", "(", "hid_counter", "=", "(", "next_hid", "+", "n", ")", ")", "trans", ".", "commit", "(", ")", "return", "next_hid", "except", ":", "trans", ".", "rollback", "(", ")", "raise" ]
db_next_hid override __next_hid to generate from the database in a concurrency safe way .
train
false
1,172
def _number_of_set_bits(x): x -= ((x >> 1) & 1431655765) x = (((x >> 2) & 858993459) + (x & 858993459)) x = (((x >> 4) + x) & 252645135) x += (x >> 8) x += (x >> 16) return (x & 63)
[ "def", "_number_of_set_bits", "(", "x", ")", ":", "x", "-=", "(", "(", "x", ">>", "1", ")", "&", "1431655765", ")", "x", "=", "(", "(", "(", "x", ">>", "2", ")", "&", "858993459", ")", "+", "(", "x", "&", "858993459", ")", ")", "x", "=", "(", "(", "(", "x", ">>", "4", ")", "+", "x", ")", "&", "252645135", ")", "x", "+=", "(", "x", ">>", "8", ")", "x", "+=", "(", "x", ">>", "16", ")", "return", "(", "x", "&", "63", ")" ]
returns the number of bits that are set in a 32bit int .
train
true
1,173
def vm_snapshot_create(vm_name, kwargs=None, call=None): if (call != 'action'): raise SaltCloudSystemExit('The vm_snapshot_create action must be called with -a or --action.') if (kwargs is None): kwargs = {} snapshot_name = kwargs.get('snapshot_name', None) if (snapshot_name is None): raise SaltCloudSystemExit("The vm_snapshot_create function requires a 'snapshot_name' to be provided.") (server, user, password) = _get_xml_rpc() auth = ':'.join([user, password]) vm_id = int(get_vm_id(kwargs={'name': vm_name})) response = server.one.vm.snapshotcreate(auth, vm_id, snapshot_name) data = {'action': 'vm.snapshotcreate', 'snapshot_created': response[0], 'snapshot_id': response[1], 'error_code': response[2]} return data
[ "def", "vm_snapshot_create", "(", "vm_name", ",", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The vm_snapshot_create action must be called with -a or --action.'", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "snapshot_name", "=", "kwargs", ".", "get", "(", "'snapshot_name'", ",", "None", ")", "if", "(", "snapshot_name", "is", "None", ")", ":", "raise", "SaltCloudSystemExit", "(", "\"The vm_snapshot_create function requires a 'snapshot_name' to be provided.\"", ")", "(", "server", ",", "user", ",", "password", ")", "=", "_get_xml_rpc", "(", ")", "auth", "=", "':'", ".", "join", "(", "[", "user", ",", "password", "]", ")", "vm_id", "=", "int", "(", "get_vm_id", "(", "kwargs", "=", "{", "'name'", ":", "vm_name", "}", ")", ")", "response", "=", "server", ".", "one", ".", "vm", ".", "snapshotcreate", "(", "auth", ",", "vm_id", ",", "snapshot_name", ")", "data", "=", "{", "'action'", ":", "'vm.snapshotcreate'", ",", "'snapshot_created'", ":", "response", "[", "0", "]", ",", "'snapshot_id'", ":", "response", "[", "1", "]", ",", "'error_code'", ":", "response", "[", "2", "]", "}", "return", "data" ]
creates a new virtual machine snapshot from the provided vm .
train
true
1,174
def test_help_commands_equally_functional(script): results = list(map(script.pip, ('help', '--help'))) results.append(script.pip()) out = map((lambda x: x.stdout), results) ret = map((lambda x: x.returncode), results) msg = '"pip --help" != "pip help" != "pip"' assert (len(set(out)) == 1), ('output of: ' + msg) assert (sum(ret) == 0), ('exit codes of: ' + msg) for (name, cls) in commands.items(): if cls.hidden: continue assert (script.pip('help', name).stdout == script.pip(name, '--help').stdout)
[ "def", "test_help_commands_equally_functional", "(", "script", ")", ":", "results", "=", "list", "(", "map", "(", "script", ".", "pip", ",", "(", "'help'", ",", "'--help'", ")", ")", ")", "results", ".", "append", "(", "script", ".", "pip", "(", ")", ")", "out", "=", "map", "(", "(", "lambda", "x", ":", "x", ".", "stdout", ")", ",", "results", ")", "ret", "=", "map", "(", "(", "lambda", "x", ":", "x", ".", "returncode", ")", ",", "results", ")", "msg", "=", "'\"pip --help\" != \"pip help\" != \"pip\"'", "assert", "(", "len", "(", "set", "(", "out", ")", ")", "==", "1", ")", ",", "(", "'output of: '", "+", "msg", ")", "assert", "(", "sum", "(", "ret", ")", "==", "0", ")", ",", "(", "'exit codes of: '", "+", "msg", ")", "for", "(", "name", ",", "cls", ")", "in", "commands", ".", "items", "(", ")", ":", "if", "cls", ".", "hidden", ":", "continue", "assert", "(", "script", ".", "pip", "(", "'help'", ",", "name", ")", ".", "stdout", "==", "script", ".", "pip", "(", "name", ",", "'--help'", ")", ".", "stdout", ")" ]
test if pip help and pip --help behave the same way .
train
false
1,175
def init_model(): pass
[ "def", "init_model", "(", ")", ":", "pass" ]
call me before using any of the tables or classes in the model .
train
false
1,176
def single_char_or_unicode(argument): char = unicode_code(argument) if (len(char) > 1): raise ValueError(('%r invalid; must be a single character or a Unicode code' % char)) return char
[ "def", "single_char_or_unicode", "(", "argument", ")", ":", "char", "=", "unicode_code", "(", "argument", ")", "if", "(", "len", "(", "char", ")", ">", "1", ")", ":", "raise", "ValueError", "(", "(", "'%r invalid; must be a single character or a Unicode code'", "%", "char", ")", ")", "return", "char" ]
a single character is returned as-is .
train
false
1,179
def constant_grad(expr): grad = {} for var in expr.variables(): rows = (var.size[0] * var.size[1]) cols = (expr.size[0] * expr.size[1]) if ((rows, cols) == (1, 1)): grad[var] = 0.0 else: grad[var] = sp.csc_matrix((rows, cols), dtype='float64') return grad
[ "def", "constant_grad", "(", "expr", ")", ":", "grad", "=", "{", "}", "for", "var", "in", "expr", ".", "variables", "(", ")", ":", "rows", "=", "(", "var", ".", "size", "[", "0", "]", "*", "var", ".", "size", "[", "1", "]", ")", "cols", "=", "(", "expr", ".", "size", "[", "0", "]", "*", "expr", ".", "size", "[", "1", "]", ")", "if", "(", "(", "rows", ",", "cols", ")", "==", "(", "1", ",", "1", ")", ")", ":", "grad", "[", "var", "]", "=", "0.0", "else", ":", "grad", "[", "var", "]", "=", "sp", ".", "csc_matrix", "(", "(", "rows", ",", "cols", ")", ",", "dtype", "=", "'float64'", ")", "return", "grad" ]
returns the gradient of constant terms in an expression .
train
false
1,180
def _ord_to_namespace(n, _max_length=None): if (_max_length is None): _max_length = MAX_NAMESPACE_LENGTH length = _LEX_DISTANCE[(_max_length - 1)] if (n == 0): return '' n -= 1 return (NAMESPACE_CHARACTERS[(n / length)] + _ord_to_namespace((n % length), (_max_length - 1)))
[ "def", "_ord_to_namespace", "(", "n", ",", "_max_length", "=", "None", ")", ":", "if", "(", "_max_length", "is", "None", ")", ":", "_max_length", "=", "MAX_NAMESPACE_LENGTH", "length", "=", "_LEX_DISTANCE", "[", "(", "_max_length", "-", "1", ")", "]", "if", "(", "n", "==", "0", ")", ":", "return", "''", "n", "-=", "1", "return", "(", "NAMESPACE_CHARACTERS", "[", "(", "n", "/", "length", ")", "]", "+", "_ord_to_namespace", "(", "(", "n", "%", "length", ")", ",", "(", "_max_length", "-", "1", ")", ")", ")" ]
convert a namespace ordinal to a namespace string .
train
true
1,183
@library.global_function @contextfunction def display_context(context, include_callables=False): if (not settings.DEBUG): return '' keys = sorted(context.keys()) parts = ['<dt>{key}</dt><dd>{value}</dd>'.format(key=key, value=repr(context[key])) for key in keys if (include_callables or (not callable(context[key])))] html = '<dl class="jinja-context">{parts}</dl>'.format(parts=''.join(parts)) return Markup(html)
[ "@", "library", ".", "global_function", "@", "contextfunction", "def", "display_context", "(", "context", ",", "include_callables", "=", "False", ")", ":", "if", "(", "not", "settings", ".", "DEBUG", ")", ":", "return", "''", "keys", "=", "sorted", "(", "context", ".", "keys", "(", ")", ")", "parts", "=", "[", "'<dt>{key}</dt><dd>{value}</dd>'", ".", "format", "(", "key", "=", "key", ",", "value", "=", "repr", "(", "context", "[", "key", "]", ")", ")", "for", "key", "in", "keys", "if", "(", "include_callables", "or", "(", "not", "callable", "(", "context", "[", "key", "]", ")", ")", ")", "]", "html", "=", "'<dl class=\"jinja-context\">{parts}</dl>'", ".", "format", "(", "parts", "=", "''", ".", "join", "(", "parts", ")", ")", "return", "Markup", "(", "html", ")" ]
return a marked-up chunk of content containing the items in the template context .
train
false
1,185
def db_add_user(**kwargs): groups_post = kwargs.pop('groups') admin_groups = kwargs.pop('admin_groups') role = kwargs.get('role', 'CU') user = User(**kwargs) user.set_password(kwargs.get('password')) user.save() if groups_post: group_select = [] for group_id in groups_post: group = UserGroup.objects.filter(id=group_id) group_select.extend(group) user.group = group_select if (admin_groups and (role == 'GA')): for group_id in admin_groups: group = get_object(UserGroup, id=group_id) if group: AdminGroup(user=user, group=group).save() return user
[ "def", "db_add_user", "(", "**", "kwargs", ")", ":", "groups_post", "=", "kwargs", ".", "pop", "(", "'groups'", ")", "admin_groups", "=", "kwargs", ".", "pop", "(", "'admin_groups'", ")", "role", "=", "kwargs", ".", "get", "(", "'role'", ",", "'CU'", ")", "user", "=", "User", "(", "**", "kwargs", ")", "user", ".", "set_password", "(", "kwargs", ".", "get", "(", "'password'", ")", ")", "user", ".", "save", "(", ")", "if", "groups_post", ":", "group_select", "=", "[", "]", "for", "group_id", "in", "groups_post", ":", "group", "=", "UserGroup", ".", "objects", ".", "filter", "(", "id", "=", "group_id", ")", "group_select", ".", "extend", "(", "group", ")", "user", ".", "group", "=", "group_select", "if", "(", "admin_groups", "and", "(", "role", "==", "'GA'", ")", ")", ":", "for", "group_id", "in", "admin_groups", ":", "group", "=", "get_object", "(", "UserGroup", ",", "id", "=", "group_id", ")", "if", "group", ":", "AdminGroup", "(", "user", "=", "user", ",", "group", "=", "group", ")", ".", "save", "(", ")", "return", "user" ]
add a user in database .
train
false
1,186
def volume_create(**kwargs): return create_volume(kwargs, 'function')
[ "def", "volume_create", "(", "**", "kwargs", ")", ":", "return", "create_volume", "(", "kwargs", ",", "'function'", ")" ]
create block storage device .
train
false
1,187
def test_previtem_single(hist, monkeypatch): hist.start('f') monkeypatch.setattr(hist._tmphist, 'previtem', (lambda : 'item')) assert (hist.previtem() == 'item')
[ "def", "test_previtem_single", "(", "hist", ",", "monkeypatch", ")", ":", "hist", ".", "start", "(", "'f'", ")", "monkeypatch", ".", "setattr", "(", "hist", ".", "_tmphist", ",", "'previtem'", ",", "(", "lambda", ":", "'item'", ")", ")", "assert", "(", "hist", ".", "previtem", "(", ")", "==", "'item'", ")" ]
test previtem() with valid input .
train
false
1,188
def unique_everseen(iterable, key=None): seen = set() seen_add = seen.add if (key is None): for element in filterfalse(seen.__contains__, iterable): seen_add(element) (yield element) else: for element in iterable: k = key(element) if (k not in seen): seen_add(k) (yield element)
[ "def", "unique_everseen", "(", "iterable", ",", "key", "=", "None", ")", ":", "seen", "=", "set", "(", ")", "seen_add", "=", "seen", ".", "add", "if", "(", "key", "is", "None", ")", ":", "for", "element", "in", "filterfalse", "(", "seen", ".", "__contains__", ",", "iterable", ")", ":", "seen_add", "(", "element", ")", "(", "yield", "element", ")", "else", ":", "for", "element", "in", "iterable", ":", "k", "=", "key", "(", "element", ")", "if", "(", "k", "not", "in", "seen", ")", ":", "seen_add", "(", "k", ")", "(", "yield", "element", ")" ]
list unique elements .
train
true
1,190
def publish_from_doctree(document, destination_path=None, writer=None, writer_name='pseudoxml', settings=None, settings_spec=None, settings_overrides=None, config_section=None, enable_exit_status=False): reader = docutils.readers.doctree.Reader(parser_name='null') pub = Publisher(reader, None, writer, source=io.DocTreeInput(document), destination_class=io.StringOutput, settings=settings) if ((not writer) and writer_name): pub.set_writer(writer_name) pub.process_programmatic_settings(settings_spec, settings_overrides, config_section) pub.set_destination(None, destination_path) return pub.publish(enable_exit_status=enable_exit_status)
[ "def", "publish_from_doctree", "(", "document", ",", "destination_path", "=", "None", ",", "writer", "=", "None", ",", "writer_name", "=", "'pseudoxml'", ",", "settings", "=", "None", ",", "settings_spec", "=", "None", ",", "settings_overrides", "=", "None", ",", "config_section", "=", "None", ",", "enable_exit_status", "=", "False", ")", ":", "reader", "=", "docutils", ".", "readers", ".", "doctree", ".", "Reader", "(", "parser_name", "=", "'null'", ")", "pub", "=", "Publisher", "(", "reader", ",", "None", ",", "writer", ",", "source", "=", "io", ".", "DocTreeInput", "(", "document", ")", ",", "destination_class", "=", "io", ".", "StringOutput", ",", "settings", "=", "settings", ")", "if", "(", "(", "not", "writer", ")", "and", "writer_name", ")", ":", "pub", ".", "set_writer", "(", "writer_name", ")", "pub", ".", "process_programmatic_settings", "(", "settings_spec", ",", "settings_overrides", ",", "config_section", ")", "pub", ".", "set_destination", "(", "None", ",", "destination_path", ")", "return", "pub", ".", "publish", "(", "enable_exit_status", "=", "enable_exit_status", ")" ]
set up & run a publisher to render from an existing document tree data structure .
train
false
1,191
def checkpassword_dict(user_password_dict): def checkpassword(realm, user, password): p = user_password_dict.get(user) return ((p and (p == password)) or False) return checkpassword
[ "def", "checkpassword_dict", "(", "user_password_dict", ")", ":", "def", "checkpassword", "(", "realm", ",", "user", ",", "password", ")", ":", "p", "=", "user_password_dict", ".", "get", "(", "user", ")", "return", "(", "(", "p", "and", "(", "p", "==", "password", ")", ")", "or", "False", ")", "return", "checkpassword" ]
returns a checkpassword function which checks credentials against a dictionary of the form: {username : password} .
train
false
1,192
def get_availability_zones(context, get_only_available=False, with_hosts=False): enabled_services = objects.ServiceList.get_all(context, disabled=False, set_zones=True) available_zones = [] for (zone, host) in [(service['availability_zone'], service['host']) for service in enabled_services]: if ((not with_hosts) and (zone not in available_zones)): available_zones.append(zone) elif with_hosts: _available_zones = dict(available_zones) zone_hosts = _available_zones.setdefault(zone, set()) zone_hosts.add(host) available_zones = list(_available_zones.items()) if (not get_only_available): disabled_services = objects.ServiceList.get_all(context, disabled=True, set_zones=True) not_available_zones = [] azs = (available_zones if (not with_hosts) else dict(available_zones)) zones = [(service['availability_zone'], service['host']) for service in disabled_services if (service['availability_zone'] not in azs)] for (zone, host) in zones: if ((not with_hosts) and (zone not in not_available_zones)): not_available_zones.append(zone) elif with_hosts: _not_available_zones = dict(not_available_zones) zone_hosts = _not_available_zones.setdefault(zone, set()) zone_hosts.add(host) not_available_zones = list(_not_available_zones.items()) return (available_zones, not_available_zones) else: return available_zones
[ "def", "get_availability_zones", "(", "context", ",", "get_only_available", "=", "False", ",", "with_hosts", "=", "False", ")", ":", "enabled_services", "=", "objects", ".", "ServiceList", ".", "get_all", "(", "context", ",", "disabled", "=", "False", ",", "set_zones", "=", "True", ")", "available_zones", "=", "[", "]", "for", "(", "zone", ",", "host", ")", "in", "[", "(", "service", "[", "'availability_zone'", "]", ",", "service", "[", "'host'", "]", ")", "for", "service", "in", "enabled_services", "]", ":", "if", "(", "(", "not", "with_hosts", ")", "and", "(", "zone", "not", "in", "available_zones", ")", ")", ":", "available_zones", ".", "append", "(", "zone", ")", "elif", "with_hosts", ":", "_available_zones", "=", "dict", "(", "available_zones", ")", "zone_hosts", "=", "_available_zones", ".", "setdefault", "(", "zone", ",", "set", "(", ")", ")", "zone_hosts", ".", "add", "(", "host", ")", "available_zones", "=", "list", "(", "_available_zones", ".", "items", "(", ")", ")", "if", "(", "not", "get_only_available", ")", ":", "disabled_services", "=", "objects", ".", "ServiceList", ".", "get_all", "(", "context", ",", "disabled", "=", "True", ",", "set_zones", "=", "True", ")", "not_available_zones", "=", "[", "]", "azs", "=", "(", "available_zones", "if", "(", "not", "with_hosts", ")", "else", "dict", "(", "available_zones", ")", ")", "zones", "=", "[", "(", "service", "[", "'availability_zone'", "]", ",", "service", "[", "'host'", "]", ")", "for", "service", "in", "disabled_services", "if", "(", "service", "[", "'availability_zone'", "]", "not", "in", "azs", ")", "]", "for", "(", "zone", ",", "host", ")", "in", "zones", ":", "if", "(", "(", "not", "with_hosts", ")", "and", "(", "zone", "not", "in", "not_available_zones", ")", ")", ":", "not_available_zones", ".", "append", "(", "zone", ")", "elif", "with_hosts", ":", "_not_available_zones", "=", "dict", "(", "not_available_zones", ")", "zone_hosts", "=", "_not_available_zones", ".", "setdefault", "(", "zone", ",", "set", "(", ")", ")", "zone_hosts", ".", "add", "(", "host", ")", "not_available_zones", "=", "list", "(", "_not_available_zones", ".", "items", "(", ")", ")", "return", "(", "available_zones", ",", "not_available_zones", ")", "else", ":", "return", "available_zones" ]
return available and unavailable zones .
train
false
1,195
def _configure_learning_rate(num_samples_per_epoch, global_step): decay_steps = int(((num_samples_per_epoch / FLAGS.batch_size) * FLAGS.num_epochs_per_decay)) if FLAGS.sync_replicas: decay_steps /= FLAGS.replicas_to_aggregate if (FLAGS.learning_rate_decay_type == 'exponential'): return tf.train.exponential_decay(FLAGS.learning_rate, global_step, decay_steps, FLAGS.learning_rate_decay_factor, staircase=True, name='exponential_decay_learning_rate') elif (FLAGS.learning_rate_decay_type == 'fixed'): return tf.constant(FLAGS.learning_rate, name='fixed_learning_rate') elif (FLAGS.learning_rate_decay_type == 'polynomial'): return tf.train.polynomial_decay(FLAGS.learning_rate, global_step, decay_steps, FLAGS.end_learning_rate, power=1.0, cycle=False, name='polynomial_decay_learning_rate') else: raise ValueError('learning_rate_decay_type [%s] was not recognized', FLAGS.learning_rate_decay_type)
[ "def", "_configure_learning_rate", "(", "num_samples_per_epoch", ",", "global_step", ")", ":", "decay_steps", "=", "int", "(", "(", "(", "num_samples_per_epoch", "/", "FLAGS", ".", "batch_size", ")", "*", "FLAGS", ".", "num_epochs_per_decay", ")", ")", "if", "FLAGS", ".", "sync_replicas", ":", "decay_steps", "/=", "FLAGS", ".", "replicas_to_aggregate", "if", "(", "FLAGS", ".", "learning_rate_decay_type", "==", "'exponential'", ")", ":", "return", "tf", ".", "train", ".", "exponential_decay", "(", "FLAGS", ".", "learning_rate", ",", "global_step", ",", "decay_steps", ",", "FLAGS", ".", "learning_rate_decay_factor", ",", "staircase", "=", "True", ",", "name", "=", "'exponential_decay_learning_rate'", ")", "elif", "(", "FLAGS", ".", "learning_rate_decay_type", "==", "'fixed'", ")", ":", "return", "tf", ".", "constant", "(", "FLAGS", ".", "learning_rate", ",", "name", "=", "'fixed_learning_rate'", ")", "elif", "(", "FLAGS", ".", "learning_rate_decay_type", "==", "'polynomial'", ")", ":", "return", "tf", ".", "train", ".", "polynomial_decay", "(", "FLAGS", ".", "learning_rate", ",", "global_step", ",", "decay_steps", ",", "FLAGS", ".", "end_learning_rate", ",", "power", "=", "1.0", ",", "cycle", "=", "False", ",", "name", "=", "'polynomial_decay_learning_rate'", ")", "else", ":", "raise", "ValueError", "(", "'learning_rate_decay_type [%s] was not recognized'", ",", "FLAGS", ".", "learning_rate_decay_type", ")" ]
configures the learning rate .
train
false
1,196
def make_s3_files(session, key1='text1.txt', key2='text2.txt', size=None): region = 'us-west-2' bucket = create_bucket(session) if size: string1 = ('*' * size) string2 = string1 else: string1 = 'This is a test.' string2 = 'This is another test.' client = session.create_client('s3', region_name=region) client.put_object(Bucket=bucket, Key=key1, Body=string1) if (key2 is not None): client.put_object(Bucket=bucket, Key='another_directory/') client.put_object(Bucket=bucket, Key=('another_directory/%s' % key2), Body=string2) return bucket
[ "def", "make_s3_files", "(", "session", ",", "key1", "=", "'text1.txt'", ",", "key2", "=", "'text2.txt'", ",", "size", "=", "None", ")", ":", "region", "=", "'us-west-2'", "bucket", "=", "create_bucket", "(", "session", ")", "if", "size", ":", "string1", "=", "(", "'*'", "*", "size", ")", "string2", "=", "string1", "else", ":", "string1", "=", "'This is a test.'", "string2", "=", "'This is another test.'", "client", "=", "session", ".", "create_client", "(", "'s3'", ",", "region_name", "=", "region", ")", "client", ".", "put_object", "(", "Bucket", "=", "bucket", ",", "Key", "=", "key1", ",", "Body", "=", "string1", ")", "if", "(", "key2", "is", "not", "None", ")", ":", "client", ".", "put_object", "(", "Bucket", "=", "bucket", ",", "Key", "=", "'another_directory/'", ")", "client", ".", "put_object", "(", "Bucket", "=", "bucket", ",", "Key", "=", "(", "'another_directory/%s'", "%", "key2", ")", ",", "Body", "=", "string2", ")", "return", "bucket" ]
creates a randomly generated bucket in s3 with the files text1 .
train
false
1,197
def test_huge_cross(): x = rng.rand(100000, 3) y = rng.rand(1, 3) z = np.cross(x, y) zz = fast_cross_3d(x, y) assert_array_equal(z, zz)
[ "def", "test_huge_cross", "(", ")", ":", "x", "=", "rng", ".", "rand", "(", "100000", ",", "3", ")", "y", "=", "rng", ".", "rand", "(", "1", ",", "3", ")", "z", "=", "np", ".", "cross", "(", "x", ",", "y", ")", "zz", "=", "fast_cross_3d", "(", "x", ",", "y", ")", "assert_array_equal", "(", "z", ",", "zz", ")" ]
test cross product with lots of elements .
train
false
1,199
def from_files(job, form): if form.textfile_use_local_files.data: job.labels_file = form.textfile_local_labels_file.data.strip() else: flask.request.files[form.textfile_labels_file.name].save(os.path.join(job.dir(), utils.constants.LABELS_FILE)) job.labels_file = utils.constants.LABELS_FILE shuffle = bool(form.textfile_shuffle.data) backend = form.backend.data encoding = form.encoding.data compression = form.compression.data if form.textfile_use_local_files.data: train_file = form.textfile_local_train_images.data.strip() else: flask.request.files[form.textfile_train_images.name].save(os.path.join(job.dir(), utils.constants.TRAIN_FILE)) train_file = utils.constants.TRAIN_FILE image_folder = form.textfile_train_folder.data.strip() if (not image_folder): image_folder = None job.tasks.append(tasks.CreateDbTask(job_dir=job.dir(), input_file=train_file, db_name=utils.constants.TRAIN_DB, backend=backend, image_dims=job.image_dims, image_folder=image_folder, resize_mode=job.resize_mode, encoding=encoding, compression=compression, mean_file=utils.constants.MEAN_FILE_CAFFE, labels_file=job.labels_file, shuffle=shuffle)) if form.textfile_use_val.data: if form.textfile_use_local_files.data: val_file = form.textfile_local_val_images.data.strip() else: flask.request.files[form.textfile_val_images.name].save(os.path.join(job.dir(), utils.constants.VAL_FILE)) val_file = utils.constants.VAL_FILE image_folder = form.textfile_val_folder.data.strip() if (not image_folder): image_folder = None job.tasks.append(tasks.CreateDbTask(job_dir=job.dir(), input_file=val_file, db_name=utils.constants.VAL_DB, backend=backend, image_dims=job.image_dims, image_folder=image_folder, resize_mode=job.resize_mode, encoding=encoding, compression=compression, labels_file=job.labels_file, shuffle=shuffle)) if form.textfile_use_test.data: if form.textfile_use_local_files.data: test_file = form.textfile_local_test_images.data.strip() else: flask.request.files[form.textfile_test_images.name].save(os.path.join(job.dir(), utils.constants.TEST_FILE)) test_file = utils.constants.TEST_FILE image_folder = form.textfile_test_folder.data.strip() if (not image_folder): image_folder = None job.tasks.append(tasks.CreateDbTask(job_dir=job.dir(), input_file=test_file, db_name=utils.constants.TEST_DB, backend=backend, image_dims=job.image_dims, image_folder=image_folder, resize_mode=job.resize_mode, encoding=encoding, compression=compression, labels_file=job.labels_file, shuffle=shuffle))
[ "def", "from_files", "(", "job", ",", "form", ")", ":", "if", "form", ".", "textfile_use_local_files", ".", "data", ":", "job", ".", "labels_file", "=", "form", ".", "textfile_local_labels_file", ".", "data", ".", "strip", "(", ")", "else", ":", "flask", ".", "request", ".", "files", "[", "form", ".", "textfile_labels_file", ".", "name", "]", ".", "save", "(", "os", ".", "path", ".", "join", "(", "job", ".", "dir", "(", ")", ",", "utils", ".", "constants", ".", "LABELS_FILE", ")", ")", "job", ".", "labels_file", "=", "utils", ".", "constants", ".", "LABELS_FILE", "shuffle", "=", "bool", "(", "form", ".", "textfile_shuffle", ".", "data", ")", "backend", "=", "form", ".", "backend", ".", "data", "encoding", "=", "form", ".", "encoding", ".", "data", "compression", "=", "form", ".", "compression", ".", "data", "if", "form", ".", "textfile_use_local_files", ".", "data", ":", "train_file", "=", "form", ".", "textfile_local_train_images", ".", "data", ".", "strip", "(", ")", "else", ":", "flask", ".", "request", ".", "files", "[", "form", ".", "textfile_train_images", ".", "name", "]", ".", "save", "(", "os", ".", "path", ".", "join", "(", "job", ".", "dir", "(", ")", ",", "utils", ".", "constants", ".", "TRAIN_FILE", ")", ")", "train_file", "=", "utils", ".", "constants", ".", "TRAIN_FILE", "image_folder", "=", "form", ".", "textfile_train_folder", ".", "data", ".", "strip", "(", ")", "if", "(", "not", "image_folder", ")", ":", "image_folder", "=", "None", "job", ".", "tasks", ".", "append", "(", "tasks", ".", "CreateDbTask", "(", "job_dir", "=", "job", ".", "dir", "(", ")", ",", "input_file", "=", "train_file", ",", "db_name", "=", "utils", ".", "constants", ".", "TRAIN_DB", ",", "backend", "=", "backend", ",", "image_dims", "=", "job", ".", "image_dims", ",", "image_folder", "=", "image_folder", ",", "resize_mode", "=", "job", ".", "resize_mode", ",", "encoding", "=", "encoding", ",", "compression", "=", "compression", ",", "mean_file", "=", "utils", ".", "constants", ".", "MEAN_FILE_CAFFE", ",", "labels_file", "=", "job", ".", "labels_file", ",", "shuffle", "=", "shuffle", ")", ")", "if", "form", ".", "textfile_use_val", ".", "data", ":", "if", "form", ".", "textfile_use_local_files", ".", "data", ":", "val_file", "=", "form", ".", "textfile_local_val_images", ".", "data", ".", "strip", "(", ")", "else", ":", "flask", ".", "request", ".", "files", "[", "form", ".", "textfile_val_images", ".", "name", "]", ".", "save", "(", "os", ".", "path", ".", "join", "(", "job", ".", "dir", "(", ")", ",", "utils", ".", "constants", ".", "VAL_FILE", ")", ")", "val_file", "=", "utils", ".", "constants", ".", "VAL_FILE", "image_folder", "=", "form", ".", "textfile_val_folder", ".", "data", ".", "strip", "(", ")", "if", "(", "not", "image_folder", ")", ":", "image_folder", "=", "None", "job", ".", "tasks", ".", "append", "(", "tasks", ".", "CreateDbTask", "(", "job_dir", "=", "job", ".", "dir", "(", ")", ",", "input_file", "=", "val_file", ",", "db_name", "=", "utils", ".", "constants", ".", "VAL_DB", ",", "backend", "=", "backend", ",", "image_dims", "=", "job", ".", "image_dims", ",", "image_folder", "=", "image_folder", ",", "resize_mode", "=", "job", ".", "resize_mode", ",", "encoding", "=", "encoding", ",", "compression", "=", "compression", ",", "labels_file", "=", "job", ".", "labels_file", ",", "shuffle", "=", "shuffle", ")", ")", "if", "form", ".", "textfile_use_test", ".", "data", ":", "if", "form", ".", "textfile_use_local_files", ".", "data", ":", "test_file", "=", "form", ".", "textfile_local_test_images", ".", "data", ".", "strip", "(", ")", "else", ":", "flask", ".", "request", ".", "files", "[", "form", ".", "textfile_test_images", ".", "name", "]", ".", "save", "(", "os", ".", "path", ".", "join", "(", "job", ".", "dir", "(", ")", ",", "utils", ".", "constants", ".", "TEST_FILE", ")", ")", "test_file", "=", "utils", ".", "constants", ".", "TEST_FILE", "image_folder", "=", "form", ".", "textfile_test_folder", ".", "data", ".", "strip", "(", ")", "if", "(", "not", "image_folder", ")", ":", "image_folder", "=", "None", "job", ".", "tasks", ".", "append", "(", "tasks", ".", "CreateDbTask", "(", "job_dir", "=", "job", ".", "dir", "(", ")", ",", "input_file", "=", "test_file", ",", "db_name", "=", "utils", ".", "constants", ".", "TEST_DB", ",", "backend", "=", "backend", ",", "image_dims", "=", "job", ".", "image_dims", ",", "image_folder", "=", "image_folder", ",", "resize_mode", "=", "job", ".", "resize_mode", ",", "encoding", "=", "encoding", ",", "compression", "=", "compression", ",", "labels_file", "=", "job", ".", "labels_file", ",", "shuffle", "=", "shuffle", ")", ")" ]
generate a table of contents from files in the book .
train
false
1,200
def hpy(ht=None): r = Root() if (ht is not None): r.guppy.heapy.View._hiding_tag_ = ht return r.guppy.heapy.Use
[ "def", "hpy", "(", "ht", "=", "None", ")", ":", "r", "=", "Root", "(", ")", "if", "(", "ht", "is", "not", "None", ")", ":", "r", ".", "guppy", ".", "heapy", ".", "View", ".", "_hiding_tag_", "=", "ht", "return", "r", ".", "guppy", ".", "heapy", ".", "Use" ]
main entry point to the heapy system .
train
false
1,201
def read_qual_score_filter(seq, qual, max_run_length, threshold): last_good_slice_end_pos = 0 bad_run_length = 0 mask = (qual <= threshold) for (starts, ends) in _contiguous_regions(mask): if ((ends - starts) > max_run_length): return (seq[:starts], qual[:starts]) return (seq, qual)
[ "def", "read_qual_score_filter", "(", "seq", ",", "qual", ",", "max_run_length", ",", "threshold", ")", ":", "last_good_slice_end_pos", "=", "0", "bad_run_length", "=", "0", "mask", "=", "(", "qual", "<=", "threshold", ")", "for", "(", "starts", ",", "ends", ")", "in", "_contiguous_regions", "(", "mask", ")", ":", "if", "(", "(", "ends", "-", "starts", ")", ">", "max_run_length", ")", ":", "return", "(", "seq", "[", ":", "starts", "]", ",", "qual", "[", ":", "starts", "]", ")", "return", "(", "seq", ",", "qual", ")" ]
slices illumina sequence and quality line based on quality filter .
train
false
1,202
def cloud_init(names, host=None, quiet=False, **kwargs): if quiet: log.warning("'quiet' argument is being deprecated. Please migrate to --quiet") return __salt__['lxc.init'](names=names, host=host, saltcloud_mode=True, quiet=quiet, **kwargs)
[ "def", "cloud_init", "(", "names", ",", "host", "=", "None", ",", "quiet", "=", "False", ",", "**", "kwargs", ")", ":", "if", "quiet", ":", "log", ".", "warning", "(", "\"'quiet' argument is being deprecated. Please migrate to --quiet\"", ")", "return", "__salt__", "[", "'lxc.init'", "]", "(", "names", "=", "names", ",", "host", "=", "host", ",", "saltcloud_mode", "=", "True", ",", "quiet", "=", "quiet", ",", "**", "kwargs", ")" ]
thin wrapper to lxc .
train
true
1,203
@register.function @jinja2.contextfunction def get_doc_path(context, path, extension): lang = getattr(context['request'], 'LANG', 'en-US') if (lang in settings.AMO_LANGUAGES): try: localized_file_path = ('%s/%s.%s' % (path, lang, extension)) with open(localized_file_path): return localized_file_path except IOError: return ('%s/en-US.%s' % (path, extension))
[ "@", "register", ".", "function", "@", "jinja2", ".", "contextfunction", "def", "get_doc_path", "(", "context", ",", "path", ",", "extension", ")", ":", "lang", "=", "getattr", "(", "context", "[", "'request'", "]", ",", "'LANG'", ",", "'en-US'", ")", "if", "(", "lang", "in", "settings", ".", "AMO_LANGUAGES", ")", ":", "try", ":", "localized_file_path", "=", "(", "'%s/%s.%s'", "%", "(", "path", ",", "lang", ",", "extension", ")", ")", "with", "open", "(", "localized_file_path", ")", ":", "return", "localized_file_path", "except", "IOError", ":", "return", "(", "'%s/en-US.%s'", "%", "(", "path", ",", "extension", ")", ")" ]
gets the path to a localizable document in the current language with fallback to en-us .
train
false
1,204
def load_model(model_file_name): model = liblinear.load_model(model_file_name.encode()) if (not model): print ("can't open model file %s" % model_file_name) return None model = toPyModel(model) return model
[ "def", "load_model", "(", "model_file_name", ")", ":", "model", "=", "liblinear", ".", "load_model", "(", "model_file_name", ".", "encode", "(", ")", ")", "if", "(", "not", "model", ")", ":", "print", "(", "\"can't open model file %s\"", "%", "model_file_name", ")", "return", "None", "model", "=", "toPyModel", "(", "model", ")", "return", "model" ]
load a trained model for decoding .
train
false
1,205
def fib(n): if (n <= 2): return n if (n in _fib_cache): return _fib_cache[n] result = (fib((n - 1)) + fib((n - 2))) _fib_cache[n] = result return result
[ "def", "fib", "(", "n", ")", ":", "if", "(", "n", "<=", "2", ")", ":", "return", "n", "if", "(", "n", "in", "_fib_cache", ")", ":", "return", "_fib_cache", "[", "n", "]", "result", "=", "(", "fib", "(", "(", "n", "-", "1", ")", ")", "+", "fib", "(", "(", "n", "-", "2", ")", ")", ")", "_fib_cache", "[", "n", "]", "=", "result", "return", "result" ]
return the num-th fibonacci number .
train
false
1,206
def create_membership_push_to_timeline(sender, instance, created, **kwargs): if (created and instance.user and (instance.user != instance.project.owner)): created_datetime = instance.created_at _push_to_timelines(instance.project, instance.user, instance, 'create', created_datetime)
[ "def", "create_membership_push_to_timeline", "(", "sender", ",", "instance", ",", "created", ",", "**", "kwargs", ")", ":", "if", "(", "created", "and", "instance", ".", "user", "and", "(", "instance", ".", "user", "!=", "instance", ".", "project", ".", "owner", ")", ")", ":", "created_datetime", "=", "instance", ".", "created_at", "_push_to_timelines", "(", "instance", ".", "project", ",", "instance", ".", "user", ",", "instance", ",", "'create'", ",", "created_datetime", ")" ]
creating new membership with associated user .
train
false
1,207
def GetMachineKey(): return platform.node()
[ "def", "GetMachineKey", "(", ")", ":", "return", "platform", ".", "node", "(", ")" ]
gets the machine key to be used for metrics uploaded from this process .
train
false
1,208
def _euler_step(xf_traj, yf_traj, dmap, f): (ny, nx) = dmap.grid.shape xi = xf_traj[(-1)] yi = yf_traj[(-1)] (cx, cy) = f(xi, yi) if (cx == 0): dsx = np.inf elif (cx < 0): dsx = (xi / (- cx)) else: dsx = (((nx - 1) - xi) / cx) if (cy == 0): dsy = np.inf elif (cy < 0): dsy = (yi / (- cy)) else: dsy = (((ny - 1) - yi) / cy) ds = min(dsx, dsy) xf_traj.append((xi + (cx * ds))) yf_traj.append((yi + (cy * ds))) return (ds, xf_traj, yf_traj)
[ "def", "_euler_step", "(", "xf_traj", ",", "yf_traj", ",", "dmap", ",", "f", ")", ":", "(", "ny", ",", "nx", ")", "=", "dmap", ".", "grid", ".", "shape", "xi", "=", "xf_traj", "[", "(", "-", "1", ")", "]", "yi", "=", "yf_traj", "[", "(", "-", "1", ")", "]", "(", "cx", ",", "cy", ")", "=", "f", "(", "xi", ",", "yi", ")", "if", "(", "cx", "==", "0", ")", ":", "dsx", "=", "np", ".", "inf", "elif", "(", "cx", "<", "0", ")", ":", "dsx", "=", "(", "xi", "/", "(", "-", "cx", ")", ")", "else", ":", "dsx", "=", "(", "(", "(", "nx", "-", "1", ")", "-", "xi", ")", "/", "cx", ")", "if", "(", "cy", "==", "0", ")", ":", "dsy", "=", "np", ".", "inf", "elif", "(", "cy", "<", "0", ")", ":", "dsy", "=", "(", "yi", "/", "(", "-", "cy", ")", ")", "else", ":", "dsy", "=", "(", "(", "(", "ny", "-", "1", ")", "-", "yi", ")", "/", "cy", ")", "ds", "=", "min", "(", "dsx", ",", "dsy", ")", "xf_traj", ".", "append", "(", "(", "xi", "+", "(", "cx", "*", "ds", ")", ")", ")", "yf_traj", ".", "append", "(", "(", "yi", "+", "(", "cy", "*", "ds", ")", ")", ")", "return", "(", "ds", ",", "xf_traj", ",", "yf_traj", ")" ]
simple euler integration step that extends streamline to boundary .
train
false
1,211
def pick_channels_evoked(orig, include=[], exclude='bads'): if ((len(include) == 0) and (len(exclude) == 0)): return orig exclude = _check_excludes_includes(exclude, info=orig.info, allow_bads=True) sel = pick_channels(orig.info['ch_names'], include=include, exclude=exclude) if (len(sel) == 0): raise ValueError('Warning : No channels match the selection.') res = deepcopy(orig) res.info = pick_info(res.info, sel) res.data = res.data[sel, :] return res
[ "def", "pick_channels_evoked", "(", "orig", ",", "include", "=", "[", "]", ",", "exclude", "=", "'bads'", ")", ":", "if", "(", "(", "len", "(", "include", ")", "==", "0", ")", "and", "(", "len", "(", "exclude", ")", "==", "0", ")", ")", ":", "return", "orig", "exclude", "=", "_check_excludes_includes", "(", "exclude", ",", "info", "=", "orig", ".", "info", ",", "allow_bads", "=", "True", ")", "sel", "=", "pick_channels", "(", "orig", ".", "info", "[", "'ch_names'", "]", ",", "include", "=", "include", ",", "exclude", "=", "exclude", ")", "if", "(", "len", "(", "sel", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "'Warning : No channels match the selection.'", ")", "res", "=", "deepcopy", "(", "orig", ")", "res", ".", "info", "=", "pick_info", "(", "res", ".", "info", ",", "sel", ")", "res", ".", "data", "=", "res", ".", "data", "[", "sel", ",", ":", "]", "return", "res" ]
pick channels from evoked data .
train
false
1,212
def campaign_keyword(): return s3_rest_controller()
[ "def", "campaign_keyword", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
1,213
def OSXFontDirectory(): fontpaths = [] def add(arg, directory, files): fontpaths.append(directory) for fontdir in OSXFontDirectories: try: if os.path.isdir(fontdir): os.path.walk(fontdir, add, None) except (IOError, OSError, TypeError, ValueError): pass return fontpaths
[ "def", "OSXFontDirectory", "(", ")", ":", "fontpaths", "=", "[", "]", "def", "add", "(", "arg", ",", "directory", ",", "files", ")", ":", "fontpaths", ".", "append", "(", "directory", ")", "for", "fontdir", "in", "OSXFontDirectories", ":", "try", ":", "if", "os", ".", "path", ".", "isdir", "(", "fontdir", ")", ":", "os", ".", "path", ".", "walk", "(", "fontdir", ",", "add", ",", "None", ")", "except", "(", "IOError", ",", "OSError", ",", "TypeError", ",", "ValueError", ")", ":", "pass", "return", "fontpaths" ]
return the system font directories for os x .
train
false