id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
sequencelengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
1,341
def test_censure_on_rectangular_images(): rect_image = np.random.rand(300, 200) square_image = np.random.rand(200, 200) CENSURE().detect(square_image) CENSURE().detect(rect_image)
[ "def", "test_censure_on_rectangular_images", "(", ")", ":", "rect_image", "=", "np", ".", "random", ".", "rand", "(", "300", ",", "200", ")", "square_image", "=", "np", ".", "random", ".", "rand", "(", "200", ",", "200", ")", "CENSURE", "(", ")", ".", "detect", "(", "square_image", ")", "CENSURE", "(", ")", ".", "detect", "(", "rect_image", ")" ]
censure feature detector should work on 2d image of any shape .
train
false
1,343
def vm_disk_snapshot_create(name, kwargs=None, call=None): if (call != 'action'): raise SaltCloudSystemExit('The vm_disk_snapshot_create action must be called with -a or --action.') if (kwargs is None): kwargs = {} disk_id = kwargs.get('disk_id', None) description = kwargs.get('description', None) if ((disk_id is None) or (description is None)): raise SaltCloudSystemExit("The vm_disk_snapshot_create function requires a 'disk_id' and a 'description' to be provided.") (server, user, password) = _get_xml_rpc() auth = ':'.join([user, password]) vm_id = int(get_vm_id(kwargs={'name': name})) response = server.one.vm.disksnapshotcreate(auth, vm_id, int(disk_id), description) data = {'action': 'vm.disksnapshotcreate', 'created': response[0], 'snapshot_id': response[1], 'error_code': response[2]} return data
[ "def", "vm_disk_snapshot_create", "(", "name", ",", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The vm_disk_snapshot_create action must be called with -a or --action.'", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "disk_id", "=", "kwargs", ".", "get", "(", "'disk_id'", ",", "None", ")", "description", "=", "kwargs", ".", "get", "(", "'description'", ",", "None", ")", "if", "(", "(", "disk_id", "is", "None", ")", "or", "(", "description", "is", "None", ")", ")", ":", "raise", "SaltCloudSystemExit", "(", "\"The vm_disk_snapshot_create function requires a 'disk_id' and a 'description' to be provided.\"", ")", "(", "server", ",", "user", ",", "password", ")", "=", "_get_xml_rpc", "(", ")", "auth", "=", "':'", ".", "join", "(", "[", "user", ",", "password", "]", ")", "vm_id", "=", "int", "(", "get_vm_id", "(", "kwargs", "=", "{", "'name'", ":", "name", "}", ")", ")", "response", "=", "server", ".", "one", ".", "vm", ".", "disksnapshotcreate", "(", "auth", ",", "vm_id", ",", "int", "(", "disk_id", ")", ",", "description", ")", "data", "=", "{", "'action'", ":", "'vm.disksnapshotcreate'", ",", "'created'", ":", "response", "[", "0", "]", ",", "'snapshot_id'", ":", "response", "[", "1", "]", ",", "'error_code'", ":", "response", "[", "2", "]", "}", "return", "data" ]
takes a new snapshot of the disk image .
train
true
1,344
@identity_loaded.connect def on_identity_loaded(sender, identity): user = User.query.filter((User.id == identity.id)).first() identity.provides.add(UserNeed(identity.id)) if hasattr(user, u'roles'): for role in user.roles: identity.provides.add(RoleNeed(role.name)) g.user = user
[ "@", "identity_loaded", ".", "connect", "def", "on_identity_loaded", "(", "sender", ",", "identity", ")", ":", "user", "=", "User", ".", "query", ".", "filter", "(", "(", "User", ".", "id", "==", "identity", ".", "id", ")", ")", ".", "first", "(", ")", "identity", ".", "provides", ".", "add", "(", "UserNeed", "(", "identity", ".", "id", ")", ")", "if", "hasattr", "(", "user", ",", "u'roles'", ")", ":", "for", "role", "in", "user", ".", "roles", ":", "identity", ".", "provides", ".", "add", "(", "RoleNeed", "(", "role", ".", "name", ")", ")", "g", ".", "user", "=", "user" ]
sets the identity of a given option .
train
false
1,347
def enable_trace(case, trace_exclusions=None, f=sys.stdout): if (trace_exclusions is None): trace_exclusions = ['twisted', 'worker_transition.py', 'util/tu', 'log.py', '/mq/', '/db/', 'buildbot/data/', 'fake/reactor.py'] bbbase = os.path.dirname(buildbot.__file__) state = {'indent': 0} def tracefunc(frame, event, arg): if frame.f_code.co_filename.startswith(bbbase): if (not any(((te in frame.f_code.co_filename) for te in trace_exclusions))): if (event == 'call'): state['indent'] += 2 print(('-' * state['indent']), frame.f_code.co_filename.replace(bbbase, ''), frame.f_code.co_name, frame.f_code.co_varnames, file=f) if (event == 'return'): state['indent'] -= 2 return tracefunc sys.settrace(tracefunc) case.addCleanup(sys.settrace, (lambda _a, _b, _c: None))
[ "def", "enable_trace", "(", "case", ",", "trace_exclusions", "=", "None", ",", "f", "=", "sys", ".", "stdout", ")", ":", "if", "(", "trace_exclusions", "is", "None", ")", ":", "trace_exclusions", "=", "[", "'twisted'", ",", "'worker_transition.py'", ",", "'util/tu'", ",", "'log.py'", ",", "'/mq/'", ",", "'/db/'", ",", "'buildbot/data/'", ",", "'fake/reactor.py'", "]", "bbbase", "=", "os", ".", "path", ".", "dirname", "(", "buildbot", ".", "__file__", ")", "state", "=", "{", "'indent'", ":", "0", "}", "def", "tracefunc", "(", "frame", ",", "event", ",", "arg", ")", ":", "if", "frame", ".", "f_code", ".", "co_filename", ".", "startswith", "(", "bbbase", ")", ":", "if", "(", "not", "any", "(", "(", "(", "te", "in", "frame", ".", "f_code", ".", "co_filename", ")", "for", "te", "in", "trace_exclusions", ")", ")", ")", ":", "if", "(", "event", "==", "'call'", ")", ":", "state", "[", "'indent'", "]", "+=", "2", "print", "(", "(", "'-'", "*", "state", "[", "'indent'", "]", ")", ",", "frame", ".", "f_code", ".", "co_filename", ".", "replace", "(", "bbbase", ",", "''", ")", ",", "frame", ".", "f_code", ".", "co_name", ",", "frame", ".", "f_code", ".", "co_varnames", ",", "file", "=", "f", ")", "if", "(", "event", "==", "'return'", ")", ":", "state", "[", "'indent'", "]", "-=", "2", "return", "tracefunc", "sys", ".", "settrace", "(", "tracefunc", ")", "case", ".", "addCleanup", "(", "sys", ".", "settrace", ",", "(", "lambda", "_a", ",", "_b", ",", "_c", ":", "None", ")", ")" ]
enable tracing of app instances .
train
false
1,348
def unloadhook(h): def processor(handler): try: result = handler() is_gen = is_iter(result) except: h() raise if is_gen: return wrap(result) else: h() return result def wrap(result): def next_hook(): try: return next(result) except: h() raise result = iter(result) while True: (yield next_hook()) return processor
[ "def", "unloadhook", "(", "h", ")", ":", "def", "processor", "(", "handler", ")", ":", "try", ":", "result", "=", "handler", "(", ")", "is_gen", "=", "is_iter", "(", "result", ")", "except", ":", "h", "(", ")", "raise", "if", "is_gen", ":", "return", "wrap", "(", "result", ")", "else", ":", "h", "(", ")", "return", "result", "def", "wrap", "(", "result", ")", ":", "def", "next_hook", "(", ")", ":", "try", ":", "return", "next", "(", "result", ")", "except", ":", "h", "(", ")", "raise", "result", "=", "iter", "(", "result", ")", "while", "True", ":", "(", "yield", "next_hook", "(", ")", ")", "return", "processor" ]
converts an unload hook into an application processor .
train
false
1,351
def get_fd_or_server(config): lockfile = get_lockfile(config) while True: try: return (os.open(lockfile, ((os.O_CREAT | os.O_EXCL) | os.O_WRONLY)), None) except OSError: pass server = get_server(config) if (server is not None): return (None, server) remove_lockfile(lockfile)
[ "def", "get_fd_or_server", "(", "config", ")", ":", "lockfile", "=", "get_lockfile", "(", "config", ")", "while", "True", ":", "try", ":", "return", "(", "os", ".", "open", "(", "lockfile", ",", "(", "(", "os", ".", "O_CREAT", "|", "os", ".", "O_EXCL", ")", "|", "os", ".", "O_WRONLY", ")", ")", ",", "None", ")", "except", "OSError", ":", "pass", "server", "=", "get_server", "(", "config", ")", "if", "(", "server", "is", "not", "None", ")", ":", "return", "(", "None", ",", "server", ")", "remove_lockfile", "(", "lockfile", ")" ]
tries to create the lockfile .
train
false
1,352
def clear_script_prefix(): try: del _prefixes.value except AttributeError: pass
[ "def", "clear_script_prefix", "(", ")", ":", "try", ":", "del", "_prefixes", ".", "value", "except", "AttributeError", ":", "pass" ]
unsets the script prefix for the current thread .
train
false
1,353
def inject_ownership_variables(registry, xml_parent, data): ownership = XML.SubElement(xml_parent, 'com.synopsys.arc.jenkins.plugins.ownership.wrappers.OwnershipBuildWrapper') XML.SubElement(ownership, 'injectNodeOwnership').text = str(data.get('node-variables', False)).lower() XML.SubElement(ownership, 'injectJobOwnership').text = str(data.get('job-variables', False)).lower()
[ "def", "inject_ownership_variables", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "ownership", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'com.synopsys.arc.jenkins.plugins.ownership.wrappers.OwnershipBuildWrapper'", ")", "XML", ".", "SubElement", "(", "ownership", ",", "'injectNodeOwnership'", ")", ".", "text", "=", "str", "(", "data", ".", "get", "(", "'node-variables'", ",", "False", ")", ")", ".", "lower", "(", ")", "XML", ".", "SubElement", "(", "ownership", ",", "'injectJobOwnership'", ")", ".", "text", "=", "str", "(", "data", ".", "get", "(", "'job-variables'", ",", "False", ")", ")", ".", "lower", "(", ")" ]
yaml: inject-ownership-variables inject ownership variables to the build as environment variables .
train
false
1,355
def show_chain(*chains, **kw): backrefs = kw.pop('backrefs', True) chains = [chain for chain in chains if chain] def in_chains(x, ids=set(map(id, itertools.chain(*chains)))): return (id(x) in ids) max_depth = (max(map(len, chains)) - 1) if backrefs: show_backrefs([chain[(-1)] for chain in chains], max_depth=max_depth, filter=in_chains, **kw) else: show_refs([chain[0] for chain in chains], max_depth=max_depth, filter=in_chains, **kw)
[ "def", "show_chain", "(", "*", "chains", ",", "**", "kw", ")", ":", "backrefs", "=", "kw", ".", "pop", "(", "'backrefs'", ",", "True", ")", "chains", "=", "[", "chain", "for", "chain", "in", "chains", "if", "chain", "]", "def", "in_chains", "(", "x", ",", "ids", "=", "set", "(", "map", "(", "id", ",", "itertools", ".", "chain", "(", "*", "chains", ")", ")", ")", ")", ":", "return", "(", "id", "(", "x", ")", "in", "ids", ")", "max_depth", "=", "(", "max", "(", "map", "(", "len", ",", "chains", ")", ")", "-", "1", ")", "if", "backrefs", ":", "show_backrefs", "(", "[", "chain", "[", "(", "-", "1", ")", "]", "for", "chain", "in", "chains", "]", ",", "max_depth", "=", "max_depth", ",", "filter", "=", "in_chains", ",", "**", "kw", ")", "else", ":", "show_refs", "(", "[", "chain", "[", "0", "]", "for", "chain", "in", "chains", "]", ",", "max_depth", "=", "max_depth", ",", "filter", "=", "in_chains", ",", "**", "kw", ")" ]
show a chain of object references .
train
false
1,356
def network_create_safe(context, values): return IMPL.network_create_safe(context, values)
[ "def", "network_create_safe", "(", "context", ",", "values", ")", ":", "return", "IMPL", ".", "network_create_safe", "(", "context", ",", "values", ")" ]
create a network from the values dict .
train
false
1,357
def formatter(): output = s3_rest_controller() return output
[ "def", "formatter", "(", ")", ":", "output", "=", "s3_rest_controller", "(", ")", "return", "output" ]
restful crud controller .
train
false
1,358
def alignment_error_rate(reference, hypothesis, possible=None): if (possible is None): possible = reference else: assert reference.issubset(possible) return (1.0 - ((len((hypothesis & reference)) + len((hypothesis & possible))) / float((len(hypothesis) + len(reference)))))
[ "def", "alignment_error_rate", "(", "reference", ",", "hypothesis", ",", "possible", "=", "None", ")", ":", "if", "(", "possible", "is", "None", ")", ":", "possible", "=", "reference", "else", ":", "assert", "reference", ".", "issubset", "(", "possible", ")", "return", "(", "1.0", "-", "(", "(", "len", "(", "(", "hypothesis", "&", "reference", ")", ")", "+", "len", "(", "(", "hypothesis", "&", "possible", ")", ")", ")", "/", "float", "(", "(", "len", "(", "hypothesis", ")", "+", "len", "(", "reference", ")", ")", ")", ")", ")" ]
return the alignment error rate of an alignment with respect to a "gold standard" reference alignment .
train
false
1,360
def _newer(a, b): if (not os.path.exists(a)): return False if (not os.path.exists(b)): return True return (os.path.getmtime(a) >= os.path.getmtime(b))
[ "def", "_newer", "(", "a", ",", "b", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "a", ")", ")", ":", "return", "False", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "b", ")", ")", ":", "return", "True", "return", "(", "os", ".", "path", ".", "getmtime", "(", "a", ")", ">=", "os", ".", "path", ".", "getmtime", "(", "b", ")", ")" ]
inquire whether file a was written since file b .
train
true
1,361
def simple_moving_average(iterable, k=10): a = (iterable if isinstance(iterable, list) else list(iterable)) for m in xrange(len(a)): i = (m - k) j = ((m + k) + 1) w = a[max(0, i):j] (yield (float(sum(w)) / (len(w) or 1)))
[ "def", "simple_moving_average", "(", "iterable", ",", "k", "=", "10", ")", ":", "a", "=", "(", "iterable", "if", "isinstance", "(", "iterable", ",", "list", ")", "else", "list", "(", "iterable", ")", ")", "for", "m", "in", "xrange", "(", "len", "(", "a", ")", ")", ":", "i", "=", "(", "m", "-", "k", ")", "j", "=", "(", "(", "m", "+", "k", ")", "+", "1", ")", "w", "=", "a", "[", "max", "(", "0", ",", "i", ")", ":", "j", "]", "(", "yield", "(", "float", "(", "sum", "(", "w", ")", ")", "/", "(", "len", "(", "w", ")", "or", "1", ")", ")", ")" ]
returns an iterator over the simple moving average of the given list of values .
train
false
1,362
@register.filter @template.defaultfilters.stringfilter def cssid(value): return value.replace(u'.', u'-').replace(u'@', u'-').replace(u'+', u'-').replace(u' ', u'-')
[ "@", "register", ".", "filter", "@", "template", ".", "defaultfilters", ".", "stringfilter", "def", "cssid", "(", "value", ")", ":", "return", "value", ".", "replace", "(", "u'.'", ",", "u'-'", ")", ".", "replace", "(", "u'@'", ",", "u'-'", ")", ".", "replace", "(", "u'+'", ",", "u'-'", ")", ".", "replace", "(", "u' '", ",", "u'-'", ")" ]
replaces all .
train
false
1,364
def files_with_suffix(base_path, suffix): if os.path.isfile(base_path): if base_path.endswith(suffix): (yield base_path) else: for (root, _, files) in os.walk(base_path): for filename in files: if filename.endswith(suffix): (yield os.path.join(root, filename))
[ "def", "files_with_suffix", "(", "base_path", ",", "suffix", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "base_path", ")", ":", "if", "base_path", ".", "endswith", "(", "suffix", ")", ":", "(", "yield", "base_path", ")", "else", ":", "for", "(", "root", ",", "_", ",", "files", ")", "in", "os", ".", "walk", "(", "base_path", ")", ":", "for", "filename", "in", "files", ":", "if", "filename", ".", "endswith", "(", "suffix", ")", ":", "(", "yield", "os", ".", "path", ".", "join", "(", "root", ",", "filename", ")", ")" ]
iterates over files in a given directory .
train
false
1,365
def retry(func, retry_classes=None, fail_classes=None, exc_callback=None, backoff_delay=BACKOFF_DELAY): if (fail_classes and retry_classes and set(fail_classes).intersection(retry_classes)): raise ValueError("Can't include exception classes in both fail_on and retry_on") def should_retry_on(exc): if (fail_classes and isinstance(exc, tuple(fail_classes))): return False if (retry_classes and (not isinstance(exc, tuple(retry_classes)))): return False return True @functools.wraps(func) def wrapped(*args, **kwargs): while True: try: return func(*args, **kwargs) except gevent.GreenletExit as e: raise except Exception as e: if (not should_retry_on(e)): raise if (exc_callback is not None): exc_callback(e) gevent.sleep((backoff_delay + int(random.uniform(1, 10)))) return wrapped
[ "def", "retry", "(", "func", ",", "retry_classes", "=", "None", ",", "fail_classes", "=", "None", ",", "exc_callback", "=", "None", ",", "backoff_delay", "=", "BACKOFF_DELAY", ")", ":", "if", "(", "fail_classes", "and", "retry_classes", "and", "set", "(", "fail_classes", ")", ".", "intersection", "(", "retry_classes", ")", ")", ":", "raise", "ValueError", "(", "\"Can't include exception classes in both fail_on and retry_on\"", ")", "def", "should_retry_on", "(", "exc", ")", ":", "if", "(", "fail_classes", "and", "isinstance", "(", "exc", ",", "tuple", "(", "fail_classes", ")", ")", ")", ":", "return", "False", "if", "(", "retry_classes", "and", "(", "not", "isinstance", "(", "exc", ",", "tuple", "(", "retry_classes", ")", ")", ")", ")", ":", "return", "False", "return", "True", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapped", "(", "*", "args", ",", "**", "kwargs", ")", ":", "while", "True", ":", "try", ":", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "except", "gevent", ".", "GreenletExit", "as", "e", ":", "raise", "except", "Exception", "as", "e", ":", "if", "(", "not", "should_retry_on", "(", "e", ")", ")", ":", "raise", "if", "(", "exc_callback", "is", "not", "None", ")", ":", "exc_callback", "(", "e", ")", "gevent", ".", "sleep", "(", "(", "backoff_delay", "+", "int", "(", "random", ".", "uniform", "(", "1", ",", "10", ")", ")", ")", ")", "return", "wrapped" ]
retry calling the decorated function using an exponential backoff .
train
false
1,367
def setup_config(test, control_address=u'10.0.0.1', control_port=1234, name=None, log_config=None): if (name is None): name = random_name(test) ca_set = get_credential_sets()[0] scratch_directory = test.make_temporary_directory() contents = {u'control-service': {u'hostname': control_address, u'port': control_port}, u'dataset': {u'backend': u'zfs', u'name': name, u'mount_root': scratch_directory.child('mount_root').path, u'volume_config_path': scratch_directory.child('volume_config.json').path}, u'version': 1} if (log_config is not None): contents[u'logging'] = log_config test.config = scratch_directory.child('dataset-config.yml') test.config.setContent(yaml.safe_dump(contents)) ca_set.copy_to(scratch_directory, node=True) test.ca_set = ca_set
[ "def", "setup_config", "(", "test", ",", "control_address", "=", "u'10.0.0.1'", ",", "control_port", "=", "1234", ",", "name", "=", "None", ",", "log_config", "=", "None", ")", ":", "if", "(", "name", "is", "None", ")", ":", "name", "=", "random_name", "(", "test", ")", "ca_set", "=", "get_credential_sets", "(", ")", "[", "0", "]", "scratch_directory", "=", "test", ".", "make_temporary_directory", "(", ")", "contents", "=", "{", "u'control-service'", ":", "{", "u'hostname'", ":", "control_address", ",", "u'port'", ":", "control_port", "}", ",", "u'dataset'", ":", "{", "u'backend'", ":", "u'zfs'", ",", "u'name'", ":", "name", ",", "u'mount_root'", ":", "scratch_directory", ".", "child", "(", "'mount_root'", ")", ".", "path", ",", "u'volume_config_path'", ":", "scratch_directory", ".", "child", "(", "'volume_config.json'", ")", ".", "path", "}", ",", "u'version'", ":", "1", "}", "if", "(", "log_config", "is", "not", "None", ")", ":", "contents", "[", "u'logging'", "]", "=", "log_config", "test", ".", "config", "=", "scratch_directory", ".", "child", "(", "'dataset-config.yml'", ")", "test", ".", "config", ".", "setContent", "(", "yaml", ".", "safe_dump", "(", "contents", ")", ")", "ca_set", ".", "copy_to", "(", "scratch_directory", ",", "node", "=", "True", ")", "test", ".", "ca_set", "=", "ca_set" ]
overwrite or make config .
train
false
1,368
def _compile_func(body): body = u'def {0}():\n {1}'.format(FUNC_NAME, body.replace('\n', '\n ')) code = compile(body, 'inline', 'exec') env = {} eval(code, env) return env[FUNC_NAME]
[ "def", "_compile_func", "(", "body", ")", ":", "body", "=", "u'def {0}():\\n {1}'", ".", "format", "(", "FUNC_NAME", ",", "body", ".", "replace", "(", "'\\n'", ",", "'\\n '", ")", ")", "code", "=", "compile", "(", "body", ",", "'inline'", ",", "'exec'", ")", "env", "=", "{", "}", "eval", "(", "code", ",", "env", ")", "return", "env", "[", "FUNC_NAME", "]" ]
given python code for a function body .
train
false
1,369
def get_user_profile(user): try: listeners = get_memcached(get_key('listeners')) return listeners[user]['profile'] except: return user
[ "def", "get_user_profile", "(", "user", ")", ":", "try", ":", "listeners", "=", "get_memcached", "(", "get_key", "(", "'listeners'", ")", ")", "return", "listeners", "[", "user", "]", "[", "'profile'", "]", "except", ":", "return", "user" ]
return user profile .
train
false
1,370
def clear_inputhook(): pyos_inputhook_ptr = ctypes.c_void_p.in_dll(ctypes.pythonapi, 'PyOS_InputHook') pyos_inputhook_ptr.value = ctypes.c_void_p(None).value allow_CTRL_C()
[ "def", "clear_inputhook", "(", ")", ":", "pyos_inputhook_ptr", "=", "ctypes", ".", "c_void_p", ".", "in_dll", "(", "ctypes", ".", "pythonapi", ",", "'PyOS_InputHook'", ")", "pyos_inputhook_ptr", ".", "value", "=", "ctypes", ".", "c_void_p", "(", "None", ")", ".", "value", "allow_CTRL_C", "(", ")" ]
set pyos_inputhook to null and return the previous one .
train
false
1,372
def posixpath_normpath(pathname): backslash = '\\' pathname2 = (os.path.normpath(pathname) or '.') if (backslash in pathname2): pathname2 = pathname2.replace(backslash, '/') return pathname2
[ "def", "posixpath_normpath", "(", "pathname", ")", ":", "backslash", "=", "'\\\\'", "pathname2", "=", "(", "os", ".", "path", ".", "normpath", "(", "pathname", ")", "or", "'.'", ")", "if", "(", "backslash", "in", "pathname2", ")", ":", "pathname2", "=", "pathname2", ".", "replace", "(", "backslash", ",", "'/'", ")", "return", "pathname2" ]
convert path into posix path: * normalize path * replace backslash with slash .
train
true
1,373
def get_token_expiry(public=True): if public: return (now() + EXPIRE_DELTA_PUBLIC) else: return (now() + EXPIRE_DELTA)
[ "def", "get_token_expiry", "(", "public", "=", "True", ")", ":", "if", "public", ":", "return", "(", "now", "(", ")", "+", "EXPIRE_DELTA_PUBLIC", ")", "else", ":", "return", "(", "now", "(", ")", "+", "EXPIRE_DELTA", ")" ]
return a datetime object indicating when an access token should expire .
train
false
1,376
def _ls_emr_bootstrap_stderr_logs(fs, log_dir_stream, action_num=None, node_id=None): matches = _ls_logs(fs, log_dir_stream, _match_emr_bootstrap_stderr_path, action_num=None, node_id=None) return sorted(matches, key=(lambda m: ((- m['action_num']), m['node_id'])))
[ "def", "_ls_emr_bootstrap_stderr_logs", "(", "fs", ",", "log_dir_stream", ",", "action_num", "=", "None", ",", "node_id", "=", "None", ")", ":", "matches", "=", "_ls_logs", "(", "fs", ",", "log_dir_stream", ",", "_match_emr_bootstrap_stderr_path", ",", "action_num", "=", "None", ",", "node_id", "=", "None", ")", "return", "sorted", "(", "matches", ",", "key", "=", "(", "lambda", "m", ":", "(", "(", "-", "m", "[", "'action_num'", "]", ")", ",", "m", "[", "'node_id'", "]", ")", ")", ")" ]
find all stderr from bootstrap actions in the given dir .
train
false
1,377
def skipUnlessGISLookup(*gis_lookups): def decorator(test_func): @wraps(test_func) def skip_wrapper(*args, **kwargs): if any(((key not in connection.ops.gis_operators) for key in gis_lookups)): raise unittest.SkipTest(("Database doesn't support all the lookups: %s" % ', '.join(gis_lookups))) return test_func(*args, **kwargs) return skip_wrapper return decorator
[ "def", "skipUnlessGISLookup", "(", "*", "gis_lookups", ")", ":", "def", "decorator", "(", "test_func", ")", ":", "@", "wraps", "(", "test_func", ")", "def", "skip_wrapper", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "any", "(", "(", "(", "key", "not", "in", "connection", ".", "ops", ".", "gis_operators", ")", "for", "key", "in", "gis_lookups", ")", ")", ":", "raise", "unittest", ".", "SkipTest", "(", "(", "\"Database doesn't support all the lookups: %s\"", "%", "', '", ".", "join", "(", "gis_lookups", ")", ")", ")", "return", "test_func", "(", "*", "args", ",", "**", "kwargs", ")", "return", "skip_wrapper", "return", "decorator" ]
skip a test unless a database supports all of gis_lookups .
train
false
1,379
def MAMA(ds, count, fastlimit=(-4e+37), slowlimit=(-4e+37)): ret = call_talib_with_ds(ds, count, talib.MAMA, fastlimit, slowlimit) if (ret is None): ret = (None, None) return ret
[ "def", "MAMA", "(", "ds", ",", "count", ",", "fastlimit", "=", "(", "-", "4e+37", ")", ",", "slowlimit", "=", "(", "-", "4e+37", ")", ")", ":", "ret", "=", "call_talib_with_ds", "(", "ds", ",", "count", ",", "talib", ".", "MAMA", ",", "fastlimit", ",", "slowlimit", ")", "if", "(", "ret", "is", "None", ")", ":", "ret", "=", "(", "None", ",", "None", ")", "return", "ret" ]
mesa adaptive moving average .
train
false
1,380
def get_enabled(): return _get_svc_list('YES')
[ "def", "get_enabled", "(", ")", ":", "return", "_get_svc_list", "(", "'YES'", ")" ]
return what services are set to run on boot .
train
false
1,381
def task_enable_updates_testing(distribution): raise DistributionNotSupported(distribution=distribution)
[ "def", "task_enable_updates_testing", "(", "distribution", ")", ":", "raise", "DistributionNotSupported", "(", "distribution", "=", "distribution", ")" ]
enable the distributions proposed updates repository .
train
false
1,382
def is_listlike(x): return (hasattr(x, '__iter__') and (not isinstance(x, (six.text_type, bytes))))
[ "def", "is_listlike", "(", "x", ")", ":", "return", "(", "hasattr", "(", "x", ",", "'__iter__'", ")", "and", "(", "not", "isinstance", "(", "x", ",", "(", "six", ".", "text_type", ",", "bytes", ")", ")", ")", ")" ]
is this a sequence that isnt like a string or bytes? .
train
false
1,383
def increment_lineno(node, n=1): for node in zip((node,), walk(node)): if ('lineno' in node._attributes): node.lineno = (getattr(node, 'lineno', 0) + n)
[ "def", "increment_lineno", "(", "node", ",", "n", "=", "1", ")", ":", "for", "node", "in", "zip", "(", "(", "node", ",", ")", ",", "walk", "(", "node", ")", ")", ":", "if", "(", "'lineno'", "in", "node", ".", "_attributes", ")", ":", "node", ".", "lineno", "=", "(", "getattr", "(", "node", ",", "'lineno'", ",", "0", ")", "+", "n", ")" ]
increment the line number of each node in the tree starting at *node* by *n* .
train
true
1,385
def ensure_home_directory(fs, user): userprofile = get_profile(user) if ((userprofile is not None) and userprofile.home_directory): fs.do_as_user(user.username, fs.create_home_dir, userprofile.home_directory) else: LOG.warn(('Not creating home directory of %s as his profile is empty' % user))
[ "def", "ensure_home_directory", "(", "fs", ",", "user", ")", ":", "userprofile", "=", "get_profile", "(", "user", ")", "if", "(", "(", "userprofile", "is", "not", "None", ")", "and", "userprofile", ".", "home_directory", ")", ":", "fs", ".", "do_as_user", "(", "user", ".", "username", ",", "fs", ".", "create_home_dir", ",", "userprofile", ".", "home_directory", ")", "else", ":", "LOG", ".", "warn", "(", "(", "'Not creating home directory of %s as his profile is empty'", "%", "user", ")", ")" ]
adds a users home directory if it doesnt already exist .
train
false
1,386
def test_logarithmic_big_scale(): line = Line(logarithmic=True) line.add('_', [(10 ** (-10)), (10 ** 10), 1]) q = line.render_pyquery() assert (len(q('.y.axis .guides')) == 21)
[ "def", "test_logarithmic_big_scale", "(", ")", ":", "line", "=", "Line", "(", "logarithmic", "=", "True", ")", "line", ".", "add", "(", "'_'", ",", "[", "(", "10", "**", "(", "-", "10", ")", ")", ",", "(", "10", "**", "10", ")", ",", "1", "]", ")", "q", "=", "line", ".", "render_pyquery", "(", ")", "assert", "(", "len", "(", "q", "(", "'.y.axis .guides'", ")", ")", "==", "21", ")" ]
test logarithmic option with a large range of value .
train
false
1,387
def difftool_run(): files = selection.selected_group() if (not files): return s = selection.selection() model = main.model() difftool_launch_with_head(files, bool(s.staged), model.head)
[ "def", "difftool_run", "(", ")", ":", "files", "=", "selection", ".", "selected_group", "(", ")", "if", "(", "not", "files", ")", ":", "return", "s", "=", "selection", ".", "selection", "(", ")", "model", "=", "main", ".", "model", "(", ")", "difftool_launch_with_head", "(", "files", ",", "bool", "(", "s", ".", "staged", ")", ",", "model", ".", "head", ")" ]
start a default difftool session .
train
false
1,388
def verify_int(signed, ekey, n): return encrypt_int(signed, ekey, n)
[ "def", "verify_int", "(", "signed", ",", "ekey", ",", "n", ")", ":", "return", "encrypt_int", "(", "signed", ",", "ekey", ",", "n", ")" ]
verifies signed using key ekey .
train
false
1,389
def saturate(color, percent): return adjust(color, 1, percent)
[ "def", "saturate", "(", "color", ",", "percent", ")", ":", "return", "adjust", "(", "color", ",", "1", ",", "percent", ")" ]
return a fully saturated color with the same hue .
train
false
1,390
def idst(x, type=2, n=None, axis=(-1), norm=None, overwrite_x=False): if ((type == 1) and (norm is not None)): raise NotImplementedError('Orthonormalization not yet supported for IDCT-I') _TP = {1: 1, 2: 3, 3: 2} return _dst(x, _TP[type], n, axis, normalize=norm, overwrite_x=overwrite_x)
[ "def", "idst", "(", "x", ",", "type", "=", "2", ",", "n", "=", "None", ",", "axis", "=", "(", "-", "1", ")", ",", "norm", "=", "None", ",", "overwrite_x", "=", "False", ")", ":", "if", "(", "(", "type", "==", "1", ")", "and", "(", "norm", "is", "not", "None", ")", ")", ":", "raise", "NotImplementedError", "(", "'Orthonormalization not yet supported for IDCT-I'", ")", "_TP", "=", "{", "1", ":", "1", ",", "2", ":", "3", ",", "3", ":", "2", "}", "return", "_dst", "(", "x", ",", "_TP", "[", "type", "]", ",", "n", ",", "axis", ",", "normalize", "=", "norm", ",", "overwrite_x", "=", "overwrite_x", ")" ]
return the inverse discrete sine transform of an arbitrary type sequence .
train
false
1,391
def import_submodule(mod, subname, fullname): global found_now if ((fullname in found_now) and (fullname in sys.modules)): m = sys.modules[fullname] else: print ('Reloading', fullname) found_now[fullname] = 1 oldm = sys.modules.get(fullname, None) if (mod is None): path = None elif hasattr(mod, '__path__'): path = mod.__path__ else: return None try: with replace_import_hook(original_import): (fp, filename, stuff) = imp.find_module(subname, path) except ImportError: return None try: m = imp.load_module(fullname, fp, filename, stuff) except: if oldm: sys.modules[fullname] = oldm raise finally: if fp: fp.close() add_submodule(mod, m, fullname, subname) return m
[ "def", "import_submodule", "(", "mod", ",", "subname", ",", "fullname", ")", ":", "global", "found_now", "if", "(", "(", "fullname", "in", "found_now", ")", "and", "(", "fullname", "in", "sys", ".", "modules", ")", ")", ":", "m", "=", "sys", ".", "modules", "[", "fullname", "]", "else", ":", "print", "(", "'Reloading'", ",", "fullname", ")", "found_now", "[", "fullname", "]", "=", "1", "oldm", "=", "sys", ".", "modules", ".", "get", "(", "fullname", ",", "None", ")", "if", "(", "mod", "is", "None", ")", ":", "path", "=", "None", "elif", "hasattr", "(", "mod", ",", "'__path__'", ")", ":", "path", "=", "mod", ".", "__path__", "else", ":", "return", "None", "try", ":", "with", "replace_import_hook", "(", "original_import", ")", ":", "(", "fp", ",", "filename", ",", "stuff", ")", "=", "imp", ".", "find_module", "(", "subname", ",", "path", ")", "except", "ImportError", ":", "return", "None", "try", ":", "m", "=", "imp", ".", "load_module", "(", "fullname", ",", "fp", ",", "filename", ",", "stuff", ")", "except", ":", "if", "oldm", ":", "sys", ".", "modules", "[", "fullname", "]", "=", "oldm", "raise", "finally", ":", "if", "fp", ":", "fp", ".", "close", "(", ")", "add_submodule", "(", "mod", ",", "m", ",", "fullname", ",", "subname", ")", "return", "m" ]
m = import_submodule .
train
true
1,392
def p_declaration_specifiers_5(t): pass
[ "def", "p_declaration_specifiers_5", "(", "t", ")", ":", "pass" ]
declaration_specifiers : type_specifier .
train
false
1,393
def release_vlanid(vlan_id): LOG.debug(_('release_vlanid() called')) session = db.get_session() try: vlanid = session.query(l2network_models.VlanID).filter_by(vlan_id=vlan_id).one() vlanid['vlan_used'] = False session.merge(vlanid) session.flush() return vlanid['vlan_used'] except exc.NoResultFound: raise c_exc.VlanIDNotFound(vlan_id=vlan_id) return
[ "def", "release_vlanid", "(", "vlan_id", ")", ":", "LOG", ".", "debug", "(", "_", "(", "'release_vlanid() called'", ")", ")", "session", "=", "db", ".", "get_session", "(", ")", "try", ":", "vlanid", "=", "session", ".", "query", "(", "l2network_models", ".", "VlanID", ")", ".", "filter_by", "(", "vlan_id", "=", "vlan_id", ")", ".", "one", "(", ")", "vlanid", "[", "'vlan_used'", "]", "=", "False", "session", ".", "merge", "(", "vlanid", ")", "session", ".", "flush", "(", ")", "return", "vlanid", "[", "'vlan_used'", "]", "except", "exc", ".", "NoResultFound", ":", "raise", "c_exc", ".", "VlanIDNotFound", "(", "vlan_id", "=", "vlan_id", ")", "return" ]
sets the vlanid state to be unused .
train
false
1,394
@with_setup(step_runner_environ) def test_failing_behave_as_step_doesnt_pass(): runnable_step = Step.from_string('Given I have a step which calls the "other step fails" step with behave_as') try: runnable_step.run(True) except: pass assert_false(runnable_step.passed)
[ "@", "with_setup", "(", "step_runner_environ", ")", "def", "test_failing_behave_as_step_doesnt_pass", "(", ")", ":", "runnable_step", "=", "Step", ".", "from_string", "(", "'Given I have a step which calls the \"other step fails\" step with behave_as'", ")", "try", ":", "runnable_step", ".", "run", "(", "True", ")", "except", ":", "pass", "assert_false", "(", "runnable_step", ".", "passed", ")" ]
when a step definition calls another step definition with behave_as .
train
false
1,396
def _hashed_key(key): return md5(key.encode(u'utf-8')).hexdigest()
[ "def", "_hashed_key", "(", "key", ")", ":", "return", "md5", "(", "key", ".", "encode", "(", "u'utf-8'", ")", ")", ".", "hexdigest", "(", ")" ]
hash keys when talking directly to the cache api .
train
false
1,397
def isIPAddress(addr, family=AF_INET): if isinstance(addr, bytes): try: addr = addr.decode('ascii') except UnicodeDecodeError: return False if (family == AF_INET6): addr = addr.split(u'%', 1)[0] elif (family == AF_INET): if (addr.count(u'.') != 3): return False else: raise ValueError('unknown address family {!r}'.format(family)) try: inet_pton(family, addr) except (ValueError, error): return False return True
[ "def", "isIPAddress", "(", "addr", ",", "family", "=", "AF_INET", ")", ":", "if", "isinstance", "(", "addr", ",", "bytes", ")", ":", "try", ":", "addr", "=", "addr", ".", "decode", "(", "'ascii'", ")", "except", "UnicodeDecodeError", ":", "return", "False", "if", "(", "family", "==", "AF_INET6", ")", ":", "addr", "=", "addr", ".", "split", "(", "u'%'", ",", "1", ")", "[", "0", "]", "elif", "(", "family", "==", "AF_INET", ")", ":", "if", "(", "addr", ".", "count", "(", "u'.'", ")", "!=", "3", ")", ":", "return", "False", "else", ":", "raise", "ValueError", "(", "'unknown address family {!r}'", ".", "format", "(", "family", ")", ")", "try", ":", "inet_pton", "(", "family", ",", "addr", ")", "except", "(", "ValueError", ",", "error", ")", ":", "return", "False", "return", "True" ]
determine whether the given string represents an ipv4 address .
train
false
1,398
def load_class(dotted_path): dotted_path_split = dotted_path.split('.') if (len(dotted_path_split) > 1): klass_name = dotted_path_split[(-1)] module_name = '.'.join(dotted_path_split[:(-1)]) module = load_module(module_name) if has_attribute(module, klass_name): klass = getattr(module, klass_name) return klass else: raise AttributeError(('Module %s does not have class attribute %s' % (module_name, klass_name))) else: raise ValueError(('Dotted module path %s must contain a module name and a classname' % dotted_path))
[ "def", "load_class", "(", "dotted_path", ")", ":", "dotted_path_split", "=", "dotted_path", ".", "split", "(", "'.'", ")", "if", "(", "len", "(", "dotted_path_split", ")", ">", "1", ")", ":", "klass_name", "=", "dotted_path_split", "[", "(", "-", "1", ")", "]", "module_name", "=", "'.'", ".", "join", "(", "dotted_path_split", "[", ":", "(", "-", "1", ")", "]", ")", "module", "=", "load_module", "(", "module_name", ")", "if", "has_attribute", "(", "module", ",", "klass_name", ")", ":", "klass", "=", "getattr", "(", "module", ",", "klass_name", ")", "return", "klass", "else", ":", "raise", "AttributeError", "(", "(", "'Module %s does not have class attribute %s'", "%", "(", "module_name", ",", "klass_name", ")", ")", ")", "else", ":", "raise", "ValueError", "(", "(", "'Dotted module path %s must contain a module name and a classname'", "%", "dotted_path", ")", ")" ]
finds the class registered to the alias .
train
false
1,399
def value_for_key(tuple_of_tuples, key): for t in tuple_of_tuples: if (t[0] == key): return t[1] else: return key
[ "def", "value_for_key", "(", "tuple_of_tuples", ",", "key", ")", ":", "for", "t", "in", "tuple_of_tuples", ":", "if", "(", "t", "[", "0", "]", "==", "key", ")", ":", "return", "t", "[", "1", "]", "else", ":", "return", "key" ]
processes a tuple of 2-element tuples and returns the value corresponding to the given key .
train
false
1,400
def unrepr(s): if (not s): return s obj = _astnode(s) return _Builder().build(obj)
[ "def", "unrepr", "(", "s", ")", ":", "if", "(", "not", "s", ")", ":", "return", "s", "obj", "=", "_astnode", "(", "s", ")", "return", "_Builder", "(", ")", ".", "build", "(", "obj", ")" ]
return a python object compiled from a string .
train
false
1,401
def chisquare_power(effect_size, nobs, n_bins, alpha=0.05, ddof=0): crit = stats.chi2.isf(alpha, ((n_bins - 1) - ddof)) power = stats.ncx2.sf(crit, ((n_bins - 1) - ddof), ((effect_size ** 2) * nobs)) return power
[ "def", "chisquare_power", "(", "effect_size", ",", "nobs", ",", "n_bins", ",", "alpha", "=", "0.05", ",", "ddof", "=", "0", ")", ":", "crit", "=", "stats", ".", "chi2", ".", "isf", "(", "alpha", ",", "(", "(", "n_bins", "-", "1", ")", "-", "ddof", ")", ")", "power", "=", "stats", ".", "ncx2", ".", "sf", "(", "crit", ",", "(", "(", "n_bins", "-", "1", ")", "-", "ddof", ")", ",", "(", "(", "effect_size", "**", "2", ")", "*", "nobs", ")", ")", "return", "power" ]
power of chisquare goodness of fit test effect size is sqrt of chisquare statistic divided by nobs parameters effect_size : float this is the deviation from the null of the normalized chi_square statistic .
train
false
1,402
def test_scharr_h_mask(): np.random.seed(0) result = filters.scharr_h(np.random.uniform(size=(10, 10)), np.zeros((10, 10), bool)) assert_allclose(result, 0)
[ "def", "test_scharr_h_mask", "(", ")", ":", "np", ".", "random", ".", "seed", "(", "0", ")", "result", "=", "filters", ".", "scharr_h", "(", "np", ".", "random", ".", "uniform", "(", "size", "=", "(", "10", ",", "10", ")", ")", ",", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert_allclose", "(", "result", ",", "0", ")" ]
horizontal scharr on a masked array should be zero .
train
false
1,403
def memory_usage(): return _GetSystemStats().memory()
[ "def", "memory_usage", "(", ")", ":", "return", "_GetSystemStats", "(", ")", ".", "memory", "(", ")" ]
provides the memory usage in bytes for the given process .
train
false
1,404
def test_close_process_when_normal(): with pipeline.get_cat_pipeline(pipeline.PIPE, pipeline.PIPE) as pl: assert (len(pl.commands) == 1) assert (pl.commands[0]._process.poll() is None) pipeline_wait(pl)
[ "def", "test_close_process_when_normal", "(", ")", ":", "with", "pipeline", ".", "get_cat_pipeline", "(", "pipeline", ".", "PIPE", ",", "pipeline", ".", "PIPE", ")", "as", "pl", ":", "assert", "(", "len", "(", "pl", ".", "commands", ")", "==", "1", ")", "assert", "(", "pl", ".", "commands", "[", "0", "]", ".", "_process", ".", "poll", "(", ")", "is", "None", ")", "pipeline_wait", "(", "pl", ")" ]
process leaks must not occur in successful cases .
train
false
1,405
def _has_kit_refs(info, picks): for p in picks: if (info['chs'][p]['coil_type'] == FIFF.FIFFV_COIL_KIT_REF_MAG): return True return False
[ "def", "_has_kit_refs", "(", "info", ",", "picks", ")", ":", "for", "p", "in", "picks", ":", "if", "(", "info", "[", "'chs'", "]", "[", "p", "]", "[", "'coil_type'", "]", "==", "FIFF", ".", "FIFFV_COIL_KIT_REF_MAG", ")", ":", "return", "True", "return", "False" ]
determine if kit ref channels are chosen .
train
false
1,406
def resolve_reverse_ipv6(packed_ip, flags=0): waiter = Waiter() core.dns_resolve_reverse_ipv6(packed_ip, flags, waiter.switch_args) (result, _type, ttl, addrs) = waiter.get() if (result != core.DNS_ERR_NONE): raise DNSError(result) return (ttl, addrs)
[ "def", "resolve_reverse_ipv6", "(", "packed_ip", ",", "flags", "=", "0", ")", ":", "waiter", "=", "Waiter", "(", ")", "core", ".", "dns_resolve_reverse_ipv6", "(", "packed_ip", ",", "flags", ",", "waiter", ".", "switch_args", ")", "(", "result", ",", "_type", ",", "ttl", ",", "addrs", ")", "=", "waiter", ".", "get", "(", ")", "if", "(", "result", "!=", "core", ".", "DNS_ERR_NONE", ")", ":", "raise", "DNSError", "(", "result", ")", "return", "(", "ttl", ",", "addrs", ")" ]
lookup a ptr record for a given ipv6 address .
train
false
1,407
def server_services(): from evennia.server.sessionhandler import SESSIONS if (hasattr(SESSIONS, 'server') and hasattr(SESSIONS.server, 'services')): server = SESSIONS.server.services.namedServices else: server = {} del SESSIONS return server
[ "def", "server_services", "(", ")", ":", "from", "evennia", ".", "server", ".", "sessionhandler", "import", "SESSIONS", "if", "(", "hasattr", "(", "SESSIONS", ",", "'server'", ")", "and", "hasattr", "(", "SESSIONS", ".", "server", ",", "'services'", ")", ")", ":", "server", "=", "SESSIONS", ".", "server", ".", "services", ".", "namedServices", "else", ":", "server", "=", "{", "}", "del", "SESSIONS", "return", "server" ]
lists all services active on the server .
train
false
1,408
def interpret_distro_name(location, basename, metadata, py_version=None, precedence=SOURCE_DIST, platform=None): parts = basename.split('-') if (not py_version): for (i, p) in enumerate(parts[2:]): if ((len(p) == 5) and p.startswith('py2.')): return for p in range(1, (len(parts) + 1)): (yield Distribution(location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), py_version=py_version, precedence=precedence, platform=platform))
[ "def", "interpret_distro_name", "(", "location", ",", "basename", ",", "metadata", ",", "py_version", "=", "None", ",", "precedence", "=", "SOURCE_DIST", ",", "platform", "=", "None", ")", ":", "parts", "=", "basename", ".", "split", "(", "'-'", ")", "if", "(", "not", "py_version", ")", ":", "for", "(", "i", ",", "p", ")", "in", "enumerate", "(", "parts", "[", "2", ":", "]", ")", ":", "if", "(", "(", "len", "(", "p", ")", "==", "5", ")", "and", "p", ".", "startswith", "(", "'py2.'", ")", ")", ":", "return", "for", "p", "in", "range", "(", "1", ",", "(", "len", "(", "parts", ")", "+", "1", ")", ")", ":", "(", "yield", "Distribution", "(", "location", ",", "metadata", ",", "'-'", ".", "join", "(", "parts", "[", ":", "p", "]", ")", ",", "'-'", ".", "join", "(", "parts", "[", "p", ":", "]", ")", ",", "py_version", "=", "py_version", ",", "precedence", "=", "precedence", ",", "platform", "=", "platform", ")", ")" ]
generate alternative interpretations of a source distro name note: if location is a filesystem filename .
train
true
1,409
def get_default_role(request): global DEFAULT_ROLE default = getattr(settings, 'OPENSTACK_KEYSTONE_DEFAULT_ROLE', None) if (default and (DEFAULT_ROLE is None)): try: roles = keystoneclient(request, admin=True).roles.list() except Exception: roles = [] exceptions.handle(request) for role in roles: if ((role.id == default) or (role.name == default)): DEFAULT_ROLE = role break return DEFAULT_ROLE
[ "def", "get_default_role", "(", "request", ")", ":", "global", "DEFAULT_ROLE", "default", "=", "getattr", "(", "settings", ",", "'OPENSTACK_KEYSTONE_DEFAULT_ROLE'", ",", "None", ")", "if", "(", "default", "and", "(", "DEFAULT_ROLE", "is", "None", ")", ")", ":", "try", ":", "roles", "=", "keystoneclient", "(", "request", ",", "admin", "=", "True", ")", ".", "roles", ".", "list", "(", ")", "except", "Exception", ":", "roles", "=", "[", "]", "exceptions", ".", "handle", "(", "request", ")", "for", "role", "in", "roles", ":", "if", "(", "(", "role", ".", "id", "==", "default", ")", "or", "(", "role", ".", "name", "==", "default", ")", ")", ":", "DEFAULT_ROLE", "=", "role", "break", "return", "DEFAULT_ROLE" ]
gets the default role object from keystone and saves it as a global .
train
true
1,410
def _MakeSyncSearchServiceCall(call, request, response, deadline): _ValidateDeadline(deadline) logging.warning('_MakeSyncSearchServiceCall is deprecated; please use API.') try: if (deadline is None): apiproxy_stub_map.MakeSyncCall('search', call, request, response) else: rpc = apiproxy_stub_map.UserRPC('search', deadline=deadline) rpc.make_call(call, request, response) rpc.wait() rpc.check_success() except apiproxy_errors.ApplicationError as e: raise _ToSearchError(e)
[ "def", "_MakeSyncSearchServiceCall", "(", "call", ",", "request", ",", "response", ",", "deadline", ")", ":", "_ValidateDeadline", "(", "deadline", ")", "logging", ".", "warning", "(", "'_MakeSyncSearchServiceCall is deprecated; please use API.'", ")", "try", ":", "if", "(", "deadline", "is", "None", ")", ":", "apiproxy_stub_map", ".", "MakeSyncCall", "(", "'search'", ",", "call", ",", "request", ",", "response", ")", "else", ":", "rpc", "=", "apiproxy_stub_map", ".", "UserRPC", "(", "'search'", ",", "deadline", "=", "deadline", ")", "rpc", ".", "make_call", "(", "call", ",", "request", ",", "response", ")", "rpc", ".", "wait", "(", ")", "rpc", ".", "check_success", "(", ")", "except", "apiproxy_errors", ".", "ApplicationError", "as", "e", ":", "raise", "_ToSearchError", "(", "e", ")" ]
deprecated: make a synchronous call to search service .
train
false
1,411
def get_displayable_exp_summary_dicts_matching_ids(exploration_ids, editor_user_id=None): exploration_summaries = exp_services.get_exploration_summaries_matching_ids(exploration_ids) filtered_exploration_summaries = [] for exploration_summary in exploration_summaries: if (exploration_summary is None): continue if (exploration_summary.status == rights_manager.ACTIVITY_STATUS_PRIVATE): if (editor_user_id is None): continue if (not rights_manager.Actor(editor_user_id).can_edit(feconf.ACTIVITY_TYPE_EXPLORATION, exploration_summary.id)): continue filtered_exploration_summaries.append(exploration_summary) return get_displayable_exp_summary_dicts(filtered_exploration_summaries)
[ "def", "get_displayable_exp_summary_dicts_matching_ids", "(", "exploration_ids", ",", "editor_user_id", "=", "None", ")", ":", "exploration_summaries", "=", "exp_services", ".", "get_exploration_summaries_matching_ids", "(", "exploration_ids", ")", "filtered_exploration_summaries", "=", "[", "]", "for", "exploration_summary", "in", "exploration_summaries", ":", "if", "(", "exploration_summary", "is", "None", ")", ":", "continue", "if", "(", "exploration_summary", ".", "status", "==", "rights_manager", ".", "ACTIVITY_STATUS_PRIVATE", ")", ":", "if", "(", "editor_user_id", "is", "None", ")", ":", "continue", "if", "(", "not", "rights_manager", ".", "Actor", "(", "editor_user_id", ")", ".", "can_edit", "(", "feconf", ".", "ACTIVITY_TYPE_EXPLORATION", ",", "exploration_summary", ".", "id", ")", ")", ":", "continue", "filtered_exploration_summaries", ".", "append", "(", "exploration_summary", ")", "return", "get_displayable_exp_summary_dicts", "(", "filtered_exploration_summaries", ")" ]
given a list of exploration ids .
train
false
1,412
def convert_background_to_jpg(background_url): file_path = get_path_of_temp_url(background_url) im = Image.open(file_path) out_im = file_path.replace('png', 'jpg') bg = Image.new('RGB', im.size, (255, 255, 255)) bg.paste(im, (0, 0), im) bg.save(out_im, quality=55) return out_im
[ "def", "convert_background_to_jpg", "(", "background_url", ")", ":", "file_path", "=", "get_path_of_temp_url", "(", "background_url", ")", "im", "=", "Image", ".", "open", "(", "file_path", ")", "out_im", "=", "file_path", ".", "replace", "(", "'png'", ",", "'jpg'", ")", "bg", "=", "Image", ".", "new", "(", "'RGB'", ",", "im", ".", "size", ",", "(", "255", ",", "255", ",", "255", ")", ")", "bg", ".", "paste", "(", "im", ",", "(", "0", ",", "0", ")", ",", "im", ")", "bg", ".", "save", "(", "out_im", ",", "quality", "=", "55", ")", "return", "out_im" ]
convert the background image to jpg to reduce the file size .
train
false
1,414
def _lombscargle_cdf(z, N, normalization, dH=1, dK=3): if ((dK - dH) != 2): raise NotImplementedError('Degrees of freedom != 2') Nk = (N - dK) if (normalization == 'psd'): return (1 - np.exp((- z))) elif (normalization == 'standard'): return (1 - ((1 + z) ** ((-0.5) * Nk))) elif (normalization == 'model'): return (1 - ((1 - z) ** (0.5 * Nk))) elif (normalization == 'log'): return (1 - np.exp((((-0.5) * Nk) * z))) else: raise ValueError("normalization='{0}' is not recognized".format(normalization))
[ "def", "_lombscargle_cdf", "(", "z", ",", "N", ",", "normalization", ",", "dH", "=", "1", ",", "dK", "=", "3", ")", ":", "if", "(", "(", "dK", "-", "dH", ")", "!=", "2", ")", ":", "raise", "NotImplementedError", "(", "'Degrees of freedom != 2'", ")", "Nk", "=", "(", "N", "-", "dK", ")", "if", "(", "normalization", "==", "'psd'", ")", ":", "return", "(", "1", "-", "np", ".", "exp", "(", "(", "-", "z", ")", ")", ")", "elif", "(", "normalization", "==", "'standard'", ")", ":", "return", "(", "1", "-", "(", "(", "1", "+", "z", ")", "**", "(", "(", "-", "0.5", ")", "*", "Nk", ")", ")", ")", "elif", "(", "normalization", "==", "'model'", ")", ":", "return", "(", "1", "-", "(", "(", "1", "-", "z", ")", "**", "(", "0.5", "*", "Nk", ")", ")", ")", "elif", "(", "normalization", "==", "'log'", ")", ":", "return", "(", "1", "-", "np", ".", "exp", "(", "(", "(", "(", "-", "0.5", ")", "*", "Nk", ")", "*", "z", ")", ")", ")", "else", ":", "raise", "ValueError", "(", "\"normalization='{0}' is not recognized\"", ".", "format", "(", "normalization", ")", ")" ]
cumulative distribution for the lomb-scargle periodogram compute the expected cumulative distribution of the periodogram for the null hypothesis - i .
train
true
1,415
def get_unpack_formats(): formats = [(name, info[0], info[3]) for (name, info) in _UNPACK_FORMATS.items()] formats.sort() return formats
[ "def", "get_unpack_formats", "(", ")", ":", "formats", "=", "[", "(", "name", ",", "info", "[", "0", "]", ",", "info", "[", "3", "]", ")", "for", "(", "name", ",", "info", ")", "in", "_UNPACK_FORMATS", ".", "items", "(", ")", "]", "formats", ".", "sort", "(", ")", "return", "formats" ]
returns a list of supported formats for unpacking .
train
true
1,417
def copy_table(dataset_name, table_name, new_table_name, project=None): bigquery_client = bigquery.Client(project=project) dataset = bigquery_client.dataset(dataset_name) table = dataset.table(table_name) destination_table = dataset.table(new_table_name) job_id = str(uuid.uuid4()) job = bigquery_client.copy_table(job_id, destination_table, table) job.create_disposition = google.cloud.bigquery.job.CreateDisposition.CREATE_IF_NEEDED job.begin() print 'Waiting for job to finish...' wait_for_job(job) print 'Table {} copied to {}.'.format(table_name, new_table_name)
[ "def", "copy_table", "(", "dataset_name", ",", "table_name", ",", "new_table_name", ",", "project", "=", "None", ")", ":", "bigquery_client", "=", "bigquery", ".", "Client", "(", "project", "=", "project", ")", "dataset", "=", "bigquery_client", ".", "dataset", "(", "dataset_name", ")", "table", "=", "dataset", ".", "table", "(", "table_name", ")", "destination_table", "=", "dataset", ".", "table", "(", "new_table_name", ")", "job_id", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "job", "=", "bigquery_client", ".", "copy_table", "(", "job_id", ",", "destination_table", ",", "table", ")", "job", ".", "create_disposition", "=", "google", ".", "cloud", ".", "bigquery", ".", "job", ".", "CreateDisposition", ".", "CREATE_IF_NEEDED", "job", ".", "begin", "(", ")", "print", "'Waiting for job to finish...'", "wait_for_job", "(", "job", ")", "print", "'Table {} copied to {}.'", ".", "format", "(", "table_name", ",", "new_table_name", ")" ]
copies a table .
train
false
1,420
def get_fields(node): return dict(iter_fields(node))
[ "def", "get_fields", "(", "node", ")", ":", "return", "dict", "(", "iter_fields", "(", "node", ")", ")" ]
retrieve field list for a given table .
train
false
1,421
def show_hc(kwargs=None, call=None): if (call != 'function'): raise SaltCloudSystemExit('The show_hc function must be called with -f or --function.') if ((not kwargs) or ('name' not in kwargs)): log.error('Must specify name of health check.') return False conn = get_conn() return _expand_item(conn.ex_get_healthcheck(kwargs['name']))
[ "def", "show_hc", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The show_hc function must be called with -f or --function.'", ")", "if", "(", "(", "not", "kwargs", ")", "or", "(", "'name'", "not", "in", "kwargs", ")", ")", ":", "log", ".", "error", "(", "'Must specify name of health check.'", ")", "return", "False", "conn", "=", "get_conn", "(", ")", "return", "_expand_item", "(", "conn", ".", "ex_get_healthcheck", "(", "kwargs", "[", "'name'", "]", ")", ")" ]
show the details of an existing health check .
train
true
1,422
def test_crop_append(): raw = _test_raw_reader(read_raw_bti, pdf_fname=pdf_fnames[0], config_fname=config_fnames[0], head_shape_fname=hs_fnames[0]) (y, t) = raw[:] (t0, t1) = ((0.25 * t[(-1)]), (0.75 * t[(-1)])) mask = ((t0 <= t) * (t <= t1)) raw_ = raw.copy().crop(t0, t1) (y_, _) = raw_[:] assert_true((y_.shape[1] == mask.sum())) assert_true((y_.shape[0] == y.shape[0]))
[ "def", "test_crop_append", "(", ")", ":", "raw", "=", "_test_raw_reader", "(", "read_raw_bti", ",", "pdf_fname", "=", "pdf_fnames", "[", "0", "]", ",", "config_fname", "=", "config_fnames", "[", "0", "]", ",", "head_shape_fname", "=", "hs_fnames", "[", "0", "]", ")", "(", "y", ",", "t", ")", "=", "raw", "[", ":", "]", "(", "t0", ",", "t1", ")", "=", "(", "(", "0.25", "*", "t", "[", "(", "-", "1", ")", "]", ")", ",", "(", "0.75", "*", "t", "[", "(", "-", "1", ")", "]", ")", ")", "mask", "=", "(", "(", "t0", "<=", "t", ")", "*", "(", "t", "<=", "t1", ")", ")", "raw_", "=", "raw", ".", "copy", "(", ")", ".", "crop", "(", "t0", ",", "t1", ")", "(", "y_", ",", "_", ")", "=", "raw_", "[", ":", "]", "assert_true", "(", "(", "y_", ".", "shape", "[", "1", "]", "==", "mask", ".", "sum", "(", ")", ")", ")", "assert_true", "(", "(", "y_", ".", "shape", "[", "0", "]", "==", "y", ".", "shape", "[", "0", "]", ")", ")" ]
test crop and append raw .
train
false
1,423
def timeconvert(timestr): timestamp = None timetuple = email.utils.parsedate_tz(timestr) if (timetuple is not None): timestamp = email.utils.mktime_tz(timetuple) return timestamp
[ "def", "timeconvert", "(", "timestr", ")", ":", "timestamp", "=", "None", "timetuple", "=", "email", ".", "utils", ".", "parsedate_tz", "(", "timestr", ")", "if", "(", "timetuple", "is", "not", "None", ")", ":", "timestamp", "=", "email", ".", "utils", ".", "mktime_tz", "(", "timetuple", ")", "return", "timestamp" ]
convert rfc 2822 defined time string into system timestamp .
train
false
1,426
@verbose def resample(x, up, down, npad=100, axis=(-1), window='boxcar', n_jobs=1, verbose=None): from scipy.signal import get_window if (not isinstance(axis, int)): err = ('The axis parameter needs to be an integer (got %s). The axis parameter was missing from this function for a period of time, you might be intending to specify the subsequent window parameter.' % repr(axis)) raise TypeError(err) x = np.asanyarray(x) ratio = (float(up) / down) if (axis < 0): axis = (x.ndim + axis) orig_last_axis = (x.ndim - 1) if (axis != orig_last_axis): x = x.swapaxes(axis, orig_last_axis) orig_shape = x.shape x_len = orig_shape[(-1)] if (x_len == 0): warn('x has zero length along last axis, returning a copy of x') return x.copy() bad_msg = 'npad must be "auto" or an integer' if isinstance(npad, string_types): if (npad != 'auto'): raise ValueError(bad_msg) min_add = (min((x_len // 8), 100) * 2) npad = ((2 ** int(np.ceil(np.log2((x_len + min_add))))) - x_len) (npad, extra) = divmod(npad, 2) npads = np.array([npad, (npad + extra)], int) else: if (npad != int(npad)): raise ValueError(bad_msg) npads = np.array([npad, npad], int) del npad x_flat = x.reshape(((-1), x_len)) orig_len = (x_len + npads.sum()) new_len = int(round((ratio * orig_len))) final_len = int(round((ratio * x_len))) to_removes = [int(round((ratio * npads[0])))] to_removes.append(((new_len - final_len) - to_removes[0])) to_removes = np.array(to_removes) if (window is not None): if callable(window): W = window(fftfreq(orig_len)) elif (isinstance(window, np.ndarray) and (window.shape == (orig_len,))): W = window else: W = ifftshift(get_window(window, orig_len)) else: W = np.ones(orig_len) W *= (float(new_len) / float(orig_len)) W = W.astype(np.complex128) (n_jobs, cuda_dict, W) = setup_cuda_fft_resample(n_jobs, W, new_len) if (n_jobs == 1): y = np.zeros((len(x_flat), (new_len - to_removes.sum())), dtype=x.dtype) for (xi, x_) in enumerate(x_flat): y[xi] = fft_resample(x_, W, new_len, npads, to_removes, cuda_dict) else: (parallel, p_fun, _) = parallel_func(fft_resample, n_jobs) y = parallel((p_fun(x_, W, new_len, npads, to_removes, cuda_dict) for x_ in x_flat)) y = np.array(y) y.shape = (orig_shape[:(-1)] + (y.shape[1],)) if (axis != orig_last_axis): y = y.swapaxes(axis, orig_last_axis) return y
[ "@", "verbose", "def", "resample", "(", "x", ",", "up", ",", "down", ",", "npad", "=", "100", ",", "axis", "=", "(", "-", "1", ")", ",", "window", "=", "'boxcar'", ",", "n_jobs", "=", "1", ",", "verbose", "=", "None", ")", ":", "from", "scipy", ".", "signal", "import", "get_window", "if", "(", "not", "isinstance", "(", "axis", ",", "int", ")", ")", ":", "err", "=", "(", "'The axis parameter needs to be an integer (got %s). The axis parameter was missing from this function for a period of time, you might be intending to specify the subsequent window parameter.'", "%", "repr", "(", "axis", ")", ")", "raise", "TypeError", "(", "err", ")", "x", "=", "np", ".", "asanyarray", "(", "x", ")", "ratio", "=", "(", "float", "(", "up", ")", "/", "down", ")", "if", "(", "axis", "<", "0", ")", ":", "axis", "=", "(", "x", ".", "ndim", "+", "axis", ")", "orig_last_axis", "=", "(", "x", ".", "ndim", "-", "1", ")", "if", "(", "axis", "!=", "orig_last_axis", ")", ":", "x", "=", "x", ".", "swapaxes", "(", "axis", ",", "orig_last_axis", ")", "orig_shape", "=", "x", ".", "shape", "x_len", "=", "orig_shape", "[", "(", "-", "1", ")", "]", "if", "(", "x_len", "==", "0", ")", ":", "warn", "(", "'x has zero length along last axis, returning a copy of x'", ")", "return", "x", ".", "copy", "(", ")", "bad_msg", "=", "'npad must be \"auto\" or an integer'", "if", "isinstance", "(", "npad", ",", "string_types", ")", ":", "if", "(", "npad", "!=", "'auto'", ")", ":", "raise", "ValueError", "(", "bad_msg", ")", "min_add", "=", "(", "min", "(", "(", "x_len", "//", "8", ")", ",", "100", ")", "*", "2", ")", "npad", "=", "(", "(", "2", "**", "int", "(", "np", ".", "ceil", "(", "np", ".", "log2", "(", "(", "x_len", "+", "min_add", ")", ")", ")", ")", ")", "-", "x_len", ")", "(", "npad", ",", "extra", ")", "=", "divmod", "(", "npad", ",", "2", ")", "npads", "=", "np", ".", "array", "(", "[", "npad", ",", "(", "npad", "+", "extra", ")", "]", ",", "int", ")", "else", ":", "if", "(", "npad", "!=", "int", "(", "npad", ")", ")", ":", "raise", "ValueError", "(", "bad_msg", ")", "npads", "=", "np", ".", "array", "(", "[", "npad", ",", "npad", "]", ",", "int", ")", "del", "npad", "x_flat", "=", "x", ".", "reshape", "(", "(", "(", "-", "1", ")", ",", "x_len", ")", ")", "orig_len", "=", "(", "x_len", "+", "npads", ".", "sum", "(", ")", ")", "new_len", "=", "int", "(", "round", "(", "(", "ratio", "*", "orig_len", ")", ")", ")", "final_len", "=", "int", "(", "round", "(", "(", "ratio", "*", "x_len", ")", ")", ")", "to_removes", "=", "[", "int", "(", "round", "(", "(", "ratio", "*", "npads", "[", "0", "]", ")", ")", ")", "]", "to_removes", ".", "append", "(", "(", "(", "new_len", "-", "final_len", ")", "-", "to_removes", "[", "0", "]", ")", ")", "to_removes", "=", "np", ".", "array", "(", "to_removes", ")", "if", "(", "window", "is", "not", "None", ")", ":", "if", "callable", "(", "window", ")", ":", "W", "=", "window", "(", "fftfreq", "(", "orig_len", ")", ")", "elif", "(", "isinstance", "(", "window", ",", "np", ".", "ndarray", ")", "and", "(", "window", ".", "shape", "==", "(", "orig_len", ",", ")", ")", ")", ":", "W", "=", "window", "else", ":", "W", "=", "ifftshift", "(", "get_window", "(", "window", ",", "orig_len", ")", ")", "else", ":", "W", "=", "np", ".", "ones", "(", "orig_len", ")", "W", "*=", "(", "float", "(", "new_len", ")", "/", "float", "(", "orig_len", ")", ")", "W", "=", "W", ".", "astype", "(", "np", ".", "complex128", ")", "(", "n_jobs", ",", "cuda_dict", ",", "W", ")", "=", "setup_cuda_fft_resample", "(", "n_jobs", ",", "W", ",", "new_len", ")", "if", "(", "n_jobs", "==", "1", ")", ":", "y", "=", "np", ".", "zeros", "(", "(", "len", "(", "x_flat", ")", ",", "(", "new_len", "-", "to_removes", ".", "sum", "(", ")", ")", ")", ",", "dtype", "=", "x", ".", "dtype", ")", "for", "(", "xi", ",", "x_", ")", "in", "enumerate", "(", "x_flat", ")", ":", "y", "[", "xi", "]", "=", "fft_resample", "(", "x_", ",", "W", ",", "new_len", ",", "npads", ",", "to_removes", ",", "cuda_dict", ")", "else", ":", "(", "parallel", ",", "p_fun", ",", "_", ")", "=", "parallel_func", "(", "fft_resample", ",", "n_jobs", ")", "y", "=", "parallel", "(", "(", "p_fun", "(", "x_", ",", "W", ",", "new_len", ",", "npads", ",", "to_removes", ",", "cuda_dict", ")", "for", "x_", "in", "x_flat", ")", ")", "y", "=", "np", ".", "array", "(", "y", ")", "y", ".", "shape", "=", "(", "orig_shape", "[", ":", "(", "-", "1", ")", "]", "+", "(", "y", ".", "shape", "[", "1", "]", ",", ")", ")", "if", "(", "axis", "!=", "orig_last_axis", ")", ":", "y", "=", "y", ".", "swapaxes", "(", "axis", ",", "orig_last_axis", ")", "return", "y" ]
create a timegrouper and return our resampler .
train
false
1,427
def test_reshape_configure(backend_default): bsz = backend_default.bsz reshape_0 = Reshape((0, 10, 10)) reshape_0.configure((10, 2, 50)) assert (reshape_0.out_shape == (10, 10, 10)) reshape_1 = Reshape((10, 25, (-1))) reshape_1.configure((10, 2, 50)) assert (reshape_1.out_shape == (10, 25, 4)) reshape_2 = Reshape((5, (-1))) reshape_2.configure((10, 2, 25)) assert (reshape_2.out_shape == (5, 100)) assert (reshape_2.out_shape_t == (5, (100 * bsz))) reshape_3 = Reshape((5, (-1), 5)) reshape_3.configure((10, 25)) assert (reshape_3.out_shape == (5, 10, 5))
[ "def", "test_reshape_configure", "(", "backend_default", ")", ":", "bsz", "=", "backend_default", ".", "bsz", "reshape_0", "=", "Reshape", "(", "(", "0", ",", "10", ",", "10", ")", ")", "reshape_0", ".", "configure", "(", "(", "10", ",", "2", ",", "50", ")", ")", "assert", "(", "reshape_0", ".", "out_shape", "==", "(", "10", ",", "10", ",", "10", ")", ")", "reshape_1", "=", "Reshape", "(", "(", "10", ",", "25", ",", "(", "-", "1", ")", ")", ")", "reshape_1", ".", "configure", "(", "(", "10", ",", "2", ",", "50", ")", ")", "assert", "(", "reshape_1", ".", "out_shape", "==", "(", "10", ",", "25", ",", "4", ")", ")", "reshape_2", "=", "Reshape", "(", "(", "5", ",", "(", "-", "1", ")", ")", ")", "reshape_2", ".", "configure", "(", "(", "10", ",", "2", ",", "25", ")", ")", "assert", "(", "reshape_2", ".", "out_shape", "==", "(", "5", ",", "100", ")", ")", "assert", "(", "reshape_2", ".", "out_shape_t", "==", "(", "5", ",", "(", "100", "*", "bsz", ")", ")", ")", "reshape_3", "=", "Reshape", "(", "(", "5", ",", "(", "-", "1", ")", ",", "5", ")", ")", "reshape_3", ".", "configure", "(", "(", "10", ",", "25", ")", ")", "assert", "(", "reshape_3", ".", "out_shape", "==", "(", "5", ",", "10", ",", "5", ")", ")" ]
test cases: - reshape with 0 - reshape with -1 - reshape with collapsing dimensions - reshape with expanding dimensions .
train
false
1,428
def network_count_reserved_ips(context, network_id): return IMPL.network_count_reserved_ips(context, network_id)
[ "def", "network_count_reserved_ips", "(", "context", ",", "network_id", ")", ":", "return", "IMPL", ".", "network_count_reserved_ips", "(", "context", ",", "network_id", ")" ]
return the number of reserved ips in the network .
train
false
1,430
def merge_mappings(target, other, function=(lambda x, y: (x + y))): assert (set(target) == set(other)), 'keys must match' return {k: function(v, other[k]) for (k, v) in target.items()}
[ "def", "merge_mappings", "(", "target", ",", "other", ",", "function", "=", "(", "lambda", "x", ",", "y", ":", "(", "x", "+", "y", ")", ")", ")", ":", "assert", "(", "set", "(", "target", ")", "==", "set", "(", "other", ")", ")", ",", "'keys must match'", "return", "{", "k", ":", "function", "(", "v", ",", "other", "[", "k", "]", ")", "for", "(", "k", ",", "v", ")", "in", "target", ".", "items", "(", ")", "}" ]
merge two mappings into a single mapping .
train
false
1,432
def notify_about_aggregate_update(context, event_suffix, aggregate_payload): aggregate_identifier = aggregate_payload.get('aggregate_id', None) if (not aggregate_identifier): aggregate_identifier = aggregate_payload.get('name', None) if (not aggregate_identifier): LOG.debug('No aggregate id or name specified for this notification and it will be ignored') return notifier = rpc.get_notifier(service='aggregate', host=aggregate_identifier) notifier.info(context, ('aggregate.%s' % event_suffix), aggregate_payload)
[ "def", "notify_about_aggregate_update", "(", "context", ",", "event_suffix", ",", "aggregate_payload", ")", ":", "aggregate_identifier", "=", "aggregate_payload", ".", "get", "(", "'aggregate_id'", ",", "None", ")", "if", "(", "not", "aggregate_identifier", ")", ":", "aggregate_identifier", "=", "aggregate_payload", ".", "get", "(", "'name'", ",", "None", ")", "if", "(", "not", "aggregate_identifier", ")", ":", "LOG", ".", "debug", "(", "'No aggregate id or name specified for this notification and it will be ignored'", ")", "return", "notifier", "=", "rpc", ".", "get_notifier", "(", "service", "=", "'aggregate'", ",", "host", "=", "aggregate_identifier", ")", "notifier", ".", "info", "(", "context", ",", "(", "'aggregate.%s'", "%", "event_suffix", ")", ",", "aggregate_payload", ")" ]
send a notification about aggregate update .
train
false
1,433
def _reg_dir(): return os.path.join(__opts__['cachedir'], 'thorium')
[ "def", "_reg_dir", "(", ")", ":", "return", "os", ".", "path", ".", "join", "(", "__opts__", "[", "'cachedir'", "]", ",", "'thorium'", ")" ]
return the reg_dir for the given job id .
train
false
1,434
@register.inclusion_tag('zinnia/tags/dummy.html', takes_context=True) def get_categories_tree(context, template='zinnia/tags/categories_tree.html'): return {'template': template, 'categories': Category.objects.all().annotate(count_entries=Count('entries')), 'context_category': context.get('category')}
[ "@", "register", ".", "inclusion_tag", "(", "'zinnia/tags/dummy.html'", ",", "takes_context", "=", "True", ")", "def", "get_categories_tree", "(", "context", ",", "template", "=", "'zinnia/tags/categories_tree.html'", ")", ":", "return", "{", "'template'", ":", "template", ",", "'categories'", ":", "Category", ".", "objects", ".", "all", "(", ")", ".", "annotate", "(", "count_entries", "=", "Count", "(", "'entries'", ")", ")", ",", "'context_category'", ":", "context", ".", "get", "(", "'category'", ")", "}" ]
return the categories as a tree .
train
false
1,435
def load_le32(buf, pos): end = (pos + 4) if (end > len(buf)): raise BadRarFile('cannot load le32') return (S_LONG.unpack_from(buf, pos)[0], (pos + 4))
[ "def", "load_le32", "(", "buf", ",", "pos", ")", ":", "end", "=", "(", "pos", "+", "4", ")", "if", "(", "end", ">", "len", "(", "buf", ")", ")", ":", "raise", "BadRarFile", "(", "'cannot load le32'", ")", "return", "(", "S_LONG", ".", "unpack_from", "(", "buf", ",", "pos", ")", "[", "0", "]", ",", "(", "pos", "+", "4", ")", ")" ]
load little-endian 32-bit integer .
train
true
1,436
def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, link=None, verbose=1, dry_run=0): from distutils.dep_util import newer from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE if (not os.path.isfile(src)): raise DistutilsFileError(("can't copy '%s': doesn't exist or not a regular file" % src)) if os.path.isdir(dst): dir = dst dst = os.path.join(dst, os.path.basename(src)) else: dir = os.path.dirname(dst) if (update and (not newer(src, dst))): if (verbose >= 1): log.debug('not copying %s (output up-to-date)', src) return (dst, 0) try: action = _copy_action[link] except KeyError: raise ValueError(("invalid value '%s' for 'link' argument" % link)) if (verbose >= 1): if (os.path.basename(dst) == os.path.basename(src)): log.info('%s %s -> %s', action, src, dir) else: log.info('%s %s -> %s', action, src, dst) if dry_run: return (dst, 1) elif (link == 'hard'): if (not (os.path.exists(dst) and os.path.samefile(src, dst))): try: os.link(src, dst) return (dst, 1) except OSError: pass elif (link == 'sym'): if (not (os.path.exists(dst) and os.path.samefile(src, dst))): os.symlink(src, dst) return (dst, 1) _copy_file_contents(src, dst) if (preserve_mode or preserve_times): st = os.stat(src) if preserve_times: os.utime(dst, (st[ST_ATIME], st[ST_MTIME])) if preserve_mode: os.chmod(dst, S_IMODE(st[ST_MODE])) return (dst, 1)
[ "def", "copy_file", "(", "src", ",", "dst", ",", "preserve_mode", "=", "1", ",", "preserve_times", "=", "1", ",", "update", "=", "0", ",", "link", "=", "None", ",", "verbose", "=", "1", ",", "dry_run", "=", "0", ")", ":", "from", "distutils", ".", "dep_util", "import", "newer", "from", "stat", "import", "ST_ATIME", ",", "ST_MTIME", ",", "ST_MODE", ",", "S_IMODE", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "src", ")", ")", ":", "raise", "DistutilsFileError", "(", "(", "\"can't copy '%s': doesn't exist or not a regular file\"", "%", "src", ")", ")", "if", "os", ".", "path", ".", "isdir", "(", "dst", ")", ":", "dir", "=", "dst", "dst", "=", "os", ".", "path", ".", "join", "(", "dst", ",", "os", ".", "path", ".", "basename", "(", "src", ")", ")", "else", ":", "dir", "=", "os", ".", "path", ".", "dirname", "(", "dst", ")", "if", "(", "update", "and", "(", "not", "newer", "(", "src", ",", "dst", ")", ")", ")", ":", "if", "(", "verbose", ">=", "1", ")", ":", "log", ".", "debug", "(", "'not copying %s (output up-to-date)'", ",", "src", ")", "return", "(", "dst", ",", "0", ")", "try", ":", "action", "=", "_copy_action", "[", "link", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "(", "\"invalid value '%s' for 'link' argument\"", "%", "link", ")", ")", "if", "(", "verbose", ">=", "1", ")", ":", "if", "(", "os", ".", "path", ".", "basename", "(", "dst", ")", "==", "os", ".", "path", ".", "basename", "(", "src", ")", ")", ":", "log", ".", "info", "(", "'%s %s -> %s'", ",", "action", ",", "src", ",", "dir", ")", "else", ":", "log", ".", "info", "(", "'%s %s -> %s'", ",", "action", ",", "src", ",", "dst", ")", "if", "dry_run", ":", "return", "(", "dst", ",", "1", ")", "elif", "(", "link", "==", "'hard'", ")", ":", "if", "(", "not", "(", "os", ".", "path", ".", "exists", "(", "dst", ")", "and", "os", ".", "path", ".", "samefile", "(", "src", ",", "dst", ")", ")", ")", ":", "try", ":", "os", ".", "link", "(", "src", ",", "dst", ")", "return", "(", "dst", ",", "1", ")", "except", "OSError", ":", "pass", "elif", "(", "link", "==", "'sym'", ")", ":", "if", "(", "not", "(", "os", ".", "path", ".", "exists", "(", "dst", ")", "and", "os", ".", "path", ".", "samefile", "(", "src", ",", "dst", ")", ")", ")", ":", "os", ".", "symlink", "(", "src", ",", "dst", ")", "return", "(", "dst", ",", "1", ")", "_copy_file_contents", "(", "src", ",", "dst", ")", "if", "(", "preserve_mode", "or", "preserve_times", ")", ":", "st", "=", "os", ".", "stat", "(", "src", ")", "if", "preserve_times", ":", "os", ".", "utime", "(", "dst", ",", "(", "st", "[", "ST_ATIME", "]", ",", "st", "[", "ST_MTIME", "]", ")", ")", "if", "preserve_mode", ":", "os", ".", "chmod", "(", "dst", ",", "S_IMODE", "(", "st", "[", "ST_MODE", "]", ")", ")", "return", "(", "dst", ",", "1", ")" ]
copy source_path to output_path .
train
false
1,437
def create_ssh_wrapper(): ssh_wrapper = ssh_file(SSH_WRAPPER) with open(ssh_wrapper, u'w') as handle: handle.write(SSH_WRAPPER_TEMPLATE.format(known_hosts=ssh_file(KNOWN_HOSTS), identity=ssh_file(RSA_KEY))) os.chmod(ssh_wrapper, 493)
[ "def", "create_ssh_wrapper", "(", ")", ":", "ssh_wrapper", "=", "ssh_file", "(", "SSH_WRAPPER", ")", "with", "open", "(", "ssh_wrapper", ",", "u'w'", ")", "as", "handle", ":", "handle", ".", "write", "(", "SSH_WRAPPER_TEMPLATE", ".", "format", "(", "known_hosts", "=", "ssh_file", "(", "KNOWN_HOSTS", ")", ",", "identity", "=", "ssh_file", "(", "RSA_KEY", ")", ")", ")", "os", ".", "chmod", "(", "ssh_wrapper", ",", "493", ")" ]
creates wrapper for ssh to pass custom known hosts and key .
train
false
1,438
def test_nvidia_driver3(): var = cuda.fvector() f = theano.function([var], (var + 1), mode=mode_with_gpu, profile=False) topo = f.maker.fgraph.toposort() assert any([isinstance(node.op, cuda.GpuElemwise) for node in topo]) assert (theano.sandbox.cuda.use.device_number is not None)
[ "def", "test_nvidia_driver3", "(", ")", ":", "var", "=", "cuda", ".", "fvector", "(", ")", "f", "=", "theano", ".", "function", "(", "[", "var", "]", ",", "(", "var", "+", "1", ")", ",", "mode", "=", "mode_with_gpu", ",", "profile", "=", "False", ")", "topo", "=", "f", ".", "maker", ".", "fgraph", ".", "toposort", "(", ")", "assert", "any", "(", "[", "isinstance", "(", "node", ".", "op", ",", "cuda", ".", "GpuElemwise", ")", "for", "node", "in", "topo", "]", ")", "assert", "(", "theano", ".", "sandbox", ".", "cuda", ".", "use", ".", "device_number", "is", "not", "None", ")" ]
test that the gpu device is initialized by theano when we build a function with gpu op .
train
false
1,439
def _get_dpi_from(cmd, pattern, func): try: (out, _) = run_subprocess([cmd]) except (OSError, CalledProcessError): pass else: match = re.search(pattern, out) if match: return func(*map(float, match.groups()))
[ "def", "_get_dpi_from", "(", "cmd", ",", "pattern", ",", "func", ")", ":", "try", ":", "(", "out", ",", "_", ")", "=", "run_subprocess", "(", "[", "cmd", "]", ")", "except", "(", "OSError", ",", "CalledProcessError", ")", ":", "pass", "else", ":", "match", "=", "re", ".", "search", "(", "pattern", ",", "out", ")", "if", "match", ":", "return", "func", "(", "*", "map", "(", "float", ",", "match", ".", "groups", "(", ")", ")", ")" ]
match pattern against the output of func .
train
true
1,440
def status_show(context, data_dict): return {'site_title': config.get('ckan.site_title'), 'site_description': config.get('ckan.site_description'), 'site_url': config.get('ckan.site_url'), 'ckan_version': ckan.__version__, 'error_emails_to': config.get('email_to'), 'locale_default': config.get('ckan.locale_default'), 'extensions': config.get('ckan.plugins').split()}
[ "def", "status_show", "(", "context", ",", "data_dict", ")", ":", "return", "{", "'site_title'", ":", "config", ".", "get", "(", "'ckan.site_title'", ")", ",", "'site_description'", ":", "config", ".", "get", "(", "'ckan.site_description'", ")", ",", "'site_url'", ":", "config", ".", "get", "(", "'ckan.site_url'", ")", ",", "'ckan_version'", ":", "ckan", ".", "__version__", ",", "'error_emails_to'", ":", "config", ".", "get", "(", "'email_to'", ")", ",", "'locale_default'", ":", "config", ".", "get", "(", "'ckan.locale_default'", ")", ",", "'extensions'", ":", "config", ".", "get", "(", "'ckan.plugins'", ")", ".", "split", "(", ")", "}" ]
return a dictionary with information about the sites configuration .
train
false
1,442
def tools_nuget(): nuget_url = CONFIG['nuget']['url'] mobsf_subdir_tools = CONFIG['MobSF']['tools'] nuget_file_path = CONFIG['nuget']['file'] nuget_file_local = open(os.path.join(mobsf_subdir_tools, nuget_file_path), 'wb') print '[*] Downloading nuget..' nuget_file = urlrequest.urlopen(nuget_url) print '[*] Saving to File {}'.format(nuget_file_path) nuget_file_local.write(bytes(nuget_file.read())) nuget_file_local.close()
[ "def", "tools_nuget", "(", ")", ":", "nuget_url", "=", "CONFIG", "[", "'nuget'", "]", "[", "'url'", "]", "mobsf_subdir_tools", "=", "CONFIG", "[", "'MobSF'", "]", "[", "'tools'", "]", "nuget_file_path", "=", "CONFIG", "[", "'nuget'", "]", "[", "'file'", "]", "nuget_file_local", "=", "open", "(", "os", ".", "path", ".", "join", "(", "mobsf_subdir_tools", ",", "nuget_file_path", ")", ",", "'wb'", ")", "print", "'[*] Downloading nuget..'", "nuget_file", "=", "urlrequest", ".", "urlopen", "(", "nuget_url", ")", "print", "'[*] Saving to File {}'", ".", "format", "(", "nuget_file_path", ")", "nuget_file_local", ".", "write", "(", "bytes", "(", "nuget_file", ".", "read", "(", ")", ")", ")", "nuget_file_local", ".", "close", "(", ")" ]
download nuget .
train
false
1,444
def get_configuration_dict(name, default=None): default = (default or {}) output = default.copy() output.update((get_configuration_value(name, {}) or {})) return output
[ "def", "get_configuration_dict", "(", "name", ",", "default", "=", "None", ")", ":", "default", "=", "(", "default", "or", "{", "}", ")", "output", "=", "default", ".", "copy", "(", ")", "output", ".", "update", "(", "(", "get_configuration_value", "(", "name", ",", "{", "}", ")", "or", "{", "}", ")", ")", "return", "output" ]
returns a dictionary product after merging the current sites configuration and the default value .
train
false
1,445
def has_required(programs): try: return check_required(programs) except exception.CommandNotFound: return False
[ "def", "has_required", "(", "programs", ")", ":", "try", ":", "return", "check_required", "(", "programs", ")", "except", "exception", ".", "CommandNotFound", ":", "return", "False" ]
same as check_required but returns false if not all commands exist .
train
false
1,447
def _add_metadata(bt, md_key, lines): taxonomy_md = biom_taxonomy_formatter(bt, md_key) if (taxonomy_md is not None): for i in range((len(lines) - 1)): lines[(i + 1)] = ((lines[(i + 1)] + ' DCTB ') + taxonomy_md[i]) return lines else: nls = ([' DCTB '.join(lines[0].split(' DCTB ')[:(-1)])] + lines[1:]) return nls
[ "def", "_add_metadata", "(", "bt", ",", "md_key", ",", "lines", ")", ":", "taxonomy_md", "=", "biom_taxonomy_formatter", "(", "bt", ",", "md_key", ")", "if", "(", "taxonomy_md", "is", "not", "None", ")", ":", "for", "i", "in", "range", "(", "(", "len", "(", "lines", ")", "-", "1", ")", ")", ":", "lines", "[", "(", "i", "+", "1", ")", "]", "=", "(", "(", "lines", "[", "(", "i", "+", "1", ")", "]", "+", "' DCTB '", ")", "+", "taxonomy_md", "[", "i", "]", ")", "return", "lines", "else", ":", "nls", "=", "(", "[", "' DCTB '", ".", "join", "(", "lines", "[", "0", "]", ".", "split", "(", "' DCTB '", ")", "[", ":", "(", "-", "1", ")", "]", ")", "]", "+", "lines", "[", "1", ":", "]", ")", "return", "nls" ]
add metadata to formatted correlation output lines .
train
false
1,448
def get_component(app, id): sa_session = app.model.context.current return sa_session.query(app.model.Component).get(app.security.decode_id(id))
[ "def", "get_component", "(", "app", ",", "id", ")", ":", "sa_session", "=", "app", ".", "model", ".", "context", ".", "current", "return", "sa_session", ".", "query", "(", "app", ".", "model", ".", "Component", ")", ".", "get", "(", "app", ".", "security", ".", "decode_id", "(", "id", ")", ")" ]
given an object .
train
false
1,449
def is_aa(residue, standard=False): if (not isinstance(residue, basestring)): residue = residue.get_resname() residue = residue.upper() if standard: return (residue in d3_to_index) else: return (residue in SCOPData.protein_letters_3to1)
[ "def", "is_aa", "(", "residue", ",", "standard", "=", "False", ")", ":", "if", "(", "not", "isinstance", "(", "residue", ",", "basestring", ")", ")", ":", "residue", "=", "residue", ".", "get_resname", "(", ")", "residue", "=", "residue", ".", "upper", "(", ")", "if", "standard", ":", "return", "(", "residue", "in", "d3_to_index", ")", "else", ":", "return", "(", "residue", "in", "SCOPData", ".", "protein_letters_3to1", ")" ]
return true if residue object/string is an amino acid .
train
false
1,450
def test_negative_distance(): with pytest.raises(ValueError): Distance([(-2), 3.1], u.kpc) with pytest.raises(ValueError): Distance([(-2), (-3.1)], u.kpc) with pytest.raises(ValueError): Distance((-2), u.kpc) d = Distance((-2), u.kpc, allow_negative=True) assert (d.value == (-2))
[ "def", "test_negative_distance", "(", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "Distance", "(", "[", "(", "-", "2", ")", ",", "3.1", "]", ",", "u", ".", "kpc", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "Distance", "(", "[", "(", "-", "2", ")", ",", "(", "-", "3.1", ")", "]", ",", "u", ".", "kpc", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "Distance", "(", "(", "-", "2", ")", ",", "u", ".", "kpc", ")", "d", "=", "Distance", "(", "(", "-", "2", ")", ",", "u", ".", "kpc", ",", "allow_negative", "=", "True", ")", "assert", "(", "d", ".", "value", "==", "(", "-", "2", ")", ")" ]
test optional kwarg allow_negative .
train
false
1,451
def pts_to_prestep(x, *args): steps = np.zeros(((1 + len(args)), ((2 * len(x)) - 1))) steps[0, 0::2] = x steps[0, 1::2] = steps[0, 0:(-2):2] steps[1:, 0::2] = args steps[1:, 1::2] = steps[1:, 2::2] return steps
[ "def", "pts_to_prestep", "(", "x", ",", "*", "args", ")", ":", "steps", "=", "np", ".", "zeros", "(", "(", "(", "1", "+", "len", "(", "args", ")", ")", ",", "(", "(", "2", "*", "len", "(", "x", ")", ")", "-", "1", ")", ")", ")", "steps", "[", "0", ",", "0", ":", ":", "2", "]", "=", "x", "steps", "[", "0", ",", "1", ":", ":", "2", "]", "=", "steps", "[", "0", ",", "0", ":", "(", "-", "2", ")", ":", "2", "]", "steps", "[", "1", ":", ",", "0", ":", ":", "2", "]", "=", "args", "steps", "[", "1", ":", ",", "1", ":", ":", "2", "]", "=", "steps", "[", "1", ":", ",", "2", ":", ":", "2", "]", "return", "steps" ]
convert continuous line to pre-steps .
train
false
1,452
def getVoronoiLoopByPoints(inside, loop, outsides): for outside in outsides: loop = getVoronoiLoopByPoint(inside, loop, outside) return loop
[ "def", "getVoronoiLoopByPoints", "(", "inside", ",", "loop", ",", "outsides", ")", ":", "for", "outside", "in", "outsides", ":", "loop", "=", "getVoronoiLoopByPoint", "(", "inside", ",", "loop", ",", "outside", ")", "return", "loop" ]
get voronoi loop enclosing the inside .
train
false
1,453
def list_tab(user): data = raw_cron(user) ret = {'pre': [], 'crons': [], 'special': [], 'env': []} flag = False comment = None identifier = None for line in data.splitlines(): if (line == '# Lines below here are managed by Salt, do not edit'): flag = True continue if flag: commented_cron_job = False if line.startswith('#DISABLED#'): line = line[10:] commented_cron_job = True if line.startswith('@'): dat = {} comps = line.split() if (len(comps) < 2): continue dat['spec'] = comps[0] dat['cmd'] = ' '.join(comps[1:]) ret['special'].append(dat) elif line.startswith('#'): comment_line = line.lstrip('# ') if (SALT_CRON_IDENTIFIER in comment_line): parts = comment_line.split(SALT_CRON_IDENTIFIER) comment_line = parts[0].rstrip() if (len(parts[1]) > 1): identifier = parts[1][1:] if (comment is None): comment = comment_line else: comment += ('\n' + comment_line) elif ((line.find('=') > 0) and ((' ' not in line) or (line.index('=') < line.index(' ')))): comps = line.split('=', 1) dat = {} dat['name'] = comps[0] dat['value'] = comps[1] ret['env'].append(dat) elif (len(line.split(' ')) > 5): comps = line.split(' ') dat = {'minute': comps[0], 'hour': comps[1], 'daymonth': comps[2], 'month': comps[3], 'dayweek': comps[4], 'identifier': identifier, 'cmd': ' '.join(comps[5:]), 'comment': comment, 'commented': False} if commented_cron_job: dat['commented'] = True ret['crons'].append(dat) identifier = None comment = None commented_cron_job = False else: ret['pre'].append(line) return ret
[ "def", "list_tab", "(", "user", ")", ":", "data", "=", "raw_cron", "(", "user", ")", "ret", "=", "{", "'pre'", ":", "[", "]", ",", "'crons'", ":", "[", "]", ",", "'special'", ":", "[", "]", ",", "'env'", ":", "[", "]", "}", "flag", "=", "False", "comment", "=", "None", "identifier", "=", "None", "for", "line", "in", "data", ".", "splitlines", "(", ")", ":", "if", "(", "line", "==", "'# Lines below here are managed by Salt, do not edit'", ")", ":", "flag", "=", "True", "continue", "if", "flag", ":", "commented_cron_job", "=", "False", "if", "line", ".", "startswith", "(", "'#DISABLED#'", ")", ":", "line", "=", "line", "[", "10", ":", "]", "commented_cron_job", "=", "True", "if", "line", ".", "startswith", "(", "'@'", ")", ":", "dat", "=", "{", "}", "comps", "=", "line", ".", "split", "(", ")", "if", "(", "len", "(", "comps", ")", "<", "2", ")", ":", "continue", "dat", "[", "'spec'", "]", "=", "comps", "[", "0", "]", "dat", "[", "'cmd'", "]", "=", "' '", ".", "join", "(", "comps", "[", "1", ":", "]", ")", "ret", "[", "'special'", "]", ".", "append", "(", "dat", ")", "elif", "line", ".", "startswith", "(", "'#'", ")", ":", "comment_line", "=", "line", ".", "lstrip", "(", "'# '", ")", "if", "(", "SALT_CRON_IDENTIFIER", "in", "comment_line", ")", ":", "parts", "=", "comment_line", ".", "split", "(", "SALT_CRON_IDENTIFIER", ")", "comment_line", "=", "parts", "[", "0", "]", ".", "rstrip", "(", ")", "if", "(", "len", "(", "parts", "[", "1", "]", ")", ">", "1", ")", ":", "identifier", "=", "parts", "[", "1", "]", "[", "1", ":", "]", "if", "(", "comment", "is", "None", ")", ":", "comment", "=", "comment_line", "else", ":", "comment", "+=", "(", "'\\n'", "+", "comment_line", ")", "elif", "(", "(", "line", ".", "find", "(", "'='", ")", ">", "0", ")", "and", "(", "(", "' '", "not", "in", "line", ")", "or", "(", "line", ".", "index", "(", "'='", ")", "<", "line", ".", "index", "(", "' '", ")", ")", ")", ")", ":", "comps", "=", "line", ".", "split", "(", "'='", ",", "1", ")", "dat", "=", "{", "}", "dat", "[", "'name'", "]", "=", "comps", "[", "0", "]", "dat", "[", "'value'", "]", "=", "comps", "[", "1", "]", "ret", "[", "'env'", "]", ".", "append", "(", "dat", ")", "elif", "(", "len", "(", "line", ".", "split", "(", "' '", ")", ")", ">", "5", ")", ":", "comps", "=", "line", ".", "split", "(", "' '", ")", "dat", "=", "{", "'minute'", ":", "comps", "[", "0", "]", ",", "'hour'", ":", "comps", "[", "1", "]", ",", "'daymonth'", ":", "comps", "[", "2", "]", ",", "'month'", ":", "comps", "[", "3", "]", ",", "'dayweek'", ":", "comps", "[", "4", "]", ",", "'identifier'", ":", "identifier", ",", "'cmd'", ":", "' '", ".", "join", "(", "comps", "[", "5", ":", "]", ")", ",", "'comment'", ":", "comment", ",", "'commented'", ":", "False", "}", "if", "commented_cron_job", ":", "dat", "[", "'commented'", "]", "=", "True", "ret", "[", "'crons'", "]", ".", "append", "(", "dat", ")", "identifier", "=", "None", "comment", "=", "None", "commented_cron_job", "=", "False", "else", ":", "ret", "[", "'pre'", "]", ".", "append", "(", "line", ")", "return", "ret" ]
return the contents of the specified users incrontab cli example: .
train
true
1,454
def _sss(l1, l2, l3): c1 = Circle((0, 0), l3) c2 = Circle((l1, 0), l2) inter = [a for a in c1.intersection(c2) if a.y.is_nonnegative] if (not inter): return None pt = inter[0] return Triangle((0, 0), (l1, 0), pt)
[ "def", "_sss", "(", "l1", ",", "l2", ",", "l3", ")", ":", "c1", "=", "Circle", "(", "(", "0", ",", "0", ")", ",", "l3", ")", "c2", "=", "Circle", "(", "(", "l1", ",", "0", ")", ",", "l2", ")", "inter", "=", "[", "a", "for", "a", "in", "c1", ".", "intersection", "(", "c2", ")", "if", "a", ".", "y", ".", "is_nonnegative", "]", "if", "(", "not", "inter", ")", ":", "return", "None", "pt", "=", "inter", "[", "0", "]", "return", "Triangle", "(", "(", "0", ",", "0", ")", ",", "(", "l1", ",", "0", ")", ",", "pt", ")" ]
return triangle having side of length l1 on the x-axis .
train
false
1,455
def standard_normal(size=None, dtype=float): return normal(size=size, dtype=dtype)
[ "def", "standard_normal", "(", "size", "=", "None", ",", "dtype", "=", "float", ")", ":", "return", "normal", "(", "size", "=", "size", ",", "dtype", "=", "dtype", ")" ]
returns an array of samples drawn from the standard normal distribution .
train
false
1,456
@skip('silverlight') @skip('netstandard') def test_xequals_call_for_optimization(): import clr clr.AddReference('System.Configuration') from System.Configuration import ConfigurationManager c = ConfigurationManager.ConnectionStrings for i in xrange(3): if is_posix: AreEqual(2, c.Count) else: AreEqual(1, c.Count) for i in xrange(3): count = c.Count if is_posix: AreEqual(2, count) else: AreEqual(1, count) AreEqual(c.Count, count) for i in xrange(3): c[0].Name c.Count c.Count
[ "@", "skip", "(", "'silverlight'", ")", "@", "skip", "(", "'netstandard'", ")", "def", "test_xequals_call_for_optimization", "(", ")", ":", "import", "clr", "clr", ".", "AddReference", "(", "'System.Configuration'", ")", "from", "System", ".", "Configuration", "import", "ConfigurationManager", "c", "=", "ConfigurationManager", ".", "ConnectionStrings", "for", "i", "in", "xrange", "(", "3", ")", ":", "if", "is_posix", ":", "AreEqual", "(", "2", ",", "c", ".", "Count", ")", "else", ":", "AreEqual", "(", "1", ",", "c", ".", "Count", ")", "for", "i", "in", "xrange", "(", "3", ")", ":", "count", "=", "c", ".", "Count", "if", "is_posix", ":", "AreEqual", "(", "2", ",", "count", ")", "else", ":", "AreEqual", "(", "1", ",", "count", ")", "AreEqual", "(", "c", ".", "Count", ",", "count", ")", "for", "i", "in", "xrange", "(", "3", ")", ":", "c", "[", "0", "]", ".", "Name", "c", ".", "Count", "c", ".", "Count" ]
testing specifically for system .
train
false
1,457
def lookup_lastlines(lastlines_dirpath, path): underscored = path.replace('/', '_') try: lastlines_file = open(os.path.join(lastlines_dirpath, underscored)) except (OSError, IOError): return lastlines = lastlines_file.read() lastlines_file.close() os.remove(lastlines_file.name) if (not lastlines): return try: target_file = open(path) except (OSError, IOError): return target_data = target_file.read() target_file.close() loc = target_data.rfind(lastlines) if (loc == (-1)): return reverse_lineno = target_data.count('\n', (loc + len(lastlines))) return reverse_lineno
[ "def", "lookup_lastlines", "(", "lastlines_dirpath", ",", "path", ")", ":", "underscored", "=", "path", ".", "replace", "(", "'/'", ",", "'_'", ")", "try", ":", "lastlines_file", "=", "open", "(", "os", ".", "path", ".", "join", "(", "lastlines_dirpath", ",", "underscored", ")", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "return", "lastlines", "=", "lastlines_file", ".", "read", "(", ")", "lastlines_file", ".", "close", "(", ")", "os", ".", "remove", "(", "lastlines_file", ".", "name", ")", "if", "(", "not", "lastlines", ")", ":", "return", "try", ":", "target_file", "=", "open", "(", "path", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "return", "target_data", "=", "target_file", ".", "read", "(", ")", "target_file", ".", "close", "(", ")", "loc", "=", "target_data", ".", "rfind", "(", "lastlines", ")", "if", "(", "loc", "==", "(", "-", "1", ")", ")", ":", "return", "reverse_lineno", "=", "target_data", ".", "count", "(", "'\\n'", ",", "(", "loc", "+", "len", "(", "lastlines", ")", ")", ")", "return", "reverse_lineno" ]
retrieve last lines seen for path .
train
false
1,459
def directory_exists(path): return os.path.isdir(os.path.expanduser(path))
[ "def", "directory_exists", "(", "path", ")", ":", "return", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "expanduser", "(", "path", ")", ")" ]
tests to see if path is a valid directory .
train
false
1,460
def _get_data_id(annotation): if isinstance(annotation, HtmlTag): return annotation.attributes[TAGID]
[ "def", "_get_data_id", "(", "annotation", ")", ":", "if", "isinstance", "(", "annotation", ",", "HtmlTag", ")", ":", "return", "annotation", ".", "attributes", "[", "TAGID", "]" ]
get id of an annotation .
train
false
1,461
def chunks(l, n): for i in xrange(0, len(l), n): (yield l[i:(i + n)])
[ "def", "chunks", "(", "l", ",", "n", ")", ":", "for", "i", "in", "xrange", "(", "0", ",", "len", "(", "l", ")", ",", "n", ")", ":", "(", "yield", "l", "[", "i", ":", "(", "i", "+", "n", ")", "]", ")" ]
yield successive n-sized chunks from l .
train
true
1,462
def create_local_srs(): for host_ref in _db_content['host'].keys(): create_sr(name_label='Local storage', type='lvm', other_config={'i18n-original-value-name_label': 'Local storage', 'i18n-key': 'local-storage'}, physical_size=40000, physical_utilisation=20000, virtual_allocation=10000, host_ref=host_ref) create_sr(name_label='Local storage ISO', type='iso', other_config={'i18n-original-value-name_label': 'Local storage ISO', 'i18n-key': 'local-storage-iso'}, physical_size=80000, physical_utilisation=40000, virtual_allocation=80000, host_ref=host_ref)
[ "def", "create_local_srs", "(", ")", ":", "for", "host_ref", "in", "_db_content", "[", "'host'", "]", ".", "keys", "(", ")", ":", "create_sr", "(", "name_label", "=", "'Local storage'", ",", "type", "=", "'lvm'", ",", "other_config", "=", "{", "'i18n-original-value-name_label'", ":", "'Local storage'", ",", "'i18n-key'", ":", "'local-storage'", "}", ",", "physical_size", "=", "40000", ",", "physical_utilisation", "=", "20000", ",", "virtual_allocation", "=", "10000", ",", "host_ref", "=", "host_ref", ")", "create_sr", "(", "name_label", "=", "'Local storage ISO'", ",", "type", "=", "'iso'", ",", "other_config", "=", "{", "'i18n-original-value-name_label'", ":", "'Local storage ISO'", ",", "'i18n-key'", ":", "'local-storage-iso'", "}", ",", "physical_size", "=", "80000", ",", "physical_utilisation", "=", "40000", ",", "virtual_allocation", "=", "80000", ",", "host_ref", "=", "host_ref", ")" ]
create an sr that looks like the one created on the local disk by default by the xenserver installer .
train
false
1,463
def get_profile_from_user(user): for field in user._meta.get_fields(): try: if hasattr(user, field.name): attribute = getattr(user, field.name) if (get_profile_model() == type(attribute)): return attribute except Exception: logger.exception('Error getting profile attribute from user.') logger.info('Could not find profile attribute.') return None
[ "def", "get_profile_from_user", "(", "user", ")", ":", "for", "field", "in", "user", ".", "_meta", ".", "get_fields", "(", ")", ":", "try", ":", "if", "hasattr", "(", "user", ",", "field", ".", "name", ")", ":", "attribute", "=", "getattr", "(", "user", ",", "field", ".", "name", ")", "if", "(", "get_profile_model", "(", ")", "==", "type", "(", "attribute", ")", ")", ":", "return", "attribute", "except", "Exception", ":", "logger", ".", "exception", "(", "'Error getting profile attribute from user.'", ")", "logger", ".", "info", "(", "'Could not find profile attribute.'", ")", "return", "None" ]
tries to get the profile according to the class configured on auth_profile_module .
train
false
1,464
def inverse_sine_transform(F, k, x, **hints): return InverseSineTransform(F, k, x).doit(**hints)
[ "def", "inverse_sine_transform", "(", "F", ",", "k", ",", "x", ",", "**", "hints", ")", ":", "return", "InverseSineTransform", "(", "F", ",", "k", ",", "x", ")", ".", "doit", "(", "**", "hints", ")" ]
compute the unitary .
train
false
1,465
def match_paren(parens): stack = Stack() for b in parens: if (b == '('): stack.push(1) elif (not stack.isEmpty()): stack.pop() else: return False return stack.isEmpty()
[ "def", "match_paren", "(", "parens", ")", ":", "stack", "=", "Stack", "(", ")", "for", "b", "in", "parens", ":", "if", "(", "b", "==", "'('", ")", ":", "stack", ".", "push", "(", "1", ")", "elif", "(", "not", "stack", ".", "isEmpty", "(", ")", ")", ":", "stack", ".", "pop", "(", ")", "else", ":", "return", "False", "return", "stack", ".", "isEmpty", "(", ")" ]
returns true or false if parenthesis expression passed is matching .
train
false
1,466
def listen_fds(unset_environment=True): fds = int(os.environ.get('LISTEN_FDS', 0)) listen_pid = int(os.environ.get('LISTEN_PID', 0)) if (listen_pid != os.getpid()): return 0 if unset_environment: os.environ.pop('LISTEN_PID', None) os.environ.pop('LISTEN_FDS', None) return fds
[ "def", "listen_fds", "(", "unset_environment", "=", "True", ")", ":", "fds", "=", "int", "(", "os", ".", "environ", ".", "get", "(", "'LISTEN_FDS'", ",", "0", ")", ")", "listen_pid", "=", "int", "(", "os", ".", "environ", ".", "get", "(", "'LISTEN_PID'", ",", "0", ")", ")", "if", "(", "listen_pid", "!=", "os", ".", "getpid", "(", ")", ")", ":", "return", "0", "if", "unset_environment", ":", "os", ".", "environ", ".", "pop", "(", "'LISTEN_PID'", ",", "None", ")", "os", ".", "environ", ".", "pop", "(", "'LISTEN_FDS'", ",", "None", ")", "return", "fds" ]
get the number of sockets inherited from systemd socket activation .
train
false
1,467
def convert_uptime_hours(sys_uptime): return ((int(sys_uptime) / 100.0) / 3600.0)
[ "def", "convert_uptime_hours", "(", "sys_uptime", ")", ":", "return", "(", "(", "int", "(", "sys_uptime", ")", "/", "100.0", ")", "/", "3600.0", ")" ]
sys_uptime is in hundredths of seconds returns a float .
train
false