id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
sequencelengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
2,157
def re_meta(line, match=None): if match: reStr = re.compile(u'^\\.\\. {0}: (.*)'.format(re.escape(match))) else: reStr = re.compile(u'^\\.\\. (.*?): (.*)') result = reStr.findall(line.strip()) if (match and result): return (match, result[0]) elif ((not match) and result): return (result[0][0], result[0][1].strip()) else: return (None,)
[ "def", "re_meta", "(", "line", ",", "match", "=", "None", ")", ":", "if", "match", ":", "reStr", "=", "re", ".", "compile", "(", "u'^\\\\.\\\\. {0}: (.*)'", ".", "format", "(", "re", ".", "escape", "(", "match", ")", ")", ")", "else", ":", "reStr", "=", "re", ".", "compile", "(", "u'^\\\\.\\\\. (.*?): (.*)'", ")", "result", "=", "reStr", ".", "findall", "(", "line", ".", "strip", "(", ")", ")", "if", "(", "match", "and", "result", ")", ":", "return", "(", "match", ",", "result", "[", "0", "]", ")", "elif", "(", "(", "not", "match", ")", "and", "result", ")", ":", "return", "(", "result", "[", "0", "]", "[", "0", "]", ",", "result", "[", "0", "]", "[", "1", "]", ".", "strip", "(", ")", ")", "else", ":", "return", "(", "None", ",", ")" ]
find metadata using regular expressions .
train
false
2,158
def _gitConfig(path): runCommand(['git', 'config', '--file', path.child('.git').child('config').path, 'user.name', '"someone"']) runCommand(['git', 'config', '--file', path.child('.git').child('config').path, 'user.email', '"someone@someplace.com"'])
[ "def", "_gitConfig", "(", "path", ")", ":", "runCommand", "(", "[", "'git'", ",", "'config'", ",", "'--file'", ",", "path", ".", "child", "(", "'.git'", ")", ".", "child", "(", "'config'", ")", ".", "path", ",", "'user.name'", ",", "'\"someone\"'", "]", ")", "runCommand", "(", "[", "'git'", ",", "'config'", ",", "'--file'", ",", "path", ".", "child", "(", "'.git'", ")", ".", "child", "(", "'config'", ")", ".", "path", ",", "'user.email'", ",", "'\"someone@someplace.com\"'", "]", ")" ]
set some config in the repo that git requires to make commits .
train
false
2,159
def computeLRC(data): lrc = (sum((ord(a) for a in data)) & 255) lrc = ((lrc ^ 255) + 1) return (lrc & 255)
[ "def", "computeLRC", "(", "data", ")", ":", "lrc", "=", "(", "sum", "(", "(", "ord", "(", "a", ")", "for", "a", "in", "data", ")", ")", "&", "255", ")", "lrc", "=", "(", "(", "lrc", "^", "255", ")", "+", "1", ")", "return", "(", "lrc", "&", "255", ")" ]
used to compute the longitudinal redundancy check against a string .
train
false
2,160
def exc_info_to_str(exc_info): return ''.join(traceback.format_exception(*exc_info))
[ "def", "exc_info_to_str", "(", "exc_info", ")", ":", "return", "''", ".", "join", "(", "traceback", ".", "format_exception", "(", "*", "exc_info", ")", ")" ]
given some exception info .
train
false
2,162
def package_info(package, image=None): cmd = ['DISM', '/English', ('/Image:{0}'.format(image) if image else '/Online'), '/Get-PackageInfo'] if ('~' in package): cmd.append('/PackageName:{0}'.format(package)) else: cmd.append('/PackagePath:{0}'.format(package)) out = __salt__['cmd.run_all'](cmd) if (out['retcode'] == 0): ret = dict() for line in str(out['stdout']).splitlines(): if (' : ' in line): info = line.split(' : ') if (len(info) < 2): continue ret[info[0]] = info[1] else: ret = out return ret
[ "def", "package_info", "(", "package", ",", "image", "=", "None", ")", ":", "cmd", "=", "[", "'DISM'", ",", "'/English'", ",", "(", "'/Image:{0}'", ".", "format", "(", "image", ")", "if", "image", "else", "'/Online'", ")", ",", "'/Get-PackageInfo'", "]", "if", "(", "'~'", "in", "package", ")", ":", "cmd", ".", "append", "(", "'/PackageName:{0}'", ".", "format", "(", "package", ")", ")", "else", ":", "cmd", ".", "append", "(", "'/PackagePath:{0}'", ".", "format", "(", "package", ")", ")", "out", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "if", "(", "out", "[", "'retcode'", "]", "==", "0", ")", ":", "ret", "=", "dict", "(", ")", "for", "line", "in", "str", "(", "out", "[", "'stdout'", "]", ")", ".", "splitlines", "(", ")", ":", "if", "(", "' : '", "in", "line", ")", ":", "info", "=", "line", ".", "split", "(", "' : '", ")", "if", "(", "len", "(", "info", ")", "<", "2", ")", ":", "continue", "ret", "[", "info", "[", "0", "]", "]", "=", "info", "[", "1", "]", "else", ":", "ret", "=", "out", "return", "ret" ]
display information about a package args: package : the full path to the package .
train
true
2,163
@pytest.mark.skipif('sys.version_info < (3,3)') def test_find_module_py33(): assert (find_module_py33('_io') == (None, '_io', False))
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "'sys.version_info < (3,3)'", ")", "def", "test_find_module_py33", "(", ")", ":", "assert", "(", "find_module_py33", "(", "'_io'", ")", "==", "(", "None", ",", "'_io'", ",", "False", ")", ")" ]
needs to work like the old find_module .
train
false
2,164
@pytest.mark.skipif(u'sys.version_info[:2] >= (3, 5)', reason=u'Infinite recursion on Python 3.5') @pytest.mark.xfail(str(u'ip is not None')) def test_exception_logging_argless_exception(): try: log.enable_exception_logging() with log.log_to_list() as log_list: raise Exception() except Exception as exc: sys.excepthook(*sys.exc_info()) else: assert False assert (len(log_list) == 1) assert (log_list[0].levelname == u'ERROR') assert (log_list[0].message == u'Exception [astropy.tests.test_logger]') assert (log_list[0].origin == u'astropy.tests.test_logger')
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "u'sys.version_info[:2] >= (3, 5)'", ",", "reason", "=", "u'Infinite recursion on Python 3.5'", ")", "@", "pytest", ".", "mark", ".", "xfail", "(", "str", "(", "u'ip is not None'", ")", ")", "def", "test_exception_logging_argless_exception", "(", ")", ":", "try", ":", "log", ".", "enable_exception_logging", "(", ")", "with", "log", ".", "log_to_list", "(", ")", "as", "log_list", ":", "raise", "Exception", "(", ")", "except", "Exception", "as", "exc", ":", "sys", ".", "excepthook", "(", "*", "sys", ".", "exc_info", "(", ")", ")", "else", ":", "assert", "False", "assert", "(", "len", "(", "log_list", ")", "==", "1", ")", "assert", "(", "log_list", "[", "0", "]", ".", "levelname", "==", "u'ERROR'", ")", "assert", "(", "log_list", "[", "0", "]", ".", "message", "==", "u'Exception [astropy.tests.test_logger]'", ")", "assert", "(", "log_list", "[", "0", "]", ".", "origin", "==", "u'astropy.tests.test_logger'", ")" ]
regression test for a crash that occurred on python 3 when logging an exception that was instantiated with no arguments regression test for URL .
train
false
2,165
def fulltext(html, language='en'): from .cleaners import DocumentCleaner from .configuration import Configuration from .extractors import ContentExtractor from .outputformatters import OutputFormatter config = Configuration() config.language = language extractor = ContentExtractor(config) document_cleaner = DocumentCleaner(config) output_formatter = OutputFormatter(config) doc = config.get_parser().fromstring(html) doc = document_cleaner.clean(doc) top_node = extractor.calculate_best_node(doc) top_node = extractor.post_cleanup(top_node) (text, article_html) = output_formatter.get_formatted(top_node) return text
[ "def", "fulltext", "(", "html", ",", "language", "=", "'en'", ")", ":", "from", ".", "cleaners", "import", "DocumentCleaner", "from", ".", "configuration", "import", "Configuration", "from", ".", "extractors", "import", "ContentExtractor", "from", ".", "outputformatters", "import", "OutputFormatter", "config", "=", "Configuration", "(", ")", "config", ".", "language", "=", "language", "extractor", "=", "ContentExtractor", "(", "config", ")", "document_cleaner", "=", "DocumentCleaner", "(", "config", ")", "output_formatter", "=", "OutputFormatter", "(", "config", ")", "doc", "=", "config", ".", "get_parser", "(", ")", ".", "fromstring", "(", "html", ")", "doc", "=", "document_cleaner", ".", "clean", "(", "doc", ")", "top_node", "=", "extractor", ".", "calculate_best_node", "(", "doc", ")", "top_node", "=", "extractor", ".", "post_cleanup", "(", "top_node", ")", "(", "text", ",", "article_html", ")", "=", "output_formatter", ".", "get_formatted", "(", "top_node", ")", "return", "text" ]
takes article html string input and outputs the fulltext input string is decoded via unicodedammit if needed .
train
false
2,166
def list_bucket(bucket_name): url = 'https://www.googleapis.com/storage/v1/b/{0}/o'.format(bucket_name) try: response = gcs_get_request(url) if (response.status_code != HTTP_OK): logging.error('Error on listing objects in GCS bucket: {0}. Error: {1}'.format(bucket_name, response.status_code)) return [] content = json.loads(response.content) except requests.HTTPError as error: logging.error('Error on listing objects in GCS bucket: {0}. Error: {1}'.format(bucket_name, error)) return [] if ('items' not in content.keys()): return [] objects = [] for item in content['items']: objects.append(item['name']) logging.debug('Bucket contents: {0}'.format(objects)) return objects
[ "def", "list_bucket", "(", "bucket_name", ")", ":", "url", "=", "'https://www.googleapis.com/storage/v1/b/{0}/o'", ".", "format", "(", "bucket_name", ")", "try", ":", "response", "=", "gcs_get_request", "(", "url", ")", "if", "(", "response", ".", "status_code", "!=", "HTTP_OK", ")", ":", "logging", ".", "error", "(", "'Error on listing objects in GCS bucket: {0}. Error: {1}'", ".", "format", "(", "bucket_name", ",", "response", ".", "status_code", ")", ")", "return", "[", "]", "content", "=", "json", ".", "loads", "(", "response", ".", "content", ")", "except", "requests", ".", "HTTPError", "as", "error", ":", "logging", ".", "error", "(", "'Error on listing objects in GCS bucket: {0}. Error: {1}'", ".", "format", "(", "bucket_name", ",", "error", ")", ")", "return", "[", "]", "if", "(", "'items'", "not", "in", "content", ".", "keys", "(", ")", ")", ":", "return", "[", "]", "objects", "=", "[", "]", "for", "item", "in", "content", "[", "'items'", "]", ":", "objects", ".", "append", "(", "item", "[", "'name'", "]", ")", "logging", ".", "debug", "(", "'Bucket contents: {0}'", ".", "format", "(", "objects", ")", ")", "return", "objects" ]
returns a list of metadata of the objects within the given bucket .
train
false
2,167
@with_session def create_list(list_name, session=None): regexp_list = get_list_by_exact_name(list_name, session=session) if (not regexp_list): regexp_list = RegexpListList(name=list_name) session.merge(regexp_list) session.commit() return regexp_list
[ "@", "with_session", "def", "create_list", "(", "list_name", ",", "session", "=", "None", ")", ":", "regexp_list", "=", "get_list_by_exact_name", "(", "list_name", ",", "session", "=", "session", ")", "if", "(", "not", "regexp_list", ")", ":", "regexp_list", "=", "RegexpListList", "(", "name", "=", "list_name", ")", "session", ".", "merge", "(", "regexp_list", ")", "session", ".", "commit", "(", ")", "return", "regexp_list" ]
only creates the list if it doesnt exist .
train
false
2,170
def calculated_stat(base_stat, level, iv, effort, nature=None): stat = ((((((base_stat * 2) + iv) + (effort // 4)) * level) // 100) + 5) if nature: stat = int((stat * nature)) return stat
[ "def", "calculated_stat", "(", "base_stat", ",", "level", ",", "iv", ",", "effort", ",", "nature", "=", "None", ")", ":", "stat", "=", "(", "(", "(", "(", "(", "(", "base_stat", "*", "2", ")", "+", "iv", ")", "+", "(", "effort", "//", "4", ")", ")", "*", "level", ")", "//", "100", ")", "+", "5", ")", "if", "nature", ":", "stat", "=", "int", "(", "(", "stat", "*", "nature", ")", ")", "return", "stat" ]
returns the calculated stat -- i .
train
false
2,171
def update_translation_catalogs(): from django.core.management import call_command prev_cwd = os.getcwd() os.chdir(proj_dir) call_command('makemessages') call_command('compilemessages') pofile = os.path.join(proj_dir, 'locale', 'fr', 'LC_MESSAGES', 'django.po') with open(pofile) as f: content = f.read() content = re.sub('^"POT-Creation-Date.+$\\s', '', content, flags=re.MULTILINE) with open(pofile, 'w') as f: f.write(content) os.chdir(prev_cwd)
[ "def", "update_translation_catalogs", "(", ")", ":", "from", "django", ".", "core", ".", "management", "import", "call_command", "prev_cwd", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "proj_dir", ")", "call_command", "(", "'makemessages'", ")", "call_command", "(", "'compilemessages'", ")", "pofile", "=", "os", ".", "path", ".", "join", "(", "proj_dir", ",", "'locale'", ",", "'fr'", ",", "'LC_MESSAGES'", ",", "'django.po'", ")", "with", "open", "(", "pofile", ")", "as", "f", ":", "content", "=", "f", ".", "read", "(", ")", "content", "=", "re", ".", "sub", "(", "'^\"POT-Creation-Date.+$\\\\s'", ",", "''", ",", "content", ",", "flags", "=", "re", ".", "MULTILINE", ")", "with", "open", "(", "pofile", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "content", ")", "os", ".", "chdir", "(", "prev_cwd", ")" ]
run makemessages and compilemessages in sampleproject .
train
false
2,172
def instance_add_security_group(context, instance_uuid, security_group_id): sec_group_ref = models.SecurityGroupInstanceAssociation() sec_group_ref.update({'instance_uuid': instance_uuid, 'security_group_id': security_group_id}) sec_group_ref.save()
[ "def", "instance_add_security_group", "(", "context", ",", "instance_uuid", ",", "security_group_id", ")", ":", "sec_group_ref", "=", "models", ".", "SecurityGroupInstanceAssociation", "(", ")", "sec_group_ref", ".", "update", "(", "{", "'instance_uuid'", ":", "instance_uuid", ",", "'security_group_id'", ":", "security_group_id", "}", ")", "sec_group_ref", ".", "save", "(", ")" ]
associate the given security group with the given instance .
train
false
2,173
def binary_dilation(input, structure=None, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False): input = numpy.asarray(input) if (structure is None): structure = generate_binary_structure(input.ndim, 1) origin = _ni_support._normalize_sequence(origin, input.ndim) structure = numpy.asarray(structure) structure = structure[tuple(([slice(None, None, (-1))] * structure.ndim))] for ii in range(len(origin)): origin[ii] = (- origin[ii]) if (not (structure.shape[ii] & 1)): origin[ii] -= 1 return _binary_erosion(input, structure, iterations, mask, output, border_value, origin, 1, brute_force)
[ "def", "binary_dilation", "(", "input", ",", "structure", "=", "None", ",", "iterations", "=", "1", ",", "mask", "=", "None", ",", "output", "=", "None", ",", "border_value", "=", "0", ",", "origin", "=", "0", ",", "brute_force", "=", "False", ")", ":", "input", "=", "numpy", ".", "asarray", "(", "input", ")", "if", "(", "structure", "is", "None", ")", ":", "structure", "=", "generate_binary_structure", "(", "input", ".", "ndim", ",", "1", ")", "origin", "=", "_ni_support", ".", "_normalize_sequence", "(", "origin", ",", "input", ".", "ndim", ")", "structure", "=", "numpy", ".", "asarray", "(", "structure", ")", "structure", "=", "structure", "[", "tuple", "(", "(", "[", "slice", "(", "None", ",", "None", ",", "(", "-", "1", ")", ")", "]", "*", "structure", ".", "ndim", ")", ")", "]", "for", "ii", "in", "range", "(", "len", "(", "origin", ")", ")", ":", "origin", "[", "ii", "]", "=", "(", "-", "origin", "[", "ii", "]", ")", "if", "(", "not", "(", "structure", ".", "shape", "[", "ii", "]", "&", "1", ")", ")", ":", "origin", "[", "ii", "]", "-=", "1", "return", "_binary_erosion", "(", "input", ",", "structure", ",", "iterations", ",", "mask", ",", "output", ",", "border_value", ",", "origin", ",", "1", ",", "brute_force", ")" ]
return fast binary morphological dilation of an image .
train
false
2,174
def get_valid_mentioned_users_guids(comment, contributors): new_mentions = set(re.findall('\\[[@|\\+].*?\\]\\(htt[ps]{1,2}:\\/\\/[a-z\\d:.]+?\\/([a-z\\d]{5})\\/\\)', comment.content)) new_mentions = [m for m in new_mentions if ((m not in comment.ever_mentioned) and validate_contributor(m, contributors))] return new_mentions
[ "def", "get_valid_mentioned_users_guids", "(", "comment", ",", "contributors", ")", ":", "new_mentions", "=", "set", "(", "re", ".", "findall", "(", "'\\\\[[@|\\\\+].*?\\\\]\\\\(htt[ps]{1,2}:\\\\/\\\\/[a-z\\\\d:.]+?\\\\/([a-z\\\\d]{5})\\\\/\\\\)'", ",", "comment", ".", "content", ")", ")", "new_mentions", "=", "[", "m", "for", "m", "in", "new_mentions", "if", "(", "(", "m", "not", "in", "comment", ".", "ever_mentioned", ")", "and", "validate_contributor", "(", "m", ",", "contributors", ")", ")", "]", "return", "new_mentions" ]
get a list of valid users that are mentioned in the comment content .
train
false
2,175
def userkey_required(): def _decorator(view): def wrapped_view(request, *args, **kwargs): try: uk = UserKey.objects.get(user=request.user) except UserKey.DoesNotExist: messages.warning(request, u"This operation requires an active user key, but you don't have one.") return redirect('users:userkey') if (not uk.is_active()): messages.warning(request, u'This operation is not available. Your user key has not been activated.') return redirect('users:userkey') return view(request, *args, **kwargs) return wrapped_view return _decorator
[ "def", "userkey_required", "(", ")", ":", "def", "_decorator", "(", "view", ")", ":", "def", "wrapped_view", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "try", ":", "uk", "=", "UserKey", ".", "objects", ".", "get", "(", "user", "=", "request", ".", "user", ")", "except", "UserKey", ".", "DoesNotExist", ":", "messages", ".", "warning", "(", "request", ",", "u\"This operation requires an active user key, but you don't have one.\"", ")", "return", "redirect", "(", "'users:userkey'", ")", "if", "(", "not", "uk", ".", "is_active", "(", ")", ")", ":", "messages", ".", "warning", "(", "request", ",", "u'This operation is not available. Your user key has not been activated.'", ")", "return", "redirect", "(", "'users:userkey'", ")", "return", "view", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wrapped_view", "return", "_decorator" ]
decorator for views which require that the user has an active userkey .
train
false
2,176
def select_bandwidth(x, bw, kernel): bw = bw.lower() if (bw not in bandwidth_funcs): raise ValueError(('Bandwidth %s not understood' % bw)) return bandwidth_funcs[bw](x, kernel)
[ "def", "select_bandwidth", "(", "x", ",", "bw", ",", "kernel", ")", ":", "bw", "=", "bw", ".", "lower", "(", ")", "if", "(", "bw", "not", "in", "bandwidth_funcs", ")", ":", "raise", "ValueError", "(", "(", "'Bandwidth %s not understood'", "%", "bw", ")", ")", "return", "bandwidth_funcs", "[", "bw", "]", "(", "x", ",", "kernel", ")" ]
selects bandwidth for a selection rule bw this is a wrapper around existing bandwidth selection rules parameters x : array-like array for which to get the bandwidth bw : string name of bandwidth selection rule .
train
false
2,177
def configure_custom(debug=False, stdout=None, stderr=None): top_level_logger = logging.getLogger(__name__.split('.')[0]) top_level_logger.propagate = False top_level_logger.setLevel((logging.DEBUG if debug else logging.INFO)) while top_level_logger.handlers: top_level_logger.handlers.pop() fmt = CustomFormatter() handler = CustomStreamHandler(stdout=stdout, stderr=stderr, formatter=fmt) top_level_logger.addHandler(handler)
[ "def", "configure_custom", "(", "debug", "=", "False", ",", "stdout", "=", "None", ",", "stderr", "=", "None", ")", ":", "top_level_logger", "=", "logging", ".", "getLogger", "(", "__name__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "top_level_logger", ".", "propagate", "=", "False", "top_level_logger", ".", "setLevel", "(", "(", "logging", ".", "DEBUG", "if", "debug", "else", "logging", ".", "INFO", ")", ")", "while", "top_level_logger", ".", "handlers", ":", "top_level_logger", ".", "handlers", ".", "pop", "(", ")", "fmt", "=", "CustomFormatter", "(", ")", "handler", "=", "CustomStreamHandler", "(", "stdout", "=", "stdout", ",", "stderr", "=", "stderr", ",", "formatter", "=", "fmt", ")", "top_level_logger", ".", "addHandler", "(", "handler", ")" ]
configure the logging module to output logging messages to the console via stdout and stderr .
train
false
2,178
def create_symlink(src, dest): os.symlink(src, dest) atexit.register(delete_symlink, dest)
[ "def", "create_symlink", "(", "src", ",", "dest", ")", ":", "os", ".", "symlink", "(", "src", ",", "dest", ")", "atexit", ".", "register", "(", "delete_symlink", ",", "dest", ")" ]
creates a symbolic link which will be deleted when the process ends .
train
false
2,179
def _GenerateRulesForMSVS(p, output_dir, options, spec, sources, excluded_sources, actions_to_add): rules = spec.get('rules', []) rules_native = [r for r in rules if (not int(r.get('msvs_external_rule', 0)))] rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] if rules_native: _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options) if rules_external: _GenerateExternalRules(rules_external, output_dir, spec, sources, options, actions_to_add) _AdjustSourcesForRules(rules, sources, excluded_sources, False)
[ "def", "_GenerateRulesForMSVS", "(", "p", ",", "output_dir", ",", "options", ",", "spec", ",", "sources", ",", "excluded_sources", ",", "actions_to_add", ")", ":", "rules", "=", "spec", ".", "get", "(", "'rules'", ",", "[", "]", ")", "rules_native", "=", "[", "r", "for", "r", "in", "rules", "if", "(", "not", "int", "(", "r", ".", "get", "(", "'msvs_external_rule'", ",", "0", ")", ")", ")", "]", "rules_external", "=", "[", "r", "for", "r", "in", "rules", "if", "int", "(", "r", ".", "get", "(", "'msvs_external_rule'", ",", "0", ")", ")", "]", "if", "rules_native", ":", "_GenerateNativeRulesForMSVS", "(", "p", ",", "rules_native", ",", "output_dir", ",", "spec", ",", "options", ")", "if", "rules_external", ":", "_GenerateExternalRules", "(", "rules_external", ",", "output_dir", ",", "spec", ",", "sources", ",", "options", ",", "actions_to_add", ")", "_AdjustSourcesForRules", "(", "rules", ",", "sources", ",", "excluded_sources", ",", "False", ")" ]
generate all the rules for a particular project .
train
false
2,180
@pytest.fixture def aioclient_mock(): with mock_aiohttp_client() as mock_session: (yield mock_session)
[ "@", "pytest", ".", "fixture", "def", "aioclient_mock", "(", ")", ":", "with", "mock_aiohttp_client", "(", ")", "as", "mock_session", ":", "(", "yield", "mock_session", ")" ]
fixture to mock aioclient calls .
train
false
2,181
@deprecated(CallDeprecatedTests.version, replacement='newMethod') def oldMethodReplaced(x): return (2 * x)
[ "@", "deprecated", "(", "CallDeprecatedTests", ".", "version", ",", "replacement", "=", "'newMethod'", ")", "def", "oldMethodReplaced", "(", "x", ")", ":", "return", "(", "2", "*", "x", ")" ]
another deprecated method .
train
false
2,182
def MakeSyncCall(service, call, request, response, stubmap=None): if (stubmap is None): stubmap = apiproxy return stubmap.MakeSyncCall(service, call, request, response)
[ "def", "MakeSyncCall", "(", "service", ",", "call", ",", "request", ",", "response", ",", "stubmap", "=", "None", ")", ":", "if", "(", "stubmap", "is", "None", ")", ":", "stubmap", "=", "apiproxy", "return", "stubmap", ".", "MakeSyncCall", "(", "service", ",", "call", ",", "request", ",", "response", ")" ]
makes a synchronous api call within the specified package for the specified call method .
train
false
2,184
def pos(a): return (+ a)
[ "def", "pos", "(", "a", ")", ":", "return", "(", "+", "a", ")" ]
alias for max_elemwise{x .
train
false
2,185
def _stdout_list_split(retcode, stdout='', splitstring='\n'): if (retcode == 0): ret = stdout.split(splitstring) return ret else: return False
[ "def", "_stdout_list_split", "(", "retcode", ",", "stdout", "=", "''", ",", "splitstring", "=", "'\\n'", ")", ":", "if", "(", "retcode", "==", "0", ")", ":", "ret", "=", "stdout", ".", "split", "(", "splitstring", ")", "return", "ret", "else", ":", "return", "False" ]
evaulates open vswitch commands retcode value .
train
true
2,187
def dn2str(dn): return ','.join(['+'.join(['='.join((atype, escape_dn_chars((avalue or '')))) for (atype, avalue, dummy) in rdn]) for rdn in dn])
[ "def", "dn2str", "(", "dn", ")", ":", "return", "','", ".", "join", "(", "[", "'+'", ".", "join", "(", "[", "'='", ".", "join", "(", "(", "atype", ",", "escape_dn_chars", "(", "(", "avalue", "or", "''", ")", ")", ")", ")", "for", "(", "atype", ",", "avalue", ",", "dummy", ")", "in", "rdn", "]", ")", "for", "rdn", "in", "dn", "]", ")" ]
this function takes a decomposed dn as parameter and returns a single string .
train
false
2,188
def dictDiffAndReport(da, db): differences = dictDiff(da, db) if (not differences): return differences if differences['inAButNotInB']: print ('>>> inAButNotInB: %s' % differences['inAButNotInB']) if differences['inBButNotInA']: print ('>>> inBButNotInA: %s' % differences['inBButNotInA']) for key in differences['differentValues']: print ('>>> da[%s] != db[%s]' % (key, key)) print ('da[%s] = %r' % (key, da[key])) print ('db[%s] = %r' % (key, db[key])) return differences
[ "def", "dictDiffAndReport", "(", "da", ",", "db", ")", ":", "differences", "=", "dictDiff", "(", "da", ",", "db", ")", "if", "(", "not", "differences", ")", ":", "return", "differences", "if", "differences", "[", "'inAButNotInB'", "]", ":", "print", "(", "'>>> inAButNotInB: %s'", "%", "differences", "[", "'inAButNotInB'", "]", ")", "if", "differences", "[", "'inBButNotInA'", "]", ":", "print", "(", "'>>> inBButNotInA: %s'", "%", "differences", "[", "'inBButNotInA'", "]", ")", "for", "key", "in", "differences", "[", "'differentValues'", "]", ":", "print", "(", "'>>> da[%s] != db[%s]'", "%", "(", "key", ",", "key", ")", ")", "print", "(", "'da[%s] = %r'", "%", "(", "key", ",", "da", "[", "key", "]", ")", ")", "print", "(", "'db[%s] = %r'", "%", "(", "key", ",", "db", "[", "key", "]", ")", ")", "return", "differences" ]
compares two python dictionaries at the top level and report differences .
train
true
2,189
def clone_dir(src, dest): for x in os.listdir(src): dpath = os.path.join(dest, x) spath = os.path.join(src, x) if os.path.isdir(spath): os.mkdir(dpath) clone_dir(spath, dpath) else: try: hardlink_file(spath, dpath) except: shutil.copy2(spath, dpath)
[ "def", "clone_dir", "(", "src", ",", "dest", ")", ":", "for", "x", "in", "os", ".", "listdir", "(", "src", ")", ":", "dpath", "=", "os", ".", "path", ".", "join", "(", "dest", ",", "x", ")", "spath", "=", "os", ".", "path", ".", "join", "(", "src", ",", "x", ")", "if", "os", ".", "path", ".", "isdir", "(", "spath", ")", ":", "os", ".", "mkdir", "(", "dpath", ")", "clone_dir", "(", "spath", ",", "dpath", ")", "else", ":", "try", ":", "hardlink_file", "(", "spath", ",", "dpath", ")", "except", ":", "shutil", ".", "copy2", "(", "spath", ",", "dpath", ")" ]
simulates creation of a directory called clone_dir inside of tmpdir .
train
false
2,190
def sql_query(dbname, query): import sqlite3 try: path = nltk.data.find(dbname) connection = sqlite3.connect(str(path)) cur = connection.cursor() return cur.execute(query) except (ValueError, sqlite3.OperationalError): import warnings warnings.warn((u'Make sure the database file %s is installed and uncompressed.' % dbname)) raise
[ "def", "sql_query", "(", "dbname", ",", "query", ")", ":", "import", "sqlite3", "try", ":", "path", "=", "nltk", ".", "data", ".", "find", "(", "dbname", ")", "connection", "=", "sqlite3", ".", "connect", "(", "str", "(", "path", ")", ")", "cur", "=", "connection", ".", "cursor", "(", ")", "return", "cur", ".", "execute", "(", "query", ")", "except", "(", "ValueError", ",", "sqlite3", ".", "OperationalError", ")", ":", "import", "warnings", "warnings", ".", "warn", "(", "(", "u'Make sure the database file %s is installed and uncompressed.'", "%", "dbname", ")", ")", "raise" ]
execute an sql query over a database .
train
false
2,193
def sedgewick_maze_graph(create_using=None): G = empty_graph(0, create_using) G.add_nodes_from(range(8)) G.add_edges_from([[0, 2], [0, 7], [0, 5]]) G.add_edges_from([[1, 7], [2, 6]]) G.add_edges_from([[3, 4], [3, 5]]) G.add_edges_from([[4, 5], [4, 7], [4, 6]]) G.name = 'Sedgewick Maze' return G
[ "def", "sedgewick_maze_graph", "(", "create_using", "=", "None", ")", ":", "G", "=", "empty_graph", "(", "0", ",", "create_using", ")", "G", ".", "add_nodes_from", "(", "range", "(", "8", ")", ")", "G", ".", "add_edges_from", "(", "[", "[", "0", ",", "2", "]", ",", "[", "0", ",", "7", "]", ",", "[", "0", ",", "5", "]", "]", ")", "G", ".", "add_edges_from", "(", "[", "[", "1", ",", "7", "]", ",", "[", "2", ",", "6", "]", "]", ")", "G", ".", "add_edges_from", "(", "[", "[", "3", ",", "4", "]", ",", "[", "3", ",", "5", "]", "]", ")", "G", ".", "add_edges_from", "(", "[", "[", "4", ",", "5", "]", ",", "[", "4", ",", "7", "]", ",", "[", "4", ",", "6", "]", "]", ")", "G", ".", "name", "=", "'Sedgewick Maze'", "return", "G" ]
return a small maze with a cycle .
train
false
2,194
def _edit_string_for_tags(tags): names = [] for tag in tags: name = tag.name if ((u',' in name) or (u' ' in name)): names.append((u'"%s"' % name)) else: names.append(name) return u', '.join(sorted(names))
[ "def", "_edit_string_for_tags", "(", "tags", ")", ":", "names", "=", "[", "]", "for", "tag", "in", "tags", ":", "name", "=", "tag", ".", "name", "if", "(", "(", "u','", "in", "name", ")", "or", "(", "u' '", "in", "name", ")", ")", ":", "names", ".", "append", "(", "(", "u'\"%s\"'", "%", "name", ")", ")", "else", ":", "names", ".", "append", "(", "name", ")", "return", "u', '", ".", "join", "(", "sorted", "(", "names", ")", ")" ]
given list of tag instances .
train
false
2,195
@must_have_permission(ADMIN) @must_be_branched_from_node def delete_draft_registration(auth, node, draft, *args, **kwargs): if (draft.registered_node and (not draft.registered_node.is_deleted)): raise HTTPError(http.FORBIDDEN, data={'message_short': "Can't delete draft", 'message_long': 'This draft has already been registered and cannot be deleted.'}) DraftRegistration.remove_one(draft) return (None, http.NO_CONTENT)
[ "@", "must_have_permission", "(", "ADMIN", ")", "@", "must_be_branched_from_node", "def", "delete_draft_registration", "(", "auth", ",", "node", ",", "draft", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "draft", ".", "registered_node", "and", "(", "not", "draft", ".", "registered_node", ".", "is_deleted", ")", ")", ":", "raise", "HTTPError", "(", "http", ".", "FORBIDDEN", ",", "data", "=", "{", "'message_short'", ":", "\"Can't delete draft\"", ",", "'message_long'", ":", "'This draft has already been registered and cannot be deleted.'", "}", ")", "DraftRegistration", ".", "remove_one", "(", "draft", ")", "return", "(", "None", ",", "http", ".", "NO_CONTENT", ")" ]
permanently delete a draft registration :return: none :rtype: nonetype .
train
false
2,196
def _user_has_perm(user, perm, obj): for backend in auth.get_backends(): if (not hasattr(backend, 'has_perm')): continue try: if backend.has_perm(user, perm, obj): return True except PermissionDenied: return False return False
[ "def", "_user_has_perm", "(", "user", ",", "perm", ",", "obj", ")", ":", "for", "backend", "in", "auth", ".", "get_backends", "(", ")", ":", "if", "(", "not", "hasattr", "(", "backend", ",", "'has_perm'", ")", ")", ":", "continue", "try", ":", "if", "backend", ".", "has_perm", "(", "user", ",", "perm", ",", "obj", ")", ":", "return", "True", "except", "PermissionDenied", ":", "return", "False", "return", "False" ]
a backend can raise permissiondenied to short-circuit permission checking .
train
false
2,197
def get_fmri(name, **kwargs): if name.startswith('pkg://'): return name cmd = '/bin/pkg list -aHv {0}'.format(name) lines = __salt__['cmd.run_stdout'](cmd).splitlines() if (not lines): return '' ret = [] for line in lines: ret.append(_ips_get_pkgname(line)) return ret
[ "def", "get_fmri", "(", "name", ",", "**", "kwargs", ")", ":", "if", "name", ".", "startswith", "(", "'pkg://'", ")", ":", "return", "name", "cmd", "=", "'/bin/pkg list -aHv {0}'", ".", "format", "(", "name", ")", "lines", "=", "__salt__", "[", "'cmd.run_stdout'", "]", "(", "cmd", ")", ".", "splitlines", "(", ")", "if", "(", "not", "lines", ")", ":", "return", "''", "ret", "=", "[", "]", "for", "line", "in", "lines", ":", "ret", ".", "append", "(", "_ips_get_pkgname", "(", "line", ")", ")", "return", "ret" ]
returns fmri from partial name .
train
true
2,198
def fczmdriver(cls): _fczm_register.append(cls) return cls
[ "def", "fczmdriver", "(", "cls", ")", ":", "_fczm_register", ".", "append", "(", "cls", ")", "return", "cls" ]
decorator for concrete fibre channel zone manager drivers .
train
false
2,199
def write_worksheet_cols(doc, worksheet): if worksheet.column_dimensions: start_tag(doc, 'cols') for (column_string, columndimension) in worksheet.column_dimensions.items(): col_index = column_index_from_string(column_string) col_def = {} col_def['collapsed'] = str(columndimension.style_index) col_def['min'] = str(col_index) col_def['max'] = str(col_index) if (columndimension.width != worksheet.default_column_dimension.width): col_def['customWidth'] = 'true' if (not columndimension.visible): col_def['hidden'] = 'true' if (columndimension.outline_level > 0): col_def['outlineLevel'] = str(columndimension.outline_level) if columndimension.collapsed: col_def['collapsed'] = 'true' if columndimension.auto_size: col_def['bestFit'] = 'true' if (columndimension.width > 0): col_def['width'] = str(columndimension.width) else: col_def['width'] = '9.10' tag(doc, 'col', col_def) end_tag(doc, 'cols')
[ "def", "write_worksheet_cols", "(", "doc", ",", "worksheet", ")", ":", "if", "worksheet", ".", "column_dimensions", ":", "start_tag", "(", "doc", ",", "'cols'", ")", "for", "(", "column_string", ",", "columndimension", ")", "in", "worksheet", ".", "column_dimensions", ".", "items", "(", ")", ":", "col_index", "=", "column_index_from_string", "(", "column_string", ")", "col_def", "=", "{", "}", "col_def", "[", "'collapsed'", "]", "=", "str", "(", "columndimension", ".", "style_index", ")", "col_def", "[", "'min'", "]", "=", "str", "(", "col_index", ")", "col_def", "[", "'max'", "]", "=", "str", "(", "col_index", ")", "if", "(", "columndimension", ".", "width", "!=", "worksheet", ".", "default_column_dimension", ".", "width", ")", ":", "col_def", "[", "'customWidth'", "]", "=", "'true'", "if", "(", "not", "columndimension", ".", "visible", ")", ":", "col_def", "[", "'hidden'", "]", "=", "'true'", "if", "(", "columndimension", ".", "outline_level", ">", "0", ")", ":", "col_def", "[", "'outlineLevel'", "]", "=", "str", "(", "columndimension", ".", "outline_level", ")", "if", "columndimension", ".", "collapsed", ":", "col_def", "[", "'collapsed'", "]", "=", "'true'", "if", "columndimension", ".", "auto_size", ":", "col_def", "[", "'bestFit'", "]", "=", "'true'", "if", "(", "columndimension", ".", "width", ">", "0", ")", ":", "col_def", "[", "'width'", "]", "=", "str", "(", "columndimension", ".", "width", ")", "else", ":", "col_def", "[", "'width'", "]", "=", "'9.10'", "tag", "(", "doc", ",", "'col'", ",", "col_def", ")", "end_tag", "(", "doc", ",", "'cols'", ")" ]
write worksheet columns to xml .
train
false
2,200
def monit_status(summary, service): for line in summary.split('\n'): if (service in line): return ' '.join(line.split()[2:]) raise ServiceException('Unable to find Monit entry for {}'.format(service))
[ "def", "monit_status", "(", "summary", ",", "service", ")", ":", "for", "line", "in", "summary", ".", "split", "(", "'\\n'", ")", ":", "if", "(", "service", "in", "line", ")", ":", "return", "' '", ".", "join", "(", "line", ".", "split", "(", ")", "[", "2", ":", "]", ")", "raise", "ServiceException", "(", "'Unable to find Monit entry for {}'", ".", "format", "(", "service", ")", ")" ]
retrieves the status of a monit service .
train
false
2,201
@memoize def InvertRelativePath(path, toplevel_dir=None): if (not path): return path toplevel_dir = ('.' if (toplevel_dir is None) else toplevel_dir) return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
[ "@", "memoize", "def", "InvertRelativePath", "(", "path", ",", "toplevel_dir", "=", "None", ")", ":", "if", "(", "not", "path", ")", ":", "return", "path", "toplevel_dir", "=", "(", "'.'", "if", "(", "toplevel_dir", "is", "None", ")", "else", "toplevel_dir", ")", "return", "RelativePath", "(", "toplevel_dir", ",", "os", ".", "path", ".", "join", "(", "toplevel_dir", ",", "path", ")", ")" ]
given a path like foo/bar that is relative to toplevel_dir .
train
false
2,202
def p_statement_expr(): print t[1]
[ "def", "p_statement_expr", "(", ")", ":", "print", "t", "[", "1", "]" ]
statement : expression .
train
false
2,203
def yi(phenny, input): (quadraels, remainder) = divide(int(time.time()), 1753200) raels = (quadraels * 4) (extraraels, remainder) = divide(remainder, 432000) if (extraraels == 4): return phenny.say('Yes! PARTAI!') else: phenny.say('Not yet...')
[ "def", "yi", "(", "phenny", ",", "input", ")", ":", "(", "quadraels", ",", "remainder", ")", "=", "divide", "(", "int", "(", "time", ".", "time", "(", ")", ")", ",", "1753200", ")", "raels", "=", "(", "quadraels", "*", "4", ")", "(", "extraraels", ",", "remainder", ")", "=", "divide", "(", "remainder", ",", "432000", ")", "if", "(", "extraraels", "==", "4", ")", ":", "return", "phenny", ".", "say", "(", "'Yes! PARTAI!'", ")", "else", ":", "phenny", ".", "say", "(", "'Not yet...'", ")" ]
shows whether it is currently yi or not .
train
false
2,204
def get_can_use_amazon_s3(): try: if has_module(u'storages.backends.s3boto', members=[u'S3BotoStorage']): return (True, None) else: return (False, _(u'Amazon S3 depends on django-storages, which is not installed')) except ImproperlyConfigured as e: return (False, (_(u'Amazon S3 backend failed to load: %s') % e))
[ "def", "get_can_use_amazon_s3", "(", ")", ":", "try", ":", "if", "has_module", "(", "u'storages.backends.s3boto'", ",", "members", "=", "[", "u'S3BotoStorage'", "]", ")", ":", "return", "(", "True", ",", "None", ")", "else", ":", "return", "(", "False", ",", "_", "(", "u'Amazon S3 depends on django-storages, which is not installed'", ")", ")", "except", "ImproperlyConfigured", "as", "e", ":", "return", "(", "False", ",", "(", "_", "(", "u'Amazon S3 backend failed to load: %s'", ")", "%", "e", ")", ")" ]
check whether django-storages is installed .
train
false
2,205
def history_changed(proc, TIMEOUT, to): proc.send('\x1b[A') assert proc.expect([TIMEOUT, to])
[ "def", "history_changed", "(", "proc", ",", "TIMEOUT", ",", "to", ")", ":", "proc", ".", "send", "(", "'\\x1b[A'", ")", "assert", "proc", ".", "expect", "(", "[", "TIMEOUT", ",", "to", "]", ")" ]
ensures that history changed .
train
false
2,206
def _tx_resource_for_name(name): if (name == 'core'): return 'django.core' else: return ('django.contrib-%s' % name)
[ "def", "_tx_resource_for_name", "(", "name", ")", ":", "if", "(", "name", "==", "'core'", ")", ":", "return", "'django.core'", "else", ":", "return", "(", "'django.contrib-%s'", "%", "name", ")" ]
return the transifex resource name .
train
false
2,208
def takes_all_arguments(function, *named_arguments): return bool((takes_arguments(function, *named_arguments) == set(named_arguments)))
[ "def", "takes_all_arguments", "(", "function", ",", "*", "named_arguments", ")", ":", "return", "bool", "(", "(", "takes_arguments", "(", "function", ",", "*", "named_arguments", ")", "==", "set", "(", "named_arguments", ")", ")", ")" ]
returns true if all supplied arguments are found in the function .
train
false
2,210
def get_variables_by_name(given_name, scope=None): return get_variables(scope=scope, suffix=given_name)
[ "def", "get_variables_by_name", "(", "given_name", ",", "scope", "=", "None", ")", ":", "return", "get_variables", "(", "scope", "=", "scope", ",", "suffix", "=", "given_name", ")" ]
gets the list of variables that were given that name .
train
false
2,212
def pkcs1Pad(data, messageLength): lenPad = ((messageLength - 2) - len(data)) return ((('\x01' + ('\xff' * lenPad)) + '\x00') + data)
[ "def", "pkcs1Pad", "(", "data", ",", "messageLength", ")", ":", "lenPad", "=", "(", "(", "messageLength", "-", "2", ")", "-", "len", "(", "data", ")", ")", "return", "(", "(", "(", "'\\x01'", "+", "(", "'\\xff'", "*", "lenPad", ")", ")", "+", "'\\x00'", ")", "+", "data", ")" ]
pad out data to messagelength according to the pkcs#1 standard .
train
false
2,213
def competency(): s3.filter = (FS('person_id$human_resource.type') == 2) field = s3db.hrm_competency.person_id field.widget = S3PersonAutocompleteWidget(ajax_filter='~.human_resource.type=2') return s3db.hrm_competency_controller()
[ "def", "competency", "(", ")", ":", "s3", ".", "filter", "=", "(", "FS", "(", "'person_id$human_resource.type'", ")", "==", "2", ")", "field", "=", "s3db", ".", "hrm_competency", ".", "person_id", "field", ".", "widget", "=", "S3PersonAutocompleteWidget", "(", "ajax_filter", "=", "'~.human_resource.type=2'", ")", "return", "s3db", ".", "hrm_competency_controller", "(", ")" ]
restful crud controller used to allow searching for people by skill .
train
false
2,214
@pytest.fixture def tp0_store_fs(tp0_store): from pootle_fs.models import StoreFS return StoreFS.objects.create(store=tp0_store, path='/some/fs/path')
[ "@", "pytest", ".", "fixture", "def", "tp0_store_fs", "(", "tp0_store", ")", ":", "from", "pootle_fs", ".", "models", "import", "StoreFS", "return", "StoreFS", ".", "objects", ".", "create", "(", "store", "=", "tp0_store", ",", "path", "=", "'/some/fs/path'", ")" ]
require the /en/project0/project0 .
train
false
2,216
def _sqrt_nearest(n, a): if ((n <= 0) or (a <= 0)): raise ValueError('Both arguments to _sqrt_nearest should be positive.') b = 0 while (a != b): (b, a) = (a, ((a - ((- n) // a)) >> 1)) return a
[ "def", "_sqrt_nearest", "(", "n", ",", "a", ")", ":", "if", "(", "(", "n", "<=", "0", ")", "or", "(", "a", "<=", "0", ")", ")", ":", "raise", "ValueError", "(", "'Both arguments to _sqrt_nearest should be positive.'", ")", "b", "=", "0", "while", "(", "a", "!=", "b", ")", ":", "(", "b", ",", "a", ")", "=", "(", "a", ",", "(", "(", "a", "-", "(", "(", "-", "n", ")", "//", "a", ")", ")", ">>", "1", ")", ")", "return", "a" ]
closest integer to the square root of the positive integer n .
train
false
2,217
def _parse_qstat_state(qstat_out, job_id): if (qstat_out.strip() == ''): return 'u' lines = qstat_out.split('\n') while (not lines.pop(0).startswith('---')): pass for line in lines: if line: (job, prior, name, user, state) = line.strip().split()[0:5] if (int(job) == int(job_id)): return state return 'u'
[ "def", "_parse_qstat_state", "(", "qstat_out", ",", "job_id", ")", ":", "if", "(", "qstat_out", ".", "strip", "(", ")", "==", "''", ")", ":", "return", "'u'", "lines", "=", "qstat_out", ".", "split", "(", "'\\n'", ")", "while", "(", "not", "lines", ".", "pop", "(", "0", ")", ".", "startswith", "(", "'---'", ")", ")", ":", "pass", "for", "line", "in", "lines", ":", "if", "line", ":", "(", "job", ",", "prior", ",", "name", ",", "user", ",", "state", ")", "=", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "[", "0", ":", "5", "]", "if", "(", "int", "(", "job", ")", "==", "int", "(", "job_id", ")", ")", ":", "return", "state", "return", "'u'" ]
parse "state" column from qstat output for given job_id returns state for the *first* job matching job_id .
train
true
2,218
def append_var(var, value): makeconf = _get_makeconf() old_value = get_var(var) if (old_value is not None): appended_value = '{0} {1}'.format(old_value, value) __salt__['file.sed'](makeconf, '^{0}=.*'.format(var), '{0}="{1}"'.format(var, appended_value)) else: _add_var(var, value) new_value = get_var(var) return {var: {'old': old_value, 'new': new_value}}
[ "def", "append_var", "(", "var", ",", "value", ")", ":", "makeconf", "=", "_get_makeconf", "(", ")", "old_value", "=", "get_var", "(", "var", ")", "if", "(", "old_value", "is", "not", "None", ")", ":", "appended_value", "=", "'{0} {1}'", ".", "format", "(", "old_value", ",", "value", ")", "__salt__", "[", "'file.sed'", "]", "(", "makeconf", ",", "'^{0}=.*'", ".", "format", "(", "var", ")", ",", "'{0}=\"{1}\"'", ".", "format", "(", "var", ",", "appended_value", ")", ")", "else", ":", "_add_var", "(", "var", ",", "value", ")", "new_value", "=", "get_var", "(", "var", ")", "return", "{", "var", ":", "{", "'old'", ":", "old_value", ",", "'new'", ":", "new_value", "}", "}" ]
add to or create a new variable in the make .
train
true
2,220
def onlyOnPOSIX(testMethod): if (resource is None): testMethod.skip = 'Test only applies to POSIX platforms.' return testMethod
[ "def", "onlyOnPOSIX", "(", "testMethod", ")", ":", "if", "(", "resource", "is", "None", ")", ":", "testMethod", ".", "skip", "=", "'Test only applies to POSIX platforms.'", "return", "testMethod" ]
only run this test on posix platforms .
train
false
2,221
def config_dirs(): paths = [] if (platform.system() == u'Darwin'): paths.append(MAC_DIR) paths.append(UNIX_DIR_FALLBACK) if (UNIX_DIR_VAR in os.environ): paths.append(os.environ[UNIX_DIR_VAR]) elif (platform.system() == u'Windows'): paths.append(WINDOWS_DIR_FALLBACK) if (WINDOWS_DIR_VAR in os.environ): paths.append(os.environ[WINDOWS_DIR_VAR]) else: paths.append(UNIX_DIR_FALLBACK) if (UNIX_DIR_VAR in os.environ): paths.append(os.environ[UNIX_DIR_VAR]) out = [] for path in paths: path = os.path.abspath(os.path.expanduser(path)) if (path not in out): out.append(path) return out
[ "def", "config_dirs", "(", ")", ":", "paths", "=", "[", "]", "if", "(", "platform", ".", "system", "(", ")", "==", "u'Darwin'", ")", ":", "paths", ".", "append", "(", "MAC_DIR", ")", "paths", ".", "append", "(", "UNIX_DIR_FALLBACK", ")", "if", "(", "UNIX_DIR_VAR", "in", "os", ".", "environ", ")", ":", "paths", ".", "append", "(", "os", ".", "environ", "[", "UNIX_DIR_VAR", "]", ")", "elif", "(", "platform", ".", "system", "(", ")", "==", "u'Windows'", ")", ":", "paths", ".", "append", "(", "WINDOWS_DIR_FALLBACK", ")", "if", "(", "WINDOWS_DIR_VAR", "in", "os", ".", "environ", ")", ":", "paths", ".", "append", "(", "os", ".", "environ", "[", "WINDOWS_DIR_VAR", "]", ")", "else", ":", "paths", ".", "append", "(", "UNIX_DIR_FALLBACK", ")", "if", "(", "UNIX_DIR_VAR", "in", "os", ".", "environ", ")", ":", "paths", ".", "append", "(", "os", ".", "environ", "[", "UNIX_DIR_VAR", "]", ")", "out", "=", "[", "]", "for", "path", "in", "paths", ":", "path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "expanduser", "(", "path", ")", ")", "if", "(", "path", "not", "in", "out", ")", ":", "out", ".", "append", "(", "path", ")", "return", "out" ]
return a platform-specific list of candidates for user configuration directories on the system .
train
false
2,222
def has_disk_dev(mapping, disk_dev): for disk in mapping: info = mapping[disk] if (info['dev'] == disk_dev): return True return False
[ "def", "has_disk_dev", "(", "mapping", ",", "disk_dev", ")", ":", "for", "disk", "in", "mapping", ":", "info", "=", "mapping", "[", "disk", "]", "if", "(", "info", "[", "'dev'", "]", "==", "disk_dev", ")", ":", "return", "True", "return", "False" ]
determine if a disk device name has already been used .
train
false
2,223
@require_POST @csrf_protect def replicate(request): if (not test_user_authenticated(request)): return login(request, next='/cobbler_web/replicate', expired=True) return HttpResponseRedirect('/cobbler_web/task_created')
[ "@", "require_POST", "@", "csrf_protect", "def", "replicate", "(", "request", ")", ":", "if", "(", "not", "test_user_authenticated", "(", "request", ")", ")", ":", "return", "login", "(", "request", ",", "next", "=", "'/cobbler_web/replicate'", ",", "expired", "=", "True", ")", "return", "HttpResponseRedirect", "(", "'/cobbler_web/task_created'", ")" ]
replicate configuration from the central cobbler server .
train
false
2,224
def get_in(keys, coll, default=None, no_default=False): try: return reduce(operator.getitem, keys, coll) except (KeyError, IndexError, TypeError): if no_default: raise return default
[ "def", "get_in", "(", "keys", ",", "coll", ",", "default", "=", "None", ",", "no_default", "=", "False", ")", ":", "try", ":", "return", "reduce", "(", "operator", ".", "getitem", ",", "keys", ",", "coll", ")", "except", "(", "KeyError", ",", "IndexError", ",", "TypeError", ")", ":", "if", "no_default", ":", "raise", "return", "default" ]
returns coll[i0][i1] .
train
true
2,225
def test_scharr_h_horizontal(): (i, j) = np.mgrid[(-5):6, (-5):6] image = (i >= 0).astype(float) result = filters.scharr_h(image) i[(np.abs(j) == 5)] = 10000 assert np.all((result[(i == 0)] == 1)) assert np.all((result[(np.abs(i) > 1)] == 0))
[ "def", "test_scharr_h_horizontal", "(", ")", ":", "(", "i", ",", "j", ")", "=", "np", ".", "mgrid", "[", "(", "-", "5", ")", ":", "6", ",", "(", "-", "5", ")", ":", "6", "]", "image", "=", "(", "i", ">=", "0", ")", ".", "astype", "(", "float", ")", "result", "=", "filters", ".", "scharr_h", "(", "image", ")", "i", "[", "(", "np", ".", "abs", "(", "j", ")", "==", "5", ")", "]", "=", "10000", "assert", "np", ".", "all", "(", "(", "result", "[", "(", "i", "==", "0", ")", "]", "==", "1", ")", ")", "assert", "np", ".", "all", "(", "(", "result", "[", "(", "np", ".", "abs", "(", "i", ")", ">", "1", ")", "]", "==", "0", ")", ")" ]
horizontal scharr on an edge should be a horizontal line .
train
false
2,226
def sort_pkglist(pkgs): try: for key in pkgs: pkgs[key] = sorted(set(pkgs[key])) except AttributeError as exc: log.exception(exc)
[ "def", "sort_pkglist", "(", "pkgs", ")", ":", "try", ":", "for", "key", "in", "pkgs", ":", "pkgs", "[", "key", "]", "=", "sorted", "(", "set", "(", "pkgs", "[", "key", "]", ")", ")", "except", "AttributeError", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")" ]
accepts a dict obtained from pkg .
train
true
2,227
def save_as(filename, title=u'Save As...'): result = compat.getsavefilename(parent=active_window(), caption=title, basedir=filename) return result[0]
[ "def", "save_as", "(", "filename", ",", "title", "=", "u'Save As...'", ")", ":", "result", "=", "compat", ".", "getsavefilename", "(", "parent", "=", "active_window", "(", ")", ",", "caption", "=", "title", ",", "basedir", "=", "filename", ")", "return", "result", "[", "0", "]" ]
creates a save file dialog and returns a filename .
train
false
2,228
def test_e(): from .. import e E = Q(100, u'V/m') pytest.raises(TypeError, (lambda : (e * E))) pytest.raises(TypeError, (lambda : (e * E))) pytest.raises(TypeError, (lambda : (e.cgs * E))) assert isinstance(e.si, Q) assert isinstance(e.gauss, Q) assert isinstance(e.esu, Q) assert ((e.si * E) == Q(100, u'eV/m')) assert ((e.gauss * E) == Q((e.gauss.value * E.value), u'Fr V/m')) assert ((e.esu * E) == Q((e.esu.value * E.value), u'Fr V/m'))
[ "def", "test_e", "(", ")", ":", "from", ".", ".", "import", "e", "E", "=", "Q", "(", "100", ",", "u'V/m'", ")", "pytest", ".", "raises", "(", "TypeError", ",", "(", "lambda", ":", "(", "e", "*", "E", ")", ")", ")", "pytest", ".", "raises", "(", "TypeError", ",", "(", "lambda", ":", "(", "e", "*", "E", ")", ")", ")", "pytest", ".", "raises", "(", "TypeError", ",", "(", "lambda", ":", "(", "e", ".", "cgs", "*", "E", ")", ")", ")", "assert", "isinstance", "(", "e", ".", "si", ",", "Q", ")", "assert", "isinstance", "(", "e", ".", "gauss", ",", "Q", ")", "assert", "isinstance", "(", "e", ".", "esu", ",", "Q", ")", "assert", "(", "(", "e", ".", "si", "*", "E", ")", "==", "Q", "(", "100", ",", "u'eV/m'", ")", ")", "assert", "(", "(", "e", ".", "gauss", "*", "E", ")", "==", "Q", "(", "(", "e", ".", "gauss", ".", "value", "*", "E", ".", "value", ")", ",", "u'Fr V/m'", ")", ")", "assert", "(", "(", "e", ".", "esu", "*", "E", ")", "==", "Q", "(", "(", "e", ".", "esu", ".", "value", "*", "E", ".", "value", ")", ",", "u'Fr V/m'", ")", ")" ]
tests for #572 demonstrating how em constants should behave .
train
false
2,229
def writeOutput(fileName=''): fileName = fabmetheus_interpret.getFirstTranslatorFileNameUnmodified(fileName) if (fileName != ''): skeinforge_craft.writeChainTextWithNounMessage(fileName, 'unpause')
[ "def", "writeOutput", "(", "fileName", "=", "''", ")", ":", "fileName", "=", "fabmetheus_interpret", ".", "getFirstTranslatorFileNameUnmodified", "(", "fileName", ")", "if", "(", "fileName", "!=", "''", ")", ":", "skeinforge_craft", ".", "writeChainTextWithNounMessage", "(", "fileName", ",", "'unpause'", ")" ]
speed a gcode linear move file .
train
false
2,230
def get_system_date(utc_offset=None): offset_time = _get_offset_time(utc_offset) return datetime.strftime(offset_time, '%a %m/%d/%Y')
[ "def", "get_system_date", "(", "utc_offset", "=", "None", ")", ":", "offset_time", "=", "_get_offset_time", "(", "utc_offset", ")", "return", "datetime", ".", "strftime", "(", "offset_time", ",", "'%a %m/%d/%Y'", ")" ]
get the windows system date :return: returns the system date .
train
false
2,231
def quote_etag(etag): return ('"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"'))
[ "def", "quote_etag", "(", "etag", ")", ":", "return", "(", "'\"%s\"'", "%", "etag", ".", "replace", "(", "'\\\\'", ",", "'\\\\\\\\'", ")", ".", "replace", "(", "'\"'", ",", "'\\\\\"'", ")", ")" ]
quote an etag .
train
false
2,232
def _password_digest(username, password): if (not isinstance(password, string_type)): raise TypeError(('password must be an instance of %s' % (string_type.__name__,))) if (len(password) == 0): raise ValueError("password can't be empty") if (not isinstance(username, string_type)): raise TypeError(('password must be an instance of %s' % (string_type.__name__,))) md5hash = md5() data = ('%s:mongo:%s' % (username, password)) md5hash.update(data.encode('utf-8')) return _unicode(md5hash.hexdigest())
[ "def", "_password_digest", "(", "username", ",", "password", ")", ":", "if", "(", "not", "isinstance", "(", "password", ",", "string_type", ")", ")", ":", "raise", "TypeError", "(", "(", "'password must be an instance of %s'", "%", "(", "string_type", ".", "__name__", ",", ")", ")", ")", "if", "(", "len", "(", "password", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "\"password can't be empty\"", ")", "if", "(", "not", "isinstance", "(", "username", ",", "string_type", ")", ")", ":", "raise", "TypeError", "(", "(", "'password must be an instance of %s'", "%", "(", "string_type", ".", "__name__", ",", ")", ")", ")", "md5hash", "=", "md5", "(", ")", "data", "=", "(", "'%s:mongo:%s'", "%", "(", "username", ",", "password", ")", ")", "md5hash", ".", "update", "(", "data", ".", "encode", "(", "'utf-8'", ")", ")", "return", "_unicode", "(", "md5hash", ".", "hexdigest", "(", ")", ")" ]
get a password digest to use for authentication .
train
true
2,233
@handle_response_format @treeio_login_required def weblink_edit(request, weblink_id, response_format='html'): link = get_object_or_404(WebLink, pk=weblink_id) if (not request.user.profile.has_permission(link, mode='w')): return user_denied(request, message="You don't have access to this Web Link") if request.POST: if ('cancel' not in request.POST): form = WebLinkForm(request.user.profile, None, request.POST, instance=link) if form.is_valid(): link = form.save() return HttpResponseRedirect(reverse('documents_weblink_view', args=[link.id])) else: return HttpResponseRedirect(reverse('documents_weblink_view', args=[link.id])) else: form = WebLinkForm(request.user.profile, None, instance=link) context = _get_default_context(request) context.update({'form': form, 'link': link}) return render_to_response('documents/weblink_edit', context, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "weblink_edit", "(", "request", ",", "weblink_id", ",", "response_format", "=", "'html'", ")", ":", "link", "=", "get_object_or_404", "(", "WebLink", ",", "pk", "=", "weblink_id", ")", "if", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "link", ",", "mode", "=", "'w'", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Web Link\"", ")", "if", "request", ".", "POST", ":", "if", "(", "'cancel'", "not", "in", "request", ".", "POST", ")", ":", "form", "=", "WebLinkForm", "(", "request", ".", "user", ".", "profile", ",", "None", ",", "request", ".", "POST", ",", "instance", "=", "link", ")", "if", "form", ".", "is_valid", "(", ")", ":", "link", "=", "form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'documents_weblink_view'", ",", "args", "=", "[", "link", ".", "id", "]", ")", ")", "else", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'documents_weblink_view'", ",", "args", "=", "[", "link", ".", "id", "]", ")", ")", "else", ":", "form", "=", "WebLinkForm", "(", "request", ".", "user", ".", "profile", ",", "None", ",", "instance", "=", "link", ")", "context", "=", "_get_default_context", "(", "request", ")", "context", ".", "update", "(", "{", "'form'", ":", "form", ",", "'link'", ":", "link", "}", ")", "return", "render_to_response", "(", "'documents/weblink_edit'", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
weblink edit page .
train
false
2,235
def inline_markdown_extension(pelicanobj, config): try: pelicanobj.settings['MD_EXTENSIONS'].append(PelicanInlineMarkdownExtension(config)) except: sys.excepthook(*sys.exc_info()) sys.stderr.write('\nError - the pelican Markdown extension failed to configure. Inline Markdown extension is non-functional.\n') sys.stderr.flush()
[ "def", "inline_markdown_extension", "(", "pelicanobj", ",", "config", ")", ":", "try", ":", "pelicanobj", ".", "settings", "[", "'MD_EXTENSIONS'", "]", ".", "append", "(", "PelicanInlineMarkdownExtension", "(", "config", ")", ")", "except", ":", "sys", ".", "excepthook", "(", "*", "sys", ".", "exc_info", "(", ")", ")", "sys", ".", "stderr", ".", "write", "(", "'\\nError - the pelican Markdown extension failed to configure. Inline Markdown extension is non-functional.\\n'", ")", "sys", ".", "stderr", ".", "flush", "(", ")" ]
instantiates a customized markdown extension .
train
false
2,236
def part_key(part): rval = [] for ps in part: rval.append(ps.u) rval.append(ps.v) return tuple(rval)
[ "def", "part_key", "(", "part", ")", ":", "rval", "=", "[", "]", "for", "ps", "in", "part", ":", "rval", ".", "append", "(", "ps", ".", "u", ")", "rval", ".", "append", "(", "ps", ".", "v", ")", "return", "tuple", "(", "rval", ")" ]
helper for multisetpartitiontraverser .
train
false
2,237
def get_volume_type_from_volume(volume): type_id = volume.get('volume_type_id') if (type_id is None): return {} ctxt = context.get_admin_context() return volume_types.get_volume_type(ctxt, type_id)
[ "def", "get_volume_type_from_volume", "(", "volume", ")", ":", "type_id", "=", "volume", ".", "get", "(", "'volume_type_id'", ")", "if", "(", "type_id", "is", "None", ")", ":", "return", "{", "}", "ctxt", "=", "context", ".", "get_admin_context", "(", ")", "return", "volume_types", ".", "get_volume_type", "(", "ctxt", ",", "type_id", ")" ]
provides volume type associated with volume .
train
false
2,238
def _search_type_in_pep0484(code): for p in PEP0484_PATTERNS: match = p.search(code) if match: return [match.group(1)]
[ "def", "_search_type_in_pep0484", "(", "code", ")", ":", "for", "p", "in", "PEP0484_PATTERNS", ":", "match", "=", "p", ".", "search", "(", "code", ")", "if", "match", ":", "return", "[", "match", ".", "group", "(", "1", ")", "]" ]
for more info see: URL#type-comments .
train
false
2,239
def skip_unless_has_memory_collection(cls): if (platform.system() not in ['Darwin', 'Linux']): return unittest.skip('Memory tests only supported on mac/linux.')(cls) return cls
[ "def", "skip_unless_has_memory_collection", "(", "cls", ")", ":", "if", "(", "platform", ".", "system", "(", ")", "not", "in", "[", "'Darwin'", ",", "'Linux'", "]", ")", ":", "return", "unittest", ".", "skip", "(", "'Memory tests only supported on mac/linux.'", ")", "(", "cls", ")", "return", "cls" ]
class decorator to skip tests that require memory collection .
train
false
2,240
@login_required @mobile_template('users/{mobile/}pw_change.html') def password_change(request, template): if (request.method == 'POST'): form = PasswordChangeForm(user=request.user, data=request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('users.pw_change_complete')) else: form = PasswordChangeForm(user=request.user) return render(request, template, {'form': form})
[ "@", "login_required", "@", "mobile_template", "(", "'users/{mobile/}pw_change.html'", ")", "def", "password_change", "(", "request", ",", "template", ")", ":", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "form", "=", "PasswordChangeForm", "(", "user", "=", "request", ".", "user", ",", "data", "=", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "form", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'users.pw_change_complete'", ")", ")", "else", ":", "form", "=", "PasswordChangeForm", "(", "user", "=", "request", ".", "user", ")", "return", "render", "(", "request", ",", "template", ",", "{", "'form'", ":", "form", "}", ")" ]
change password of user .
train
false
2,243
def extractBests(query, choices, processor=default_processor, scorer=default_scorer, score_cutoff=0, limit=5): best_list = extractWithoutOrder(query, choices, processor, scorer, score_cutoff) return (heapq.nlargest(limit, best_list, key=(lambda i: i[1])) if (limit is not None) else sorted(best_list, key=(lambda i: i[1]), reverse=True))
[ "def", "extractBests", "(", "query", ",", "choices", ",", "processor", "=", "default_processor", ",", "scorer", "=", "default_scorer", ",", "score_cutoff", "=", "0", ",", "limit", "=", "5", ")", ":", "best_list", "=", "extractWithoutOrder", "(", "query", ",", "choices", ",", "processor", ",", "scorer", ",", "score_cutoff", ")", "return", "(", "heapq", ".", "nlargest", "(", "limit", ",", "best_list", ",", "key", "=", "(", "lambda", "i", ":", "i", "[", "1", "]", ")", ")", "if", "(", "limit", "is", "not", "None", ")", "else", "sorted", "(", "best_list", ",", "key", "=", "(", "lambda", "i", ":", "i", "[", "1", "]", ")", ",", "reverse", "=", "True", ")", ")" ]
get a list of the best matches to a collection of choices .
train
true
2,244
def test_read_no_header_names(): table = '\n| John | 555-1234 |192.168.1.10|\n| Mary | 555-2134 |192.168.1.12|\n| Bob | 555-4527 | 192.168.1.9|\n' dat = ascii.read(table, Reader=ascii.FixedWidth, guess=False, header_start=None, data_start=0, names=('Name', 'Phone', 'TCP')) assert_equal(tuple(dat.dtype.names), ('Name', 'Phone', 'TCP')) assert_equal(dat[1][0], 'Mary') assert_equal(dat[0][1], '555-1234') assert_equal(dat[2][2], '192.168.1.9')
[ "def", "test_read_no_header_names", "(", ")", ":", "table", "=", "'\\n| John | 555-1234 |192.168.1.10|\\n| Mary | 555-2134 |192.168.1.12|\\n| Bob | 555-4527 | 192.168.1.9|\\n'", "dat", "=", "ascii", ".", "read", "(", "table", ",", "Reader", "=", "ascii", ".", "FixedWidth", ",", "guess", "=", "False", ",", "header_start", "=", "None", ",", "data_start", "=", "0", ",", "names", "=", "(", "'Name'", ",", "'Phone'", ",", "'TCP'", ")", ")", "assert_equal", "(", "tuple", "(", "dat", ".", "dtype", ".", "names", ")", ",", "(", "'Name'", ",", "'Phone'", ",", "'TCP'", ")", ")", "assert_equal", "(", "dat", "[", "1", "]", "[", "0", "]", ",", "'Mary'", ")", "assert_equal", "(", "dat", "[", "0", "]", "[", "1", "]", ",", "'555-1234'", ")", "assert_equal", "(", "dat", "[", "2", "]", "[", "2", "]", ",", "'192.168.1.9'", ")" ]
table with no header row and with col names provided .
train
false
2,245
def test_empty_givens_updates(): x = T.scalar() y = (x * 2) function([theano.In(x)], y, givens={}) function([theano.In(x)], y, updates={})
[ "def", "test_empty_givens_updates", "(", ")", ":", "x", "=", "T", ".", "scalar", "(", ")", "y", "=", "(", "x", "*", "2", ")", "function", "(", "[", "theano", ".", "In", "(", "x", ")", "]", ",", "y", ",", "givens", "=", "{", "}", ")", "function", "(", "[", "theano", ".", "In", "(", "x", ")", "]", ",", "y", ",", "updates", "=", "{", "}", ")" ]
regression test for bug fixed in 8625e03 .
train
false
2,246
def openable(string, **kwargs): f = tempfile.NamedTemporaryFile(**kwargs) f.write(string) f.seek(0) _TEMPORARY_FILES.append(f) return f.name
[ "def", "openable", "(", "string", ",", "**", "kwargs", ")", ":", "f", "=", "tempfile", ".", "NamedTemporaryFile", "(", "**", "kwargs", ")", "f", ".", "write", "(", "string", ")", "f", ".", "seek", "(", "0", ")", "_TEMPORARY_FILES", ".", "append", "(", "f", ")", "return", "f", ".", "name" ]
returns the path to a temporary file that contains the given string .
train
false
2,247
def _check_and_fix_fs_dir(gcs_uri): if (not is_gcs_uri(gcs_uri)): raise ValueError(('Invalid GCS URI: %r' % gcs_uri)) if (not gcs_uri.endswith('/')): gcs_uri += '/' return gcs_uri
[ "def", "_check_and_fix_fs_dir", "(", "gcs_uri", ")", ":", "if", "(", "not", "is_gcs_uri", "(", "gcs_uri", ")", ")", ":", "raise", "ValueError", "(", "(", "'Invalid GCS URI: %r'", "%", "gcs_uri", ")", ")", "if", "(", "not", "gcs_uri", ".", "endswith", "(", "'/'", ")", ")", ":", "gcs_uri", "+=", "'/'", "return", "gcs_uri" ]
helper for __init__ .
train
false
2,248
def liveobj_valid(obj): return (obj != None)
[ "def", "liveobj_valid", "(", "obj", ")", ":", "return", "(", "obj", "!=", "None", ")" ]
check whether obj represents a valid live api obj .
train
false
2,249
def _id_type(ityp): if (ityp.lower() == 'rackspace'): ityp = 'rax_identity.RaxIdentity' elif (ityp.lower() == 'keystone'): ityp = 'keystone_identity.KeystoneIdentity' return ityp
[ "def", "_id_type", "(", "ityp", ")", ":", "if", "(", "ityp", ".", "lower", "(", ")", "==", "'rackspace'", ")", ":", "ityp", "=", "'rax_identity.RaxIdentity'", "elif", "(", "ityp", ".", "lower", "(", ")", "==", "'keystone'", ")", ":", "ityp", "=", "'keystone_identity.KeystoneIdentity'", "return", "ityp" ]
allow for shorthand names for the most common types .
train
false
2,250
@mobile_template('questions/{mobile/}marketplace_success.html') def marketplace_success(request, template=None): return render(request, template)
[ "@", "mobile_template", "(", "'questions/{mobile/}marketplace_success.html'", ")", "def", "marketplace_success", "(", "request", ",", "template", "=", "None", ")", ":", "return", "render", "(", "request", ",", "template", ")" ]
confirmation of ticket submitted successfully .
train
false
2,252
def assert_attribute_is(output, path, attribute, text): assert_attribute_matches(output, path, attribute, re.escape(text))
[ "def", "assert_attribute_is", "(", "output", ",", "path", ",", "attribute", ",", "text", ")", ":", "assert_attribute_matches", "(", "output", ",", "path", ",", "attribute", ",", "re", ".", "escape", "(", "text", ")", ")" ]
asserts the specified attribute of the first element matching the specified path matches exactly the specified text .
train
false
2,254
def get_offset(name): if (name not in _dont_uppercase): name = name.upper() name = _lite_rule_alias.get(name, name) name = _lite_rule_alias.get(name.lower(), name) else: name = _lite_rule_alias.get(name, name) if (name not in _offset_map): try: split = name.split('-') klass = prefix_mapping[split[0]] offset = klass._from_name(*split[1:]) except (ValueError, TypeError, KeyError): raise ValueError(_INVALID_FREQ_ERROR.format(name)) _offset_map[name] = offset return _offset_map[name].copy()
[ "def", "get_offset", "(", "name", ")", ":", "if", "(", "name", "not", "in", "_dont_uppercase", ")", ":", "name", "=", "name", ".", "upper", "(", ")", "name", "=", "_lite_rule_alias", ".", "get", "(", "name", ",", "name", ")", "name", "=", "_lite_rule_alias", ".", "get", "(", "name", ".", "lower", "(", ")", ",", "name", ")", "else", ":", "name", "=", "_lite_rule_alias", ".", "get", "(", "name", ",", "name", ")", "if", "(", "name", "not", "in", "_offset_map", ")", ":", "try", ":", "split", "=", "name", ".", "split", "(", "'-'", ")", "klass", "=", "prefix_mapping", "[", "split", "[", "0", "]", "]", "offset", "=", "klass", ".", "_from_name", "(", "*", "split", "[", "1", ":", "]", ")", "except", "(", "ValueError", ",", "TypeError", ",", "KeyError", ")", ":", "raise", "ValueError", "(", "_INVALID_FREQ_ERROR", ".", "format", "(", "name", ")", ")", "_offset_map", "[", "name", "]", "=", "offset", "return", "_offset_map", "[", "name", "]", ".", "copy", "(", ")" ]
displays the current time zone offset :return: the current time zone offset :rtype: str cli example: .
train
true
2,256
def test_active(timer): assert (not timer.isActive()) timer.start() assert timer.isActive() timer.stop() assert (not timer.isActive())
[ "def", "test_active", "(", "timer", ")", ":", "assert", "(", "not", "timer", ".", "isActive", "(", ")", ")", "timer", ".", "start", "(", ")", "assert", "timer", ".", "isActive", "(", ")", "timer", ".", "stop", "(", ")", "assert", "(", "not", "timer", ".", "isActive", "(", ")", ")" ]
test isactive .
train
false
2,257
def resume_file_upload(vault, upload_id, part_size, fobj, part_hash_map, chunk_size=_ONE_MEGABYTE): uploader = _Uploader(vault, upload_id, part_size, chunk_size) for (part_index, part_data) in enumerate(generate_parts_from_fobj(fobj, part_size)): part_tree_hash = tree_hash(chunk_hashes(part_data, chunk_size)) if ((part_index not in part_hash_map) or (part_hash_map[part_index] != part_tree_hash)): uploader.upload_part(part_index, part_data) else: uploader.skip_part(part_index, part_tree_hash, len(part_data)) uploader.close() return uploader.archive_id
[ "def", "resume_file_upload", "(", "vault", ",", "upload_id", ",", "part_size", ",", "fobj", ",", "part_hash_map", ",", "chunk_size", "=", "_ONE_MEGABYTE", ")", ":", "uploader", "=", "_Uploader", "(", "vault", ",", "upload_id", ",", "part_size", ",", "chunk_size", ")", "for", "(", "part_index", ",", "part_data", ")", "in", "enumerate", "(", "generate_parts_from_fobj", "(", "fobj", ",", "part_size", ")", ")", ":", "part_tree_hash", "=", "tree_hash", "(", "chunk_hashes", "(", "part_data", ",", "chunk_size", ")", ")", "if", "(", "(", "part_index", "not", "in", "part_hash_map", ")", "or", "(", "part_hash_map", "[", "part_index", "]", "!=", "part_tree_hash", ")", ")", ":", "uploader", ".", "upload_part", "(", "part_index", ",", "part_data", ")", "else", ":", "uploader", ".", "skip_part", "(", "part_index", ",", "part_tree_hash", ",", "len", "(", "part_data", ")", ")", "uploader", ".", "close", "(", ")", "return", "uploader", ".", "archive_id" ]
resume upload of a file already part-uploaded to glacier .
train
false
2,258
def send_warning(message, request=None, e=None, **extra_data): username = None if (request and request.user.is_authenticated()): username = request.user.username error_message = None if e: error_message = unicode(e) data = {'username': username, 'body': error_message} data.update(extra_data) logger.warn(message, exc_info=sys.exc_info(), extra={'request': request, 'data': data})
[ "def", "send_warning", "(", "message", ",", "request", "=", "None", ",", "e", "=", "None", ",", "**", "extra_data", ")", ":", "username", "=", "None", "if", "(", "request", "and", "request", ".", "user", ".", "is_authenticated", "(", ")", ")", ":", "username", "=", "request", ".", "user", ".", "username", "error_message", "=", "None", "if", "e", ":", "error_message", "=", "unicode", "(", "e", ")", "data", "=", "{", "'username'", ":", "username", ",", "'body'", ":", "error_message", "}", "data", ".", "update", "(", "extra_data", ")", "logger", ".", "warn", "(", "message", ",", "exc_info", "=", "sys", ".", "exc_info", "(", ")", ",", "extra", "=", "{", "'request'", ":", "request", ",", "'data'", ":", "data", "}", ")" ]
uses the logging system to send a message to logging and sentry .
train
false
2,260
def demo_sent_subjectivity(text): from nltk.classify import NaiveBayesClassifier from nltk.tokenize import regexp word_tokenizer = regexp.WhitespaceTokenizer() try: sentim_analyzer = load('sa_subjectivity.pickle') except LookupError: print 'Cannot find the sentiment analyzer you want to load.' print 'Training a new one using NaiveBayesClassifier.' sentim_analyzer = demo_subjectivity(NaiveBayesClassifier.train, True) tokenized_text = [word.lower() for word in word_tokenizer.tokenize(text)] print sentim_analyzer.classify(tokenized_text)
[ "def", "demo_sent_subjectivity", "(", "text", ")", ":", "from", "nltk", ".", "classify", "import", "NaiveBayesClassifier", "from", "nltk", ".", "tokenize", "import", "regexp", "word_tokenizer", "=", "regexp", ".", "WhitespaceTokenizer", "(", ")", "try", ":", "sentim_analyzer", "=", "load", "(", "'sa_subjectivity.pickle'", ")", "except", "LookupError", ":", "print", "'Cannot find the sentiment analyzer you want to load.'", "print", "'Training a new one using NaiveBayesClassifier.'", "sentim_analyzer", "=", "demo_subjectivity", "(", "NaiveBayesClassifier", ".", "train", ",", "True", ")", "tokenized_text", "=", "[", "word", ".", "lower", "(", ")", "for", "word", "in", "word_tokenizer", ".", "tokenize", "(", "text", ")", "]", "print", "sentim_analyzer", ".", "classify", "(", "tokenized_text", ")" ]
classify a single sentence as subjective or objective using a stored sentimentanalyzer .
train
false
2,261
def get_box_folder_location(): box_prefs_path = 'Library/Application Support/Box/Box Sync/sync_root_folder.txt' box_home = None box_prefs = os.path.join(os.environ['HOME'], box_prefs_path) try: with open(box_prefs, 'r') as sync_path: data = sync_path.read() box_home = data except IOError: error('Unable to find your Box prefs =(') return box_home
[ "def", "get_box_folder_location", "(", ")", ":", "box_prefs_path", "=", "'Library/Application Support/Box/Box Sync/sync_root_folder.txt'", "box_home", "=", "None", "box_prefs", "=", "os", ".", "path", ".", "join", "(", "os", ".", "environ", "[", "'HOME'", "]", ",", "box_prefs_path", ")", "try", ":", "with", "open", "(", "box_prefs", ",", "'r'", ")", "as", "sync_path", ":", "data", "=", "sync_path", ".", "read", "(", ")", "box_home", "=", "data", "except", "IOError", ":", "error", "(", "'Unable to find your Box prefs =('", ")", "return", "box_home" ]
try to locate the box folder .
train
true
2,262
def cleanup_unused_files(quiet=False): from sentry.models import File, FileBlob, FileBlobIndex if quiet: from sentry.utils.query import RangeQuerySetWrapper else: from sentry.utils.query import RangeQuerySetWrapperWithProgressBar as RangeQuerySetWrapper cutoff = (timezone.now() - timedelta(days=1)) queryset = FileBlob.objects.filter(timestamp__lte=cutoff) for blob in RangeQuerySetWrapper(queryset): if FileBlobIndex.objects.filter(blob=blob).exists(): continue if File.objects.filter(blob=blob).exists(): continue blob.delete()
[ "def", "cleanup_unused_files", "(", "quiet", "=", "False", ")", ":", "from", "sentry", ".", "models", "import", "File", ",", "FileBlob", ",", "FileBlobIndex", "if", "quiet", ":", "from", "sentry", ".", "utils", ".", "query", "import", "RangeQuerySetWrapper", "else", ":", "from", "sentry", ".", "utils", ".", "query", "import", "RangeQuerySetWrapperWithProgressBar", "as", "RangeQuerySetWrapper", "cutoff", "=", "(", "timezone", ".", "now", "(", ")", "-", "timedelta", "(", "days", "=", "1", ")", ")", "queryset", "=", "FileBlob", ".", "objects", ".", "filter", "(", "timestamp__lte", "=", "cutoff", ")", "for", "blob", "in", "RangeQuerySetWrapper", "(", "queryset", ")", ":", "if", "FileBlobIndex", ".", "objects", ".", "filter", "(", "blob", "=", "blob", ")", ".", "exists", "(", ")", ":", "continue", "if", "File", ".", "objects", ".", "filter", "(", "blob", "=", "blob", ")", ".", "exists", "(", ")", ":", "continue", "blob", ".", "delete", "(", ")" ]
remove fileblobs if they are no longer referenced by any file .
train
false
2,263
@require_POST @login_required def move_thread(request, forum_slug, thread_id): forum = get_object_or_404(Forum, slug=forum_slug) thread = get_object_or_404(Thread, pk=thread_id, forum=forum) user = request.user new_forum_id = request.POST.get('forum') new_forum = get_object_or_404(Forum, id=new_forum_id) if (not (forum.allows_viewing_by(user) and new_forum.allows_viewing_by(user))): raise Http404 if (not new_forum.allows_posting_by(user)): raise PermissionDenied if (not (has_perm(user, 'forums_forum.thread_move_forum', new_forum) and has_perm(user, 'forums_forum.thread_move_forum', forum))): raise PermissionDenied log.warning(('User %s is moving thread with id=%s to forum with id=%s' % (user, thread.id, new_forum_id))) thread.forum = new_forum thread.save() return HttpResponseRedirect(thread.get_absolute_url())
[ "@", "require_POST", "@", "login_required", "def", "move_thread", "(", "request", ",", "forum_slug", ",", "thread_id", ")", ":", "forum", "=", "get_object_or_404", "(", "Forum", ",", "slug", "=", "forum_slug", ")", "thread", "=", "get_object_or_404", "(", "Thread", ",", "pk", "=", "thread_id", ",", "forum", "=", "forum", ")", "user", "=", "request", ".", "user", "new_forum_id", "=", "request", ".", "POST", ".", "get", "(", "'forum'", ")", "new_forum", "=", "get_object_or_404", "(", "Forum", ",", "id", "=", "new_forum_id", ")", "if", "(", "not", "(", "forum", ".", "allows_viewing_by", "(", "user", ")", "and", "new_forum", ".", "allows_viewing_by", "(", "user", ")", ")", ")", ":", "raise", "Http404", "if", "(", "not", "new_forum", ".", "allows_posting_by", "(", "user", ")", ")", ":", "raise", "PermissionDenied", "if", "(", "not", "(", "has_perm", "(", "user", ",", "'forums_forum.thread_move_forum'", ",", "new_forum", ")", "and", "has_perm", "(", "user", ",", "'forums_forum.thread_move_forum'", ",", "forum", ")", ")", ")", ":", "raise", "PermissionDenied", "log", ".", "warning", "(", "(", "'User %s is moving thread with id=%s to forum with id=%s'", "%", "(", "user", ",", "thread", ".", "id", ",", "new_forum_id", ")", ")", ")", "thread", ".", "forum", "=", "new_forum", "thread", ".", "save", "(", ")", "return", "HttpResponseRedirect", "(", "thread", ".", "get_absolute_url", "(", ")", ")" ]
move a thread .
train
false
2,267
@register_opt() @local_optimizer([tensor.Join]) def local_gpu_join(node): if isinstance(node.op, tensor.Join): axis_and_tensors = node.inputs matches = [((t.dtype == 'float32') and (((t.owner is not None) and isinstance(t.owner.op, HostFromGpu)) or isinstance(t, gof.Constant))) for t in axis_and_tensors[1:]] if all(matches): new_tensors = [as_cuda_ndarray_variable(t) for t in axis_and_tensors[1:]] new_a_and_t = ([axis_and_tensors[0]] + new_tensors) replacement_node = host_from_gpu(gpu_join(*new_a_and_t)) return [replacement_node]
[ "@", "register_opt", "(", ")", "@", "local_optimizer", "(", "[", "tensor", ".", "Join", "]", ")", "def", "local_gpu_join", "(", "node", ")", ":", "if", "isinstance", "(", "node", ".", "op", ",", "tensor", ".", "Join", ")", ":", "axis_and_tensors", "=", "node", ".", "inputs", "matches", "=", "[", "(", "(", "t", ".", "dtype", "==", "'float32'", ")", "and", "(", "(", "(", "t", ".", "owner", "is", "not", "None", ")", "and", "isinstance", "(", "t", ".", "owner", ".", "op", ",", "HostFromGpu", ")", ")", "or", "isinstance", "(", "t", ",", "gof", ".", "Constant", ")", ")", ")", "for", "t", "in", "axis_and_tensors", "[", "1", ":", "]", "]", "if", "all", "(", "matches", ")", ":", "new_tensors", "=", "[", "as_cuda_ndarray_variable", "(", "t", ")", "for", "t", "in", "axis_and_tensors", "[", "1", ":", "]", "]", "new_a_and_t", "=", "(", "[", "axis_and_tensors", "[", "0", "]", "]", "+", "new_tensors", ")", "replacement_node", "=", "host_from_gpu", "(", "gpu_join", "(", "*", "new_a_and_t", ")", ")", "return", "[", "replacement_node", "]" ]
inspired by the opt for convop .
train
false
2,268
def _boolrelextrema(data, comparator, axis=0, order=1, mode='clip'): if ((int(order) != order) or (order < 1)): raise ValueError('Order must be an int >= 1') datalen = data.shape[axis] locs = np.arange(0, datalen) results = np.ones(data.shape, dtype=bool) main = data.take(locs, axis=axis, mode=mode) for shift in xrange(1, (order + 1)): plus = data.take((locs + shift), axis=axis, mode=mode) minus = data.take((locs - shift), axis=axis, mode=mode) results &= comparator(main, plus) results &= comparator(main, minus) if (~ results.any()): return results return results
[ "def", "_boolrelextrema", "(", "data", ",", "comparator", ",", "axis", "=", "0", ",", "order", "=", "1", ",", "mode", "=", "'clip'", ")", ":", "if", "(", "(", "int", "(", "order", ")", "!=", "order", ")", "or", "(", "order", "<", "1", ")", ")", ":", "raise", "ValueError", "(", "'Order must be an int >= 1'", ")", "datalen", "=", "data", ".", "shape", "[", "axis", "]", "locs", "=", "np", ".", "arange", "(", "0", ",", "datalen", ")", "results", "=", "np", ".", "ones", "(", "data", ".", "shape", ",", "dtype", "=", "bool", ")", "main", "=", "data", ".", "take", "(", "locs", ",", "axis", "=", "axis", ",", "mode", "=", "mode", ")", "for", "shift", "in", "xrange", "(", "1", ",", "(", "order", "+", "1", ")", ")", ":", "plus", "=", "data", ".", "take", "(", "(", "locs", "+", "shift", ")", ",", "axis", "=", "axis", ",", "mode", "=", "mode", ")", "minus", "=", "data", ".", "take", "(", "(", "locs", "-", "shift", ")", ",", "axis", "=", "axis", ",", "mode", "=", "mode", ")", "results", "&=", "comparator", "(", "main", ",", "plus", ")", "results", "&=", "comparator", "(", "main", ",", "minus", ")", "if", "(", "~", "results", ".", "any", "(", ")", ")", ":", "return", "results", "return", "results" ]
calculate the relative extrema of data .
train
true
2,269
def enhance_contrast_percentile(image, selem, out=None, mask=None, shift_x=False, shift_y=False, p0=0, p1=1): return _apply(percentile_cy._enhance_contrast, image, selem, out=out, mask=mask, shift_x=shift_x, shift_y=shift_y, p0=p0, p1=p1)
[ "def", "enhance_contrast_percentile", "(", "image", ",", "selem", ",", "out", "=", "None", ",", "mask", "=", "None", ",", "shift_x", "=", "False", ",", "shift_y", "=", "False", ",", "p0", "=", "0", ",", "p1", "=", "1", ")", ":", "return", "_apply", "(", "percentile_cy", ".", "_enhance_contrast", ",", "image", ",", "selem", ",", "out", "=", "out", ",", "mask", "=", "mask", ",", "shift_x", "=", "shift_x", ",", "shift_y", "=", "shift_y", ",", "p0", "=", "p0", ",", "p1", "=", "p1", ")" ]
enhance contrast of an image .
train
false
2,270
def scaling(x=1, y=1, z=1): s = np.array([[x, 0, 0, 0], [0, y, 0, 0], [0, 0, z, 0], [0, 0, 0, 1]], dtype=float) return s
[ "def", "scaling", "(", "x", "=", "1", ",", "y", "=", "1", ",", "z", "=", "1", ")", ":", "s", "=", "np", ".", "array", "(", "[", "[", "x", ",", "0", ",", "0", ",", "0", "]", ",", "[", "0", ",", "y", ",", "0", ",", "0", "]", ",", "[", "0", ",", "0", ",", "z", ",", "0", "]", ",", "[", "0", ",", "0", ",", "0", ",", "1", "]", "]", ",", "dtype", "=", "float", ")", "return", "s" ]
create an array with a scaling matrix .
train
false
2,271
def get_extensions(cert_type): assert X509_EXT_ENABLED, 'X509 extensions are not supported in pyOpenSSL prior to version 0.15.1. Your version: {0}'.format(OpenSSL_version) ext = {} if (cert_type == ''): log.error('cert_type set to empty in tls_ca.get_extensions(); defaulting to ``server``') cert_type = 'server' try: ext['common'] = __salt__['pillar.get']('tls.extensions:common', False) except NameError as err: log.debug(err) if ((not ext['common']) or (ext['common'] == '')): ext['common'] = {'csr': {'basicConstraints': 'CA:FALSE'}, 'cert': {'authorityKeyIdentifier': 'keyid,issuer:always', 'subjectKeyIdentifier': 'hash'}} try: ext['server'] = __salt__['pillar.get']('tls.extensions:server', False) except NameError as err: log.debug(err) if ((not ext['server']) or (ext['server'] == '')): ext['server'] = {'csr': {'extendedKeyUsage': 'serverAuth', 'keyUsage': 'digitalSignature, keyEncipherment'}, 'cert': {}} try: ext['client'] = __salt__['pillar.get']('tls.extensions:client', False) except NameError as err: log.debug(err) if ((not ext['client']) or (ext['client'] == '')): ext['client'] = {'csr': {'extendedKeyUsage': 'clientAuth', 'keyUsage': 'nonRepudiation, digitalSignature, keyEncipherment'}, 'cert': {}} if (cert_type not in ext): try: ext[cert_type] = __salt__['pillar.get']('tls.extensions:{0}'.format(cert_type)) except NameError as e: log.debug('pillar, tls:extensions:{0} not available or not operating in a salt context\n{1}'.format(cert_type, e)) retval = ext['common'] for Use in retval: retval[Use].update(ext[cert_type][Use]) return retval
[ "def", "get_extensions", "(", "cert_type", ")", ":", "assert", "X509_EXT_ENABLED", ",", "'X509 extensions are not supported in pyOpenSSL prior to version 0.15.1. Your version: {0}'", ".", "format", "(", "OpenSSL_version", ")", "ext", "=", "{", "}", "if", "(", "cert_type", "==", "''", ")", ":", "log", ".", "error", "(", "'cert_type set to empty in tls_ca.get_extensions(); defaulting to ``server``'", ")", "cert_type", "=", "'server'", "try", ":", "ext", "[", "'common'", "]", "=", "__salt__", "[", "'pillar.get'", "]", "(", "'tls.extensions:common'", ",", "False", ")", "except", "NameError", "as", "err", ":", "log", ".", "debug", "(", "err", ")", "if", "(", "(", "not", "ext", "[", "'common'", "]", ")", "or", "(", "ext", "[", "'common'", "]", "==", "''", ")", ")", ":", "ext", "[", "'common'", "]", "=", "{", "'csr'", ":", "{", "'basicConstraints'", ":", "'CA:FALSE'", "}", ",", "'cert'", ":", "{", "'authorityKeyIdentifier'", ":", "'keyid,issuer:always'", ",", "'subjectKeyIdentifier'", ":", "'hash'", "}", "}", "try", ":", "ext", "[", "'server'", "]", "=", "__salt__", "[", "'pillar.get'", "]", "(", "'tls.extensions:server'", ",", "False", ")", "except", "NameError", "as", "err", ":", "log", ".", "debug", "(", "err", ")", "if", "(", "(", "not", "ext", "[", "'server'", "]", ")", "or", "(", "ext", "[", "'server'", "]", "==", "''", ")", ")", ":", "ext", "[", "'server'", "]", "=", "{", "'csr'", ":", "{", "'extendedKeyUsage'", ":", "'serverAuth'", ",", "'keyUsage'", ":", "'digitalSignature, keyEncipherment'", "}", ",", "'cert'", ":", "{", "}", "}", "try", ":", "ext", "[", "'client'", "]", "=", "__salt__", "[", "'pillar.get'", "]", "(", "'tls.extensions:client'", ",", "False", ")", "except", "NameError", "as", "err", ":", "log", ".", "debug", "(", "err", ")", "if", "(", "(", "not", "ext", "[", "'client'", "]", ")", "or", "(", "ext", "[", "'client'", "]", "==", "''", ")", ")", ":", "ext", "[", "'client'", "]", "=", "{", "'csr'", ":", "{", "'extendedKeyUsage'", ":", "'clientAuth'", ",", "'keyUsage'", ":", "'nonRepudiation, digitalSignature, keyEncipherment'", "}", ",", "'cert'", ":", "{", "}", "}", "if", "(", "cert_type", "not", "in", "ext", ")", ":", "try", ":", "ext", "[", "cert_type", "]", "=", "__salt__", "[", "'pillar.get'", "]", "(", "'tls.extensions:{0}'", ".", "format", "(", "cert_type", ")", ")", "except", "NameError", "as", "e", ":", "log", ".", "debug", "(", "'pillar, tls:extensions:{0} not available or not operating in a salt context\\n{1}'", ".", "format", "(", "cert_type", ",", "e", ")", ")", "retval", "=", "ext", "[", "'common'", "]", "for", "Use", "in", "retval", ":", "retval", "[", "Use", "]", ".", "update", "(", "ext", "[", "cert_type", "]", "[", "Use", "]", ")", "return", "retval" ]
return set of data data extensions .
train
true
2,272
def authorized_keys(name): ssh_dir = posixpath.join(home_directory(name), '.ssh') authorized_keys_filename = posixpath.join(ssh_dir, 'authorized_keys') return uncommented_lines(authorized_keys_filename, use_sudo=True)
[ "def", "authorized_keys", "(", "name", ")", ":", "ssh_dir", "=", "posixpath", ".", "join", "(", "home_directory", "(", "name", ")", ",", "'.ssh'", ")", "authorized_keys_filename", "=", "posixpath", ".", "join", "(", "ssh_dir", ",", "'authorized_keys'", ")", "return", "uncommented_lines", "(", "authorized_keys_filename", ",", "use_sudo", "=", "True", ")" ]
get the list of authorized ssh public keys for the user .
train
false
2,273
def plot_pacf(x, ax=None, lags=None, alpha=0.05, method='ywm', use_vlines=True, title='Partial Autocorrelation', zero=True, **kwargs): (fig, ax) = utils.create_mpl_ax(ax) (lags, nlags, irregular) = _prepare_data_corr_plot(x, lags, zero) confint = None if (alpha is None): acf_x = pacf(x, nlags=nlags, alpha=alpha, method=method) else: (acf_x, confint) = pacf(x, nlags=nlags, alpha=alpha, method=method) _plot_corr(ax, title, acf_x, confint, lags, irregular, use_vlines, **kwargs) return fig
[ "def", "plot_pacf", "(", "x", ",", "ax", "=", "None", ",", "lags", "=", "None", ",", "alpha", "=", "0.05", ",", "method", "=", "'ywm'", ",", "use_vlines", "=", "True", ",", "title", "=", "'Partial Autocorrelation'", ",", "zero", "=", "True", ",", "**", "kwargs", ")", ":", "(", "fig", ",", "ax", ")", "=", "utils", ".", "create_mpl_ax", "(", "ax", ")", "(", "lags", ",", "nlags", ",", "irregular", ")", "=", "_prepare_data_corr_plot", "(", "x", ",", "lags", ",", "zero", ")", "confint", "=", "None", "if", "(", "alpha", "is", "None", ")", ":", "acf_x", "=", "pacf", "(", "x", ",", "nlags", "=", "nlags", ",", "alpha", "=", "alpha", ",", "method", "=", "method", ")", "else", ":", "(", "acf_x", ",", "confint", ")", "=", "pacf", "(", "x", ",", "nlags", "=", "nlags", ",", "alpha", "=", "alpha", ",", "method", "=", "method", ")", "_plot_corr", "(", "ax", ",", "title", ",", "acf_x", ",", "confint", ",", "lags", ",", "irregular", ",", "use_vlines", ",", "**", "kwargs", ")", "return", "fig" ]
plot the partial autocorrelation function plots lags on the horizontal and the correlations on vertical axis .
train
false
2,274
def get_wake_on_modem(): ret = salt.utils.mac_utils.execute_return_result('systemsetup -getwakeonmodem') return (salt.utils.mac_utils.validate_enabled(salt.utils.mac_utils.parse_return(ret)) == 'on')
[ "def", "get_wake_on_modem", "(", ")", ":", "ret", "=", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_result", "(", "'systemsetup -getwakeonmodem'", ")", "return", "(", "salt", ".", "utils", ".", "mac_utils", ".", "validate_enabled", "(", "salt", ".", "utils", ".", "mac_utils", ".", "parse_return", "(", "ret", ")", ")", "==", "'on'", ")" ]
displays whether wake on modem is on or off if supported :return: a string value representing the "wake on modem" settings :rtype: str cli example: .
train
true
2,275
def set_committed_value(instance, key, value): (state, dict_) = (instance_state(instance), instance_dict(instance)) state.manager[key].impl.set_committed_value(state, dict_, value)
[ "def", "set_committed_value", "(", "instance", ",", "key", ",", "value", ")", ":", "(", "state", ",", "dict_", ")", "=", "(", "instance_state", "(", "instance", ")", ",", "instance_dict", "(", "instance", ")", ")", "state", ".", "manager", "[", "key", "]", ".", "impl", ".", "set_committed_value", "(", "state", ",", "dict_", ",", "value", ")" ]
set the value of an attribute with no history events .
train
false
2,276
def zlib_encode(input, errors='strict'): assert (errors == 'strict') output = zlib.compress(input) return (output, len(input))
[ "def", "zlib_encode", "(", "input", ",", "errors", "=", "'strict'", ")", ":", "assert", "(", "errors", "==", "'strict'", ")", "output", "=", "zlib", ".", "compress", "(", "input", ")", "return", "(", "output", ",", "len", "(", "input", ")", ")" ]
encodes the object input and returns a tuple .
train
false
2,279
def get_user_from_cas_resp(cas_resp): if cas_resp.user: user = User.objects.filter(guids___id=cas_resp.user).first() if user: return (user, None, 'authenticate') else: external_credential = validate_external_credential(cas_resp.user) if (not external_credential): return (None, None, None) user = get_user(external_id_provider=external_credential['provider'], external_id=external_credential['id']) if user: return (user, external_credential, 'authenticate') else: return (None, external_credential, 'external_first_login')
[ "def", "get_user_from_cas_resp", "(", "cas_resp", ")", ":", "if", "cas_resp", ".", "user", ":", "user", "=", "User", ".", "objects", ".", "filter", "(", "guids___id", "=", "cas_resp", ".", "user", ")", ".", "first", "(", ")", "if", "user", ":", "return", "(", "user", ",", "None", ",", "'authenticate'", ")", "else", ":", "external_credential", "=", "validate_external_credential", "(", "cas_resp", ".", "user", ")", "if", "(", "not", "external_credential", ")", ":", "return", "(", "None", ",", "None", ",", "None", ")", "user", "=", "get_user", "(", "external_id_provider", "=", "external_credential", "[", "'provider'", "]", ",", "external_id", "=", "external_credential", "[", "'id'", "]", ")", "if", "user", ":", "return", "(", "user", ",", "external_credential", ",", "'authenticate'", ")", "else", ":", "return", "(", "None", ",", "external_credential", ",", "'external_first_login'", ")" ]
given a cas service validation response .
train
false
2,280
def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): if (flow_func is None): flow_func = default_flow_func if (auxiliary is None): H = build_auxiliary_edge_connectivity(G) else: H = auxiliary kwargs = dict(capacity='capacity', flow_func=flow_func, residual=residual) (cut_value, partition) = nx.minimum_cut(H, s, t, **kwargs) (reachable, non_reachable) = partition cutset = set() for (u, nbrs) in ((n, G[n]) for n in reachable): cutset.update(((u, v) for v in nbrs if (v in non_reachable))) return cutset
[ "def", "minimum_st_edge_cut", "(", "G", ",", "s", ",", "t", ",", "flow_func", "=", "None", ",", "auxiliary", "=", "None", ",", "residual", "=", "None", ")", ":", "if", "(", "flow_func", "is", "None", ")", ":", "flow_func", "=", "default_flow_func", "if", "(", "auxiliary", "is", "None", ")", ":", "H", "=", "build_auxiliary_edge_connectivity", "(", "G", ")", "else", ":", "H", "=", "auxiliary", "kwargs", "=", "dict", "(", "capacity", "=", "'capacity'", ",", "flow_func", "=", "flow_func", ",", "residual", "=", "residual", ")", "(", "cut_value", ",", "partition", ")", "=", "nx", ".", "minimum_cut", "(", "H", ",", "s", ",", "t", ",", "**", "kwargs", ")", "(", "reachable", ",", "non_reachable", ")", "=", "partition", "cutset", "=", "set", "(", ")", "for", "(", "u", ",", "nbrs", ")", "in", "(", "(", "n", ",", "G", "[", "n", "]", ")", "for", "n", "in", "reachable", ")", ":", "cutset", ".", "update", "(", "(", "(", "u", ",", "v", ")", "for", "v", "in", "nbrs", "if", "(", "v", "in", "non_reachable", ")", ")", ")", "return", "cutset" ]
returns the edges of the cut-set of a minimum -cut .
train
false