id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
sequencelengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
---|---|---|---|---|---|
1,881 | def cookie_is_encoded(data):
depr(0, 13, 'cookie_is_encoded() will be removed soon.', 'Do not use this API directly.')
return bool((data.startswith(tob('!')) and (tob('?') in data)))
| [
"def",
"cookie_is_encoded",
"(",
"data",
")",
":",
"depr",
"(",
"0",
",",
"13",
",",
"'cookie_is_encoded() will be removed soon.'",
",",
"'Do not use this API directly.'",
")",
"return",
"bool",
"(",
"(",
"data",
".",
"startswith",
"(",
"tob",
"(",
"'!'",
")",
")",
"and",
"(",
"tob",
"(",
"'?'",
")",
"in",
"data",
")",
")",
")"
] | return true if the argument looks like a encoded cookie . | train | false |
1,882 | def denormalize_column_names(parsed_data):
cols = parsed_data.columns.tolist()
base_columns = defaultdict(list)
for col in cols:
if ('.' in col):
base_columns[col].append(col.split('.')[(-1)])
rename = {}
for (col, new_cols) in iteritems(base_columns):
if (len(new_cols) == 1):
rename[col] = new_cols[0]
if (len(list(rename.keys())) > 0):
return parsed_data.rename(columns=rename)
else:
return parsed_data
| [
"def",
"denormalize_column_names",
"(",
"parsed_data",
")",
":",
"cols",
"=",
"parsed_data",
".",
"columns",
".",
"tolist",
"(",
")",
"base_columns",
"=",
"defaultdict",
"(",
"list",
")",
"for",
"col",
"in",
"cols",
":",
"if",
"(",
"'.'",
"in",
"col",
")",
":",
"base_columns",
"[",
"col",
"]",
".",
"append",
"(",
"col",
".",
"split",
"(",
"'.'",
")",
"[",
"(",
"-",
"1",
")",
"]",
")",
"rename",
"=",
"{",
"}",
"for",
"(",
"col",
",",
"new_cols",
")",
"in",
"iteritems",
"(",
"base_columns",
")",
":",
"if",
"(",
"len",
"(",
"new_cols",
")",
"==",
"1",
")",
":",
"rename",
"[",
"col",
"]",
"=",
"new_cols",
"[",
"0",
"]",
"if",
"(",
"len",
"(",
"list",
"(",
"rename",
".",
"keys",
"(",
")",
")",
")",
">",
"0",
")",
":",
"return",
"parsed_data",
".",
"rename",
"(",
"columns",
"=",
"rename",
")",
"else",
":",
"return",
"parsed_data"
] | attempts to remove the column hierarchy if possible when parsing from json . | train | false |
1,884 | def libvlc_vlm_set_input(p_instance, psz_name, psz_input):
f = (_Cfunctions.get('libvlc_vlm_set_input', None) or _Cfunction('libvlc_vlm_set_input', ((1,), (1,), (1,)), None, ctypes.c_int, Instance, ctypes.c_char_p, ctypes.c_char_p))
return f(p_instance, psz_name, psz_input)
| [
"def",
"libvlc_vlm_set_input",
"(",
"p_instance",
",",
"psz_name",
",",
"psz_input",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_vlm_set_input'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_vlm_set_input'",
",",
"(",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
")",
",",
"None",
",",
"ctypes",
".",
"c_int",
",",
"Instance",
",",
"ctypes",
".",
"c_char_p",
",",
"ctypes",
".",
"c_char_p",
")",
")",
"return",
"f",
"(",
"p_instance",
",",
"psz_name",
",",
"psz_input",
")"
] | set a medias input mrl . | train | true |
1,885 | def _search_by_lun(disks_service, lun_id):
res = [disk for disk in disks_service.list(search='disk_type=lun') if (disk.lun_storage.id == lun_id)]
return (res[0] if res else None)
| [
"def",
"_search_by_lun",
"(",
"disks_service",
",",
"lun_id",
")",
":",
"res",
"=",
"[",
"disk",
"for",
"disk",
"in",
"disks_service",
".",
"list",
"(",
"search",
"=",
"'disk_type=lun'",
")",
"if",
"(",
"disk",
".",
"lun_storage",
".",
"id",
"==",
"lun_id",
")",
"]",
"return",
"(",
"res",
"[",
"0",
"]",
"if",
"res",
"else",
"None",
")"
] | find disk by lun id . | train | false |
1,886 | def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, data):
qualified_root_targets = []
for target in root_targets:
target = target.strip()
qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
if (not qualified_targets):
raise GypError(('Could not find target %s' % target))
qualified_root_targets.extend(qualified_targets)
wanted_targets = {}
for target in qualified_root_targets:
wanted_targets[target] = targets[target]
for dependency in dependency_nodes[target].DeepDependencies():
wanted_targets[dependency] = targets[dependency]
wanted_flat_list = [t for t in flat_list if (t in wanted_targets)]
for build_file in data['target_build_files']:
if (not ('targets' in data[build_file])):
continue
new_targets = []
for target in data[build_file]['targets']:
qualified_name = gyp.common.QualifiedTarget(build_file, target['target_name'], target['toolset'])
if (qualified_name in wanted_targets):
new_targets.append(target)
data[build_file]['targets'] = new_targets
return (wanted_targets, wanted_flat_list)
| [
"def",
"PruneUnwantedTargets",
"(",
"targets",
",",
"flat_list",
",",
"dependency_nodes",
",",
"root_targets",
",",
"data",
")",
":",
"qualified_root_targets",
"=",
"[",
"]",
"for",
"target",
"in",
"root_targets",
":",
"target",
"=",
"target",
".",
"strip",
"(",
")",
"qualified_targets",
"=",
"gyp",
".",
"common",
".",
"FindQualifiedTargets",
"(",
"target",
",",
"flat_list",
")",
"if",
"(",
"not",
"qualified_targets",
")",
":",
"raise",
"GypError",
"(",
"(",
"'Could not find target %s'",
"%",
"target",
")",
")",
"qualified_root_targets",
".",
"extend",
"(",
"qualified_targets",
")",
"wanted_targets",
"=",
"{",
"}",
"for",
"target",
"in",
"qualified_root_targets",
":",
"wanted_targets",
"[",
"target",
"]",
"=",
"targets",
"[",
"target",
"]",
"for",
"dependency",
"in",
"dependency_nodes",
"[",
"target",
"]",
".",
"DeepDependencies",
"(",
")",
":",
"wanted_targets",
"[",
"dependency",
"]",
"=",
"targets",
"[",
"dependency",
"]",
"wanted_flat_list",
"=",
"[",
"t",
"for",
"t",
"in",
"flat_list",
"if",
"(",
"t",
"in",
"wanted_targets",
")",
"]",
"for",
"build_file",
"in",
"data",
"[",
"'target_build_files'",
"]",
":",
"if",
"(",
"not",
"(",
"'targets'",
"in",
"data",
"[",
"build_file",
"]",
")",
")",
":",
"continue",
"new_targets",
"=",
"[",
"]",
"for",
"target",
"in",
"data",
"[",
"build_file",
"]",
"[",
"'targets'",
"]",
":",
"qualified_name",
"=",
"gyp",
".",
"common",
".",
"QualifiedTarget",
"(",
"build_file",
",",
"target",
"[",
"'target_name'",
"]",
",",
"target",
"[",
"'toolset'",
"]",
")",
"if",
"(",
"qualified_name",
"in",
"wanted_targets",
")",
":",
"new_targets",
".",
"append",
"(",
"target",
")",
"data",
"[",
"build_file",
"]",
"[",
"'targets'",
"]",
"=",
"new_targets",
"return",
"(",
"wanted_targets",
",",
"wanted_flat_list",
")"
] | return only the targets that are deep dependencies of |root_targets| . | train | false |
1,889 | def safe_classpath(classpath, synthetic_jar_dir, custom_name=None):
if synthetic_jar_dir:
safe_mkdir(synthetic_jar_dir)
else:
synthetic_jar_dir = safe_mkdtemp()
bundled_classpath = relativize_classpath(classpath, synthetic_jar_dir)
manifest = Manifest()
manifest.addentry(Manifest.CLASS_PATH, u' '.join(bundled_classpath))
with temporary_file(root_dir=synthetic_jar_dir, cleanup=False, suffix=u'.jar') as jar_file:
with open_zip(jar_file, mode=u'w', compression=ZIP_STORED) as jar:
jar.writestr(Manifest.PATH, manifest.contents())
if custom_name:
custom_path = os.path.join(synthetic_jar_dir, custom_name)
safe_concurrent_rename(jar_file.name, custom_path)
return [custom_path]
else:
return [jar_file.name]
| [
"def",
"safe_classpath",
"(",
"classpath",
",",
"synthetic_jar_dir",
",",
"custom_name",
"=",
"None",
")",
":",
"if",
"synthetic_jar_dir",
":",
"safe_mkdir",
"(",
"synthetic_jar_dir",
")",
"else",
":",
"synthetic_jar_dir",
"=",
"safe_mkdtemp",
"(",
")",
"bundled_classpath",
"=",
"relativize_classpath",
"(",
"classpath",
",",
"synthetic_jar_dir",
")",
"manifest",
"=",
"Manifest",
"(",
")",
"manifest",
".",
"addentry",
"(",
"Manifest",
".",
"CLASS_PATH",
",",
"u' '",
".",
"join",
"(",
"bundled_classpath",
")",
")",
"with",
"temporary_file",
"(",
"root_dir",
"=",
"synthetic_jar_dir",
",",
"cleanup",
"=",
"False",
",",
"suffix",
"=",
"u'.jar'",
")",
"as",
"jar_file",
":",
"with",
"open_zip",
"(",
"jar_file",
",",
"mode",
"=",
"u'w'",
",",
"compression",
"=",
"ZIP_STORED",
")",
"as",
"jar",
":",
"jar",
".",
"writestr",
"(",
"Manifest",
".",
"PATH",
",",
"manifest",
".",
"contents",
"(",
")",
")",
"if",
"custom_name",
":",
"custom_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"synthetic_jar_dir",
",",
"custom_name",
")",
"safe_concurrent_rename",
"(",
"jar_file",
".",
"name",
",",
"custom_path",
")",
"return",
"[",
"custom_path",
"]",
"else",
":",
"return",
"[",
"jar_file",
".",
"name",
"]"
] | bundles classpath into one synthetic jar that includes original classpath in its manifest . | train | true |
1,890 | @task
def mailserver(ctx, port=1025):
cmd = 'python -m smtpd -n -c DebuggingServer localhost:{port}'.format(port=port)
ctx.run(bin_prefix(cmd), pty=True)
| [
"@",
"task",
"def",
"mailserver",
"(",
"ctx",
",",
"port",
"=",
"1025",
")",
":",
"cmd",
"=",
"'python -m smtpd -n -c DebuggingServer localhost:{port}'",
".",
"format",
"(",
"port",
"=",
"port",
")",
"ctx",
".",
"run",
"(",
"bin_prefix",
"(",
"cmd",
")",
",",
"pty",
"=",
"True",
")"
] | run a smtp test server . | train | false |
1,892 | def is_valid_ipv4(address):
try:
return netaddr.valid_ipv4(address)
except Exception:
return False
| [
"def",
"is_valid_ipv4",
"(",
"address",
")",
":",
"try",
":",
"return",
"netaddr",
".",
"valid_ipv4",
"(",
"address",
")",
"except",
"Exception",
":",
"return",
"False"
] | returns true if given is a valid ipv4 address . | train | false |
1,893 | def WCLPRICE(barDs, count):
return call_talib_with_hlc(barDs, count, talib.WCLPRICE)
| [
"def",
"WCLPRICE",
"(",
"barDs",
",",
"count",
")",
":",
"return",
"call_talib_with_hlc",
"(",
"barDs",
",",
"count",
",",
"talib",
".",
"WCLPRICE",
")"
] | weighted close price . | train | false |
1,894 | def cummingsify(poem):
def success():
return poem.lower()
def gibberish():
raise GibberishError()
def bug():
raise CannotCummingsify(poem)
return random.choice([success, gibberish, bug])()
| [
"def",
"cummingsify",
"(",
"poem",
")",
":",
"def",
"success",
"(",
")",
":",
"return",
"poem",
".",
"lower",
"(",
")",
"def",
"gibberish",
"(",
")",
":",
"raise",
"GibberishError",
"(",
")",
"def",
"bug",
"(",
")",
":",
"raise",
"CannotCummingsify",
"(",
"poem",
")",
"return",
"random",
".",
"choice",
"(",
"[",
"success",
",",
"gibberish",
",",
"bug",
"]",
")",
"(",
")"
] | randomly do one of the following: 1 . | train | false |
1,895 | @with_setup(prepare_stdout)
def test_blank_step_hash_value():
from lettuce import step
@step('ignore step')
def ignore_step(step):
pass
@step('string length calc')
def string_lenth_calc(step):
for hash in step.hashes:
if ((len(hash['string']) + len(hash['string2'])) != int(hash['length'])):
raise AssertionError('fail')
filename = syntax_feature_name('blank_values_in_hash')
runner = Runner(filename, verbosity=1)
runner.run()
assert_stdout_lines('.\n1 feature (1 passed)\n1 scenario (1 passed)\n4 steps (4 passed)\n')
| [
"@",
"with_setup",
"(",
"prepare_stdout",
")",
"def",
"test_blank_step_hash_value",
"(",
")",
":",
"from",
"lettuce",
"import",
"step",
"@",
"step",
"(",
"'ignore step'",
")",
"def",
"ignore_step",
"(",
"step",
")",
":",
"pass",
"@",
"step",
"(",
"'string length calc'",
")",
"def",
"string_lenth_calc",
"(",
"step",
")",
":",
"for",
"hash",
"in",
"step",
".",
"hashes",
":",
"if",
"(",
"(",
"len",
"(",
"hash",
"[",
"'string'",
"]",
")",
"+",
"len",
"(",
"hash",
"[",
"'string2'",
"]",
")",
")",
"!=",
"int",
"(",
"hash",
"[",
"'length'",
"]",
")",
")",
":",
"raise",
"AssertionError",
"(",
"'fail'",
")",
"filename",
"=",
"syntax_feature_name",
"(",
"'blank_values_in_hash'",
")",
"runner",
"=",
"Runner",
"(",
"filename",
",",
"verbosity",
"=",
"1",
")",
"runner",
".",
"run",
"(",
")",
"assert_stdout_lines",
"(",
"'.\\n1 feature (1 passed)\\n1 scenario (1 passed)\\n4 steps (4 passed)\\n'",
")"
] | syntax checking: blank in step hash column = empty string . | train | false |
1,896 | def validate_token_and_source(token_in_headers, token_in_query_params):
if ((not token_in_headers) and (not token_in_query_params)):
LOG.audit('Token is not found in header or query parameters.')
raise exceptions.TokenNotProvidedError('Token is not provided.')
if token_in_headers:
LOG.audit('Token provided in headers')
if token_in_query_params:
LOG.audit('Token provided in query parameters')
return validate_token((token_in_headers or token_in_query_params))
| [
"def",
"validate_token_and_source",
"(",
"token_in_headers",
",",
"token_in_query_params",
")",
":",
"if",
"(",
"(",
"not",
"token_in_headers",
")",
"and",
"(",
"not",
"token_in_query_params",
")",
")",
":",
"LOG",
".",
"audit",
"(",
"'Token is not found in header or query parameters.'",
")",
"raise",
"exceptions",
".",
"TokenNotProvidedError",
"(",
"'Token is not provided.'",
")",
"if",
"token_in_headers",
":",
"LOG",
".",
"audit",
"(",
"'Token provided in headers'",
")",
"if",
"token_in_query_params",
":",
"LOG",
".",
"audit",
"(",
"'Token provided in query parameters'",
")",
"return",
"validate_token",
"(",
"(",
"token_in_headers",
"or",
"token_in_query_params",
")",
")"
] | validate the provided authentication token . | train | false |
1,897 | def clear_course_from_cache(course_key):
get_block_structure_manager(course_key).clear()
| [
"def",
"clear_course_from_cache",
"(",
"course_key",
")",
":",
"get_block_structure_manager",
"(",
"course_key",
")",
".",
"clear",
"(",
")"
] | a higher order function implemented on top of the block_structure . | train | false |
1,898 | def is_marketing_link_set(name):
enable_mktg_site = configuration_helpers.get_value('ENABLE_MKTG_SITE', settings.FEATURES.get('ENABLE_MKTG_SITE', False))
marketing_urls = configuration_helpers.get_value('MKTG_URLS', settings.MKTG_URLS)
if enable_mktg_site:
return (name in marketing_urls)
else:
return (name in settings.MKTG_URL_LINK_MAP)
| [
"def",
"is_marketing_link_set",
"(",
"name",
")",
":",
"enable_mktg_site",
"=",
"configuration_helpers",
".",
"get_value",
"(",
"'ENABLE_MKTG_SITE'",
",",
"settings",
".",
"FEATURES",
".",
"get",
"(",
"'ENABLE_MKTG_SITE'",
",",
"False",
")",
")",
"marketing_urls",
"=",
"configuration_helpers",
".",
"get_value",
"(",
"'MKTG_URLS'",
",",
"settings",
".",
"MKTG_URLS",
")",
"if",
"enable_mktg_site",
":",
"return",
"(",
"name",
"in",
"marketing_urls",
")",
"else",
":",
"return",
"(",
"name",
"in",
"settings",
".",
"MKTG_URL_LINK_MAP",
")"
] | returns a boolean if a given named marketing link is configured . | train | false |
1,899 | def scatter_add(a, slices, value):
a.scatter_add(slices, value)
| [
"def",
"scatter_add",
"(",
"a",
",",
"slices",
",",
"value",
")",
":",
"a",
".",
"scatter_add",
"(",
"slices",
",",
"value",
")"
] | adds given values to specified elements of an array . | train | false |
1,900 | @contextlib.contextmanager
def arg_scope(list_ops_or_scope, **kwargs):
if isinstance(list_ops_or_scope, dict):
if kwargs:
raise ValueError('When attempting to re-use a scope by suppling adictionary, kwargs must be empty.')
current_scope = list_ops_or_scope.copy()
try:
_get_arg_stack().append(current_scope)
(yield current_scope)
finally:
_get_arg_stack().pop()
else:
if (not isinstance(list_ops_or_scope, (list, tuple))):
raise TypeError('list_ops_or_scope must either be a list/tuple or reusedscope (i.e. dict)')
try:
current_scope = _current_arg_scope().copy()
for op in list_ops_or_scope:
key_op = (op.__module__, op.__name__)
if (not has_arg_scope(op)):
raise ValueError('%s is not decorated with @add_arg_scope', key_op)
if (key_op in current_scope):
current_kwargs = current_scope[key_op].copy()
current_kwargs.update(kwargs)
current_scope[key_op] = current_kwargs
else:
current_scope[key_op] = kwargs.copy()
_get_arg_stack().append(current_scope)
(yield current_scope)
finally:
_get_arg_stack().pop()
| [
"@",
"contextlib",
".",
"contextmanager",
"def",
"arg_scope",
"(",
"list_ops_or_scope",
",",
"**",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"list_ops_or_scope",
",",
"dict",
")",
":",
"if",
"kwargs",
":",
"raise",
"ValueError",
"(",
"'When attempting to re-use a scope by suppling adictionary, kwargs must be empty.'",
")",
"current_scope",
"=",
"list_ops_or_scope",
".",
"copy",
"(",
")",
"try",
":",
"_get_arg_stack",
"(",
")",
".",
"append",
"(",
"current_scope",
")",
"(",
"yield",
"current_scope",
")",
"finally",
":",
"_get_arg_stack",
"(",
")",
".",
"pop",
"(",
")",
"else",
":",
"if",
"(",
"not",
"isinstance",
"(",
"list_ops_or_scope",
",",
"(",
"list",
",",
"tuple",
")",
")",
")",
":",
"raise",
"TypeError",
"(",
"'list_ops_or_scope must either be a list/tuple or reusedscope (i.e. dict)'",
")",
"try",
":",
"current_scope",
"=",
"_current_arg_scope",
"(",
")",
".",
"copy",
"(",
")",
"for",
"op",
"in",
"list_ops_or_scope",
":",
"key_op",
"=",
"(",
"op",
".",
"__module__",
",",
"op",
".",
"__name__",
")",
"if",
"(",
"not",
"has_arg_scope",
"(",
"op",
")",
")",
":",
"raise",
"ValueError",
"(",
"'%s is not decorated with @add_arg_scope'",
",",
"key_op",
")",
"if",
"(",
"key_op",
"in",
"current_scope",
")",
":",
"current_kwargs",
"=",
"current_scope",
"[",
"key_op",
"]",
".",
"copy",
"(",
")",
"current_kwargs",
".",
"update",
"(",
"kwargs",
")",
"current_scope",
"[",
"key_op",
"]",
"=",
"current_kwargs",
"else",
":",
"current_scope",
"[",
"key_op",
"]",
"=",
"kwargs",
".",
"copy",
"(",
")",
"_get_arg_stack",
"(",
")",
".",
"append",
"(",
"current_scope",
")",
"(",
"yield",
"current_scope",
")",
"finally",
":",
"_get_arg_stack",
"(",
")",
".",
"pop",
"(",
")"
] | stores the default arguments for the given set of list_ops . | train | true |
1,902 | def order_history(user, **kwargs):
course_org_filter = (kwargs['course_org_filter'] if ('course_org_filter' in kwargs) else None)
org_filter_out_set = (kwargs['org_filter_out_set'] if ('org_filter_out_set' in kwargs) else [])
order_history_list = []
purchased_order_items = OrderItem.objects.filter(user=user, status='purchased').select_subclasses().order_by('-fulfilled_time')
for order_item in purchased_order_items:
if (order_item.order.id not in [item['order_id'] for item in order_history_list]):
order_item_course_id = getattr(order_item, 'course_id', None)
if order_item_course_id:
if ((course_org_filter and (course_org_filter == order_item_course_id.org)) or ((course_org_filter is None) and (order_item_course_id.org not in org_filter_out_set))):
order_history_list.append({'order_id': order_item.order.id, 'receipt_url': reverse('shoppingcart.views.show_receipt', kwargs={'ordernum': order_item.order.id}), 'order_date': ModuleI18nService().strftime(order_item.order.purchase_time, 'SHORT_DATE')})
return order_history_list
| [
"def",
"order_history",
"(",
"user",
",",
"**",
"kwargs",
")",
":",
"course_org_filter",
"=",
"(",
"kwargs",
"[",
"'course_org_filter'",
"]",
"if",
"(",
"'course_org_filter'",
"in",
"kwargs",
")",
"else",
"None",
")",
"org_filter_out_set",
"=",
"(",
"kwargs",
"[",
"'org_filter_out_set'",
"]",
"if",
"(",
"'org_filter_out_set'",
"in",
"kwargs",
")",
"else",
"[",
"]",
")",
"order_history_list",
"=",
"[",
"]",
"purchased_order_items",
"=",
"OrderItem",
".",
"objects",
".",
"filter",
"(",
"user",
"=",
"user",
",",
"status",
"=",
"'purchased'",
")",
".",
"select_subclasses",
"(",
")",
".",
"order_by",
"(",
"'-fulfilled_time'",
")",
"for",
"order_item",
"in",
"purchased_order_items",
":",
"if",
"(",
"order_item",
".",
"order",
".",
"id",
"not",
"in",
"[",
"item",
"[",
"'order_id'",
"]",
"for",
"item",
"in",
"order_history_list",
"]",
")",
":",
"order_item_course_id",
"=",
"getattr",
"(",
"order_item",
",",
"'course_id'",
",",
"None",
")",
"if",
"order_item_course_id",
":",
"if",
"(",
"(",
"course_org_filter",
"and",
"(",
"course_org_filter",
"==",
"order_item_course_id",
".",
"org",
")",
")",
"or",
"(",
"(",
"course_org_filter",
"is",
"None",
")",
"and",
"(",
"order_item_course_id",
".",
"org",
"not",
"in",
"org_filter_out_set",
")",
")",
")",
":",
"order_history_list",
".",
"append",
"(",
"{",
"'order_id'",
":",
"order_item",
".",
"order",
".",
"id",
",",
"'receipt_url'",
":",
"reverse",
"(",
"'shoppingcart.views.show_receipt'",
",",
"kwargs",
"=",
"{",
"'ordernum'",
":",
"order_item",
".",
"order",
".",
"id",
"}",
")",
",",
"'order_date'",
":",
"ModuleI18nService",
"(",
")",
".",
"strftime",
"(",
"order_item",
".",
"order",
".",
"purchase_time",
",",
"'SHORT_DATE'",
")",
"}",
")",
"return",
"order_history_list"
] | display a list of the currently logged-in users past orders . | train | false |
1,903 | def _get_candidate(version):
return version[_get_candidate_pos(version)]
| [
"def",
"_get_candidate",
"(",
"version",
")",
":",
"return",
"version",
"[",
"_get_candidate_pos",
"(",
"version",
")",
"]"
] | returns the candidate . | train | false |
1,904 | def path_split(pathname_spec, separator=os.path.sep):
return list(pathname_spec.split(separator))
| [
"def",
"path_split",
"(",
"pathname_spec",
",",
"separator",
"=",
"os",
".",
"path",
".",
"sep",
")",
":",
"return",
"list",
"(",
"pathname_spec",
".",
"split",
"(",
"separator",
")",
")"
] | find the right place in the expression tree/line to parallelize . | train | false |
1,907 | def KeyLEQ(key1, key2):
if ((key1 is None) or (key2 is None)):
return True
return (key1 <= key2)
| [
"def",
"KeyLEQ",
"(",
"key1",
",",
"key2",
")",
":",
"if",
"(",
"(",
"key1",
"is",
"None",
")",
"or",
"(",
"key2",
"is",
"None",
")",
")",
":",
"return",
"True",
"return",
"(",
"key1",
"<=",
"key2",
")"
] | compare two keys for less-than-or-equal-to . | train | false |
1,908 | def writebytes(filename, bytesource):
outfile = open(filename, 'wb')
for bt in bytesource:
outfile.write(bt)
| [
"def",
"writebytes",
"(",
"filename",
",",
"bytesource",
")",
":",
"outfile",
"=",
"open",
"(",
"filename",
",",
"'wb'",
")",
"for",
"bt",
"in",
"bytesource",
":",
"outfile",
".",
"write",
"(",
"bt",
")"
] | convenience for emitting the bytes we generate to a file . | train | false |
1,909 | def _blade_action_postfunc(closing_message):
console.info(closing_message)
SCons.SConsign.write()
| [
"def",
"_blade_action_postfunc",
"(",
"closing_message",
")",
":",
"console",
".",
"info",
"(",
"closing_message",
")",
"SCons",
".",
"SConsign",
".",
"write",
"(",
")"
] | to do post jobs if blades own actions failed to build . | train | false |
1,910 | def test_forum_delete_with_user_and_topic(topic, user):
assert (user.post_count == 1)
topic.forum.delete([user])
forum = Forum.query.filter_by(id=topic.forum_id).first()
assert (forum is None)
assert (user.post_count == 0)
| [
"def",
"test_forum_delete_with_user_and_topic",
"(",
"topic",
",",
"user",
")",
":",
"assert",
"(",
"user",
".",
"post_count",
"==",
"1",
")",
"topic",
".",
"forum",
".",
"delete",
"(",
"[",
"user",
"]",
")",
"forum",
"=",
"Forum",
".",
"query",
".",
"filter_by",
"(",
"id",
"=",
"topic",
".",
"forum_id",
")",
".",
"first",
"(",
")",
"assert",
"(",
"forum",
"is",
"None",
")",
"assert",
"(",
"user",
".",
"post_count",
"==",
"0",
")"
] | now test the delete forum method with a topic inside . | train | false |
1,912 | def word2vec2tensor(word2vec_model_path, tensor_filename, binary=False):
model = gensim.models.Word2Vec.load_word2vec_format(word2vec_model_path, binary=binary)
outfiletsv = (tensor_filename + '_tensor.tsv')
outfiletsvmeta = (tensor_filename + '_metadata.tsv')
with open(outfiletsv, 'w+') as file_vector:
with open(outfiletsvmeta, 'w+') as file_metadata:
for word in model.index2word:
file_metadata.write((word.encode('utf-8') + '\n'))
vector_row = ' DCTB '.join(map(str, model[word]))
file_vector.write((vector_row + '\n'))
logger.info(('2D tensor file saved to %s' % outfiletsv))
logger.info(('Tensor metadata file saved to %s' % outfiletsvmeta))
| [
"def",
"word2vec2tensor",
"(",
"word2vec_model_path",
",",
"tensor_filename",
",",
"binary",
"=",
"False",
")",
":",
"model",
"=",
"gensim",
".",
"models",
".",
"Word2Vec",
".",
"load_word2vec_format",
"(",
"word2vec_model_path",
",",
"binary",
"=",
"binary",
")",
"outfiletsv",
"=",
"(",
"tensor_filename",
"+",
"'_tensor.tsv'",
")",
"outfiletsvmeta",
"=",
"(",
"tensor_filename",
"+",
"'_metadata.tsv'",
")",
"with",
"open",
"(",
"outfiletsv",
",",
"'w+'",
")",
"as",
"file_vector",
":",
"with",
"open",
"(",
"outfiletsvmeta",
",",
"'w+'",
")",
"as",
"file_metadata",
":",
"for",
"word",
"in",
"model",
".",
"index2word",
":",
"file_metadata",
".",
"write",
"(",
"(",
"word",
".",
"encode",
"(",
"'utf-8'",
")",
"+",
"'\\n'",
")",
")",
"vector_row",
"=",
"' DCTB '",
".",
"join",
"(",
"map",
"(",
"str",
",",
"model",
"[",
"word",
"]",
")",
")",
"file_vector",
".",
"write",
"(",
"(",
"vector_row",
"+",
"'\\n'",
")",
")",
"logger",
".",
"info",
"(",
"(",
"'2D tensor file saved to %s'",
"%",
"outfiletsv",
")",
")",
"logger",
".",
"info",
"(",
"(",
"'Tensor metadata file saved to %s'",
"%",
"outfiletsvmeta",
")",
")"
] | convert word2vec mode to 2d tensor tsv file and metadata file args: param1 : word2vec model file path param2 : filename prefix param2 : set true to use a binary word2vec model . | train | false |
1,916 | def get_all_thread_participants(exploration_id, thread_id):
return set([m.author_id for m in get_messages(exploration_id, thread_id) if user_services.is_user_registered(m.author_id)])
| [
"def",
"get_all_thread_participants",
"(",
"exploration_id",
",",
"thread_id",
")",
":",
"return",
"set",
"(",
"[",
"m",
".",
"author_id",
"for",
"m",
"in",
"get_messages",
"(",
"exploration_id",
",",
"thread_id",
")",
"if",
"user_services",
".",
"is_user_registered",
"(",
"m",
".",
"author_id",
")",
"]",
")"
] | fetches all participants of the given thread . | train | false |
1,917 | def request_server_info(server):
if (not server.request):
server.request = True
Thread(target=_retrieve_info, args=(server,)).start()
| [
"def",
"request_server_info",
"(",
"server",
")",
":",
"if",
"(",
"not",
"server",
".",
"request",
")",
":",
"server",
".",
"request",
"=",
"True",
"Thread",
"(",
"target",
"=",
"_retrieve_info",
",",
"args",
"=",
"(",
"server",
",",
")",
")",
".",
"start",
"(",
")"
] | launch async request to resolve server address . | train | false |
1,918 | def create_empty_copy(G, with_data=True):
H = G.__class__()
H.add_nodes_from(G.nodes(data=with_data))
if with_data:
H.graph.update(G.graph)
return H
| [
"def",
"create_empty_copy",
"(",
"G",
",",
"with_data",
"=",
"True",
")",
":",
"H",
"=",
"G",
".",
"__class__",
"(",
")",
"H",
".",
"add_nodes_from",
"(",
"G",
".",
"nodes",
"(",
"data",
"=",
"with_data",
")",
")",
"if",
"with_data",
":",
"H",
".",
"graph",
".",
"update",
"(",
"G",
".",
"graph",
")",
"return",
"H"
] | return a copy of the graph g with all of the edges removed . | train | true |
1,921 | def retry_on_timeout(retries=1):
def outer(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
attempt = 0
while True:
try:
return func(*args, **kwargs)
except messaging.MessagingTimeout:
attempt += 1
if (attempt <= retries):
LOG.warning(_LW('Retrying %(name)s after a MessagingTimeout, attempt %(attempt)s of %(retries)s.'), {'attempt': attempt, 'retries': retries, 'name': func.__name__})
else:
raise
return wrapped
return outer
| [
"def",
"retry_on_timeout",
"(",
"retries",
"=",
"1",
")",
":",
"def",
"outer",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapped",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"attempt",
"=",
"0",
"while",
"True",
":",
"try",
":",
"return",
"func",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"except",
"messaging",
".",
"MessagingTimeout",
":",
"attempt",
"+=",
"1",
"if",
"(",
"attempt",
"<=",
"retries",
")",
":",
"LOG",
".",
"warning",
"(",
"_LW",
"(",
"'Retrying %(name)s after a MessagingTimeout, attempt %(attempt)s of %(retries)s.'",
")",
",",
"{",
"'attempt'",
":",
"attempt",
",",
"'retries'",
":",
"retries",
",",
"'name'",
":",
"func",
".",
"__name__",
"}",
")",
"else",
":",
"raise",
"return",
"wrapped",
"return",
"outer"
] | retry the call in case a messagingtimeout is raised . | train | false |
1,923 | def _encode_timestamp(name, value, dummy0, dummy1):
return (('\x11' + name) + _PACK_TIMESTAMP(value.inc, value.time))
| [
"def",
"_encode_timestamp",
"(",
"name",
",",
"value",
",",
"dummy0",
",",
"dummy1",
")",
":",
"return",
"(",
"(",
"'\\x11'",
"+",
"name",
")",
"+",
"_PACK_TIMESTAMP",
"(",
"value",
".",
"inc",
",",
"value",
".",
"time",
")",
")"
] | encode bson . | train | true |
1,926 | def ssm_describe_association(name=None, kwargs=None, instance_id=None, call=None):
if (call != 'action'):
raise SaltCloudSystemExit('The ssm_describe_association action must be called with -a or --action.')
if (not kwargs):
kwargs = {}
if ('instance_id' in kwargs):
instance_id = kwargs['instance_id']
if (name and (not instance_id)):
instance_id = _get_node(name)['instanceId']
if ((not name) and (not instance_id)):
log.error('Either a name or an instance_id is required.')
return False
if ('ssm_document' not in kwargs):
log.error('A ssm_document is required.')
return False
params = {'Action': 'DescribeAssociation', 'InstanceId': instance_id, 'Name': kwargs['ssm_document']}
result = aws.query(params, return_root=True, location=get_location(), provider=get_provider(), product='ssm', opts=__opts__, sigver='4')
log.info(result)
return result
| [
"def",
"ssm_describe_association",
"(",
"name",
"=",
"None",
",",
"kwargs",
"=",
"None",
",",
"instance_id",
"=",
"None",
",",
"call",
"=",
"None",
")",
":",
"if",
"(",
"call",
"!=",
"'action'",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The ssm_describe_association action must be called with -a or --action.'",
")",
"if",
"(",
"not",
"kwargs",
")",
":",
"kwargs",
"=",
"{",
"}",
"if",
"(",
"'instance_id'",
"in",
"kwargs",
")",
":",
"instance_id",
"=",
"kwargs",
"[",
"'instance_id'",
"]",
"if",
"(",
"name",
"and",
"(",
"not",
"instance_id",
")",
")",
":",
"instance_id",
"=",
"_get_node",
"(",
"name",
")",
"[",
"'instanceId'",
"]",
"if",
"(",
"(",
"not",
"name",
")",
"and",
"(",
"not",
"instance_id",
")",
")",
":",
"log",
".",
"error",
"(",
"'Either a name or an instance_id is required.'",
")",
"return",
"False",
"if",
"(",
"'ssm_document'",
"not",
"in",
"kwargs",
")",
":",
"log",
".",
"error",
"(",
"'A ssm_document is required.'",
")",
"return",
"False",
"params",
"=",
"{",
"'Action'",
":",
"'DescribeAssociation'",
",",
"'InstanceId'",
":",
"instance_id",
",",
"'Name'",
":",
"kwargs",
"[",
"'ssm_document'",
"]",
"}",
"result",
"=",
"aws",
".",
"query",
"(",
"params",
",",
"return_root",
"=",
"True",
",",
"location",
"=",
"get_location",
"(",
")",
",",
"provider",
"=",
"get_provider",
"(",
")",
",",
"product",
"=",
"'ssm'",
",",
"opts",
"=",
"__opts__",
",",
"sigver",
"=",
"'4'",
")",
"log",
".",
"info",
"(",
"result",
")",
"return",
"result"
] | describes the associations for the specified ssm document or instance . | train | true |
1,927 | def createProfile(colorSpace, colorTemp=(-1)):
if (colorSpace not in ['LAB', 'XYZ', 'sRGB']):
raise PyCMSError(('Color space not supported for on-the-fly profile creation (%s)' % colorSpace))
if (colorSpace == 'LAB'):
try:
colorTemp = float(colorTemp)
except:
raise PyCMSError(('Color temperature must be numeric, "%s" not valid' % colorTemp))
try:
return core.createProfile(colorSpace, colorTemp)
except (TypeError, ValueError) as v:
raise PyCMSError(v)
| [
"def",
"createProfile",
"(",
"colorSpace",
",",
"colorTemp",
"=",
"(",
"-",
"1",
")",
")",
":",
"if",
"(",
"colorSpace",
"not",
"in",
"[",
"'LAB'",
",",
"'XYZ'",
",",
"'sRGB'",
"]",
")",
":",
"raise",
"PyCMSError",
"(",
"(",
"'Color space not supported for on-the-fly profile creation (%s)'",
"%",
"colorSpace",
")",
")",
"if",
"(",
"colorSpace",
"==",
"'LAB'",
")",
":",
"try",
":",
"colorTemp",
"=",
"float",
"(",
"colorTemp",
")",
"except",
":",
"raise",
"PyCMSError",
"(",
"(",
"'Color temperature must be numeric, \"%s\" not valid'",
"%",
"colorTemp",
")",
")",
"try",
":",
"return",
"core",
".",
"createProfile",
"(",
"colorSpace",
",",
"colorTemp",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
"as",
"v",
":",
"raise",
"PyCMSError",
"(",
"v",
")"
] | creates a profile . | train | false |
1,928 | def find_change_in_general_direction(curvature):
curv_pos = (curvature > 0)
split = []
currently_pos = curv_pos[0]
for (c, is_pos) in zip(range(curvature.shape[0]), curv_pos):
if (is_pos != currently_pos):
currently_pos = is_pos
split.append(c)
return split
| [
"def",
"find_change_in_general_direction",
"(",
"curvature",
")",
":",
"curv_pos",
"=",
"(",
"curvature",
">",
"0",
")",
"split",
"=",
"[",
"]",
"currently_pos",
"=",
"curv_pos",
"[",
"0",
"]",
"for",
"(",
"c",
",",
"is_pos",
")",
"in",
"zip",
"(",
"range",
"(",
"curvature",
".",
"shape",
"[",
"0",
"]",
")",
",",
"curv_pos",
")",
":",
"if",
"(",
"is_pos",
"!=",
"currently_pos",
")",
":",
"currently_pos",
"=",
"is_pos",
"split",
".",
"append",
"(",
"c",
")",
"return",
"split"
] | return indecies of where the singn of curvature has flipped . | train | false |
1,929 | def makeFakeClient(clientProtocol):
return FakeTransport(clientProtocol, isServer=False)
| [
"def",
"makeFakeClient",
"(",
"clientProtocol",
")",
":",
"return",
"FakeTransport",
"(",
"clientProtocol",
",",
"isServer",
"=",
"False",
")"
] | create and return a new in-memory transport hooked up to the given protocol . | train | false |
1,932 | def get_pipeline_definition(pipeline_id, version='latest', region=None, key=None, keyid=None, profile=None):
client = _get_client(region, key, keyid, profile)
r = {}
try:
r['result'] = client.get_pipeline_definition(pipelineId=pipeline_id, version=version)
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
r['error'] = str(e)
return r
| [
"def",
"get_pipeline_definition",
"(",
"pipeline_id",
",",
"version",
"=",
"'latest'",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"client",
"=",
"_get_client",
"(",
"region",
",",
"key",
",",
"keyid",
",",
"profile",
")",
"r",
"=",
"{",
"}",
"try",
":",
"r",
"[",
"'result'",
"]",
"=",
"client",
".",
"get_pipeline_definition",
"(",
"pipelineId",
"=",
"pipeline_id",
",",
"version",
"=",
"version",
")",
"except",
"(",
"botocore",
".",
"exceptions",
".",
"BotoCoreError",
",",
"botocore",
".",
"exceptions",
".",
"ClientError",
")",
"as",
"e",
":",
"r",
"[",
"'error'",
"]",
"=",
"str",
"(",
"e",
")",
"return",
"r"
] | get the definition of the specified pipeline . | train | false |
1,933 | def unload(*plugins):
observers = PluginImplementations(interfaces.IPluginObserver)
for plugin in plugins:
if (plugin in _PLUGINS):
_PLUGINS.remove(plugin)
if (plugin in _PLUGINS_SERVICE):
del _PLUGINS_SERVICE[plugin]
else:
raise Exception(('Cannot unload plugin `%s`' % plugin))
service = _get_service(plugin)
for observer_plugin in observers:
observer_plugin.before_unload(service)
service.deactivate()
_PLUGINS_CLASS.remove(service.__class__)
for observer_plugin in observers:
observer_plugin.after_unload(service)
plugins_update()
| [
"def",
"unload",
"(",
"*",
"plugins",
")",
":",
"observers",
"=",
"PluginImplementations",
"(",
"interfaces",
".",
"IPluginObserver",
")",
"for",
"plugin",
"in",
"plugins",
":",
"if",
"(",
"plugin",
"in",
"_PLUGINS",
")",
":",
"_PLUGINS",
".",
"remove",
"(",
"plugin",
")",
"if",
"(",
"plugin",
"in",
"_PLUGINS_SERVICE",
")",
":",
"del",
"_PLUGINS_SERVICE",
"[",
"plugin",
"]",
"else",
":",
"raise",
"Exception",
"(",
"(",
"'Cannot unload plugin `%s`'",
"%",
"plugin",
")",
")",
"service",
"=",
"_get_service",
"(",
"plugin",
")",
"for",
"observer_plugin",
"in",
"observers",
":",
"observer_plugin",
".",
"before_unload",
"(",
"service",
")",
"service",
".",
"deactivate",
"(",
")",
"_PLUGINS_CLASS",
".",
"remove",
"(",
"service",
".",
"__class__",
")",
"for",
"observer_plugin",
"in",
"observers",
":",
"observer_plugin",
".",
"after_unload",
"(",
"service",
")",
"plugins_update",
"(",
")"
] | unload specified fault manager module module: string module to unload cli example: . | train | false |
1,934 | def bbox_artist(artist, renderer, props=None, fill=True):
if (props is None):
props = {}
props = props.copy()
pad = props.pop('pad', 4)
pad = renderer.points_to_pixels(pad)
bbox = artist.get_window_extent(renderer)
(l, b, w, h) = bbox.bounds
l -= (pad / 2.0)
b -= (pad / 2.0)
w += pad
h += pad
r = Rectangle(xy=(l, b), width=w, height=h, fill=fill)
r.set_transform(transforms.IdentityTransform())
r.set_clip_on(False)
r.update(props)
r.draw(renderer)
| [
"def",
"bbox_artist",
"(",
"artist",
",",
"renderer",
",",
"props",
"=",
"None",
",",
"fill",
"=",
"True",
")",
":",
"if",
"(",
"props",
"is",
"None",
")",
":",
"props",
"=",
"{",
"}",
"props",
"=",
"props",
".",
"copy",
"(",
")",
"pad",
"=",
"props",
".",
"pop",
"(",
"'pad'",
",",
"4",
")",
"pad",
"=",
"renderer",
".",
"points_to_pixels",
"(",
"pad",
")",
"bbox",
"=",
"artist",
".",
"get_window_extent",
"(",
"renderer",
")",
"(",
"l",
",",
"b",
",",
"w",
",",
"h",
")",
"=",
"bbox",
".",
"bounds",
"l",
"-=",
"(",
"pad",
"/",
"2.0",
")",
"b",
"-=",
"(",
"pad",
"/",
"2.0",
")",
"w",
"+=",
"pad",
"h",
"+=",
"pad",
"r",
"=",
"Rectangle",
"(",
"xy",
"=",
"(",
"l",
",",
"b",
")",
",",
"width",
"=",
"w",
",",
"height",
"=",
"h",
",",
"fill",
"=",
"fill",
")",
"r",
".",
"set_transform",
"(",
"transforms",
".",
"IdentityTransform",
"(",
")",
")",
"r",
".",
"set_clip_on",
"(",
"False",
")",
"r",
".",
"update",
"(",
"props",
")",
"r",
".",
"draw",
"(",
"renderer",
")"
] | this is a debug function to draw a rectangle around the bounding box returned by :meth:~matplotlib . | train | false |
1,936 | def isvalid_identifier(s):
return ((s is not None) and (not iskeyword(s)) and (re.match('^[_a-zA-Z][_a-zA-Z0-9]*$', s) is not None))
| [
"def",
"isvalid_identifier",
"(",
"s",
")",
":",
"return",
"(",
"(",
"s",
"is",
"not",
"None",
")",
"and",
"(",
"not",
"iskeyword",
"(",
"s",
")",
")",
"and",
"(",
"re",
".",
"match",
"(",
"'^[_a-zA-Z][_a-zA-Z0-9]*$'",
",",
"s",
")",
"is",
"not",
"None",
")",
")"
] | check whether a string is a valid python identifier examples . | train | false |
1,938 | def back_tick(cmd, ret_err=False, as_str=True, shell=False):
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=shell)
(out, err) = proc.communicate()
retcode = proc.returncode
if (retcode is None):
proc.terminate()
raise RuntimeError((cmd + ' process did not terminate'))
if (retcode != 0):
raise RuntimeError((cmd + (' process returned code %d' % retcode)))
out = out.strip()
if as_str:
out = out.decode('latin-1')
if (not ret_err):
return out
err = err.strip()
if as_str:
err = err.decode('latin-1')
return (out, err)
| [
"def",
"back_tick",
"(",
"cmd",
",",
"ret_err",
"=",
"False",
",",
"as_str",
"=",
"True",
",",
"shell",
"=",
"False",
")",
":",
"proc",
"=",
"Popen",
"(",
"cmd",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
",",
"shell",
"=",
"shell",
")",
"(",
"out",
",",
"err",
")",
"=",
"proc",
".",
"communicate",
"(",
")",
"retcode",
"=",
"proc",
".",
"returncode",
"if",
"(",
"retcode",
"is",
"None",
")",
":",
"proc",
".",
"terminate",
"(",
")",
"raise",
"RuntimeError",
"(",
"(",
"cmd",
"+",
"' process did not terminate'",
")",
")",
"if",
"(",
"retcode",
"!=",
"0",
")",
":",
"raise",
"RuntimeError",
"(",
"(",
"cmd",
"+",
"(",
"' process returned code %d'",
"%",
"retcode",
")",
")",
")",
"out",
"=",
"out",
".",
"strip",
"(",
")",
"if",
"as_str",
":",
"out",
"=",
"out",
".",
"decode",
"(",
"'latin-1'",
")",
"if",
"(",
"not",
"ret_err",
")",
":",
"return",
"out",
"err",
"=",
"err",
".",
"strip",
"(",
")",
"if",
"as_str",
":",
"err",
"=",
"err",
".",
"decode",
"(",
"'latin-1'",
")",
"return",
"(",
"out",
",",
"err",
")"
] | run command cmd . | train | false |
1,939 | def _unevaluated_Mul(*args):
args = list(args)
newargs = []
ncargs = []
co = S.One
while args:
a = args.pop()
if a.is_Mul:
(c, nc) = a.args_cnc()
args.extend(c)
if nc:
ncargs.append(Mul._from_args(nc))
elif a.is_Number:
co *= a
else:
newargs.append(a)
_mulsort(newargs)
if (co is not S.One):
newargs.insert(0, co)
if ncargs:
newargs.append(Mul._from_args(ncargs))
return Mul._from_args(newargs)
| [
"def",
"_unevaluated_Mul",
"(",
"*",
"args",
")",
":",
"args",
"=",
"list",
"(",
"args",
")",
"newargs",
"=",
"[",
"]",
"ncargs",
"=",
"[",
"]",
"co",
"=",
"S",
".",
"One",
"while",
"args",
":",
"a",
"=",
"args",
".",
"pop",
"(",
")",
"if",
"a",
".",
"is_Mul",
":",
"(",
"c",
",",
"nc",
")",
"=",
"a",
".",
"args_cnc",
"(",
")",
"args",
".",
"extend",
"(",
"c",
")",
"if",
"nc",
":",
"ncargs",
".",
"append",
"(",
"Mul",
".",
"_from_args",
"(",
"nc",
")",
")",
"elif",
"a",
".",
"is_Number",
":",
"co",
"*=",
"a",
"else",
":",
"newargs",
".",
"append",
"(",
"a",
")",
"_mulsort",
"(",
"newargs",
")",
"if",
"(",
"co",
"is",
"not",
"S",
".",
"One",
")",
":",
"newargs",
".",
"insert",
"(",
"0",
",",
"co",
")",
"if",
"ncargs",
":",
"newargs",
".",
"append",
"(",
"Mul",
".",
"_from_args",
"(",
"ncargs",
")",
")",
"return",
"Mul",
".",
"_from_args",
"(",
"newargs",
")"
] | return a well-formed unevaluated mul: numbers are collected and put in slot 0 . | train | false |
1,940 | def delete_event_source_mapping(UUID=None, EventSourceArn=None, FunctionName=None, region=None, key=None, keyid=None, profile=None):
ids = _get_ids(UUID, EventSourceArn=EventSourceArn, FunctionName=FunctionName)
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
for id in ids:
conn.delete_event_source_mapping(UUID=id)
return {'deleted': True}
except ClientError as e:
return {'deleted': False, 'error': salt.utils.boto3.get_error(e)}
| [
"def",
"delete_event_source_mapping",
"(",
"UUID",
"=",
"None",
",",
"EventSourceArn",
"=",
"None",
",",
"FunctionName",
"=",
"None",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"ids",
"=",
"_get_ids",
"(",
"UUID",
",",
"EventSourceArn",
"=",
"EventSourceArn",
",",
"FunctionName",
"=",
"FunctionName",
")",
"try",
":",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"for",
"id",
"in",
"ids",
":",
"conn",
".",
"delete_event_source_mapping",
"(",
"UUID",
"=",
"id",
")",
"return",
"{",
"'deleted'",
":",
"True",
"}",
"except",
"ClientError",
"as",
"e",
":",
"return",
"{",
"'deleted'",
":",
"False",
",",
"'error'",
":",
"salt",
".",
"utils",
".",
"boto3",
".",
"get_error",
"(",
"e",
")",
"}"
] | given an event source mapping id or an event source arn and functionname . | train | false |
1,942 | def on_start(param=None, **kwargs):
def _on_start_hook(func):
hook = _get_hook(func, 'on_start')
if (hook is None):
hook = _Hook(func, 'on_start')
_add_hook(func, hook)
hook._add_hook(kwargs)
return func
if callable(param):
return _on_start_hook(param)
else:
return (lambda func: _on_start_hook(func))
| [
"def",
"on_start",
"(",
"param",
"=",
"None",
",",
"**",
"kwargs",
")",
":",
"def",
"_on_start_hook",
"(",
"func",
")",
":",
"hook",
"=",
"_get_hook",
"(",
"func",
",",
"'on_start'",
")",
"if",
"(",
"hook",
"is",
"None",
")",
":",
"hook",
"=",
"_Hook",
"(",
"func",
",",
"'on_start'",
")",
"_add_hook",
"(",
"func",
",",
"hook",
")",
"hook",
".",
"_add_hook",
"(",
"kwargs",
")",
"return",
"func",
"if",
"callable",
"(",
"param",
")",
":",
"return",
"_on_start_hook",
"(",
"param",
")",
"else",
":",
"return",
"(",
"lambda",
"func",
":",
"_on_start_hook",
"(",
"func",
")",
")"
] | loads api keys . | train | false |
1,944 | def is_palindrom(sequence):
return (str(sequence) == str(sequence.reverse_complement()))
| [
"def",
"is_palindrom",
"(",
"sequence",
")",
":",
"return",
"(",
"str",
"(",
"sequence",
")",
"==",
"str",
"(",
"sequence",
".",
"reverse_complement",
"(",
")",
")",
")"
] | is_palindrom -> bool . | train | false |
1,945 | def getouterframes(frame, context=1):
framelist = []
while frame:
framelist.append(((frame,) + getframeinfo(frame, context)))
frame = frame.f_back
return framelist
| [
"def",
"getouterframes",
"(",
"frame",
",",
"context",
"=",
"1",
")",
":",
"framelist",
"=",
"[",
"]",
"while",
"frame",
":",
"framelist",
".",
"append",
"(",
"(",
"(",
"frame",
",",
")",
"+",
"getframeinfo",
"(",
"frame",
",",
"context",
")",
")",
")",
"frame",
"=",
"frame",
".",
"f_back",
"return",
"framelist"
] | get a list of records for a frame and all higher frames . | train | true |
1,946 | def store_dump(result, file):
result['di'] = sblock(result['di'])
result['ci'] = sblock(result['ci'])
result['hr'] = sblock(result['hr'])
result['ir'] = sblock(result['ir'])
with open(file, 'w') as input:
pickle.dump(result, input)
| [
"def",
"store_dump",
"(",
"result",
",",
"file",
")",
":",
"result",
"[",
"'di'",
"]",
"=",
"sblock",
"(",
"result",
"[",
"'di'",
"]",
")",
"result",
"[",
"'ci'",
"]",
"=",
"sblock",
"(",
"result",
"[",
"'ci'",
"]",
")",
"result",
"[",
"'hr'",
"]",
"=",
"sblock",
"(",
"result",
"[",
"'hr'",
"]",
")",
"result",
"[",
"'ir'",
"]",
"=",
"sblock",
"(",
"result",
"[",
"'ir'",
"]",
")",
"with",
"open",
"(",
"file",
",",
"'w'",
")",
"as",
"input",
":",
"pickle",
".",
"dump",
"(",
"result",
",",
"input",
")"
] | quick function to dump a result to a pickle . | train | false |
1,947 | def create_loadbalancer(call=None, kwargs=None):
if (call != 'function'):
raise SaltCloudSystemExit('The create_address function must be called with -f or --function.')
if (kwargs is None):
kwargs = {}
conn = get_conn()
datacenter_id = get_datacenter_id()
loadbalancer = LoadBalancer(name=kwargs.get('name'), ip=kwargs.get('ip'), dhcp=kwargs.get('dhcp'))
response = conn.create_loadbalancer(datacenter_id, loadbalancer)
_wait_for_completion(conn, response, 60, 'loadbalancer')
return response
| [
"def",
"create_loadbalancer",
"(",
"call",
"=",
"None",
",",
"kwargs",
"=",
"None",
")",
":",
"if",
"(",
"call",
"!=",
"'function'",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'The create_address function must be called with -f or --function.'",
")",
"if",
"(",
"kwargs",
"is",
"None",
")",
":",
"kwargs",
"=",
"{",
"}",
"conn",
"=",
"get_conn",
"(",
")",
"datacenter_id",
"=",
"get_datacenter_id",
"(",
")",
"loadbalancer",
"=",
"LoadBalancer",
"(",
"name",
"=",
"kwargs",
".",
"get",
"(",
"'name'",
")",
",",
"ip",
"=",
"kwargs",
".",
"get",
"(",
"'ip'",
")",
",",
"dhcp",
"=",
"kwargs",
".",
"get",
"(",
"'dhcp'",
")",
")",
"response",
"=",
"conn",
".",
"create_loadbalancer",
"(",
"datacenter_id",
",",
"loadbalancer",
")",
"_wait_for_completion",
"(",
"conn",
",",
"response",
",",
"60",
",",
"'loadbalancer'",
")",
"return",
"response"
] | creates a loadbalancer within the datacenter from the provider config . | train | true |
1,948 | @conf.commands.register
def fuzz(p, _inplace=0):
if (not _inplace):
p = p.copy()
q = p
while (not isinstance(q, NoPayload)):
for f in q.fields_desc:
if isinstance(f, PacketListField):
for r in getattr(q, f.name):
print 'fuzzing', repr(r)
fuzz(r, _inplace=1)
elif (f.default is not None):
rnd = f.randval()
if (rnd is not None):
q.default_fields[f.name] = rnd
q = q.payload
return p
| [
"@",
"conf",
".",
"commands",
".",
"register",
"def",
"fuzz",
"(",
"p",
",",
"_inplace",
"=",
"0",
")",
":",
"if",
"(",
"not",
"_inplace",
")",
":",
"p",
"=",
"p",
".",
"copy",
"(",
")",
"q",
"=",
"p",
"while",
"(",
"not",
"isinstance",
"(",
"q",
",",
"NoPayload",
")",
")",
":",
"for",
"f",
"in",
"q",
".",
"fields_desc",
":",
"if",
"isinstance",
"(",
"f",
",",
"PacketListField",
")",
":",
"for",
"r",
"in",
"getattr",
"(",
"q",
",",
"f",
".",
"name",
")",
":",
"print",
"'fuzzing'",
",",
"repr",
"(",
"r",
")",
"fuzz",
"(",
"r",
",",
"_inplace",
"=",
"1",
")",
"elif",
"(",
"f",
".",
"default",
"is",
"not",
"None",
")",
":",
"rnd",
"=",
"f",
".",
"randval",
"(",
")",
"if",
"(",
"rnd",
"is",
"not",
"None",
")",
":",
"q",
".",
"default_fields",
"[",
"f",
".",
"name",
"]",
"=",
"rnd",
"q",
"=",
"q",
".",
"payload",
"return",
"p"
] | transform a layer into a fuzzy layer by replacing some default values by random objects . | train | true |
1,949 | def simplify(expr, ratio=1.7, measure=count_ops, fu=False):
expr = sympify(expr)
try:
return expr._eval_simplify(ratio=ratio, measure=measure)
except AttributeError:
pass
original_expr = expr = signsimp(expr)
from sympy.simplify.hyperexpand import hyperexpand
from sympy.functions.special.bessel import BesselBase
from sympy import Sum, Product
if ((not isinstance(expr, Basic)) or (not expr.args)):
return expr
if (not isinstance(expr, (Add, Mul, Pow, ExpBase))):
if (isinstance(expr, Function) and hasattr(expr, 'inverse')):
if ((len(expr.args) == 1) and (len(expr.args[0].args) == 1) and isinstance(expr.args[0], expr.inverse(argindex=1))):
return simplify(expr.args[0].args[0], ratio=ratio, measure=measure, fu=fu)
return expr.func(*[simplify(x, ratio=ratio, measure=measure, fu=fu) for x in expr.args])
def shorter(*choices):
'Return the choice that has the fewest ops. In case of a tie,\n the expression listed first is selected.'
if (not has_variety(choices)):
return choices[0]
return min(choices, key=measure)
expr = bottom_up(expr, (lambda w: w.normal()))
expr = Mul(*powsimp(expr).as_content_primitive())
_e = cancel(expr)
expr1 = shorter(_e, _mexpand(_e).cancel())
expr2 = shorter(together(expr, deep=True), together(expr1, deep=True))
if (ratio is S.Infinity):
expr = expr2
else:
expr = shorter(expr2, expr1, expr)
if (not isinstance(expr, Basic)):
return expr
expr = factor_terms(expr, sign=False)
expr = hyperexpand(expr)
expr = piecewise_fold(expr)
if expr.has(BesselBase):
expr = besselsimp(expr)
if ((expr.has(TrigonometricFunction) and (not fu)) or expr.has(HyperbolicFunction)):
expr = trigsimp(expr, deep=True)
if expr.has(log):
expr = shorter(expand_log(expr, deep=True), logcombine(expr))
if expr.has(CombinatorialFunction, gamma):
expr = combsimp(expr)
if expr.has(Sum):
expr = sum_simplify(expr)
if expr.has(Product):
expr = product_simplify(expr)
short = shorter(powsimp(expr, combine='exp', deep=True), powsimp(expr), expr)
short = shorter(short, factor_terms(short), expand_power_exp(expand_mul(short)))
if short.has(TrigonometricFunction, HyperbolicFunction, ExpBase):
short = exptrigsimp(short, simplify=False)
hollow_mul = Transform((lambda x: Mul(*x.args)), (lambda x: (x.is_Mul and (len(x.args) == 2) and x.args[0].is_Number and x.args[1].is_Add and x.is_commutative)))
expr = short.xreplace(hollow_mul)
(numer, denom) = expr.as_numer_denom()
if denom.is_Add:
(n, d) = fraction(radsimp((1 / denom), symbolic=False, max_terms=1))
if (n is not S.One):
expr = ((numer * n).expand() / d)
if expr.could_extract_minus_sign():
(n, d) = fraction(expr)
if (d != 0):
expr = signsimp(((- n) / (- d)))
if (measure(expr) > (ratio * measure(original_expr))):
expr = original_expr
return expr
| [
"def",
"simplify",
"(",
"expr",
",",
"ratio",
"=",
"1.7",
",",
"measure",
"=",
"count_ops",
",",
"fu",
"=",
"False",
")",
":",
"expr",
"=",
"sympify",
"(",
"expr",
")",
"try",
":",
"return",
"expr",
".",
"_eval_simplify",
"(",
"ratio",
"=",
"ratio",
",",
"measure",
"=",
"measure",
")",
"except",
"AttributeError",
":",
"pass",
"original_expr",
"=",
"expr",
"=",
"signsimp",
"(",
"expr",
")",
"from",
"sympy",
".",
"simplify",
".",
"hyperexpand",
"import",
"hyperexpand",
"from",
"sympy",
".",
"functions",
".",
"special",
".",
"bessel",
"import",
"BesselBase",
"from",
"sympy",
"import",
"Sum",
",",
"Product",
"if",
"(",
"(",
"not",
"isinstance",
"(",
"expr",
",",
"Basic",
")",
")",
"or",
"(",
"not",
"expr",
".",
"args",
")",
")",
":",
"return",
"expr",
"if",
"(",
"not",
"isinstance",
"(",
"expr",
",",
"(",
"Add",
",",
"Mul",
",",
"Pow",
",",
"ExpBase",
")",
")",
")",
":",
"if",
"(",
"isinstance",
"(",
"expr",
",",
"Function",
")",
"and",
"hasattr",
"(",
"expr",
",",
"'inverse'",
")",
")",
":",
"if",
"(",
"(",
"len",
"(",
"expr",
".",
"args",
")",
"==",
"1",
")",
"and",
"(",
"len",
"(",
"expr",
".",
"args",
"[",
"0",
"]",
".",
"args",
")",
"==",
"1",
")",
"and",
"isinstance",
"(",
"expr",
".",
"args",
"[",
"0",
"]",
",",
"expr",
".",
"inverse",
"(",
"argindex",
"=",
"1",
")",
")",
")",
":",
"return",
"simplify",
"(",
"expr",
".",
"args",
"[",
"0",
"]",
".",
"args",
"[",
"0",
"]",
",",
"ratio",
"=",
"ratio",
",",
"measure",
"=",
"measure",
",",
"fu",
"=",
"fu",
")",
"return",
"expr",
".",
"func",
"(",
"*",
"[",
"simplify",
"(",
"x",
",",
"ratio",
"=",
"ratio",
",",
"measure",
"=",
"measure",
",",
"fu",
"=",
"fu",
")",
"for",
"x",
"in",
"expr",
".",
"args",
"]",
")",
"def",
"shorter",
"(",
"*",
"choices",
")",
":",
"if",
"(",
"not",
"has_variety",
"(",
"choices",
")",
")",
":",
"return",
"choices",
"[",
"0",
"]",
"return",
"min",
"(",
"choices",
",",
"key",
"=",
"measure",
")",
"expr",
"=",
"bottom_up",
"(",
"expr",
",",
"(",
"lambda",
"w",
":",
"w",
".",
"normal",
"(",
")",
")",
")",
"expr",
"=",
"Mul",
"(",
"*",
"powsimp",
"(",
"expr",
")",
".",
"as_content_primitive",
"(",
")",
")",
"_e",
"=",
"cancel",
"(",
"expr",
")",
"expr1",
"=",
"shorter",
"(",
"_e",
",",
"_mexpand",
"(",
"_e",
")",
".",
"cancel",
"(",
")",
")",
"expr2",
"=",
"shorter",
"(",
"together",
"(",
"expr",
",",
"deep",
"=",
"True",
")",
",",
"together",
"(",
"expr1",
",",
"deep",
"=",
"True",
")",
")",
"if",
"(",
"ratio",
"is",
"S",
".",
"Infinity",
")",
":",
"expr",
"=",
"expr2",
"else",
":",
"expr",
"=",
"shorter",
"(",
"expr2",
",",
"expr1",
",",
"expr",
")",
"if",
"(",
"not",
"isinstance",
"(",
"expr",
",",
"Basic",
")",
")",
":",
"return",
"expr",
"expr",
"=",
"factor_terms",
"(",
"expr",
",",
"sign",
"=",
"False",
")",
"expr",
"=",
"hyperexpand",
"(",
"expr",
")",
"expr",
"=",
"piecewise_fold",
"(",
"expr",
")",
"if",
"expr",
".",
"has",
"(",
"BesselBase",
")",
":",
"expr",
"=",
"besselsimp",
"(",
"expr",
")",
"if",
"(",
"(",
"expr",
".",
"has",
"(",
"TrigonometricFunction",
")",
"and",
"(",
"not",
"fu",
")",
")",
"or",
"expr",
".",
"has",
"(",
"HyperbolicFunction",
")",
")",
":",
"expr",
"=",
"trigsimp",
"(",
"expr",
",",
"deep",
"=",
"True",
")",
"if",
"expr",
".",
"has",
"(",
"log",
")",
":",
"expr",
"=",
"shorter",
"(",
"expand_log",
"(",
"expr",
",",
"deep",
"=",
"True",
")",
",",
"logcombine",
"(",
"expr",
")",
")",
"if",
"expr",
".",
"has",
"(",
"CombinatorialFunction",
",",
"gamma",
")",
":",
"expr",
"=",
"combsimp",
"(",
"expr",
")",
"if",
"expr",
".",
"has",
"(",
"Sum",
")",
":",
"expr",
"=",
"sum_simplify",
"(",
"expr",
")",
"if",
"expr",
".",
"has",
"(",
"Product",
")",
":",
"expr",
"=",
"product_simplify",
"(",
"expr",
")",
"short",
"=",
"shorter",
"(",
"powsimp",
"(",
"expr",
",",
"combine",
"=",
"'exp'",
",",
"deep",
"=",
"True",
")",
",",
"powsimp",
"(",
"expr",
")",
",",
"expr",
")",
"short",
"=",
"shorter",
"(",
"short",
",",
"factor_terms",
"(",
"short",
")",
",",
"expand_power_exp",
"(",
"expand_mul",
"(",
"short",
")",
")",
")",
"if",
"short",
".",
"has",
"(",
"TrigonometricFunction",
",",
"HyperbolicFunction",
",",
"ExpBase",
")",
":",
"short",
"=",
"exptrigsimp",
"(",
"short",
",",
"simplify",
"=",
"False",
")",
"hollow_mul",
"=",
"Transform",
"(",
"(",
"lambda",
"x",
":",
"Mul",
"(",
"*",
"x",
".",
"args",
")",
")",
",",
"(",
"lambda",
"x",
":",
"(",
"x",
".",
"is_Mul",
"and",
"(",
"len",
"(",
"x",
".",
"args",
")",
"==",
"2",
")",
"and",
"x",
".",
"args",
"[",
"0",
"]",
".",
"is_Number",
"and",
"x",
".",
"args",
"[",
"1",
"]",
".",
"is_Add",
"and",
"x",
".",
"is_commutative",
")",
")",
")",
"expr",
"=",
"short",
".",
"xreplace",
"(",
"hollow_mul",
")",
"(",
"numer",
",",
"denom",
")",
"=",
"expr",
".",
"as_numer_denom",
"(",
")",
"if",
"denom",
".",
"is_Add",
":",
"(",
"n",
",",
"d",
")",
"=",
"fraction",
"(",
"radsimp",
"(",
"(",
"1",
"/",
"denom",
")",
",",
"symbolic",
"=",
"False",
",",
"max_terms",
"=",
"1",
")",
")",
"if",
"(",
"n",
"is",
"not",
"S",
".",
"One",
")",
":",
"expr",
"=",
"(",
"(",
"numer",
"*",
"n",
")",
".",
"expand",
"(",
")",
"/",
"d",
")",
"if",
"expr",
".",
"could_extract_minus_sign",
"(",
")",
":",
"(",
"n",
",",
"d",
")",
"=",
"fraction",
"(",
"expr",
")",
"if",
"(",
"d",
"!=",
"0",
")",
":",
"expr",
"=",
"signsimp",
"(",
"(",
"(",
"-",
"n",
")",
"/",
"(",
"-",
"d",
")",
")",
")",
"if",
"(",
"measure",
"(",
"expr",
")",
">",
"(",
"ratio",
"*",
"measure",
"(",
"original_expr",
")",
")",
")",
":",
"expr",
"=",
"original_expr",
"return",
"expr"
] | cancel units that appear in both numerator and denominator . | train | false |
1,951 | def s_update(name, value):
if (not blocks.CURRENT.names.has_key(name)):
raise sex.SullyRuntimeError(("NO OBJECT WITH NAME '%s' FOUND IN CURRENT REQUEST" % name))
blocks.CURRENT.names[name].value = value
| [
"def",
"s_update",
"(",
"name",
",",
"value",
")",
":",
"if",
"(",
"not",
"blocks",
".",
"CURRENT",
".",
"names",
".",
"has_key",
"(",
"name",
")",
")",
":",
"raise",
"sex",
".",
"SullyRuntimeError",
"(",
"(",
"\"NO OBJECT WITH NAME '%s' FOUND IN CURRENT REQUEST\"",
"%",
"name",
")",
")",
"blocks",
".",
"CURRENT",
".",
"names",
"[",
"name",
"]",
".",
"value",
"=",
"value"
] | update the value of the named primitive in the currently open request . | train | false |
1,952 | def run_pre_commit_script(component, translation, filename):
run_hook(component, translation, component.pre_commit_script, None, filename)
| [
"def",
"run_pre_commit_script",
"(",
"component",
",",
"translation",
",",
"filename",
")",
":",
"run_hook",
"(",
"component",
",",
"translation",
",",
"component",
".",
"pre_commit_script",
",",
"None",
",",
"filename",
")"
] | pre commit hook . | train | false |
1,954 | def _warn_if_unused_glob(log_printer, globs, used_globs, message):
unused_globs = (set(globs) - set(used_globs))
for glob in unused_globs:
log_printer.warn(message.format(glob))
| [
"def",
"_warn_if_unused_glob",
"(",
"log_printer",
",",
"globs",
",",
"used_globs",
",",
"message",
")",
":",
"unused_globs",
"=",
"(",
"set",
"(",
"globs",
")",
"-",
"set",
"(",
"used_globs",
")",
")",
"for",
"glob",
"in",
"unused_globs",
":",
"log_printer",
".",
"warn",
"(",
"message",
".",
"format",
"(",
"glob",
")",
")"
] | warn if a glob has not been used . | train | false |
1,955 | def libvlc_media_list_player_play(p_mlp):
f = (_Cfunctions.get('libvlc_media_list_player_play', None) or _Cfunction('libvlc_media_list_player_play', ((1,),), None, None, MediaListPlayer))
return f(p_mlp)
| [
"def",
"libvlc_media_list_player_play",
"(",
"p_mlp",
")",
":",
"f",
"=",
"(",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_media_list_player_play'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_media_list_player_play'",
",",
"(",
"(",
"1",
",",
")",
",",
")",
",",
"None",
",",
"None",
",",
"MediaListPlayer",
")",
")",
"return",
"f",
"(",
"p_mlp",
")"
] | play media list . | train | false |
1,956 | def get_collection_ids_subscribed_to(user_id):
subscriptions_model = user_models.UserSubscriptionsModel.get(user_id, strict=False)
return (subscriptions_model.collection_ids if subscriptions_model else [])
| [
"def",
"get_collection_ids_subscribed_to",
"(",
"user_id",
")",
":",
"subscriptions_model",
"=",
"user_models",
".",
"UserSubscriptionsModel",
".",
"get",
"(",
"user_id",
",",
"strict",
"=",
"False",
")",
"return",
"(",
"subscriptions_model",
".",
"collection_ids",
"if",
"subscriptions_model",
"else",
"[",
"]",
")"
] | returns a list with ids of all collections that the given user subscribes to . | train | false |
1,958 | def getAlterationFile(fileName):
settingsAlterationsDirectory = archive.getSettingsPath('alterations')
archive.makeDirectory(settingsAlterationsDirectory)
fileInSettingsAlterationsDirectory = getFileInGivenDirectory(settingsAlterationsDirectory, fileName)
if (fileInSettingsAlterationsDirectory != ''):
return fileInSettingsAlterationsDirectory
alterationsDirectory = archive.getSkeinforgePath('alterations')
return getFileInGivenDirectory(alterationsDirectory, fileName)
| [
"def",
"getAlterationFile",
"(",
"fileName",
")",
":",
"settingsAlterationsDirectory",
"=",
"archive",
".",
"getSettingsPath",
"(",
"'alterations'",
")",
"archive",
".",
"makeDirectory",
"(",
"settingsAlterationsDirectory",
")",
"fileInSettingsAlterationsDirectory",
"=",
"getFileInGivenDirectory",
"(",
"settingsAlterationsDirectory",
",",
"fileName",
")",
"if",
"(",
"fileInSettingsAlterationsDirectory",
"!=",
"''",
")",
":",
"return",
"fileInSettingsAlterationsDirectory",
"alterationsDirectory",
"=",
"archive",
".",
"getSkeinforgePath",
"(",
"'alterations'",
")",
"return",
"getFileInGivenDirectory",
"(",
"alterationsDirectory",
",",
"fileName",
")"
] | get the file from the filename or the lowercase filename in the alterations directories . | train | false |
1,959 | def read_sheets(archive):
xml_source = archive.read(ARC_WORKBOOK)
tree = fromstring(xml_source)
for element in safe_iterator(tree, ('{%s}sheet' % SHEET_MAIN_NS)):
attrib = element.attrib
attrib['id'] = attrib[('{%s}id' % REL_NS)]
del attrib[('{%s}id' % REL_NS)]
if attrib['id']:
(yield attrib)
| [
"def",
"read_sheets",
"(",
"archive",
")",
":",
"xml_source",
"=",
"archive",
".",
"read",
"(",
"ARC_WORKBOOK",
")",
"tree",
"=",
"fromstring",
"(",
"xml_source",
")",
"for",
"element",
"in",
"safe_iterator",
"(",
"tree",
",",
"(",
"'{%s}sheet'",
"%",
"SHEET_MAIN_NS",
")",
")",
":",
"attrib",
"=",
"element",
".",
"attrib",
"attrib",
"[",
"'id'",
"]",
"=",
"attrib",
"[",
"(",
"'{%s}id'",
"%",
"REL_NS",
")",
"]",
"del",
"attrib",
"[",
"(",
"'{%s}id'",
"%",
"REL_NS",
")",
"]",
"if",
"attrib",
"[",
"'id'",
"]",
":",
"(",
"yield",
"attrib",
")"
] | read worksheet titles and ids for a workbook . | train | true |
1,960 | def url_add_parameters(url, params):
if params:
fragments = list(urlparse(url))
fragments[4] = urlencode((parse_qsl(fragments[4]) + params.items()))
url = urlunparse(fragments)
return url
| [
"def",
"url_add_parameters",
"(",
"url",
",",
"params",
")",
":",
"if",
"params",
":",
"fragments",
"=",
"list",
"(",
"urlparse",
"(",
"url",
")",
")",
"fragments",
"[",
"4",
"]",
"=",
"urlencode",
"(",
"(",
"parse_qsl",
"(",
"fragments",
"[",
"4",
"]",
")",
"+",
"params",
".",
"items",
"(",
")",
")",
")",
"url",
"=",
"urlunparse",
"(",
"fragments",
")",
"return",
"url"
] | adds parameters to url . | train | false |
1,962 | def project_list(profile=None, **connection_args):
auth(profile, **connection_args)
if (_OS_IDENTITY_API_VERSION > 2):
return tenant_list(profile, **connection_args)
else:
return False
| [
"def",
"project_list",
"(",
"profile",
"=",
"None",
",",
"**",
"connection_args",
")",
":",
"auth",
"(",
"profile",
",",
"**",
"connection_args",
")",
"if",
"(",
"_OS_IDENTITY_API_VERSION",
">",
"2",
")",
":",
"return",
"tenant_list",
"(",
"profile",
",",
"**",
"connection_args",
")",
"else",
":",
"return",
"False"
] | return a list of available projects . | train | true |
1,963 | def ffmpeg_write_image(filename, image, logfile=False):
if (image.dtype != 'uint8'):
image = image.astype('uint8')
cmd = [get_setting('FFMPEG_BINARY'), '-y', '-s', ('%dx%d' % image.shape[:2][::(-1)]), '-f', 'rawvideo', '-pix_fmt', ('rgba' if (image.shape[2] == 4) else 'rgb24'), '-i', '-', filename]
if logfile:
log_file = open((filename + '.log'), 'w+')
else:
log_file = sp.PIPE
popen_params = {'stdout': DEVNULL, 'stderr': log_file, 'stdin': sp.PIPE}
if (os.name == 'nt'):
popen_params['creationflags'] = 134217728
proc = sp.Popen(cmd, **popen_params)
(out, err) = proc.communicate(image.tostring())
if proc.returncode:
err = '\n'.join([('[MoviePy] Running : %s\n' % cmd), 'WARNING: this command returned an error:', err.decode('utf8')])
raise IOError(err)
del proc
| [
"def",
"ffmpeg_write_image",
"(",
"filename",
",",
"image",
",",
"logfile",
"=",
"False",
")",
":",
"if",
"(",
"image",
".",
"dtype",
"!=",
"'uint8'",
")",
":",
"image",
"=",
"image",
".",
"astype",
"(",
"'uint8'",
")",
"cmd",
"=",
"[",
"get_setting",
"(",
"'FFMPEG_BINARY'",
")",
",",
"'-y'",
",",
"'-s'",
",",
"(",
"'%dx%d'",
"%",
"image",
".",
"shape",
"[",
":",
"2",
"]",
"[",
":",
":",
"(",
"-",
"1",
")",
"]",
")",
",",
"'-f'",
",",
"'rawvideo'",
",",
"'-pix_fmt'",
",",
"(",
"'rgba'",
"if",
"(",
"image",
".",
"shape",
"[",
"2",
"]",
"==",
"4",
")",
"else",
"'rgb24'",
")",
",",
"'-i'",
",",
"'-'",
",",
"filename",
"]",
"if",
"logfile",
":",
"log_file",
"=",
"open",
"(",
"(",
"filename",
"+",
"'.log'",
")",
",",
"'w+'",
")",
"else",
":",
"log_file",
"=",
"sp",
".",
"PIPE",
"popen_params",
"=",
"{",
"'stdout'",
":",
"DEVNULL",
",",
"'stderr'",
":",
"log_file",
",",
"'stdin'",
":",
"sp",
".",
"PIPE",
"}",
"if",
"(",
"os",
".",
"name",
"==",
"'nt'",
")",
":",
"popen_params",
"[",
"'creationflags'",
"]",
"=",
"134217728",
"proc",
"=",
"sp",
".",
"Popen",
"(",
"cmd",
",",
"**",
"popen_params",
")",
"(",
"out",
",",
"err",
")",
"=",
"proc",
".",
"communicate",
"(",
"image",
".",
"tostring",
"(",
")",
")",
"if",
"proc",
".",
"returncode",
":",
"err",
"=",
"'\\n'",
".",
"join",
"(",
"[",
"(",
"'[MoviePy] Running : %s\\n'",
"%",
"cmd",
")",
",",
"'WARNING: this command returned an error:'",
",",
"err",
".",
"decode",
"(",
"'utf8'",
")",
"]",
")",
"raise",
"IOError",
"(",
"err",
")",
"del",
"proc"
] | writes an image to a file . | train | false |
1,965 | def test_custom_model_parametrized_decorator():
def cosine(x, amplitude=1):
return [(amplitude * np.cos(x))]
@custom_model(fit_deriv=cosine)
def sine(x, amplitude=1):
return (amplitude * np.sin(x))
assert issubclass(sine, Model)
s = sine(2)
assert_allclose(s((np.pi / 2)), 2)
assert_allclose(s.fit_deriv(0, 2), 2)
| [
"def",
"test_custom_model_parametrized_decorator",
"(",
")",
":",
"def",
"cosine",
"(",
"x",
",",
"amplitude",
"=",
"1",
")",
":",
"return",
"[",
"(",
"amplitude",
"*",
"np",
".",
"cos",
"(",
"x",
")",
")",
"]",
"@",
"custom_model",
"(",
"fit_deriv",
"=",
"cosine",
")",
"def",
"sine",
"(",
"x",
",",
"amplitude",
"=",
"1",
")",
":",
"return",
"(",
"amplitude",
"*",
"np",
".",
"sin",
"(",
"x",
")",
")",
"assert",
"issubclass",
"(",
"sine",
",",
"Model",
")",
"s",
"=",
"sine",
"(",
"2",
")",
"assert_allclose",
"(",
"s",
"(",
"(",
"np",
".",
"pi",
"/",
"2",
")",
")",
",",
"2",
")",
"assert_allclose",
"(",
"s",
".",
"fit_deriv",
"(",
"0",
",",
"2",
")",
",",
"2",
")"
] | tests using custom_model as a decorator with parameters . | train | false |
1,966 | @register.as_tag
def blog_categories(*args):
posts = BlogPost.objects.published()
categories = BlogCategory.objects.filter(blogposts__in=posts)
return list(categories.annotate(post_count=Count(u'blogposts')))
| [
"@",
"register",
".",
"as_tag",
"def",
"blog_categories",
"(",
"*",
"args",
")",
":",
"posts",
"=",
"BlogPost",
".",
"objects",
".",
"published",
"(",
")",
"categories",
"=",
"BlogCategory",
".",
"objects",
".",
"filter",
"(",
"blogposts__in",
"=",
"posts",
")",
"return",
"list",
"(",
"categories",
".",
"annotate",
"(",
"post_count",
"=",
"Count",
"(",
"u'blogposts'",
")",
")",
")"
] | put a list of categories for blog posts into the template context . | train | true |
1,967 | def p_iteration_statement_3(t):
pass
| [
"def",
"p_iteration_statement_3",
"(",
"t",
")",
":",
"pass"
] | iteration_statement : do statement while lparen expression rparen semi . | train | false |
1,968 | def offline(zpool, *vdevs, **kwargs):
ret = {}
if (not exists(zpool)):
ret[zpool] = 'storage pool does not exist'
return ret
if ((not vdevs) or (len(vdevs) <= 0)):
ret[zpool] = 'no devices specified'
return ret
ret[zpool] = {}
devs = ' '.join(vdevs)
zpool_cmd = _check_zpool()
cmd = '{zpool_cmd} offline {temp}{zpool} {devs}'.format(zpool_cmd=zpool_cmd, temp=('-t ' if kwargs.get('temporary', False) else ''), zpool=zpool, devs=devs)
res = __salt__['cmd.run_all'](cmd, python_shell=False)
if (res['retcode'] != 0):
ret[zpool] = (res['stderr'] if ('stderr' in res) else res['stdout'])
else:
ret[zpool] = 'offlined {0}'.format(devs)
return ret
| [
"def",
"offline",
"(",
"zpool",
",",
"*",
"vdevs",
",",
"**",
"kwargs",
")",
":",
"ret",
"=",
"{",
"}",
"if",
"(",
"not",
"exists",
"(",
"zpool",
")",
")",
":",
"ret",
"[",
"zpool",
"]",
"=",
"'storage pool does not exist'",
"return",
"ret",
"if",
"(",
"(",
"not",
"vdevs",
")",
"or",
"(",
"len",
"(",
"vdevs",
")",
"<=",
"0",
")",
")",
":",
"ret",
"[",
"zpool",
"]",
"=",
"'no devices specified'",
"return",
"ret",
"ret",
"[",
"zpool",
"]",
"=",
"{",
"}",
"devs",
"=",
"' '",
".",
"join",
"(",
"vdevs",
")",
"zpool_cmd",
"=",
"_check_zpool",
"(",
")",
"cmd",
"=",
"'{zpool_cmd} offline {temp}{zpool} {devs}'",
".",
"format",
"(",
"zpool_cmd",
"=",
"zpool_cmd",
",",
"temp",
"=",
"(",
"'-t '",
"if",
"kwargs",
".",
"get",
"(",
"'temporary'",
",",
"False",
")",
"else",
"''",
")",
",",
"zpool",
"=",
"zpool",
",",
"devs",
"=",
"devs",
")",
"res",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
")",
"if",
"(",
"res",
"[",
"'retcode'",
"]",
"!=",
"0",
")",
":",
"ret",
"[",
"zpool",
"]",
"=",
"(",
"res",
"[",
"'stderr'",
"]",
"if",
"(",
"'stderr'",
"in",
"res",
")",
"else",
"res",
"[",
"'stdout'",
"]",
")",
"else",
":",
"ret",
"[",
"zpool",
"]",
"=",
"'offlined {0}'",
".",
"format",
"(",
"devs",
")",
"return",
"ret"
] | mark a cache storage device as offline . | train | false |
1,969 | def init(mpstate):
return SerialModule(mpstate)
| [
"def",
"init",
"(",
"mpstate",
")",
":",
"return",
"SerialModule",
"(",
"mpstate",
")"
] | initialize the module . | train | false |
1,970 | def as_one_hot(input_, n_indices):
shape = input_.get_shape().as_list()
n_elem = numpy.prod(shape)
indices = tf.range(n_elem)
indices = tf.cast(indices, tf.int64)
indices_input = tf.concat(0, [indices, tf.reshape(input_, [(-1)])])
indices_input = tf.reshape(indices_input, [2, (-1)])
indices_input = tf.transpose(indices_input)
res = tf.sparse_to_dense(indices_input, [n_elem, n_indices], 1.0, 0.0, name='flat_one_hot')
res = tf.reshape(res, ([elem for elem in shape] + [n_indices]))
return res
| [
"def",
"as_one_hot",
"(",
"input_",
",",
"n_indices",
")",
":",
"shape",
"=",
"input_",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"n_elem",
"=",
"numpy",
".",
"prod",
"(",
"shape",
")",
"indices",
"=",
"tf",
".",
"range",
"(",
"n_elem",
")",
"indices",
"=",
"tf",
".",
"cast",
"(",
"indices",
",",
"tf",
".",
"int64",
")",
"indices_input",
"=",
"tf",
".",
"concat",
"(",
"0",
",",
"[",
"indices",
",",
"tf",
".",
"reshape",
"(",
"input_",
",",
"[",
"(",
"-",
"1",
")",
"]",
")",
"]",
")",
"indices_input",
"=",
"tf",
".",
"reshape",
"(",
"indices_input",
",",
"[",
"2",
",",
"(",
"-",
"1",
")",
"]",
")",
"indices_input",
"=",
"tf",
".",
"transpose",
"(",
"indices_input",
")",
"res",
"=",
"tf",
".",
"sparse_to_dense",
"(",
"indices_input",
",",
"[",
"n_elem",
",",
"n_indices",
"]",
",",
"1.0",
",",
"0.0",
",",
"name",
"=",
"'flat_one_hot'",
")",
"res",
"=",
"tf",
".",
"reshape",
"(",
"res",
",",
"(",
"[",
"elem",
"for",
"elem",
"in",
"shape",
"]",
"+",
"[",
"n_indices",
"]",
")",
")",
"return",
"res"
] | convert indices to one-hot . | train | false |
1,971 | def translate_text_with_model(target, text, model=translate.NMT):
translate_client = translate.Client()
result = translate_client.translate(text, target_language=target, model=model)
print u'Text: {}'.format(result['input'])
print u'Translation: {}'.format(result['translatedText'])
print u'Detected source language: {}'.format(result['detectedSourceLanguage'])
| [
"def",
"translate_text_with_model",
"(",
"target",
",",
"text",
",",
"model",
"=",
"translate",
".",
"NMT",
")",
":",
"translate_client",
"=",
"translate",
".",
"Client",
"(",
")",
"result",
"=",
"translate_client",
".",
"translate",
"(",
"text",
",",
"target_language",
"=",
"target",
",",
"model",
"=",
"model",
")",
"print",
"u'Text: {}'",
".",
"format",
"(",
"result",
"[",
"'input'",
"]",
")",
"print",
"u'Translation: {}'",
".",
"format",
"(",
"result",
"[",
"'translatedText'",
"]",
")",
"print",
"u'Detected source language: {}'",
".",
"format",
"(",
"result",
"[",
"'detectedSourceLanguage'",
"]",
")"
] | translates text into the target language . | train | false |
1,972 | def rewrite_internal_link(link):
if (not link.startswith('#/')):
return link
from staticpages.models import AbstractPage
virtual_path = link[2:]
url = u'#'
for page_model in AbstractPage.__subclasses__():
try:
page = page_model.objects.live().get(virtual_path=virtual_path)
url = page.get_absolute_url()
except ObjectDoesNotExist:
pass
return url
| [
"def",
"rewrite_internal_link",
"(",
"link",
")",
":",
"if",
"(",
"not",
"link",
".",
"startswith",
"(",
"'#/'",
")",
")",
":",
"return",
"link",
"from",
"staticpages",
".",
"models",
"import",
"AbstractPage",
"virtual_path",
"=",
"link",
"[",
"2",
":",
"]",
"url",
"=",
"u'#'",
"for",
"page_model",
"in",
"AbstractPage",
".",
"__subclasses__",
"(",
")",
":",
"try",
":",
"page",
"=",
"page_model",
".",
"objects",
".",
"live",
"(",
")",
".",
"get",
"(",
"virtual_path",
"=",
"virtual_path",
")",
"url",
"=",
"page",
".",
"get_absolute_url",
"(",
")",
"except",
"ObjectDoesNotExist",
":",
"pass",
"return",
"url"
] | converts link into an internal link . | train | false |
1,973 | def rgb2yiq(rgb):
return _convert(yiq_from_rgb, rgb)
| [
"def",
"rgb2yiq",
"(",
"rgb",
")",
":",
"return",
"_convert",
"(",
"yiq_from_rgb",
",",
"rgb",
")"
] | rgb to yiq color space conversion . | train | false |
1,974 | def query(params=None):
path = 'https://api.qingcloud.com/iaas/'
access_key_id = config.get_cloud_config_value('access_key_id', get_configured_provider(), __opts__, search_global=False)
access_key_secret = config.get_cloud_config_value('secret_access_key', get_configured_provider(), __opts__, search_global=False)
real_parameters = {'access_key_id': access_key_id, 'signature_version': DEFAULT_QINGCLOUD_SIGNATURE_VERSION, 'time_stamp': time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()), 'version': DEFAULT_QINGCLOUD_API_VERSION}
if params:
for (key, value) in params.items():
if isinstance(value, list):
for i in range(1, (len(value) + 1)):
if isinstance(value[(i - 1)], dict):
for (sk, sv) in value[(i - 1)].items():
if (isinstance(sv, dict) or isinstance(sv, list)):
sv = json.dumps(sv, separators=(',', ':'))
real_parameters['{0}.{1}.{2}'.format(key, i, sk)] = sv
else:
real_parameters['{0}.{1}'.format(key, i)] = value[(i - 1)]
else:
real_parameters[key] = value
signature = _compute_signature(real_parameters, access_key_secret, 'GET', '/iaas/')
real_parameters['signature'] = signature
request = requests.get(path, params=real_parameters, verify=False)
if (request.status_code != 200):
raise SaltCloudSystemExit("An error occurred while querying QingCloud. HTTP Code: {0} Error: '{1}'".format(request.status_code, request.text))
log.debug(request.url)
content = request.text
result = json.loads(content, object_hook=salt.utils.decode_dict)
if (result['ret_code'] != 0):
raise SaltCloudSystemExit(pprint.pformat(result.get('message', {})))
return result
| [
"def",
"query",
"(",
"params",
"=",
"None",
")",
":",
"path",
"=",
"'https://api.qingcloud.com/iaas/'",
"access_key_id",
"=",
"config",
".",
"get_cloud_config_value",
"(",
"'access_key_id'",
",",
"get_configured_provider",
"(",
")",
",",
"__opts__",
",",
"search_global",
"=",
"False",
")",
"access_key_secret",
"=",
"config",
".",
"get_cloud_config_value",
"(",
"'secret_access_key'",
",",
"get_configured_provider",
"(",
")",
",",
"__opts__",
",",
"search_global",
"=",
"False",
")",
"real_parameters",
"=",
"{",
"'access_key_id'",
":",
"access_key_id",
",",
"'signature_version'",
":",
"DEFAULT_QINGCLOUD_SIGNATURE_VERSION",
",",
"'time_stamp'",
":",
"time",
".",
"strftime",
"(",
"'%Y-%m-%dT%H:%M:%SZ'",
",",
"time",
".",
"gmtime",
"(",
")",
")",
",",
"'version'",
":",
"DEFAULT_QINGCLOUD_API_VERSION",
"}",
"if",
"params",
":",
"for",
"(",
"key",
",",
"value",
")",
"in",
"params",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"(",
"len",
"(",
"value",
")",
"+",
"1",
")",
")",
":",
"if",
"isinstance",
"(",
"value",
"[",
"(",
"i",
"-",
"1",
")",
"]",
",",
"dict",
")",
":",
"for",
"(",
"sk",
",",
"sv",
")",
"in",
"value",
"[",
"(",
"i",
"-",
"1",
")",
"]",
".",
"items",
"(",
")",
":",
"if",
"(",
"isinstance",
"(",
"sv",
",",
"dict",
")",
"or",
"isinstance",
"(",
"sv",
",",
"list",
")",
")",
":",
"sv",
"=",
"json",
".",
"dumps",
"(",
"sv",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
"real_parameters",
"[",
"'{0}.{1}.{2}'",
".",
"format",
"(",
"key",
",",
"i",
",",
"sk",
")",
"]",
"=",
"sv",
"else",
":",
"real_parameters",
"[",
"'{0}.{1}'",
".",
"format",
"(",
"key",
",",
"i",
")",
"]",
"=",
"value",
"[",
"(",
"i",
"-",
"1",
")",
"]",
"else",
":",
"real_parameters",
"[",
"key",
"]",
"=",
"value",
"signature",
"=",
"_compute_signature",
"(",
"real_parameters",
",",
"access_key_secret",
",",
"'GET'",
",",
"'/iaas/'",
")",
"real_parameters",
"[",
"'signature'",
"]",
"=",
"signature",
"request",
"=",
"requests",
".",
"get",
"(",
"path",
",",
"params",
"=",
"real_parameters",
",",
"verify",
"=",
"False",
")",
"if",
"(",
"request",
".",
"status_code",
"!=",
"200",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"\"An error occurred while querying QingCloud. HTTP Code: {0} Error: '{1}'\"",
".",
"format",
"(",
"request",
".",
"status_code",
",",
"request",
".",
"text",
")",
")",
"log",
".",
"debug",
"(",
"request",
".",
"url",
")",
"content",
"=",
"request",
".",
"text",
"result",
"=",
"json",
".",
"loads",
"(",
"content",
",",
"object_hook",
"=",
"salt",
".",
"utils",
".",
"decode_dict",
")",
"if",
"(",
"result",
"[",
"'ret_code'",
"]",
"!=",
"0",
")",
":",
"raise",
"SaltCloudSystemExit",
"(",
"pprint",
".",
"pformat",
"(",
"result",
".",
"get",
"(",
"'message'",
",",
"{",
"}",
")",
")",
")",
"return",
"result"
] | querying data database the database to query query query to be executed time_precision time precision to use chunked whether is chunked or not user the user to connect as password the password of the user host the host to connect to port the port to connect to cli example: . | train | true |
1,975 | def decode_filename(filename):
if isinstance(filename, unicode):
return filename
else:
return filename.decode(sys.getfilesystemencoding())
| [
"def",
"decode_filename",
"(",
"filename",
")",
":",
"if",
"isinstance",
"(",
"filename",
",",
"unicode",
")",
":",
"return",
"filename",
"else",
":",
"return",
"filename",
".",
"decode",
"(",
"sys",
".",
"getfilesystemencoding",
"(",
")",
")"
] | return unicode filename . | train | true |
1,976 | def if_search_enabled(f):
@wraps(f)
def wrapper(*args, **kwargs):
'Wraps the decorated function.'
cls = args[0]
if cls.search_is_enabled():
return f(*args, **kwargs)
return wrapper
| [
"def",
"if_search_enabled",
"(",
"f",
")",
":",
"@",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"cls",
"=",
"args",
"[",
"0",
"]",
"if",
"cls",
".",
"search_is_enabled",
"(",
")",
":",
"return",
"f",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"wrapper"
] | only call f if search is enabled for the courseteamindexer . | train | false |
1,977 | def exception_str(self, ex):
return ('in %s\n:: %s' % (ex.file, ', '.join(ex.args)))
| [
"def",
"exception_str",
"(",
"self",
",",
"ex",
")",
":",
"return",
"(",
"'in %s\\n:: %s'",
"%",
"(",
"ex",
".",
"file",
",",
"', '",
".",
"join",
"(",
"ex",
".",
"args",
")",
")",
")"
] | function used to replace default __str__ method of exception instances . | train | false |
1,978 | def downcaseTokens(s, l, t):
return [tt.lower() for tt in map(_ustr, t)]
| [
"def",
"downcaseTokens",
"(",
"s",
",",
"l",
",",
"t",
")",
":",
"return",
"[",
"tt",
".",
"lower",
"(",
")",
"for",
"tt",
"in",
"map",
"(",
"_ustr",
",",
"t",
")",
"]"
] | helper parse action to convert tokens to lower case . | train | true |
1,980 | def remove_like(obj, user):
obj_type = apps.get_model('contenttypes', 'ContentType').objects.get_for_model(obj)
with atomic():
qs = Like.objects.filter(content_type=obj_type, object_id=obj.id, user=user)
if (not qs.exists()):
return
like = qs.first()
project = like.project
qs.delete()
if (project is not None):
project.refresh_totals()
| [
"def",
"remove_like",
"(",
"obj",
",",
"user",
")",
":",
"obj_type",
"=",
"apps",
".",
"get_model",
"(",
"'contenttypes'",
",",
"'ContentType'",
")",
".",
"objects",
".",
"get_for_model",
"(",
"obj",
")",
"with",
"atomic",
"(",
")",
":",
"qs",
"=",
"Like",
".",
"objects",
".",
"filter",
"(",
"content_type",
"=",
"obj_type",
",",
"object_id",
"=",
"obj",
".",
"id",
",",
"user",
"=",
"user",
")",
"if",
"(",
"not",
"qs",
".",
"exists",
"(",
")",
")",
":",
"return",
"like",
"=",
"qs",
".",
"first",
"(",
")",
"project",
"=",
"like",
".",
"project",
"qs",
".",
"delete",
"(",
")",
"if",
"(",
"project",
"is",
"not",
"None",
")",
":",
"project",
".",
"refresh_totals",
"(",
")"
] | remove an user like from an object . | train | false |
1,982 | @register.simple_tag
def bootstrap_formset(*args, **kwargs):
return render_formset(*args, **kwargs)
| [
"@",
"register",
".",
"simple_tag",
"def",
"bootstrap_formset",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"return",
"render_formset",
"(",
"*",
"args",
",",
"**",
"kwargs",
")"
] | render a formset **tag name**:: bootstrap_formset **parameters**: formset the formset that is being rendered see bootstrap_field_ for other arguments **usage**:: {% bootstrap_formset formset %} **example**:: {% bootstrap_formset formset layout=horizontal %} . | train | false |
1,985 | def final_repset_from_iteration_repsets(repset_fasta_fs):
observed = {}
for repset_fasta_f in repset_fasta_fs:
for (otu_id, seq) in parse_fasta(repset_fasta_f):
o = otu_id.split()[0]
if (not (o in observed)):
(yield (otu_id, seq))
observed[o] = None
else:
pass
| [
"def",
"final_repset_from_iteration_repsets",
"(",
"repset_fasta_fs",
")",
":",
"observed",
"=",
"{",
"}",
"for",
"repset_fasta_f",
"in",
"repset_fasta_fs",
":",
"for",
"(",
"otu_id",
",",
"seq",
")",
"in",
"parse_fasta",
"(",
"repset_fasta_f",
")",
":",
"o",
"=",
"otu_id",
".",
"split",
"(",
")",
"[",
"0",
"]",
"if",
"(",
"not",
"(",
"o",
"in",
"observed",
")",
")",
":",
"(",
"yield",
"(",
"otu_id",
",",
"seq",
")",
")",
"observed",
"[",
"o",
"]",
"=",
"None",
"else",
":",
"pass"
] | the first observation of each otu is chosen as the representative - this ensures that the representative sequence is the centroid of the cluster . | train | false |
1,987 | def SplitGeneratedFileName(fname):
return tuple(fname.split('x', 4))
| [
"def",
"SplitGeneratedFileName",
"(",
"fname",
")",
":",
"return",
"tuple",
"(",
"fname",
".",
"split",
"(",
"'x'",
",",
"4",
")",
")"
] | reverse of getgeneratedfilename() . | train | false |
1,988 | def linked_data(prefix, ignore_channels=False):
recs = linked_data_.get(prefix)
if (recs is None):
recs = linked_data_[prefix] = odict()
meta_dir = join(prefix, u'conda-meta')
if isdir(meta_dir):
for fn in listdir(meta_dir):
if fn.endswith(u'.json'):
dist_name = fn[:(-5)]
load_linked_data(prefix, dist_name, ignore_channels=ignore_channels)
return recs
| [
"def",
"linked_data",
"(",
"prefix",
",",
"ignore_channels",
"=",
"False",
")",
":",
"recs",
"=",
"linked_data_",
".",
"get",
"(",
"prefix",
")",
"if",
"(",
"recs",
"is",
"None",
")",
":",
"recs",
"=",
"linked_data_",
"[",
"prefix",
"]",
"=",
"odict",
"(",
")",
"meta_dir",
"=",
"join",
"(",
"prefix",
",",
"u'conda-meta'",
")",
"if",
"isdir",
"(",
"meta_dir",
")",
":",
"for",
"fn",
"in",
"listdir",
"(",
"meta_dir",
")",
":",
"if",
"fn",
".",
"endswith",
"(",
"u'.json'",
")",
":",
"dist_name",
"=",
"fn",
"[",
":",
"(",
"-",
"5",
")",
"]",
"load_linked_data",
"(",
"prefix",
",",
"dist_name",
",",
"ignore_channels",
"=",
"ignore_channels",
")",
"return",
"recs"
] | return a dictionary of the linked packages in prefix . | train | false |
1,989 | def read_plain_byte_array(file_obj, count):
return [file_obj.read(struct.unpack('<i', file_obj.read(4))[0]) for i in range(count)]
| [
"def",
"read_plain_byte_array",
"(",
"file_obj",
",",
"count",
")",
":",
"return",
"[",
"file_obj",
".",
"read",
"(",
"struct",
".",
"unpack",
"(",
"'<i'",
",",
"file_obj",
".",
"read",
"(",
"4",
")",
")",
"[",
"0",
"]",
")",
"for",
"i",
"in",
"range",
"(",
"count",
")",
"]"
] | read count byte arrays using the plain encoding . | train | true |
1,990 | def _send_instant_emails(recipient_list, feedback_message_reference, exploration_id, has_suggestion):
can_users_receive_email = email_manager.can_users_receive_thread_email(recipient_list, exploration_id, has_suggestion)
for (index, recipient_id) in enumerate(recipient_list):
if can_users_receive_email[index]:
transaction_services.run_in_transaction(enqueue_feedback_message_instant_email_task, recipient_id, feedback_message_reference)
| [
"def",
"_send_instant_emails",
"(",
"recipient_list",
",",
"feedback_message_reference",
",",
"exploration_id",
",",
"has_suggestion",
")",
":",
"can_users_receive_email",
"=",
"email_manager",
".",
"can_users_receive_thread_email",
"(",
"recipient_list",
",",
"exploration_id",
",",
"has_suggestion",
")",
"for",
"(",
"index",
",",
"recipient_id",
")",
"in",
"enumerate",
"(",
"recipient_list",
")",
":",
"if",
"can_users_receive_email",
"[",
"index",
"]",
":",
"transaction_services",
".",
"run_in_transaction",
"(",
"enqueue_feedback_message_instant_email_task",
",",
"recipient_id",
",",
"feedback_message_reference",
")"
] | adds the given feedbackmessagereference to each of the recipients email buffers . | train | false |
1,991 | def all_links(html):
ans = set()
for match in re.finditer(u'<\\s*[Aa]\\s+.*?[hH][Rr][Ee][Ff]\\s*=\\s*([\'"])(.+?)\\1', html, (re.MULTILINE | re.DOTALL)):
ans.add(replace_entities(match.group(2)))
return ans
| [
"def",
"all_links",
"(",
"html",
")",
":",
"ans",
"=",
"set",
"(",
")",
"for",
"match",
"in",
"re",
".",
"finditer",
"(",
"u'<\\\\s*[Aa]\\\\s+.*?[hH][Rr][Ee][Ff]\\\\s*=\\\\s*([\\'\"])(.+?)\\\\1'",
",",
"html",
",",
"(",
"re",
".",
"MULTILINE",
"|",
"re",
".",
"DOTALL",
")",
")",
":",
"ans",
".",
"add",
"(",
"replace_entities",
"(",
"match",
".",
"group",
"(",
"2",
")",
")",
")",
"return",
"ans"
] | return set of all links in the file . | train | false |
1,996 | @login_required
@ensure_csrf_cookie
def run_python(request):
if (not request.user.is_staff):
raise Http404
c = {}
c['code'] = ''
c['results'] = None
if (request.method == 'POST'):
py_code = c['code'] = request.POST.get('code')
g = {}
try:
safe_exec(py_code, g)
except Exception:
c['results'] = traceback.format_exc()
else:
c['results'] = pprint.pformat(g)
return render_to_response('debug/run_python_form.html', c)
| [
"@",
"login_required",
"@",
"ensure_csrf_cookie",
"def",
"run_python",
"(",
"request",
")",
":",
"if",
"(",
"not",
"request",
".",
"user",
".",
"is_staff",
")",
":",
"raise",
"Http404",
"c",
"=",
"{",
"}",
"c",
"[",
"'code'",
"]",
"=",
"''",
"c",
"[",
"'results'",
"]",
"=",
"None",
"if",
"(",
"request",
".",
"method",
"==",
"'POST'",
")",
":",
"py_code",
"=",
"c",
"[",
"'code'",
"]",
"=",
"request",
".",
"POST",
".",
"get",
"(",
"'code'",
")",
"g",
"=",
"{",
"}",
"try",
":",
"safe_exec",
"(",
"py_code",
",",
"g",
")",
"except",
"Exception",
":",
"c",
"[",
"'results'",
"]",
"=",
"traceback",
".",
"format_exc",
"(",
")",
"else",
":",
"c",
"[",
"'results'",
"]",
"=",
"pprint",
".",
"pformat",
"(",
"g",
")",
"return",
"render_to_response",
"(",
"'debug/run_python_form.html'",
",",
"c",
")"
] | a page to allow testing the python sandbox on a production server . | train | false |
1,997 | def validate_value(tokens):
for token in tokens:
type_ = token.type
if (type_ == u'{'):
validate_block(token.content, u'property value')
else:
validate_any(token, u'property value')
| [
"def",
"validate_value",
"(",
"tokens",
")",
":",
"for",
"token",
"in",
"tokens",
":",
"type_",
"=",
"token",
".",
"type",
"if",
"(",
"type_",
"==",
"u'{'",
")",
":",
"validate_block",
"(",
"token",
".",
"content",
",",
"u'property value'",
")",
"else",
":",
"validate_any",
"(",
"token",
",",
"u'property value'",
")"
] | validate a property value . | train | false |
1,998 | def assoc_in(d, keys, value, factory=dict):
return update_in(d, keys, (lambda x: value), value, factory)
| [
"def",
"assoc_in",
"(",
"d",
",",
"keys",
",",
"value",
",",
"factory",
"=",
"dict",
")",
":",
"return",
"update_in",
"(",
"d",
",",
"keys",
",",
"(",
"lambda",
"x",
":",
"value",
")",
",",
"value",
",",
"factory",
")"
] | return a new dict with new . | train | false |
1,999 | def admin():
series_id = None
get_vars_new = Storage()
try:
series_id = int(request.args[0])
except:
try:
(dummy, series_id) = get_vars['viewing'].split('.')
series_id = int(series_id)
except:
pass
if series_id:
get_vars_new.viewing = ('survey_complete.%s' % series_id)
return dict(series_id=series_id, vars=get_vars_new)
| [
"def",
"admin",
"(",
")",
":",
"series_id",
"=",
"None",
"get_vars_new",
"=",
"Storage",
"(",
")",
"try",
":",
"series_id",
"=",
"int",
"(",
"request",
".",
"args",
"[",
"0",
"]",
")",
"except",
":",
"try",
":",
"(",
"dummy",
",",
"series_id",
")",
"=",
"get_vars",
"[",
"'viewing'",
"]",
".",
"split",
"(",
"'.'",
")",
"series_id",
"=",
"int",
"(",
"series_id",
")",
"except",
":",
"pass",
"if",
"series_id",
":",
"get_vars_new",
".",
"viewing",
"=",
"(",
"'survey_complete.%s'",
"%",
"series_id",
")",
"return",
"dict",
"(",
"series_id",
"=",
"series_id",
",",
"vars",
"=",
"get_vars_new",
")"
] | require the admin user . | train | false |
2,001 | @open_file(0, mode='rb')
def read_gpickle(path):
return pickle.load(path)
| [
"@",
"open_file",
"(",
"0",
",",
"mode",
"=",
"'rb'",
")",
"def",
"read_gpickle",
"(",
"path",
")",
":",
"return",
"pickle",
".",
"load",
"(",
"path",
")"
] | read graph object in python pickle format . | train | false |
2,002 | def rtuple(reldict, lcon=False, rcon=False):
items = [class_abbrev(reldict['subjclass']), reldict['subjtext'], reldict['filler'], class_abbrev(reldict['objclass']), reldict['objtext']]
format = '[%s: %r] %r [%s: %r]'
if lcon:
items = ([reldict['lcon']] + items)
format = ('...%r)' + format)
if rcon:
items.append(reldict['rcon'])
format = (format + '(%r...')
printargs = tuple(items)
return (format % printargs)
| [
"def",
"rtuple",
"(",
"reldict",
",",
"lcon",
"=",
"False",
",",
"rcon",
"=",
"False",
")",
":",
"items",
"=",
"[",
"class_abbrev",
"(",
"reldict",
"[",
"'subjclass'",
"]",
")",
",",
"reldict",
"[",
"'subjtext'",
"]",
",",
"reldict",
"[",
"'filler'",
"]",
",",
"class_abbrev",
"(",
"reldict",
"[",
"'objclass'",
"]",
")",
",",
"reldict",
"[",
"'objtext'",
"]",
"]",
"format",
"=",
"'[%s: %r] %r [%s: %r]'",
"if",
"lcon",
":",
"items",
"=",
"(",
"[",
"reldict",
"[",
"'lcon'",
"]",
"]",
"+",
"items",
")",
"format",
"=",
"(",
"'...%r)'",
"+",
"format",
")",
"if",
"rcon",
":",
"items",
".",
"append",
"(",
"reldict",
"[",
"'rcon'",
"]",
")",
"format",
"=",
"(",
"format",
"+",
"'(%r...'",
")",
"printargs",
"=",
"tuple",
"(",
"items",
")",
"return",
"(",
"format",
"%",
"printargs",
")"
] | pretty print the reldict as an rtuple . | train | false |
2,003 | def functest_builder(method, func):
def do_test(self):
method(self, func)
return do_test
| [
"def",
"functest_builder",
"(",
"method",
",",
"func",
")",
":",
"def",
"do_test",
"(",
"self",
")",
":",
"method",
"(",
"self",
",",
"func",
")",
"return",
"do_test"
] | generate a test method that tests the given function . | train | false |
2,004 | def testBackgroundAndImage(src='test-background.html', dest='test-background.pdf'):
pdf = pisa.CreatePDF(file(src, 'r'), file(dest, 'wb'), log_warn=1, log_err=1, path=os.path.join(os.getcwd(), src))
dumpErrors(pdf)
if (not pdf.err):
pisa.startViewer(dest)
| [
"def",
"testBackgroundAndImage",
"(",
"src",
"=",
"'test-background.html'",
",",
"dest",
"=",
"'test-background.pdf'",
")",
":",
"pdf",
"=",
"pisa",
".",
"CreatePDF",
"(",
"file",
"(",
"src",
",",
"'r'",
")",
",",
"file",
"(",
"dest",
",",
"'wb'",
")",
",",
"log_warn",
"=",
"1",
",",
"log_err",
"=",
"1",
",",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"src",
")",
")",
"dumpErrors",
"(",
"pdf",
")",
"if",
"(",
"not",
"pdf",
".",
"err",
")",
":",
"pisa",
".",
"startViewer",
"(",
"dest",
")"
] | simple test showing how to create a pdf file from pml source string . | train | false |
2,005 | def write_pickle(obj, dest, tmp=None, pickle_protocol=0):
if (tmp is None):
tmp = os.path.dirname(dest)
(fd, tmppath) = mkstemp(dir=tmp, suffix='.tmp')
with os.fdopen(fd, 'wb') as fo:
pickle.dump(obj, fo, pickle_protocol)
fo.flush()
os.fsync(fd)
renamer(tmppath, dest)
| [
"def",
"write_pickle",
"(",
"obj",
",",
"dest",
",",
"tmp",
"=",
"None",
",",
"pickle_protocol",
"=",
"0",
")",
":",
"if",
"(",
"tmp",
"is",
"None",
")",
":",
"tmp",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"dest",
")",
"(",
"fd",
",",
"tmppath",
")",
"=",
"mkstemp",
"(",
"dir",
"=",
"tmp",
",",
"suffix",
"=",
"'.tmp'",
")",
"with",
"os",
".",
"fdopen",
"(",
"fd",
",",
"'wb'",
")",
"as",
"fo",
":",
"pickle",
".",
"dump",
"(",
"obj",
",",
"fo",
",",
"pickle_protocol",
")",
"fo",
".",
"flush",
"(",
")",
"os",
".",
"fsync",
"(",
"fd",
")",
"renamer",
"(",
"tmppath",
",",
"dest",
")"
] | ensure that a pickle file gets written to disk . | train | false |
2,006 | def unpack_argument(session, service_name, operation_name, cli_argument, value):
param_name = getattr(cli_argument, 'name', 'anonymous')
value_override = session.emit_first_non_none_response(('load-cli-arg.%s.%s.%s' % (service_name, operation_name, param_name)), param=cli_argument, value=value, service_name=service_name, operation_name=operation_name)
if (value_override is not None):
value = value_override
return value
| [
"def",
"unpack_argument",
"(",
"session",
",",
"service_name",
",",
"operation_name",
",",
"cli_argument",
",",
"value",
")",
":",
"param_name",
"=",
"getattr",
"(",
"cli_argument",
",",
"'name'",
",",
"'anonymous'",
")",
"value_override",
"=",
"session",
".",
"emit_first_non_none_response",
"(",
"(",
"'load-cli-arg.%s.%s.%s'",
"%",
"(",
"service_name",
",",
"operation_name",
",",
"param_name",
")",
")",
",",
"param",
"=",
"cli_argument",
",",
"value",
"=",
"value",
",",
"service_name",
"=",
"service_name",
",",
"operation_name",
"=",
"operation_name",
")",
"if",
"(",
"value_override",
"is",
"not",
"None",
")",
":",
"value",
"=",
"value_override",
"return",
"value"
] | unpack an arguments value from the commandline . | train | false |
2,007 | def _yaml_result_unicode_to_utf8(data):
if six.PY3:
return data
if isinstance(data, OrderedDict):
for (key, elt) in six.iteritems(data):
data[key] = _yaml_result_unicode_to_utf8(elt)
elif isinstance(data, list):
for i in range(len(data)):
data[i] = _yaml_result_unicode_to_utf8(data[i])
elif isinstance(data, six.text_type):
data = data.encode('utf-8')
return data
| [
"def",
"_yaml_result_unicode_to_utf8",
"(",
"data",
")",
":",
"if",
"six",
".",
"PY3",
":",
"return",
"data",
"if",
"isinstance",
"(",
"data",
",",
"OrderedDict",
")",
":",
"for",
"(",
"key",
",",
"elt",
")",
"in",
"six",
".",
"iteritems",
"(",
"data",
")",
":",
"data",
"[",
"key",
"]",
"=",
"_yaml_result_unicode_to_utf8",
"(",
"elt",
")",
"elif",
"isinstance",
"(",
"data",
",",
"list",
")",
":",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"data",
")",
")",
":",
"data",
"[",
"i",
"]",
"=",
"_yaml_result_unicode_to_utf8",
"(",
"data",
"[",
"i",
"]",
")",
"elif",
"isinstance",
"(",
"data",
",",
"six",
".",
"text_type",
")",
":",
"data",
"=",
"data",
".",
"encode",
"(",
"'utf-8'",
")",
"return",
"data"
] | replace unicode strings by utf-8 str in final yaml result this is a recursive function . | train | false |
2,009 | def filter_services(services):
filtered_services = services[:]
for service_name in services:
mod = __import__(('services.' + service_name), globals=globals(), locals=locals(), fromlist=['Service'], level=(-1))
service = mod.Service
if ((service.required_features is not None) and (bs4.builder_registry.lookup(*service.required_features) is None)):
logger.warning((u'Service %s not available: none of available features could be used. One of %r required' % (service_name, service.required_features)))
filtered_services.remove(service_name)
return filtered_services
| [
"def",
"filter_services",
"(",
"services",
")",
":",
"filtered_services",
"=",
"services",
"[",
":",
"]",
"for",
"service_name",
"in",
"services",
":",
"mod",
"=",
"__import__",
"(",
"(",
"'services.'",
"+",
"service_name",
")",
",",
"globals",
"=",
"globals",
"(",
")",
",",
"locals",
"=",
"locals",
"(",
")",
",",
"fromlist",
"=",
"[",
"'Service'",
"]",
",",
"level",
"=",
"(",
"-",
"1",
")",
")",
"service",
"=",
"mod",
".",
"Service",
"if",
"(",
"(",
"service",
".",
"required_features",
"is",
"not",
"None",
")",
"and",
"(",
"bs4",
".",
"builder_registry",
".",
"lookup",
"(",
"*",
"service",
".",
"required_features",
")",
"is",
"None",
")",
")",
":",
"logger",
".",
"warning",
"(",
"(",
"u'Service %s not available: none of available features could be used. One of %r required'",
"%",
"(",
"service_name",
",",
"service",
".",
"required_features",
")",
")",
")",
"filtered_services",
".",
"remove",
"(",
"service_name",
")",
"return",
"filtered_services"
] | filter out services that are not available because of a missing feature . | train | false |
2,010 | def fmt_highlights(raw_value, value, unit):
if (unit is None):
return value
highlights = highlight_string(raw_value, unit)
start_search = 0
for highlight in highlights:
htext = escape(force_text(highlight[2]))
find_highlight = value.find(htext, start_search)
if (find_highlight >= 0):
newpart = HL_CHECK.format(htext)
next_part = value[(find_highlight + len(htext)):]
value = ((value[:find_highlight] + newpart) + next_part)
start_search = (find_highlight + len(newpart))
return value
| [
"def",
"fmt_highlights",
"(",
"raw_value",
",",
"value",
",",
"unit",
")",
":",
"if",
"(",
"unit",
"is",
"None",
")",
":",
"return",
"value",
"highlights",
"=",
"highlight_string",
"(",
"raw_value",
",",
"unit",
")",
"start_search",
"=",
"0",
"for",
"highlight",
"in",
"highlights",
":",
"htext",
"=",
"escape",
"(",
"force_text",
"(",
"highlight",
"[",
"2",
"]",
")",
")",
"find_highlight",
"=",
"value",
".",
"find",
"(",
"htext",
",",
"start_search",
")",
"if",
"(",
"find_highlight",
">=",
"0",
")",
":",
"newpart",
"=",
"HL_CHECK",
".",
"format",
"(",
"htext",
")",
"next_part",
"=",
"value",
"[",
"(",
"find_highlight",
"+",
"len",
"(",
"htext",
")",
")",
":",
"]",
"value",
"=",
"(",
"(",
"value",
"[",
":",
"find_highlight",
"]",
"+",
"newpart",
")",
"+",
"next_part",
")",
"start_search",
"=",
"(",
"find_highlight",
"+",
"len",
"(",
"newpart",
")",
")",
"return",
"value"
] | formats check highlights . | train | false |
2,011 | @task
@use_master
@set_modified_on
def resize_preview(src, pk, **kw):
instance = Preview.objects.get(pk=pk)
(thumb_dst, full_dst) = (instance.thumbnail_path, instance.image_path)
sizes = (instance.sizes or {})
log.info(('[1@None] Resizing preview and storing size: %s' % thumb_dst))
try:
thumbnail_size = APP_PREVIEW_SIZES[0][:2]
image_size = APP_PREVIEW_SIZES[1][:2]
with private_storage.open(src, 'rb') as fp:
size = Image.open(fp).size
if (size[0] > size[1]):
thumbnail_size = thumbnail_size[::(-1)]
image_size = image_size[::(-1)]
if kw.get('generate_thumbnail', True):
sizes['thumbnail'] = resize_image(src, thumb_dst, thumbnail_size, remove_src=False)
if kw.get('generate_image', True):
sizes['image'] = resize_image(src, full_dst, image_size, remove_src=False)
instance.sizes = sizes
instance.save()
log.info(('Preview resized to: %s' % thumb_dst))
private_storage.delete(src)
return True
except Exception as e:
log.error(('Error saving preview: %s; %s' % (e, thumb_dst)))
| [
"@",
"task",
"@",
"use_master",
"@",
"set_modified_on",
"def",
"resize_preview",
"(",
"src",
",",
"pk",
",",
"**",
"kw",
")",
":",
"instance",
"=",
"Preview",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"pk",
")",
"(",
"thumb_dst",
",",
"full_dst",
")",
"=",
"(",
"instance",
".",
"thumbnail_path",
",",
"instance",
".",
"image_path",
")",
"sizes",
"=",
"(",
"instance",
".",
"sizes",
"or",
"{",
"}",
")",
"log",
".",
"info",
"(",
"(",
"'[1@None] Resizing preview and storing size: %s'",
"%",
"thumb_dst",
")",
")",
"try",
":",
"thumbnail_size",
"=",
"APP_PREVIEW_SIZES",
"[",
"0",
"]",
"[",
":",
"2",
"]",
"image_size",
"=",
"APP_PREVIEW_SIZES",
"[",
"1",
"]",
"[",
":",
"2",
"]",
"with",
"private_storage",
".",
"open",
"(",
"src",
",",
"'rb'",
")",
"as",
"fp",
":",
"size",
"=",
"Image",
".",
"open",
"(",
"fp",
")",
".",
"size",
"if",
"(",
"size",
"[",
"0",
"]",
">",
"size",
"[",
"1",
"]",
")",
":",
"thumbnail_size",
"=",
"thumbnail_size",
"[",
":",
":",
"(",
"-",
"1",
")",
"]",
"image_size",
"=",
"image_size",
"[",
":",
":",
"(",
"-",
"1",
")",
"]",
"if",
"kw",
".",
"get",
"(",
"'generate_thumbnail'",
",",
"True",
")",
":",
"sizes",
"[",
"'thumbnail'",
"]",
"=",
"resize_image",
"(",
"src",
",",
"thumb_dst",
",",
"thumbnail_size",
",",
"remove_src",
"=",
"False",
")",
"if",
"kw",
".",
"get",
"(",
"'generate_image'",
",",
"True",
")",
":",
"sizes",
"[",
"'image'",
"]",
"=",
"resize_image",
"(",
"src",
",",
"full_dst",
",",
"image_size",
",",
"remove_src",
"=",
"False",
")",
"instance",
".",
"sizes",
"=",
"sizes",
"instance",
".",
"save",
"(",
")",
"log",
".",
"info",
"(",
"(",
"'Preview resized to: %s'",
"%",
"thumb_dst",
")",
")",
"private_storage",
".",
"delete",
"(",
"src",
")",
"return",
"True",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"error",
"(",
"(",
"'Error saving preview: %s; %s'",
"%",
"(",
"e",
",",
"thumb_dst",
")",
")",
")"
] | resizes preview images and stores the sizes on the preview . | train | false |
2,012 | def fileSize(file):
size = None
if hasattr(file, 'fileno'):
fileno = file.fileno()
try:
stat_ = os.fstat(fileno)
size = stat_[stat.ST_SIZE]
except:
pass
else:
return size
if (hasattr(file, 'name') and path.exists(file.name)):
try:
size = path.getsize(file.name)
except:
pass
else:
return size
if (hasattr(file, 'seek') and hasattr(file, 'tell')):
try:
try:
file.seek(0, 2)
size = file.tell()
finally:
file.seek(0, 0)
except:
pass
else:
return size
return size
| [
"def",
"fileSize",
"(",
"file",
")",
":",
"size",
"=",
"None",
"if",
"hasattr",
"(",
"file",
",",
"'fileno'",
")",
":",
"fileno",
"=",
"file",
".",
"fileno",
"(",
")",
"try",
":",
"stat_",
"=",
"os",
".",
"fstat",
"(",
"fileno",
")",
"size",
"=",
"stat_",
"[",
"stat",
".",
"ST_SIZE",
"]",
"except",
":",
"pass",
"else",
":",
"return",
"size",
"if",
"(",
"hasattr",
"(",
"file",
",",
"'name'",
")",
"and",
"path",
".",
"exists",
"(",
"file",
".",
"name",
")",
")",
":",
"try",
":",
"size",
"=",
"path",
".",
"getsize",
"(",
"file",
".",
"name",
")",
"except",
":",
"pass",
"else",
":",
"return",
"size",
"if",
"(",
"hasattr",
"(",
"file",
",",
"'seek'",
")",
"and",
"hasattr",
"(",
"file",
",",
"'tell'",
")",
")",
":",
"try",
":",
"try",
":",
"file",
".",
"seek",
"(",
"0",
",",
"2",
")",
"size",
"=",
"file",
".",
"tell",
"(",
")",
"finally",
":",
"file",
".",
"seek",
"(",
"0",
",",
"0",
")",
"except",
":",
"pass",
"else",
":",
"return",
"size",
"return",
"size"
] | ill try my damndest to determine the size of this file object . | train | false |
2,013 | def add_qos(tenant_id, qos_name, qos_desc):
LOG.debug(_('add_qos() called'))
session = db.get_session()
try:
qos = session.query(network_models_v2.QoS).filter_by(tenant_id=tenant_id).filter_by(qos_name=qos_name).one()
raise c_exc.QosNameAlreadyExists(qos_name=qos_name, tenant_id=tenant_id)
except exc.NoResultFound:
qos = network_models_v2.QoS(tenant_id, qos_name, qos_desc)
session.add(qos)
session.flush()
return qos
| [
"def",
"add_qos",
"(",
"tenant_id",
",",
"qos_name",
",",
"qos_desc",
")",
":",
"LOG",
".",
"debug",
"(",
"_",
"(",
"'add_qos() called'",
")",
")",
"session",
"=",
"db",
".",
"get_session",
"(",
")",
"try",
":",
"qos",
"=",
"session",
".",
"query",
"(",
"network_models_v2",
".",
"QoS",
")",
".",
"filter_by",
"(",
"tenant_id",
"=",
"tenant_id",
")",
".",
"filter_by",
"(",
"qos_name",
"=",
"qos_name",
")",
".",
"one",
"(",
")",
"raise",
"c_exc",
".",
"QosNameAlreadyExists",
"(",
"qos_name",
"=",
"qos_name",
",",
"tenant_id",
"=",
"tenant_id",
")",
"except",
"exc",
".",
"NoResultFound",
":",
"qos",
"=",
"network_models_v2",
".",
"QoS",
"(",
"tenant_id",
",",
"qos_name",
",",
"qos_desc",
")",
"session",
".",
"add",
"(",
"qos",
")",
"session",
".",
"flush",
"(",
")",
"return",
"qos"
] | adds a qos to tenant association . | train | false |
2,014 | def grover_iteration(qstate, oracle):
wgate = WGate(oracle.nqubits)
return ((wgate * oracle) * qstate)
| [
"def",
"grover_iteration",
"(",
"qstate",
",",
"oracle",
")",
":",
"wgate",
"=",
"WGate",
"(",
"oracle",
".",
"nqubits",
")",
"return",
"(",
"(",
"wgate",
"*",
"oracle",
")",
"*",
"qstate",
")"
] | applies one application of the oracle and w gate . | train | false |
2,015 | def candlestick2_ochl(ax, opens, closes, highs, lows, width=4, colorup=u'k', colordown=u'r', alpha=0.75):
candlestick2_ohlc(ax, opens, highs, lows, closes, width=width, colorup=colorup, colordown=colordown, alpha=alpha)
| [
"def",
"candlestick2_ochl",
"(",
"ax",
",",
"opens",
",",
"closes",
",",
"highs",
",",
"lows",
",",
"width",
"=",
"4",
",",
"colorup",
"=",
"u'k'",
",",
"colordown",
"=",
"u'r'",
",",
"alpha",
"=",
"0.75",
")",
":",
"candlestick2_ohlc",
"(",
"ax",
",",
"opens",
",",
"highs",
",",
"lows",
",",
"closes",
",",
"width",
"=",
"width",
",",
"colorup",
"=",
"colorup",
",",
"colordown",
"=",
"colordown",
",",
"alpha",
"=",
"alpha",
")"
] | represent the open . | train | false |
2,016 | def ClassDoc(path):
try:
from com.sun.tools.javadoc import JavadocTool, Messager, ModifierFilter
from com.sun.tools.javac.util import List, Context
from com.sun.tools.javac.code.Flags import PUBLIC
except ImportError:
raise DataError("Creating documentation from Java source files requires 'tools.jar' to be in CLASSPATH.")
context = Context()
Messager.preRegister(context, 'libdoc')
jdoctool = JavadocTool.make0(context)
filter = ModifierFilter(PUBLIC)
java_names = List.of(path)
if (sys.platform[4:7] < '1.8'):
root = jdoctool.getRootDocImpl('en', 'utf-8', filter, java_names, List.nil(), False, List.nil(), List.nil(), False, False, True)
else:
root = jdoctool.getRootDocImpl('en', 'utf-8', filter, java_names, List.nil(), List.nil(), False, List.nil(), List.nil(), False, False, True)
return root.classes()[0]
| [
"def",
"ClassDoc",
"(",
"path",
")",
":",
"try",
":",
"from",
"com",
".",
"sun",
".",
"tools",
".",
"javadoc",
"import",
"JavadocTool",
",",
"Messager",
",",
"ModifierFilter",
"from",
"com",
".",
"sun",
".",
"tools",
".",
"javac",
".",
"util",
"import",
"List",
",",
"Context",
"from",
"com",
".",
"sun",
".",
"tools",
".",
"javac",
".",
"code",
".",
"Flags",
"import",
"PUBLIC",
"except",
"ImportError",
":",
"raise",
"DataError",
"(",
"\"Creating documentation from Java source files requires 'tools.jar' to be in CLASSPATH.\"",
")",
"context",
"=",
"Context",
"(",
")",
"Messager",
".",
"preRegister",
"(",
"context",
",",
"'libdoc'",
")",
"jdoctool",
"=",
"JavadocTool",
".",
"make0",
"(",
"context",
")",
"filter",
"=",
"ModifierFilter",
"(",
"PUBLIC",
")",
"java_names",
"=",
"List",
".",
"of",
"(",
"path",
")",
"if",
"(",
"sys",
".",
"platform",
"[",
"4",
":",
"7",
"]",
"<",
"'1.8'",
")",
":",
"root",
"=",
"jdoctool",
".",
"getRootDocImpl",
"(",
"'en'",
",",
"'utf-8'",
",",
"filter",
",",
"java_names",
",",
"List",
".",
"nil",
"(",
")",
",",
"False",
",",
"List",
".",
"nil",
"(",
")",
",",
"List",
".",
"nil",
"(",
")",
",",
"False",
",",
"False",
",",
"True",
")",
"else",
":",
"root",
"=",
"jdoctool",
".",
"getRootDocImpl",
"(",
"'en'",
",",
"'utf-8'",
",",
"filter",
",",
"java_names",
",",
"List",
".",
"nil",
"(",
")",
",",
"List",
".",
"nil",
"(",
")",
",",
"False",
",",
"List",
".",
"nil",
"(",
")",
",",
"List",
".",
"nil",
"(",
")",
",",
"False",
",",
"False",
",",
"True",
")",
"return",
"root",
".",
"classes",
"(",
")",
"[",
"0",
"]"
] | process the given java source file and return classdoc instance . | train | false |
2,018 | def split_pre_existing_dir(dirname):
(head, tail) = os.path.split(dirname)
b_head = to_bytes(head, errors='surrogate_or_strict')
if (not os.path.exists(b_head)):
(pre_existing_dir, new_directory_list) = split_pre_existing_dir(head)
else:
return (head, [tail])
new_directory_list.append(tail)
return (pre_existing_dir, new_directory_list)
| [
"def",
"split_pre_existing_dir",
"(",
"dirname",
")",
":",
"(",
"head",
",",
"tail",
")",
"=",
"os",
".",
"path",
".",
"split",
"(",
"dirname",
")",
"b_head",
"=",
"to_bytes",
"(",
"head",
",",
"errors",
"=",
"'surrogate_or_strict'",
")",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"b_head",
")",
")",
":",
"(",
"pre_existing_dir",
",",
"new_directory_list",
")",
"=",
"split_pre_existing_dir",
"(",
"head",
")",
"else",
":",
"return",
"(",
"head",
",",
"[",
"tail",
"]",
")",
"new_directory_list",
".",
"append",
"(",
"tail",
")",
"return",
"(",
"pre_existing_dir",
",",
"new_directory_list",
")"
] | return the first pre-existing directory and a list of the new directories that will be created . | train | false |
2,020 | def bzr_wc_target_exists_local_mods_force():
test = 'bzr_wc_target_exists_local_mods_force'
wt = ('%s-test-%s' % (DIR, test))
puts(magenta(('Executing test: %s' % test)))
from fabric.api import cd, run
from fabtools.files import is_dir
from fabtools import require
require.bazaar.working_copy(REMOTE_URL, wt)
assert is_dir(wt)
with cd(wt):
assert (run('bzr status') == '')
run('echo "# a new comment" >> __init__.py')
assert (run('bzr status') != '')
require.bazaar.working_copy(REMOTE_URL, wt, force=True)
assert (run(('bzr status %s' % wt)) == '')
| [
"def",
"bzr_wc_target_exists_local_mods_force",
"(",
")",
":",
"test",
"=",
"'bzr_wc_target_exists_local_mods_force'",
"wt",
"=",
"(",
"'%s-test-%s'",
"%",
"(",
"DIR",
",",
"test",
")",
")",
"puts",
"(",
"magenta",
"(",
"(",
"'Executing test: %s'",
"%",
"test",
")",
")",
")",
"from",
"fabric",
".",
"api",
"import",
"cd",
",",
"run",
"from",
"fabtools",
".",
"files",
"import",
"is_dir",
"from",
"fabtools",
"import",
"require",
"require",
".",
"bazaar",
".",
"working_copy",
"(",
"REMOTE_URL",
",",
"wt",
")",
"assert",
"is_dir",
"(",
"wt",
")",
"with",
"cd",
"(",
"wt",
")",
":",
"assert",
"(",
"run",
"(",
"'bzr status'",
")",
"==",
"''",
")",
"run",
"(",
"'echo \"# a new comment\" >> __init__.py'",
")",
"assert",
"(",
"run",
"(",
"'bzr status'",
")",
"!=",
"''",
")",
"require",
".",
"bazaar",
".",
"working_copy",
"(",
"REMOTE_URL",
",",
"wt",
",",
"force",
"=",
"True",
")",
"assert",
"(",
"run",
"(",
"(",
"'bzr status %s'",
"%",
"wt",
")",
")",
"==",
"''",
")"
] | test working copy when a target already exists and has local modifications and force was specified . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.