id_within_dataset
int64 1
55.5k
| snippet
stringlengths 19
14.2k
| tokens
sequencelengths 6
1.63k
| nl
stringlengths 6
352
| split_within_dataset
stringclasses 1
value | is_duplicated
bool 2
classes |
---|---|---|---|---|---|
800 | def run_server(system, fast=False, settings=None, asset_settings=None, port=None, contracts=False):
if (system not in ['lms', 'studio']):
print('System must be either lms or studio', file=sys.stderr)
exit(1)
if (not settings):
settings = DEFAULT_SETTINGS
if ((not fast) and asset_settings):
args = [system, '--settings={}'.format(asset_settings), '--watch']
if (settings == DEFAULT_SETTINGS):
args.append('--skip-collect')
call_task('pavelib.assets.update_assets', args=args)
if (port is None):
port = DEFAULT_PORT[system]
args = [settings, 'runserver', '--traceback', '--pythonpath=.', '0.0.0.0:{}'.format(port)]
if contracts:
args.append('--contracts')
run_process(django_cmd(system, *args))
| [
"def",
"run_server",
"(",
"system",
",",
"fast",
"=",
"False",
",",
"settings",
"=",
"None",
",",
"asset_settings",
"=",
"None",
",",
"port",
"=",
"None",
",",
"contracts",
"=",
"False",
")",
":",
"if",
"(",
"system",
"not",
"in",
"[",
"'lms'",
",",
"'studio'",
"]",
")",
":",
"print",
"(",
"'System must be either lms or studio'",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"exit",
"(",
"1",
")",
"if",
"(",
"not",
"settings",
")",
":",
"settings",
"=",
"DEFAULT_SETTINGS",
"if",
"(",
"(",
"not",
"fast",
")",
"and",
"asset_settings",
")",
":",
"args",
"=",
"[",
"system",
",",
"'--settings={}'",
".",
"format",
"(",
"asset_settings",
")",
",",
"'--watch'",
"]",
"if",
"(",
"settings",
"==",
"DEFAULT_SETTINGS",
")",
":",
"args",
".",
"append",
"(",
"'--skip-collect'",
")",
"call_task",
"(",
"'pavelib.assets.update_assets'",
",",
"args",
"=",
"args",
")",
"if",
"(",
"port",
"is",
"None",
")",
":",
"port",
"=",
"DEFAULT_PORT",
"[",
"system",
"]",
"args",
"=",
"[",
"settings",
",",
"'runserver'",
",",
"'--traceback'",
",",
"'--pythonpath=.'",
",",
"'0.0.0.0:{}'",
".",
"format",
"(",
"port",
")",
"]",
"if",
"contracts",
":",
"args",
".",
"append",
"(",
"'--contracts'",
")",
"run_process",
"(",
"django_cmd",
"(",
"system",
",",
"*",
"args",
")",
")"
] | run a wsgi server with the given application . | train | false |
802 | def fpart(x):
return math.modf(x)[0]
| [
"def",
"fpart",
"(",
"x",
")",
":",
"return",
"math",
".",
"modf",
"(",
"x",
")",
"[",
"0",
"]"
] | return fractional part of given number . | train | false |
804 | def call_lights(*args, **kwargs):
res = dict()
lights = _get_lights()
for dev_id in ((('id' in kwargs) and _get_devices(kwargs)) or sorted(lights.keys())):
if lights.get(str(dev_id)):
res[dev_id] = lights[str(dev_id)]
return (res or False)
| [
"def",
"call_lights",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"res",
"=",
"dict",
"(",
")",
"lights",
"=",
"_get_lights",
"(",
")",
"for",
"dev_id",
"in",
"(",
"(",
"(",
"'id'",
"in",
"kwargs",
")",
"and",
"_get_devices",
"(",
"kwargs",
")",
")",
"or",
"sorted",
"(",
"lights",
".",
"keys",
"(",
")",
")",
")",
":",
"if",
"lights",
".",
"get",
"(",
"str",
"(",
"dev_id",
")",
")",
":",
"res",
"[",
"dev_id",
"]",
"=",
"lights",
"[",
"str",
"(",
"dev_id",
")",
"]",
"return",
"(",
"res",
"or",
"False",
")"
] | get info about all available lamps . | train | true |
806 | def get_items(xml):
try:
from bs4 import BeautifulSoup
except ImportError:
error = u'Missing dependency "BeautifulSoup4" and "lxml" required to import WordPress XML files.'
sys.exit(error)
with open(xml, encoding=u'utf-8') as infile:
xmlfile = infile.read()
soup = BeautifulSoup(xmlfile, u'xml')
items = soup.rss.channel.findAll(u'item')
return items
| [
"def",
"get_items",
"(",
"xml",
")",
":",
"try",
":",
"from",
"bs4",
"import",
"BeautifulSoup",
"except",
"ImportError",
":",
"error",
"=",
"u'Missing dependency \"BeautifulSoup4\" and \"lxml\" required to import WordPress XML files.'",
"sys",
".",
"exit",
"(",
"error",
")",
"with",
"open",
"(",
"xml",
",",
"encoding",
"=",
"u'utf-8'",
")",
"as",
"infile",
":",
"xmlfile",
"=",
"infile",
".",
"read",
"(",
")",
"soup",
"=",
"BeautifulSoup",
"(",
"xmlfile",
",",
"u'xml'",
")",
"items",
"=",
"soup",
".",
"rss",
".",
"channel",
".",
"findAll",
"(",
"u'item'",
")",
"return",
"items"
] | get a list of alfred . | train | false |
807 | def sequence(seq, limits=None):
seq = sympify(seq)
if is_sequence(seq, Tuple):
return SeqPer(seq, limits)
else:
return SeqFormula(seq, limits)
| [
"def",
"sequence",
"(",
"seq",
",",
"limits",
"=",
"None",
")",
":",
"seq",
"=",
"sympify",
"(",
"seq",
")",
"if",
"is_sequence",
"(",
"seq",
",",
"Tuple",
")",
":",
"return",
"SeqPer",
"(",
"seq",
",",
"limits",
")",
"else",
":",
"return",
"SeqFormula",
"(",
"seq",
",",
"limits",
")"
] | yields an infinite sequence . | train | false |
809 | def exec_code(lang, code, cwd=None):
return exec_code_all(lang, code, cwd)['stdout']
| [
"def",
"exec_code",
"(",
"lang",
",",
"code",
",",
"cwd",
"=",
"None",
")",
":",
"return",
"exec_code_all",
"(",
"lang",
",",
"code",
",",
"cwd",
")",
"[",
"'stdout'",
"]"
] | pass in two strings . | train | false |
810 | def python_console(namespace=None):
if (namespace is None):
import inspect
frame = inspect.currentframe()
caller = frame.f_back
if (not caller):
logging.error("can't find caller who start this console.")
caller = frame
namespace = dict(caller.f_globals)
namespace.update(caller.f_locals)
return get_python_console(namespace=namespace).interact()
| [
"def",
"python_console",
"(",
"namespace",
"=",
"None",
")",
":",
"if",
"(",
"namespace",
"is",
"None",
")",
":",
"import",
"inspect",
"frame",
"=",
"inspect",
".",
"currentframe",
"(",
")",
"caller",
"=",
"frame",
".",
"f_back",
"if",
"(",
"not",
"caller",
")",
":",
"logging",
".",
"error",
"(",
"\"can't find caller who start this console.\"",
")",
"caller",
"=",
"frame",
"namespace",
"=",
"dict",
"(",
"caller",
".",
"f_globals",
")",
"namespace",
".",
"update",
"(",
"caller",
".",
"f_locals",
")",
"return",
"get_python_console",
"(",
"namespace",
"=",
"namespace",
")",
".",
"interact",
"(",
")"
] | start a interactive python console with callers stack . | train | true |
811 | def split_txt(txt, epub_split_size_kb=0):
if (epub_split_size_kb > 0):
if isinstance(txt, unicode):
txt = txt.encode('utf-8')
length_byte = len(txt)
chunk_size = long((length_byte / (int((length_byte / (epub_split_size_kb * 1024))) + 2)))
if len(filter((lambda x: (len(x) > chunk_size)), txt.split('\n\n'))):
txt = '\n\n'.join([split_string_separator(line, chunk_size) for line in txt.split('\n\n')])
if isbytestring(txt):
txt = txt.decode('utf-8')
return txt
| [
"def",
"split_txt",
"(",
"txt",
",",
"epub_split_size_kb",
"=",
"0",
")",
":",
"if",
"(",
"epub_split_size_kb",
">",
"0",
")",
":",
"if",
"isinstance",
"(",
"txt",
",",
"unicode",
")",
":",
"txt",
"=",
"txt",
".",
"encode",
"(",
"'utf-8'",
")",
"length_byte",
"=",
"len",
"(",
"txt",
")",
"chunk_size",
"=",
"long",
"(",
"(",
"length_byte",
"/",
"(",
"int",
"(",
"(",
"length_byte",
"/",
"(",
"epub_split_size_kb",
"*",
"1024",
")",
")",
")",
"+",
"2",
")",
")",
")",
"if",
"len",
"(",
"filter",
"(",
"(",
"lambda",
"x",
":",
"(",
"len",
"(",
"x",
")",
">",
"chunk_size",
")",
")",
",",
"txt",
".",
"split",
"(",
"'\\n\\n'",
")",
")",
")",
":",
"txt",
"=",
"'\\n\\n'",
".",
"join",
"(",
"[",
"split_string_separator",
"(",
"line",
",",
"chunk_size",
")",
"for",
"line",
"in",
"txt",
".",
"split",
"(",
"'\\n\\n'",
")",
"]",
")",
"if",
"isbytestring",
"(",
"txt",
")",
":",
"txt",
"=",
"txt",
".",
"decode",
"(",
"'utf-8'",
")",
"return",
"txt"
] | ensure there are split points for converting to epub . | train | false |
812 | def curdoc():
return _state.document
| [
"def",
"curdoc",
"(",
")",
":",
"return",
"_state",
".",
"document"
] | return the document for the current default state . | train | false |
813 | def _instance_overrides_method(base, instance, method_name):
bound_method = getattr(instance, method_name)
unbound_method = getattr(base, method_name)
return (six.get_unbound_function(unbound_method) != six.get_method_function(bound_method))
| [
"def",
"_instance_overrides_method",
"(",
"base",
",",
"instance",
",",
"method_name",
")",
":",
"bound_method",
"=",
"getattr",
"(",
"instance",
",",
"method_name",
")",
"unbound_method",
"=",
"getattr",
"(",
"base",
",",
"method_name",
")",
"return",
"(",
"six",
".",
"get_unbound_function",
"(",
"unbound_method",
")",
"!=",
"six",
".",
"get_method_function",
"(",
"bound_method",
")",
")"
] | returns true if instance overrides a method inherited from base . | train | false |
814 | def _invalid_attribute(attributes):
invalid_attributes = []
for attribute in attributes:
if ('namespace' not in attribute):
msg = u"'namespace' not in enrollment attribute"
log.warn(msg)
invalid_attributes.append('namespace')
raise InvalidEnrollmentAttribute(msg)
if ('name' not in attribute):
msg = u"'name' not in enrollment attribute"
log.warn(msg)
invalid_attributes.append('name')
raise InvalidEnrollmentAttribute(msg)
if ('value' not in attribute):
msg = u"'value' not in enrollment attribute"
log.warn(msg)
invalid_attributes.append('value')
raise InvalidEnrollmentAttribute(msg)
return invalid_attributes
| [
"def",
"_invalid_attribute",
"(",
"attributes",
")",
":",
"invalid_attributes",
"=",
"[",
"]",
"for",
"attribute",
"in",
"attributes",
":",
"if",
"(",
"'namespace'",
"not",
"in",
"attribute",
")",
":",
"msg",
"=",
"u\"'namespace' not in enrollment attribute\"",
"log",
".",
"warn",
"(",
"msg",
")",
"invalid_attributes",
".",
"append",
"(",
"'namespace'",
")",
"raise",
"InvalidEnrollmentAttribute",
"(",
"msg",
")",
"if",
"(",
"'name'",
"not",
"in",
"attribute",
")",
":",
"msg",
"=",
"u\"'name' not in enrollment attribute\"",
"log",
".",
"warn",
"(",
"msg",
")",
"invalid_attributes",
".",
"append",
"(",
"'name'",
")",
"raise",
"InvalidEnrollmentAttribute",
"(",
"msg",
")",
"if",
"(",
"'value'",
"not",
"in",
"attribute",
")",
":",
"msg",
"=",
"u\"'value' not in enrollment attribute\"",
"log",
".",
"warn",
"(",
"msg",
")",
"invalid_attributes",
".",
"append",
"(",
"'value'",
")",
"raise",
"InvalidEnrollmentAttribute",
"(",
"msg",
")",
"return",
"invalid_attributes"
] | validate enrollment attribute args: attributes: dict of attribute return: list of invalid attributes . | train | false |
815 | def formatListLines(msgs):
i = 0
for size in msgs:
i += 1
(yield ('%d %d\r\n' % (i, size)))
| [
"def",
"formatListLines",
"(",
"msgs",
")",
":",
"i",
"=",
"0",
"for",
"size",
"in",
"msgs",
":",
"i",
"+=",
"1",
"(",
"yield",
"(",
"'%d %d\\r\\n'",
"%",
"(",
"i",
",",
"size",
")",
")",
")"
] | format a list of message sizes appropriately for the lines of a list response . | train | false |
817 | def read_rle(file_obj, header, bit_width, debug_logging):
count = (header >> 1)
zero_data = '\x00\x00\x00\x00'
width = ((bit_width + 7) // 8)
data = file_obj.read(width)
data = (data + zero_data[len(data):])
value = struct.unpack('<i', data)[0]
if debug_logging:
logger.debug(u'Read RLE group with value %s of byte-width %s and count %s', value, width, count)
for _ in range(count):
(yield value)
| [
"def",
"read_rle",
"(",
"file_obj",
",",
"header",
",",
"bit_width",
",",
"debug_logging",
")",
":",
"count",
"=",
"(",
"header",
">>",
"1",
")",
"zero_data",
"=",
"'\\x00\\x00\\x00\\x00'",
"width",
"=",
"(",
"(",
"bit_width",
"+",
"7",
")",
"//",
"8",
")",
"data",
"=",
"file_obj",
".",
"read",
"(",
"width",
")",
"data",
"=",
"(",
"data",
"+",
"zero_data",
"[",
"len",
"(",
"data",
")",
":",
"]",
")",
"value",
"=",
"struct",
".",
"unpack",
"(",
"'<i'",
",",
"data",
")",
"[",
"0",
"]",
"if",
"debug_logging",
":",
"logger",
".",
"debug",
"(",
"u'Read RLE group with value %s of byte-width %s and count %s'",
",",
"value",
",",
"width",
",",
"count",
")",
"for",
"_",
"in",
"range",
"(",
"count",
")",
":",
"(",
"yield",
"value",
")"
] | read a run-length encoded run from the given fo with the given header and bit_width . | train | true |
818 | def get_svc_alias():
ret = {}
for d in AVAIL_SVR_DIRS:
for el in glob.glob(os.path.join(d, '*')):
if (not os.path.islink(el)):
continue
psvc = os.readlink(el)
if (not os.path.isabs(psvc)):
psvc = os.path.join(d, psvc)
nsvc = os.path.basename(psvc)
if (nsvc not in ret):
ret[nsvc] = []
ret[nsvc].append(el)
return ret
| [
"def",
"get_svc_alias",
"(",
")",
":",
"ret",
"=",
"{",
"}",
"for",
"d",
"in",
"AVAIL_SVR_DIRS",
":",
"for",
"el",
"in",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"d",
",",
"'*'",
")",
")",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"islink",
"(",
"el",
")",
")",
":",
"continue",
"psvc",
"=",
"os",
".",
"readlink",
"(",
"el",
")",
"if",
"(",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"psvc",
")",
")",
":",
"psvc",
"=",
"os",
".",
"path",
".",
"join",
"(",
"d",
",",
"psvc",
")",
"nsvc",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"psvc",
")",
"if",
"(",
"nsvc",
"not",
"in",
"ret",
")",
":",
"ret",
"[",
"nsvc",
"]",
"=",
"[",
"]",
"ret",
"[",
"nsvc",
"]",
".",
"append",
"(",
"el",
")",
"return",
"ret"
] | returns the list of services name that are aliased and their alias path(s) . | train | true |
819 | def normprob(z, direction='two-sided', mean=0, std=1):
if (direction == 'two-sided'):
if (z >= 0):
return (2 * (1.0 - norm.cdf(z, mean, std)))
else:
return (2 * norm.cdf(z, mean, std))
elif (direction == 'high'):
return (1 - norm.cdf(z, mean, std))
elif (direction == 'low'):
return norm.cdf(z, mean, std)
else:
raise ValueError('Unknown direction.')
| [
"def",
"normprob",
"(",
"z",
",",
"direction",
"=",
"'two-sided'",
",",
"mean",
"=",
"0",
",",
"std",
"=",
"1",
")",
":",
"if",
"(",
"direction",
"==",
"'two-sided'",
")",
":",
"if",
"(",
"z",
">=",
"0",
")",
":",
"return",
"(",
"2",
"*",
"(",
"1.0",
"-",
"norm",
".",
"cdf",
"(",
"z",
",",
"mean",
",",
"std",
")",
")",
")",
"else",
":",
"return",
"(",
"2",
"*",
"norm",
".",
"cdf",
"(",
"z",
",",
"mean",
",",
"std",
")",
")",
"elif",
"(",
"direction",
"==",
"'high'",
")",
":",
"return",
"(",
"1",
"-",
"norm",
".",
"cdf",
"(",
"z",
",",
"mean",
",",
"std",
")",
")",
"elif",
"(",
"direction",
"==",
"'low'",
")",
":",
"return",
"norm",
".",
"cdf",
"(",
"z",
",",
"mean",
",",
"std",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Unknown direction.'",
")"
] | calculate probability from normal distribution paramaters z : float value of z statistic direction : str one of low . | train | false |
820 | def _pair_iter(it):
it = iter(it)
prev = next(it)
for el in it:
(yield (prev, el))
prev = el
(yield (prev, None))
| [
"def",
"_pair_iter",
"(",
"it",
")",
":",
"it",
"=",
"iter",
"(",
"it",
")",
"prev",
"=",
"next",
"(",
"it",
")",
"for",
"el",
"in",
"it",
":",
"(",
"yield",
"(",
"prev",
",",
"el",
")",
")",
"prev",
"=",
"el",
"(",
"yield",
"(",
"prev",
",",
"None",
")",
")"
] | yields pairs of tokens from the given iterator such that each input token will appear as the first element in a yielded tuple . | train | false |
822 | def classmarkChange(MobileStationClassmark3_presence=0):
a = TpPd(pd=6)
b = MessageType(mesType=22)
c = MobileStationClassmark2()
packet = ((a / b) / c)
if (MobileStationClassmark3_presence is 1):
e = MobileStationClassmark3(ieiMSC3=32)
packet = (packet / e)
return packet
| [
"def",
"classmarkChange",
"(",
"MobileStationClassmark3_presence",
"=",
"0",
")",
":",
"a",
"=",
"TpPd",
"(",
"pd",
"=",
"6",
")",
"b",
"=",
"MessageType",
"(",
"mesType",
"=",
"22",
")",
"c",
"=",
"MobileStationClassmark2",
"(",
")",
"packet",
"=",
"(",
"(",
"a",
"/",
"b",
")",
"/",
"c",
")",
"if",
"(",
"MobileStationClassmark3_presence",
"is",
"1",
")",
":",
"e",
"=",
"MobileStationClassmark3",
"(",
"ieiMSC3",
"=",
"32",
")",
"packet",
"=",
"(",
"packet",
"/",
"e",
")",
"return",
"packet"
] | classmark change section 9 . | train | true |
823 | def remount(name, device, mkmnt=False, fstype='', opts='defaults', user=None):
force_mount = False
if (__grains__['os'] in ['MacOS', 'Darwin']):
if (opts == 'defaults'):
opts = 'noowners'
if (fstype == 'smbfs'):
force_mount = True
if isinstance(opts, six.string_types):
opts = opts.split(',')
mnts = active()
if (name in mnts):
if (('remount' not in opts) and (__grains__['os'] not in ['OpenBSD', 'MacOS', 'Darwin'])):
opts.append('remount')
if force_mount:
umount(name, device, user=user)
lopts = ','.join(opts)
args = '-o {0}'.format(lopts)
if fstype:
args += ' -t {0}'.format(fstype)
if ((__grains__['os'] not in ['OpenBSD', 'MacOS', 'Darwin']) or force_mount):
cmd = 'mount {0} {1} {2} '.format(args, device, name)
else:
cmd = 'mount -u {0} {1} {2} '.format(args, device, name)
out = __salt__['cmd.run_all'](cmd, runas=user, python_shell=False)
if out['retcode']:
return out['stderr']
return True
return mount(name, device, mkmnt, fstype, opts, user=user)
| [
"def",
"remount",
"(",
"name",
",",
"device",
",",
"mkmnt",
"=",
"False",
",",
"fstype",
"=",
"''",
",",
"opts",
"=",
"'defaults'",
",",
"user",
"=",
"None",
")",
":",
"force_mount",
"=",
"False",
"if",
"(",
"__grains__",
"[",
"'os'",
"]",
"in",
"[",
"'MacOS'",
",",
"'Darwin'",
"]",
")",
":",
"if",
"(",
"opts",
"==",
"'defaults'",
")",
":",
"opts",
"=",
"'noowners'",
"if",
"(",
"fstype",
"==",
"'smbfs'",
")",
":",
"force_mount",
"=",
"True",
"if",
"isinstance",
"(",
"opts",
",",
"six",
".",
"string_types",
")",
":",
"opts",
"=",
"opts",
".",
"split",
"(",
"','",
")",
"mnts",
"=",
"active",
"(",
")",
"if",
"(",
"name",
"in",
"mnts",
")",
":",
"if",
"(",
"(",
"'remount'",
"not",
"in",
"opts",
")",
"and",
"(",
"__grains__",
"[",
"'os'",
"]",
"not",
"in",
"[",
"'OpenBSD'",
",",
"'MacOS'",
",",
"'Darwin'",
"]",
")",
")",
":",
"opts",
".",
"append",
"(",
"'remount'",
")",
"if",
"force_mount",
":",
"umount",
"(",
"name",
",",
"device",
",",
"user",
"=",
"user",
")",
"lopts",
"=",
"','",
".",
"join",
"(",
"opts",
")",
"args",
"=",
"'-o {0}'",
".",
"format",
"(",
"lopts",
")",
"if",
"fstype",
":",
"args",
"+=",
"' -t {0}'",
".",
"format",
"(",
"fstype",
")",
"if",
"(",
"(",
"__grains__",
"[",
"'os'",
"]",
"not",
"in",
"[",
"'OpenBSD'",
",",
"'MacOS'",
",",
"'Darwin'",
"]",
")",
"or",
"force_mount",
")",
":",
"cmd",
"=",
"'mount {0} {1} {2} '",
".",
"format",
"(",
"args",
",",
"device",
",",
"name",
")",
"else",
":",
"cmd",
"=",
"'mount -u {0} {1} {2} '",
".",
"format",
"(",
"args",
",",
"device",
",",
"name",
")",
"out",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
",",
"runas",
"=",
"user",
",",
"python_shell",
"=",
"False",
")",
"if",
"out",
"[",
"'retcode'",
"]",
":",
"return",
"out",
"[",
"'stderr'",
"]",
"return",
"True",
"return",
"mount",
"(",
"name",
",",
"device",
",",
"mkmnt",
",",
"fstype",
",",
"opts",
",",
"user",
"=",
"user",
")"
] | remounts the filesystem as writable . | train | true |
824 | def parse_vars(args):
result = {}
for arg in args:
if ('=' not in arg):
raise ValueError(('Variable assignment %r invalid (no "=")' % arg))
(name, value) = arg.split('=', 1)
result[name] = value
return result
| [
"def",
"parse_vars",
"(",
"args",
")",
":",
"result",
"=",
"{",
"}",
"for",
"arg",
"in",
"args",
":",
"if",
"(",
"'='",
"not",
"in",
"arg",
")",
":",
"raise",
"ValueError",
"(",
"(",
"'Variable assignment %r invalid (no \"=\")'",
"%",
"arg",
")",
")",
"(",
"name",
",",
"value",
")",
"=",
"arg",
".",
"split",
"(",
"'='",
",",
"1",
")",
"result",
"[",
"name",
"]",
"=",
"value",
"return",
"result"
] | given variables like [a=b . | train | true |
826 | def _export_spreadsheet(client, spreadsheet_key, worksheet_id, headers):
cleaned_headers = _get_cleaned_headers(headers)
spreadsheet_lines = [headers]
rows_feed = client.GetListFeed(spreadsheet_key, worksheet_id, visibility='public', projection='values')
while True:
found_data = False
for row in rows_feed.entry:
line = []
for (header_idx, (header, cleaned_header)) in enumerate(zip(headers, cleaned_headers)):
try:
cell_data = row.custom[cleaned_header].text
except KeyError:
raise GoogleSpreadsheetError(("Could not map header '%s' to Google Spreadsheet's internal representation of the header. We suggest changing the name of the header in your Google Spreadsheet to be alphanumeric if possible, as this will likely solve the issue. Note that the name isn't *required* to be alphanumeric, but it may fix issues with converting to Google Spreadsheet's internal format in some cases." % header))
if ((not found_data) and (header_idx == 0) and cell_data.lstrip().startswith('#')):
line.append(cell_data)
break
else:
line.append(cell_data)
found_data = True
spreadsheet_lines.append(line)
next_link = rows_feed.GetNextLink()
if next_link:
rows_feed = client.Get(next_link.href, converter=SpreadsheetsListFeedFromString)
else:
break
return spreadsheet_lines
| [
"def",
"_export_spreadsheet",
"(",
"client",
",",
"spreadsheet_key",
",",
"worksheet_id",
",",
"headers",
")",
":",
"cleaned_headers",
"=",
"_get_cleaned_headers",
"(",
"headers",
")",
"spreadsheet_lines",
"=",
"[",
"headers",
"]",
"rows_feed",
"=",
"client",
".",
"GetListFeed",
"(",
"spreadsheet_key",
",",
"worksheet_id",
",",
"visibility",
"=",
"'public'",
",",
"projection",
"=",
"'values'",
")",
"while",
"True",
":",
"found_data",
"=",
"False",
"for",
"row",
"in",
"rows_feed",
".",
"entry",
":",
"line",
"=",
"[",
"]",
"for",
"(",
"header_idx",
",",
"(",
"header",
",",
"cleaned_header",
")",
")",
"in",
"enumerate",
"(",
"zip",
"(",
"headers",
",",
"cleaned_headers",
")",
")",
":",
"try",
":",
"cell_data",
"=",
"row",
".",
"custom",
"[",
"cleaned_header",
"]",
".",
"text",
"except",
"KeyError",
":",
"raise",
"GoogleSpreadsheetError",
"(",
"(",
"\"Could not map header '%s' to Google Spreadsheet's internal representation of the header. We suggest changing the name of the header in your Google Spreadsheet to be alphanumeric if possible, as this will likely solve the issue. Note that the name isn't *required* to be alphanumeric, but it may fix issues with converting to Google Spreadsheet's internal format in some cases.\"",
"%",
"header",
")",
")",
"if",
"(",
"(",
"not",
"found_data",
")",
"and",
"(",
"header_idx",
"==",
"0",
")",
"and",
"cell_data",
".",
"lstrip",
"(",
")",
".",
"startswith",
"(",
"'#'",
")",
")",
":",
"line",
".",
"append",
"(",
"cell_data",
")",
"break",
"else",
":",
"line",
".",
"append",
"(",
"cell_data",
")",
"found_data",
"=",
"True",
"spreadsheet_lines",
".",
"append",
"(",
"line",
")",
"next_link",
"=",
"rows_feed",
".",
"GetNextLink",
"(",
")",
"if",
"next_link",
":",
"rows_feed",
"=",
"client",
".",
"Get",
"(",
"next_link",
".",
"href",
",",
"converter",
"=",
"SpreadsheetsListFeedFromString",
")",
"else",
":",
"break",
"return",
"spreadsheet_lines"
] | returns a list of lists containing the entire spreadsheet . | train | false |
827 | def test_help_command_should_exit_status_ok_when_command_exists(script):
result = script.pip('help', 'freeze')
assert (result.returncode == SUCCESS)
| [
"def",
"test_help_command_should_exit_status_ok_when_command_exists",
"(",
"script",
")",
":",
"result",
"=",
"script",
".",
"pip",
"(",
"'help'",
",",
"'freeze'",
")",
"assert",
"(",
"result",
".",
"returncode",
"==",
"SUCCESS",
")"
] | test help command for existing command . | train | false |
828 | def is_vlanid_used(vlan_id):
LOG.debug(_('is_vlanid_used() called'))
session = db.get_session()
try:
vlanid = session.query(network_models_v2.VlanID).filter_by(vlan_id=vlan_id).one()
return vlanid['vlan_used']
except exc.NoResultFound:
raise c_exc.VlanIDNotFound(vlan_id=vlan_id)
| [
"def",
"is_vlanid_used",
"(",
"vlan_id",
")",
":",
"LOG",
".",
"debug",
"(",
"_",
"(",
"'is_vlanid_used() called'",
")",
")",
"session",
"=",
"db",
".",
"get_session",
"(",
")",
"try",
":",
"vlanid",
"=",
"session",
".",
"query",
"(",
"network_models_v2",
".",
"VlanID",
")",
".",
"filter_by",
"(",
"vlan_id",
"=",
"vlan_id",
")",
".",
"one",
"(",
")",
"return",
"vlanid",
"[",
"'vlan_used'",
"]",
"except",
"exc",
".",
"NoResultFound",
":",
"raise",
"c_exc",
".",
"VlanIDNotFound",
"(",
"vlan_id",
"=",
"vlan_id",
")"
] | checks if a vlanid is in use . | train | false |
829 | def getservbyname(servicename, protocolname=None):
return _ResolveService(servicename, protocolname)[1]
| [
"def",
"getservbyname",
"(",
"servicename",
",",
"protocolname",
"=",
"None",
")",
":",
"return",
"_ResolveService",
"(",
"servicename",
",",
"protocolname",
")",
"[",
"1",
"]"
] | getservbyname -> integer return a port number from a service name and protocol name . | train | false |
830 | def task_label(task):
func = task[0]
if hasattr(func, 'funcs'):
if (len(func.funcs) > 1):
return '{0}(...)'.format(funcname(func.funcs[0]))
else:
head = funcname(func.funcs[0])
else:
head = funcname(task[0])
if any((has_sub_tasks(i) for i in task[1:])):
return '{0}(...)'.format(head)
else:
return head
| [
"def",
"task_label",
"(",
"task",
")",
":",
"func",
"=",
"task",
"[",
"0",
"]",
"if",
"hasattr",
"(",
"func",
",",
"'funcs'",
")",
":",
"if",
"(",
"len",
"(",
"func",
".",
"funcs",
")",
">",
"1",
")",
":",
"return",
"'{0}(...)'",
".",
"format",
"(",
"funcname",
"(",
"func",
".",
"funcs",
"[",
"0",
"]",
")",
")",
"else",
":",
"head",
"=",
"funcname",
"(",
"func",
".",
"funcs",
"[",
"0",
"]",
")",
"else",
":",
"head",
"=",
"funcname",
"(",
"task",
"[",
"0",
"]",
")",
"if",
"any",
"(",
"(",
"has_sub_tasks",
"(",
"i",
")",
"for",
"i",
"in",
"task",
"[",
"1",
":",
"]",
")",
")",
":",
"return",
"'{0}(...)'",
".",
"format",
"(",
"head",
")",
"else",
":",
"return",
"head"
] | label for a task on a dot graph . | train | false |
833 | def _update_doc(doc):
from textwrap import wrap
info_table = [(p, plugin_info(p).get('description', 'no description')) for p in available_plugins if (not (p == 'test'))]
if (len(info_table) > 0):
name_length = max([len(n) for (n, _) in info_table])
else:
name_length = 0
description_length = ((WRAP_LEN - 1) - name_length)
column_lengths = [name_length, description_length]
_format_plugin_info_table(info_table, column_lengths)
for (name, plugin_description) in info_table:
description_lines = wrap(plugin_description, description_length)
name_column = [name]
name_column.extend(['' for _ in range((len(description_lines) - 1))])
for (name, description) in zip(name_column, description_lines):
doc += ('%s %s\n' % (name.ljust(name_length), description))
doc = doc.strip()
return doc
| [
"def",
"_update_doc",
"(",
"doc",
")",
":",
"from",
"textwrap",
"import",
"wrap",
"info_table",
"=",
"[",
"(",
"p",
",",
"plugin_info",
"(",
"p",
")",
".",
"get",
"(",
"'description'",
",",
"'no description'",
")",
")",
"for",
"p",
"in",
"available_plugins",
"if",
"(",
"not",
"(",
"p",
"==",
"'test'",
")",
")",
"]",
"if",
"(",
"len",
"(",
"info_table",
")",
">",
"0",
")",
":",
"name_length",
"=",
"max",
"(",
"[",
"len",
"(",
"n",
")",
"for",
"(",
"n",
",",
"_",
")",
"in",
"info_table",
"]",
")",
"else",
":",
"name_length",
"=",
"0",
"description_length",
"=",
"(",
"(",
"WRAP_LEN",
"-",
"1",
")",
"-",
"name_length",
")",
"column_lengths",
"=",
"[",
"name_length",
",",
"description_length",
"]",
"_format_plugin_info_table",
"(",
"info_table",
",",
"column_lengths",
")",
"for",
"(",
"name",
",",
"plugin_description",
")",
"in",
"info_table",
":",
"description_lines",
"=",
"wrap",
"(",
"plugin_description",
",",
"description_length",
")",
"name_column",
"=",
"[",
"name",
"]",
"name_column",
".",
"extend",
"(",
"[",
"''",
"for",
"_",
"in",
"range",
"(",
"(",
"len",
"(",
"description_lines",
")",
"-",
"1",
")",
")",
"]",
")",
"for",
"(",
"name",
",",
"description",
")",
"in",
"zip",
"(",
"name_column",
",",
"description_lines",
")",
":",
"doc",
"+=",
"(",
"'%s %s\\n'",
"%",
"(",
"name",
".",
"ljust",
"(",
"name_length",
")",
",",
"description",
")",
")",
"doc",
"=",
"doc",
".",
"strip",
"(",
")",
"return",
"doc"
] | add a list of plugins to the module docstring . | train | false |
836 | def _dup_ff_trivial_gcd(f, g, K):
if (not (f or g)):
return ([], [], [])
elif (not f):
return (dup_monic(g, K), [], [dup_LC(g, K)])
elif (not g):
return (dup_monic(f, K), [dup_LC(f, K)], [])
else:
return None
| [
"def",
"_dup_ff_trivial_gcd",
"(",
"f",
",",
"g",
",",
"K",
")",
":",
"if",
"(",
"not",
"(",
"f",
"or",
"g",
")",
")",
":",
"return",
"(",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
")",
"elif",
"(",
"not",
"f",
")",
":",
"return",
"(",
"dup_monic",
"(",
"g",
",",
"K",
")",
",",
"[",
"]",
",",
"[",
"dup_LC",
"(",
"g",
",",
"K",
")",
"]",
")",
"elif",
"(",
"not",
"g",
")",
":",
"return",
"(",
"dup_monic",
"(",
"f",
",",
"K",
")",
",",
"[",
"dup_LC",
"(",
"f",
",",
"K",
")",
"]",
",",
"[",
"]",
")",
"else",
":",
"return",
"None"
] | handle trivial cases in gcd algorithm over a field . | train | false |
837 | def subdocuments(fields_chain, resource, document):
if (len(fields_chain) == 0):
(yield document)
elif (isinstance(document, dict) and (fields_chain[0] in document)):
subdocument = document[fields_chain[0]]
docs = (subdocument if isinstance(subdocument, list) else [subdocument])
try:
resource = field_definition(resource, fields_chain[0])['data_relation']['resource']
except KeyError:
resource = resource
for doc in docs:
for result in subdocuments(fields_chain[1:], resource, doc):
(yield result)
else:
(yield document)
| [
"def",
"subdocuments",
"(",
"fields_chain",
",",
"resource",
",",
"document",
")",
":",
"if",
"(",
"len",
"(",
"fields_chain",
")",
"==",
"0",
")",
":",
"(",
"yield",
"document",
")",
"elif",
"(",
"isinstance",
"(",
"document",
",",
"dict",
")",
"and",
"(",
"fields_chain",
"[",
"0",
"]",
"in",
"document",
")",
")",
":",
"subdocument",
"=",
"document",
"[",
"fields_chain",
"[",
"0",
"]",
"]",
"docs",
"=",
"(",
"subdocument",
"if",
"isinstance",
"(",
"subdocument",
",",
"list",
")",
"else",
"[",
"subdocument",
"]",
")",
"try",
":",
"resource",
"=",
"field_definition",
"(",
"resource",
",",
"fields_chain",
"[",
"0",
"]",
")",
"[",
"'data_relation'",
"]",
"[",
"'resource'",
"]",
"except",
"KeyError",
":",
"resource",
"=",
"resource",
"for",
"doc",
"in",
"docs",
":",
"for",
"result",
"in",
"subdocuments",
"(",
"fields_chain",
"[",
"1",
":",
"]",
",",
"resource",
",",
"doc",
")",
":",
"(",
"yield",
"result",
")",
"else",
":",
"(",
"yield",
"document",
")"
] | traverses the given document and yields subdocuments which correspond to the given fields_chain . | train | false |
840 | def S_IFMT(mode):
return (mode & 61440)
| [
"def",
"S_IFMT",
"(",
"mode",
")",
":",
"return",
"(",
"mode",
"&",
"61440",
")"
] | return the portion of the files mode that describes the file type . | train | false |
841 | @public
def refine_root(f, s, t, eps=None, steps=None, fast=False, check_sqf=False):
try:
F = Poly(f)
except GeneratorsNeeded:
raise PolynomialError(("can't refine a root of %s, not a polynomial" % f))
return F.refine_root(s, t, eps=eps, steps=steps, fast=fast, check_sqf=check_sqf)
| [
"@",
"public",
"def",
"refine_root",
"(",
"f",
",",
"s",
",",
"t",
",",
"eps",
"=",
"None",
",",
"steps",
"=",
"None",
",",
"fast",
"=",
"False",
",",
"check_sqf",
"=",
"False",
")",
":",
"try",
":",
"F",
"=",
"Poly",
"(",
"f",
")",
"except",
"GeneratorsNeeded",
":",
"raise",
"PolynomialError",
"(",
"(",
"\"can't refine a root of %s, not a polynomial\"",
"%",
"f",
")",
")",
"return",
"F",
".",
"refine_root",
"(",
"s",
",",
"t",
",",
"eps",
"=",
"eps",
",",
"steps",
"=",
"steps",
",",
"fast",
"=",
"fast",
",",
"check_sqf",
"=",
"check_sqf",
")"
] | refine an isolating interval of a root to the given precision . | train | false |
842 | def _get_candidate_names():
global _name_sequence
if (_name_sequence is None):
_once_lock.acquire()
try:
if (_name_sequence is None):
_name_sequence = _RandomNameSequence()
finally:
_once_lock.release()
return _name_sequence
| [
"def",
"_get_candidate_names",
"(",
")",
":",
"global",
"_name_sequence",
"if",
"(",
"_name_sequence",
"is",
"None",
")",
":",
"_once_lock",
".",
"acquire",
"(",
")",
"try",
":",
"if",
"(",
"_name_sequence",
"is",
"None",
")",
":",
"_name_sequence",
"=",
"_RandomNameSequence",
"(",
")",
"finally",
":",
"_once_lock",
".",
"release",
"(",
")",
"return",
"_name_sequence"
] | common setup sequence for all user-callable interfaces . | train | true |
844 | def color_dict_to_objects(d, colorspace='hsv'):
result = {}
for (k, v) in d.items():
result[k] = Color(k, v, colorspace)
return result
| [
"def",
"color_dict_to_objects",
"(",
"d",
",",
"colorspace",
"=",
"'hsv'",
")",
":",
"result",
"=",
"{",
"}",
"for",
"(",
"k",
",",
"v",
")",
"in",
"d",
".",
"items",
"(",
")",
":",
"result",
"[",
"k",
"]",
"=",
"Color",
"(",
"k",
",",
"v",
",",
"colorspace",
")",
"return",
"result"
] | converts color dict to dict of color objects . | train | false |
845 | def monomial_key(order=None, gens=None):
if (order is None):
order = lex
if isinstance(order, Symbol):
order = str(order)
if isinstance(order, str):
try:
order = _monomial_key[order]
except KeyError:
raise ValueError(("supported monomial orderings are 'lex', 'grlex' and 'grevlex', got %r" % order))
if hasattr(order, '__call__'):
if (gens is not None):
def _order(expr):
return order(expr.as_poly(*gens).degree_list())
return _order
return order
else:
raise ValueError(('monomial ordering specification must be a string or a callable, got %s' % order))
| [
"def",
"monomial_key",
"(",
"order",
"=",
"None",
",",
"gens",
"=",
"None",
")",
":",
"if",
"(",
"order",
"is",
"None",
")",
":",
"order",
"=",
"lex",
"if",
"isinstance",
"(",
"order",
",",
"Symbol",
")",
":",
"order",
"=",
"str",
"(",
"order",
")",
"if",
"isinstance",
"(",
"order",
",",
"str",
")",
":",
"try",
":",
"order",
"=",
"_monomial_key",
"[",
"order",
"]",
"except",
"KeyError",
":",
"raise",
"ValueError",
"(",
"(",
"\"supported monomial orderings are 'lex', 'grlex' and 'grevlex', got %r\"",
"%",
"order",
")",
")",
"if",
"hasattr",
"(",
"order",
",",
"'__call__'",
")",
":",
"if",
"(",
"gens",
"is",
"not",
"None",
")",
":",
"def",
"_order",
"(",
"expr",
")",
":",
"return",
"order",
"(",
"expr",
".",
"as_poly",
"(",
"*",
"gens",
")",
".",
"degree_list",
"(",
")",
")",
"return",
"_order",
"return",
"order",
"else",
":",
"raise",
"ValueError",
"(",
"(",
"'monomial ordering specification must be a string or a callable, got %s'",
"%",
"order",
")",
")"
] | return a function defining admissible order on monomials . | train | false |
846 | def mac_only(request):
context = {'title': 'Supports OSX Only'}
template = 'general/ios.html'
return render(request, template, context)
| [
"def",
"mac_only",
"(",
"request",
")",
":",
"context",
"=",
"{",
"'title'",
":",
"'Supports OSX Only'",
"}",
"template",
"=",
"'general/ios.html'",
"return",
"render",
"(",
"request",
",",
"template",
",",
"context",
")"
] | mac ony message route . | train | false |
847 | def ensure_valid_usage_key(view_func):
@wraps(view_func)
def inner(request, *args, **kwargs):
usage_key = kwargs.get('usage_key_string')
if (usage_key is not None):
try:
UsageKey.from_string(usage_key)
except InvalidKeyError:
raise Http404
response = view_func(request, *args, **kwargs)
return response
return inner
| [
"def",
"ensure_valid_usage_key",
"(",
"view_func",
")",
":",
"@",
"wraps",
"(",
"view_func",
")",
"def",
"inner",
"(",
"request",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"usage_key",
"=",
"kwargs",
".",
"get",
"(",
"'usage_key_string'",
")",
"if",
"(",
"usage_key",
"is",
"not",
"None",
")",
":",
"try",
":",
"UsageKey",
".",
"from_string",
"(",
"usage_key",
")",
"except",
"InvalidKeyError",
":",
"raise",
"Http404",
"response",
"=",
"view_func",
"(",
"request",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"response",
"return",
"inner"
] | this decorator should only be used with views which have argument usage_key_string . | train | false |
848 | def validate_colors_dict(colors, colortype='tuple'):
for key in colors:
if ('rgb' in colors[key]):
colors[key] = color_parser(colors[key], unlabel_rgb)
for value in colors[key]:
if (value > 255.0):
raise exceptions.PlotlyError('Whoops! The elements in your rgb colors tuples cannot exceed 255.0.')
colors[key] = color_parser(colors[key], unconvert_from_RGB_255)
if ('#' in colors[key]):
colors[key] = color_parser(colors[key], hex_to_rgb)
colors[key] = color_parser(colors[key], unconvert_from_RGB_255)
if isinstance(colors[key], tuple):
for value in colors[key]:
if (value > 1.0):
raise exceptions.PlotlyError('Whoops! The elements in your colors tuples cannot exceed 1.0.')
if (colortype == 'rgb'):
for key in colors:
colors[key] = color_parser(colors[key], convert_to_RGB_255)
colors[key] = color_parser(colors[key], label_rgb)
return colors
| [
"def",
"validate_colors_dict",
"(",
"colors",
",",
"colortype",
"=",
"'tuple'",
")",
":",
"for",
"key",
"in",
"colors",
":",
"if",
"(",
"'rgb'",
"in",
"colors",
"[",
"key",
"]",
")",
":",
"colors",
"[",
"key",
"]",
"=",
"color_parser",
"(",
"colors",
"[",
"key",
"]",
",",
"unlabel_rgb",
")",
"for",
"value",
"in",
"colors",
"[",
"key",
"]",
":",
"if",
"(",
"value",
">",
"255.0",
")",
":",
"raise",
"exceptions",
".",
"PlotlyError",
"(",
"'Whoops! The elements in your rgb colors tuples cannot exceed 255.0.'",
")",
"colors",
"[",
"key",
"]",
"=",
"color_parser",
"(",
"colors",
"[",
"key",
"]",
",",
"unconvert_from_RGB_255",
")",
"if",
"(",
"'#'",
"in",
"colors",
"[",
"key",
"]",
")",
":",
"colors",
"[",
"key",
"]",
"=",
"color_parser",
"(",
"colors",
"[",
"key",
"]",
",",
"hex_to_rgb",
")",
"colors",
"[",
"key",
"]",
"=",
"color_parser",
"(",
"colors",
"[",
"key",
"]",
",",
"unconvert_from_RGB_255",
")",
"if",
"isinstance",
"(",
"colors",
"[",
"key",
"]",
",",
"tuple",
")",
":",
"for",
"value",
"in",
"colors",
"[",
"key",
"]",
":",
"if",
"(",
"value",
">",
"1.0",
")",
":",
"raise",
"exceptions",
".",
"PlotlyError",
"(",
"'Whoops! The elements in your colors tuples cannot exceed 1.0.'",
")",
"if",
"(",
"colortype",
"==",
"'rgb'",
")",
":",
"for",
"key",
"in",
"colors",
":",
"colors",
"[",
"key",
"]",
"=",
"color_parser",
"(",
"colors",
"[",
"key",
"]",
",",
"convert_to_RGB_255",
")",
"colors",
"[",
"key",
"]",
"=",
"color_parser",
"(",
"colors",
"[",
"key",
"]",
",",
"label_rgb",
")",
"return",
"colors"
] | validates dictioanry of color(s) . | train | false |
849 | def encode_thumbnail(thumbnail):
from calibre.utils.imghdr import identify
if (thumbnail is None):
return None
if (not isinstance(thumbnail, (tuple, list))):
try:
(width, height) = identify(bytes(thumbnail))[1:]
if ((width < 0) or (height < 0)):
return None
thumbnail = (width, height, thumbnail)
except Exception:
return None
return (thumbnail[0], thumbnail[1], b64encode(str(thumbnail[2])))
| [
"def",
"encode_thumbnail",
"(",
"thumbnail",
")",
":",
"from",
"calibre",
".",
"utils",
".",
"imghdr",
"import",
"identify",
"if",
"(",
"thumbnail",
"is",
"None",
")",
":",
"return",
"None",
"if",
"(",
"not",
"isinstance",
"(",
"thumbnail",
",",
"(",
"tuple",
",",
"list",
")",
")",
")",
":",
"try",
":",
"(",
"width",
",",
"height",
")",
"=",
"identify",
"(",
"bytes",
"(",
"thumbnail",
")",
")",
"[",
"1",
":",
"]",
"if",
"(",
"(",
"width",
"<",
"0",
")",
"or",
"(",
"height",
"<",
"0",
")",
")",
":",
"return",
"None",
"thumbnail",
"=",
"(",
"width",
",",
"height",
",",
"thumbnail",
")",
"except",
"Exception",
":",
"return",
"None",
"return",
"(",
"thumbnail",
"[",
"0",
"]",
",",
"thumbnail",
"[",
"1",
"]",
",",
"b64encode",
"(",
"str",
"(",
"thumbnail",
"[",
"2",
"]",
")",
")",
")"
] | encode the image part of a thumbnail . | train | false |
850 | def append_domain():
grain = {}
if salt.utils.is_proxy():
return grain
if ('append_domain' in __opts__):
grain['append_domain'] = __opts__['append_domain']
return grain
| [
"def",
"append_domain",
"(",
")",
":",
"grain",
"=",
"{",
"}",
"if",
"salt",
".",
"utils",
".",
"is_proxy",
"(",
")",
":",
"return",
"grain",
"if",
"(",
"'append_domain'",
"in",
"__opts__",
")",
":",
"grain",
"[",
"'append_domain'",
"]",
"=",
"__opts__",
"[",
"'append_domain'",
"]",
"return",
"grain"
] | return append_domain if set . | train | true |
851 | def print_file(fileName, printFunction):
printer = QPrinter(QPrinter.HighResolution)
printer.setPageSize(QPrinter.A4)
printer.setOutputFileName(fileName)
printer.setDocName(fileName)
preview = QPrintPreviewDialog(printer)
preview.paintRequested[QPrinter].connect(printFunction)
size = QApplication.instance().desktop().screenGeometry()
width = (size.width() - 100)
height = (size.height() - 100)
preview.setMinimumSize(width, height)
preview.exec_()
| [
"def",
"print_file",
"(",
"fileName",
",",
"printFunction",
")",
":",
"printer",
"=",
"QPrinter",
"(",
"QPrinter",
".",
"HighResolution",
")",
"printer",
".",
"setPageSize",
"(",
"QPrinter",
".",
"A4",
")",
"printer",
".",
"setOutputFileName",
"(",
"fileName",
")",
"printer",
".",
"setDocName",
"(",
"fileName",
")",
"preview",
"=",
"QPrintPreviewDialog",
"(",
"printer",
")",
"preview",
".",
"paintRequested",
"[",
"QPrinter",
"]",
".",
"connect",
"(",
"printFunction",
")",
"size",
"=",
"QApplication",
".",
"instance",
"(",
")",
".",
"desktop",
"(",
")",
".",
"screenGeometry",
"(",
")",
"width",
"=",
"(",
"size",
".",
"width",
"(",
")",
"-",
"100",
")",
"height",
"=",
"(",
"size",
".",
"height",
"(",
")",
"-",
"100",
")",
"preview",
".",
"setMinimumSize",
"(",
"width",
",",
"height",
")",
"preview",
".",
"exec_",
"(",
")"
] | this method print a file this method print a file . | train | false |
852 | def open_project_with_extensions(path, extensions):
if (not os.path.exists(path)):
raise NinjaIOException(u'The folder does not exist')
valid_extensions = [ext.lower() for ext in extensions if (not ext.startswith(u'-'))]
d = {}
for (root, dirs, files) in os.walk(path, followlinks=True):
for f in files:
ext = os.path.splitext(f.lower())[(-1)]
if ((ext in valid_extensions) or (u'.*' in valid_extensions)):
d[root] = [f, dirs]
elif ((ext == u'') and (u'*' in valid_extensions)):
d[root] = [f, dirs]
return d
| [
"def",
"open_project_with_extensions",
"(",
"path",
",",
"extensions",
")",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
")",
":",
"raise",
"NinjaIOException",
"(",
"u'The folder does not exist'",
")",
"valid_extensions",
"=",
"[",
"ext",
".",
"lower",
"(",
")",
"for",
"ext",
"in",
"extensions",
"if",
"(",
"not",
"ext",
".",
"startswith",
"(",
"u'-'",
")",
")",
"]",
"d",
"=",
"{",
"}",
"for",
"(",
"root",
",",
"dirs",
",",
"files",
")",
"in",
"os",
".",
"walk",
"(",
"path",
",",
"followlinks",
"=",
"True",
")",
":",
"for",
"f",
"in",
"files",
":",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"f",
".",
"lower",
"(",
")",
")",
"[",
"(",
"-",
"1",
")",
"]",
"if",
"(",
"(",
"ext",
"in",
"valid_extensions",
")",
"or",
"(",
"u'.*'",
"in",
"valid_extensions",
")",
")",
":",
"d",
"[",
"root",
"]",
"=",
"[",
"f",
",",
"dirs",
"]",
"elif",
"(",
"(",
"ext",
"==",
"u''",
")",
"and",
"(",
"u'*'",
"in",
"valid_extensions",
")",
")",
":",
"d",
"[",
"root",
"]",
"=",
"[",
"f",
",",
"dirs",
"]",
"return",
"d"
] | return a dict structure containing the info inside a folder . | train | false |
853 | def run_job(tasks):
log.debug(u'executing tasks: %s', tasks)
finished_events = manager.execute(options={u'tasks': tasks, u'cron': True, u'allow_manual': False}, priority=5)
for (_, task_name, event_) in finished_events:
log.debug(u'task finished executing: %s', task_name)
event_.wait()
log.debug(u'all tasks in schedule finished executing')
| [
"def",
"run_job",
"(",
"tasks",
")",
":",
"log",
".",
"debug",
"(",
"u'executing tasks: %s'",
",",
"tasks",
")",
"finished_events",
"=",
"manager",
".",
"execute",
"(",
"options",
"=",
"{",
"u'tasks'",
":",
"tasks",
",",
"u'cron'",
":",
"True",
",",
"u'allow_manual'",
":",
"False",
"}",
",",
"priority",
"=",
"5",
")",
"for",
"(",
"_",
",",
"task_name",
",",
"event_",
")",
"in",
"finished_events",
":",
"log",
".",
"debug",
"(",
"u'task finished executing: %s'",
",",
"task_name",
")",
"event_",
".",
"wait",
"(",
")",
"log",
".",
"debug",
"(",
"u'all tasks in schedule finished executing'",
")"
] | run a scheduled job on the minion immediately cli example: . | train | false |
854 | def places_photo(client, photo_reference, max_width=None, max_height=None):
if (not (max_width or max_height)):
raise ValueError('a max_width or max_height arg is required')
params = {'photoreference': photo_reference}
if max_width:
params['maxwidth'] = max_width
if max_height:
params['maxheight'] = max_height
response = client._get('/maps/api/place/photo', params, extract_body=(lambda response: response), requests_kwargs={'stream': True})
return response.iter_content()
| [
"def",
"places_photo",
"(",
"client",
",",
"photo_reference",
",",
"max_width",
"=",
"None",
",",
"max_height",
"=",
"None",
")",
":",
"if",
"(",
"not",
"(",
"max_width",
"or",
"max_height",
")",
")",
":",
"raise",
"ValueError",
"(",
"'a max_width or max_height arg is required'",
")",
"params",
"=",
"{",
"'photoreference'",
":",
"photo_reference",
"}",
"if",
"max_width",
":",
"params",
"[",
"'maxwidth'",
"]",
"=",
"max_width",
"if",
"max_height",
":",
"params",
"[",
"'maxheight'",
"]",
"=",
"max_height",
"response",
"=",
"client",
".",
"_get",
"(",
"'/maps/api/place/photo'",
",",
"params",
",",
"extract_body",
"=",
"(",
"lambda",
"response",
":",
"response",
")",
",",
"requests_kwargs",
"=",
"{",
"'stream'",
":",
"True",
"}",
")",
"return",
"response",
".",
"iter_content",
"(",
")"
] | downloads a photo from the places api . | train | true |
855 | def format_args(args=None, kwargs=None):
if (args is not None):
arglist = [utils.compact_text(repr(arg), 200) for arg in args]
else:
arglist = []
if (kwargs is not None):
for (k, v) in kwargs.items():
arglist.append('{}={}'.format(k, utils.compact_text(repr(v), 200)))
return ', '.join(arglist)
| [
"def",
"format_args",
"(",
"args",
"=",
"None",
",",
"kwargs",
"=",
"None",
")",
":",
"if",
"(",
"args",
"is",
"not",
"None",
")",
":",
"arglist",
"=",
"[",
"utils",
".",
"compact_text",
"(",
"repr",
"(",
"arg",
")",
",",
"200",
")",
"for",
"arg",
"in",
"args",
"]",
"else",
":",
"arglist",
"=",
"[",
"]",
"if",
"(",
"kwargs",
"is",
"not",
"None",
")",
":",
"for",
"(",
"k",
",",
"v",
")",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"arglist",
".",
"append",
"(",
"'{}={}'",
".",
"format",
"(",
"k",
",",
"utils",
".",
"compact_text",
"(",
"repr",
"(",
"v",
")",
",",
"200",
")",
")",
")",
"return",
"', '",
".",
"join",
"(",
"arglist",
")"
] | format a list of arguments/kwargs to a function-call like string . | train | false |
858 | def getFileText(fileName, printWarning=True, readMode='r'):
try:
file = open(fileName, readMode)
fileText = file.read()
file.close()
return fileText
except IOError:
if printWarning:
print (('The file ' + fileName) + ' does not exist.')
return ''
| [
"def",
"getFileText",
"(",
"fileName",
",",
"printWarning",
"=",
"True",
",",
"readMode",
"=",
"'r'",
")",
":",
"try",
":",
"file",
"=",
"open",
"(",
"fileName",
",",
"readMode",
")",
"fileText",
"=",
"file",
".",
"read",
"(",
")",
"file",
".",
"close",
"(",
")",
"return",
"fileText",
"except",
"IOError",
":",
"if",
"printWarning",
":",
"print",
"(",
"(",
"'The file '",
"+",
"fileName",
")",
"+",
"' does not exist.'",
")",
"return",
"''"
] | get the entire text of a file . | train | false |
859 | def get_rng_state():
return default_generator.get_state()
| [
"def",
"get_rng_state",
"(",
")",
":",
"return",
"default_generator",
".",
"get_state",
"(",
")"
] | returns the random number generator state as a bytetensor . | train | false |
860 | def _date_now():
now = time.time()
date_arr = np.array([np.floor(now), (1000000.0 * (now - np.floor(now)))], dtype='int32')
return date_arr
| [
"def",
"_date_now",
"(",
")",
":",
"now",
"=",
"time",
".",
"time",
"(",
")",
"date_arr",
"=",
"np",
".",
"array",
"(",
"[",
"np",
".",
"floor",
"(",
"now",
")",
",",
"(",
"1000000.0",
"*",
"(",
"now",
"-",
"np",
".",
"floor",
"(",
"now",
")",
")",
")",
"]",
",",
"dtype",
"=",
"'int32'",
")",
"return",
"date_arr"
] | get date in secs . | train | false |
861 | @profiler.trace
def flavor_get_extras(request, flavor_id, raw=False, flavor=None):
if (flavor is None):
flavor = novaclient(request).flavors.get(flavor_id)
extras = flavor.get_keys()
if raw:
return extras
return [FlavorExtraSpec(flavor_id, key, value) for (key, value) in extras.items()]
| [
"@",
"profiler",
".",
"trace",
"def",
"flavor_get_extras",
"(",
"request",
",",
"flavor_id",
",",
"raw",
"=",
"False",
",",
"flavor",
"=",
"None",
")",
":",
"if",
"(",
"flavor",
"is",
"None",
")",
":",
"flavor",
"=",
"novaclient",
"(",
"request",
")",
".",
"flavors",
".",
"get",
"(",
"flavor_id",
")",
"extras",
"=",
"flavor",
".",
"get_keys",
"(",
")",
"if",
"raw",
":",
"return",
"extras",
"return",
"[",
"FlavorExtraSpec",
"(",
"flavor_id",
",",
"key",
",",
"value",
")",
"for",
"(",
"key",
",",
"value",
")",
"in",
"extras",
".",
"items",
"(",
")",
"]"
] | get flavor extra specs . | train | true |
862 | def _dict_to_name_value(data):
if isinstance(data, dict):
sorted_data = sorted(data.items(), key=(lambda s: s[0]))
result = []
for (name, value) in sorted_data:
if isinstance(value, dict):
result.append({name: _dict_to_name_value(value)})
else:
result.append({name: value})
else:
result = data
return result
| [
"def",
"_dict_to_name_value",
"(",
"data",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"dict",
")",
":",
"sorted_data",
"=",
"sorted",
"(",
"data",
".",
"items",
"(",
")",
",",
"key",
"=",
"(",
"lambda",
"s",
":",
"s",
"[",
"0",
"]",
")",
")",
"result",
"=",
"[",
"]",
"for",
"(",
"name",
",",
"value",
")",
"in",
"sorted_data",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"result",
".",
"append",
"(",
"{",
"name",
":",
"_dict_to_name_value",
"(",
"value",
")",
"}",
")",
"else",
":",
"result",
".",
"append",
"(",
"{",
"name",
":",
"value",
"}",
")",
"else",
":",
"result",
"=",
"data",
"return",
"result"
] | convert a dictionary to a list of dictionaries to facilitate ordering . | train | true |
864 | def _SetVerboseLevel(level):
return _cpplint_state.SetVerboseLevel(level)
| [
"def",
"_SetVerboseLevel",
"(",
"level",
")",
":",
"return",
"_cpplint_state",
".",
"SetVerboseLevel",
"(",
"level",
")"
] | sets the modules verbosity . | train | false |
865 | def get_provider_user_states(user):
states = []
found_user_auths = list(models.DjangoStorage.user.get_social_auth_for_user(user))
for enabled_provider in provider.Registry.enabled():
association = None
for auth in found_user_auths:
if enabled_provider.match_social_auth(auth):
association = auth
break
if (enabled_provider.accepts_logins or association):
states.append(ProviderUserState(enabled_provider, user, association))
return states
| [
"def",
"get_provider_user_states",
"(",
"user",
")",
":",
"states",
"=",
"[",
"]",
"found_user_auths",
"=",
"list",
"(",
"models",
".",
"DjangoStorage",
".",
"user",
".",
"get_social_auth_for_user",
"(",
"user",
")",
")",
"for",
"enabled_provider",
"in",
"provider",
".",
"Registry",
".",
"enabled",
"(",
")",
":",
"association",
"=",
"None",
"for",
"auth",
"in",
"found_user_auths",
":",
"if",
"enabled_provider",
".",
"match_social_auth",
"(",
"auth",
")",
":",
"association",
"=",
"auth",
"break",
"if",
"(",
"enabled_provider",
".",
"accepts_logins",
"or",
"association",
")",
":",
"states",
".",
"append",
"(",
"ProviderUserState",
"(",
"enabled_provider",
",",
"user",
",",
"association",
")",
")",
"return",
"states"
] | gets list of states of provider-user combinations . | train | false |
867 | def nullable(property_schema):
new_schema = property_schema.copy()
new_schema['type'] = [property_schema['type'], 'null']
return new_schema
| [
"def",
"nullable",
"(",
"property_schema",
")",
":",
"new_schema",
"=",
"property_schema",
".",
"copy",
"(",
")",
"new_schema",
"[",
"'type'",
"]",
"=",
"[",
"property_schema",
"[",
"'type'",
"]",
",",
"'null'",
"]",
"return",
"new_schema"
] | clone a property schema into one that is nullable . | train | false |
868 | def make_async(func):
def make_me_async(*args, **kwargs):
async_func = AsyncRunner(func)
async_func(*args, **kwargs)
return async_func
return make_me_async
| [
"def",
"make_async",
"(",
"func",
")",
":",
"def",
"make_me_async",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"async_func",
"=",
"AsyncRunner",
"(",
"func",
")",
"async_func",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"async_func",
"return",
"make_me_async"
] | decorate methods to be run as qthreads . | train | false |
870 | @then(u'we see database created')
def step_see_db_created(context):
_expect_exact(context, u'CREATE DATABASE', timeout=2)
| [
"@",
"then",
"(",
"u'we see database created'",
")",
"def",
"step_see_db_created",
"(",
"context",
")",
":",
"_expect_exact",
"(",
"context",
",",
"u'CREATE DATABASE'",
",",
"timeout",
"=",
"2",
")"
] | wait to see create database output . | train | false |
871 | def getPathByPrefix(path, prefix, xmlElement):
if (len(path) < 2):
print 'Warning, bug, path is too small in evaluate in setPathByPrefix.'
return
pathByKey = getPathByKey((prefix + 'path'), xmlElement)
if (len(pathByKey) < len(path)):
for pointIndex in xrange(len(pathByKey)):
path[pointIndex] = pathByKey[pointIndex]
else:
path = pathByKey
path[0] = getVector3ByPrefix(path[0], (prefix + 'pathStart'), xmlElement)
path[(-1)] = getVector3ByPrefix(path[(-1)], (prefix + 'pathEnd'), xmlElement)
return path
| [
"def",
"getPathByPrefix",
"(",
"path",
",",
"prefix",
",",
"xmlElement",
")",
":",
"if",
"(",
"len",
"(",
"path",
")",
"<",
"2",
")",
":",
"print",
"'Warning, bug, path is too small in evaluate in setPathByPrefix.'",
"return",
"pathByKey",
"=",
"getPathByKey",
"(",
"(",
"prefix",
"+",
"'path'",
")",
",",
"xmlElement",
")",
"if",
"(",
"len",
"(",
"pathByKey",
")",
"<",
"len",
"(",
"path",
")",
")",
":",
"for",
"pointIndex",
"in",
"xrange",
"(",
"len",
"(",
"pathByKey",
")",
")",
":",
"path",
"[",
"pointIndex",
"]",
"=",
"pathByKey",
"[",
"pointIndex",
"]",
"else",
":",
"path",
"=",
"pathByKey",
"path",
"[",
"0",
"]",
"=",
"getVector3ByPrefix",
"(",
"path",
"[",
"0",
"]",
",",
"(",
"prefix",
"+",
"'pathStart'",
")",
",",
"xmlElement",
")",
"path",
"[",
"(",
"-",
"1",
")",
"]",
"=",
"getVector3ByPrefix",
"(",
"path",
"[",
"(",
"-",
"1",
")",
"]",
",",
"(",
"prefix",
"+",
"'pathEnd'",
")",
",",
"xmlElement",
")",
"return",
"path"
] | get path from prefix and xml element . | train | false |
872 | def _ValidateAFF4Type(aff4_type):
if (aff4_type is None):
return None
if (not isinstance(aff4_type, type)):
raise TypeError(('aff4_type=%s must be a type' % aff4_type))
if (not issubclass(aff4_type, AFF4Object)):
raise TypeError(('aff4_type=%s must be a subclass of AFF4Object.' % aff4_type))
return aff4_type
| [
"def",
"_ValidateAFF4Type",
"(",
"aff4_type",
")",
":",
"if",
"(",
"aff4_type",
"is",
"None",
")",
":",
"return",
"None",
"if",
"(",
"not",
"isinstance",
"(",
"aff4_type",
",",
"type",
")",
")",
":",
"raise",
"TypeError",
"(",
"(",
"'aff4_type=%s must be a type'",
"%",
"aff4_type",
")",
")",
"if",
"(",
"not",
"issubclass",
"(",
"aff4_type",
",",
"AFF4Object",
")",
")",
":",
"raise",
"TypeError",
"(",
"(",
"'aff4_type=%s must be a subclass of AFF4Object.'",
"%",
"aff4_type",
")",
")",
"return",
"aff4_type"
] | validates and normalizes aff4_type to class object . | train | true |
874 | def get_trailing_data(record, extra_data_flags):
data = OrderedDict()
flags = (extra_data_flags >> 1)
num = 0
while flags:
num += 1
if (flags & 1):
(sz, consumed) = decint(record, forward=False)
if (sz > consumed):
data[num] = record[(- sz):(- consumed)]
record = record[:(- sz)]
flags >>= 1
if (extra_data_flags & 1):
sz = ((ord(record[(-1)]) & 3) + 1)
consumed = 1
if (sz > consumed):
data[0] = record[(- sz):(- consumed)]
record = record[:(- sz)]
return (data, record)
| [
"def",
"get_trailing_data",
"(",
"record",
",",
"extra_data_flags",
")",
":",
"data",
"=",
"OrderedDict",
"(",
")",
"flags",
"=",
"(",
"extra_data_flags",
">>",
"1",
")",
"num",
"=",
"0",
"while",
"flags",
":",
"num",
"+=",
"1",
"if",
"(",
"flags",
"&",
"1",
")",
":",
"(",
"sz",
",",
"consumed",
")",
"=",
"decint",
"(",
"record",
",",
"forward",
"=",
"False",
")",
"if",
"(",
"sz",
">",
"consumed",
")",
":",
"data",
"[",
"num",
"]",
"=",
"record",
"[",
"(",
"-",
"sz",
")",
":",
"(",
"-",
"consumed",
")",
"]",
"record",
"=",
"record",
"[",
":",
"(",
"-",
"sz",
")",
"]",
"flags",
">>=",
"1",
"if",
"(",
"extra_data_flags",
"&",
"1",
")",
":",
"sz",
"=",
"(",
"(",
"ord",
"(",
"record",
"[",
"(",
"-",
"1",
")",
"]",
")",
"&",
"3",
")",
"+",
"1",
")",
"consumed",
"=",
"1",
"if",
"(",
"sz",
">",
"consumed",
")",
":",
"data",
"[",
"0",
"]",
"=",
"record",
"[",
"(",
"-",
"sz",
")",
":",
"(",
"-",
"consumed",
")",
"]",
"record",
"=",
"record",
"[",
":",
"(",
"-",
"sz",
")",
"]",
"return",
"(",
"data",
",",
"record",
")"
] | given a text record as a bytestring and the extra data flags from the mobi header . | train | false |
875 | def is_installable_dir(path):
if (not os.path.isdir(path)):
return False
setup_py = os.path.join(path, 'setup.py')
if os.path.isfile(setup_py):
return True
return False
| [
"def",
"is_installable_dir",
"(",
"path",
")",
":",
"if",
"(",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
")",
":",
"return",
"False",
"setup_py",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"'setup.py'",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"setup_py",
")",
":",
"return",
"True",
"return",
"False"
] | return true if path is a directory containing a setup . | train | true |
877 | def color_validator(optdict, name, value):
return optik_ext.check_color(None, name, value)
| [
"def",
"color_validator",
"(",
"optdict",
",",
"name",
",",
"value",
")",
":",
"return",
"optik_ext",
".",
"check_color",
"(",
"None",
",",
"name",
",",
"value",
")"
] | validate and return a valid color for option of type color . | train | false |
878 | def datetime_to_long(dt):
return timedelta_to_usecs((dt - dt.min))
| [
"def",
"datetime_to_long",
"(",
"dt",
")",
":",
"return",
"timedelta_to_usecs",
"(",
"(",
"dt",
"-",
"dt",
".",
"min",
")",
")"
] | converts a datetime object to a long integer representing the number of microseconds since datetime . | train | false |
879 | def staff_only_view(request):
if request.user.is_staff:
return HttpResponse('')
else:
raise SuspiciousOperation()
| [
"def",
"staff_only_view",
"(",
"request",
")",
":",
"if",
"request",
".",
"user",
".",
"is_staff",
":",
"return",
"HttpResponse",
"(",
"''",
")",
"else",
":",
"raise",
"SuspiciousOperation",
"(",
")"
] | a view that can only be visited by staff . | train | false |
880 | def test_gaussian_mssim_vs_author_ref():
mssim_matlab = 0.327314295673357
mssim = ssim(cam, cam_noisy, gaussian_weights=True, use_sample_covariance=False)
assert_almost_equal(mssim, mssim_matlab, decimal=3)
| [
"def",
"test_gaussian_mssim_vs_author_ref",
"(",
")",
":",
"mssim_matlab",
"=",
"0.327314295673357",
"mssim",
"=",
"ssim",
"(",
"cam",
",",
"cam_noisy",
",",
"gaussian_weights",
"=",
"True",
",",
"use_sample_covariance",
"=",
"False",
")",
"assert_almost_equal",
"(",
"mssim",
",",
"mssim_matlab",
",",
"decimal",
"=",
"3",
")"
] | test vs . | train | false |
883 | def _delete_current_allocs(conn, allocs):
for alloc in allocs:
rp_id = alloc.resource_provider.id
consumer_id = alloc.consumer_id
del_sql = _ALLOC_TBL.delete().where(sa.and_((_ALLOC_TBL.c.resource_provider_id == rp_id), (_ALLOC_TBL.c.consumer_id == consumer_id)))
conn.execute(del_sql)
| [
"def",
"_delete_current_allocs",
"(",
"conn",
",",
"allocs",
")",
":",
"for",
"alloc",
"in",
"allocs",
":",
"rp_id",
"=",
"alloc",
".",
"resource_provider",
".",
"id",
"consumer_id",
"=",
"alloc",
".",
"consumer_id",
"del_sql",
"=",
"_ALLOC_TBL",
".",
"delete",
"(",
")",
".",
"where",
"(",
"sa",
".",
"and_",
"(",
"(",
"_ALLOC_TBL",
".",
"c",
".",
"resource_provider_id",
"==",
"rp_id",
")",
",",
"(",
"_ALLOC_TBL",
".",
"c",
".",
"consumer_id",
"==",
"consumer_id",
")",
")",
")",
"conn",
".",
"execute",
"(",
"del_sql",
")"
] | deletes any existing allocations that correspond to the allocations to be written . | train | false |
884 | def akasLanguages(movie):
lang_and_aka = []
akas = set(((movie.get('akas') or []) + (movie.get('akas from release info') or [])))
for aka in akas:
aka = aka.encode('utf8').split('::')
if (len(aka) == 2):
language = _re_akas_lang.search(aka[1])
if language:
language = language.groups()[0]
else:
country = aka[1].split(',')[0]
country = _re_akas_country.sub('', country).strip()
language = COUNTRY_LANG.get(country)
else:
language = None
lang_and_aka.append((language, aka[0].decode('utf8')))
return lang_and_aka
| [
"def",
"akasLanguages",
"(",
"movie",
")",
":",
"lang_and_aka",
"=",
"[",
"]",
"akas",
"=",
"set",
"(",
"(",
"(",
"movie",
".",
"get",
"(",
"'akas'",
")",
"or",
"[",
"]",
")",
"+",
"(",
"movie",
".",
"get",
"(",
"'akas from release info'",
")",
"or",
"[",
"]",
")",
")",
")",
"for",
"aka",
"in",
"akas",
":",
"aka",
"=",
"aka",
".",
"encode",
"(",
"'utf8'",
")",
".",
"split",
"(",
"'::'",
")",
"if",
"(",
"len",
"(",
"aka",
")",
"==",
"2",
")",
":",
"language",
"=",
"_re_akas_lang",
".",
"search",
"(",
"aka",
"[",
"1",
"]",
")",
"if",
"language",
":",
"language",
"=",
"language",
".",
"groups",
"(",
")",
"[",
"0",
"]",
"else",
":",
"country",
"=",
"aka",
"[",
"1",
"]",
".",
"split",
"(",
"','",
")",
"[",
"0",
"]",
"country",
"=",
"_re_akas_country",
".",
"sub",
"(",
"''",
",",
"country",
")",
".",
"strip",
"(",
")",
"language",
"=",
"COUNTRY_LANG",
".",
"get",
"(",
"country",
")",
"else",
":",
"language",
"=",
"None",
"lang_and_aka",
".",
"append",
"(",
"(",
"language",
",",
"aka",
"[",
"0",
"]",
".",
"decode",
"(",
"'utf8'",
")",
")",
")",
"return",
"lang_and_aka"
] | given a movie . | train | false |
885 | def boto_fix_security_token_in_profile(conn, profile_name):
profile = ('profile ' + profile_name)
if boto.config.has_option(profile, 'aws_security_token'):
conn.provider.set_security_token(boto.config.get(profile, 'aws_security_token'))
return conn
| [
"def",
"boto_fix_security_token_in_profile",
"(",
"conn",
",",
"profile_name",
")",
":",
"profile",
"=",
"(",
"'profile '",
"+",
"profile_name",
")",
"if",
"boto",
".",
"config",
".",
"has_option",
"(",
"profile",
",",
"'aws_security_token'",
")",
":",
"conn",
".",
"provider",
".",
"set_security_token",
"(",
"boto",
".",
"config",
".",
"get",
"(",
"profile",
",",
"'aws_security_token'",
")",
")",
"return",
"conn"
] | monkey patch for boto issue boto/boto#2100 . | train | false |
886 | def _validate_regex(pattern, flags):
with warnings.catch_warnings(record=True) as recorded_warnings:
warnings.simplefilter('always')
try:
re.compile(pattern, flags)
except re.error as e:
raise configexc.ValidationError(pattern, ('must be a valid regex - ' + str(e)))
except RuntimeError:
raise configexc.ValidationError(pattern, 'must be a valid regex - recursion depth exceeded')
for w in recorded_warnings:
if (issubclass(w.category, DeprecationWarning) and str(w.message).startswith('bad escape')):
raise configexc.ValidationError(pattern, ('must be a valid regex - ' + str(w.message)))
else:
warnings.warn(w.message)
| [
"def",
"_validate_regex",
"(",
"pattern",
",",
"flags",
")",
":",
"with",
"warnings",
".",
"catch_warnings",
"(",
"record",
"=",
"True",
")",
"as",
"recorded_warnings",
":",
"warnings",
".",
"simplefilter",
"(",
"'always'",
")",
"try",
":",
"re",
".",
"compile",
"(",
"pattern",
",",
"flags",
")",
"except",
"re",
".",
"error",
"as",
"e",
":",
"raise",
"configexc",
".",
"ValidationError",
"(",
"pattern",
",",
"(",
"'must be a valid regex - '",
"+",
"str",
"(",
"e",
")",
")",
")",
"except",
"RuntimeError",
":",
"raise",
"configexc",
".",
"ValidationError",
"(",
"pattern",
",",
"'must be a valid regex - recursion depth exceeded'",
")",
"for",
"w",
"in",
"recorded_warnings",
":",
"if",
"(",
"issubclass",
"(",
"w",
".",
"category",
",",
"DeprecationWarning",
")",
"and",
"str",
"(",
"w",
".",
"message",
")",
".",
"startswith",
"(",
"'bad escape'",
")",
")",
":",
"raise",
"configexc",
".",
"ValidationError",
"(",
"pattern",
",",
"(",
"'must be a valid regex - '",
"+",
"str",
"(",
"w",
".",
"message",
")",
")",
")",
"else",
":",
"warnings",
".",
"warn",
"(",
"w",
".",
"message",
")"
] | check if the given regex is valid . | train | false |
887 | @bp.route('/<int:uid>/reply', methods=['POST', 'DELETE'])
@limit_request(5, redirect_url=(lambda uid: url_for('.view', uid=uid)))
@require_user
def reply(uid):
if (request.method == 'DELETE'):
reply_id = force_int(request.args.get('reply', 0), 0)
if (not reply_id):
return abort(404)
reply = Reply.query.get_or_404(reply_id)
if (not reply):
return abort(404)
if (reply.topic_id != uid):
return abort(404)
if (g.user.is_staff or (g.user.id == reply.account_id)):
reply.delete()
return jsonify(status='success')
return abort(403)
topic = Topic.query.get_or_404(uid)
form = ReplyForm()
if form.validate_on_submit():
form.save(g.user, topic)
else:
flash(_('Missing content'), 'error')
return redirect(url_for('.view', uid=uid))
| [
"@",
"bp",
".",
"route",
"(",
"'/<int:uid>/reply'",
",",
"methods",
"=",
"[",
"'POST'",
",",
"'DELETE'",
"]",
")",
"@",
"limit_request",
"(",
"5",
",",
"redirect_url",
"=",
"(",
"lambda",
"uid",
":",
"url_for",
"(",
"'.view'",
",",
"uid",
"=",
"uid",
")",
")",
")",
"@",
"require_user",
"def",
"reply",
"(",
"uid",
")",
":",
"if",
"(",
"request",
".",
"method",
"==",
"'DELETE'",
")",
":",
"reply_id",
"=",
"force_int",
"(",
"request",
".",
"args",
".",
"get",
"(",
"'reply'",
",",
"0",
")",
",",
"0",
")",
"if",
"(",
"not",
"reply_id",
")",
":",
"return",
"abort",
"(",
"404",
")",
"reply",
"=",
"Reply",
".",
"query",
".",
"get_or_404",
"(",
"reply_id",
")",
"if",
"(",
"not",
"reply",
")",
":",
"return",
"abort",
"(",
"404",
")",
"if",
"(",
"reply",
".",
"topic_id",
"!=",
"uid",
")",
":",
"return",
"abort",
"(",
"404",
")",
"if",
"(",
"g",
".",
"user",
".",
"is_staff",
"or",
"(",
"g",
".",
"user",
".",
"id",
"==",
"reply",
".",
"account_id",
")",
")",
":",
"reply",
".",
"delete",
"(",
")",
"return",
"jsonify",
"(",
"status",
"=",
"'success'",
")",
"return",
"abort",
"(",
"403",
")",
"topic",
"=",
"Topic",
".",
"query",
".",
"get_or_404",
"(",
"uid",
")",
"form",
"=",
"ReplyForm",
"(",
")",
"if",
"form",
".",
"validate_on_submit",
"(",
")",
":",
"form",
".",
"save",
"(",
"g",
".",
"user",
",",
"topic",
")",
"else",
":",
"flash",
"(",
"_",
"(",
"'Missing content'",
")",
",",
"'error'",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"'.view'",
",",
"uid",
"=",
"uid",
")",
")"
] | reply to a thread . | train | false |
888 | def throw(exception):
raise exception
| [
"def",
"throw",
"(",
"exception",
")",
":",
"raise",
"exception"
] | throw execption and show message . | train | false |
889 | def items(*args, **kwargs):
if args:
return item(*args)
pillarenv = kwargs.get('pillarenv')
if (pillarenv is None):
if __opts__.get('pillarenv_from_saltenv', False):
pillarenv = (kwargs.get('saltenv') or __opts__['environment'])
else:
pillarenv = __opts__.get('pillarenv')
opts = copy.copy(__opts__)
opts['pillarenv'] = pillarenv
pillar = salt.pillar.get_pillar(opts, __grains__, opts['id'], saltenv=pillarenv, pillar=kwargs.get('pillar'), pillarenv=(kwargs.get('pillarenv') or __opts__['pillarenv']))
return pillar.compile_pillar()
| [
"def",
"items",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"args",
":",
"return",
"item",
"(",
"*",
"args",
")",
"pillarenv",
"=",
"kwargs",
".",
"get",
"(",
"'pillarenv'",
")",
"if",
"(",
"pillarenv",
"is",
"None",
")",
":",
"if",
"__opts__",
".",
"get",
"(",
"'pillarenv_from_saltenv'",
",",
"False",
")",
":",
"pillarenv",
"=",
"(",
"kwargs",
".",
"get",
"(",
"'saltenv'",
")",
"or",
"__opts__",
"[",
"'environment'",
"]",
")",
"else",
":",
"pillarenv",
"=",
"__opts__",
".",
"get",
"(",
"'pillarenv'",
")",
"opts",
"=",
"copy",
".",
"copy",
"(",
"__opts__",
")",
"opts",
"[",
"'pillarenv'",
"]",
"=",
"pillarenv",
"pillar",
"=",
"salt",
".",
"pillar",
".",
"get_pillar",
"(",
"opts",
",",
"__grains__",
",",
"opts",
"[",
"'id'",
"]",
",",
"saltenv",
"=",
"pillarenv",
",",
"pillar",
"=",
"kwargs",
".",
"get",
"(",
"'pillar'",
")",
",",
"pillarenv",
"=",
"(",
"kwargs",
".",
"get",
"(",
"'pillarenv'",
")",
"or",
"__opts__",
"[",
"'pillarenv'",
"]",
")",
")",
"return",
"pillar",
".",
"compile_pillar",
"(",
")"
] | get items from the minion datastore . | train | false |
890 | def test_reader_macro():
entry = tokenize('#^()')
assert (entry[0][0] == HySymbol('dispatch_reader_macro'))
assert (entry[0][1] == HyString('^'))
assert (len(entry[0]) == 3)
| [
"def",
"test_reader_macro",
"(",
")",
":",
"entry",
"=",
"tokenize",
"(",
"'#^()'",
")",
"assert",
"(",
"entry",
"[",
"0",
"]",
"[",
"0",
"]",
"==",
"HySymbol",
"(",
"'dispatch_reader_macro'",
")",
")",
"assert",
"(",
"entry",
"[",
"0",
"]",
"[",
"1",
"]",
"==",
"HyString",
"(",
"'^'",
")",
")",
"assert",
"(",
"len",
"(",
"entry",
"[",
"0",
"]",
")",
"==",
"3",
")"
] | ensure reader macros are handles properly . | train | false |
891 | @cronjobs.register
def exclude_new_region(regions):
region_ids = [r.id for r in regions]
excluded = set(AddonExcludedRegion.objects.filter(region__in=region_ids).values_list('addon', flat=True))
ids = Webapp.objects.exclude(id__in=excluded).filter(enable_new_regions=False).values_list('id', flat=True)
_region_exclude(ids, region_ids)
| [
"@",
"cronjobs",
".",
"register",
"def",
"exclude_new_region",
"(",
"regions",
")",
":",
"region_ids",
"=",
"[",
"r",
".",
"id",
"for",
"r",
"in",
"regions",
"]",
"excluded",
"=",
"set",
"(",
"AddonExcludedRegion",
".",
"objects",
".",
"filter",
"(",
"region__in",
"=",
"region_ids",
")",
".",
"values_list",
"(",
"'addon'",
",",
"flat",
"=",
"True",
")",
")",
"ids",
"=",
"Webapp",
".",
"objects",
".",
"exclude",
"(",
"id__in",
"=",
"excluded",
")",
".",
"filter",
"(",
"enable_new_regions",
"=",
"False",
")",
".",
"values_list",
"(",
"'id'",
",",
"flat",
"=",
"True",
")",
"_region_exclude",
"(",
"ids",
",",
"region_ids",
")"
] | update blocked regions based on a list of regions to exclude . | train | false |
892 | def logistic_function(value):
return (1.0 / (1.0 + math.exp((- value))))
| [
"def",
"logistic_function",
"(",
"value",
")",
":",
"return",
"(",
"1.0",
"/",
"(",
"1.0",
"+",
"math",
".",
"exp",
"(",
"(",
"-",
"value",
")",
")",
")",
")"
] | transform the value with the logistic function . | train | false |
893 | def get_ranges(headervalue, content_length):
if (not headervalue):
return None
result = []
(bytesunit, byteranges) = headervalue.split('=', 1)
for brange in byteranges.split(','):
(start, stop) = [x.strip() for x in brange.split('-', 1)]
if start:
if (not stop):
stop = (content_length - 1)
(start, stop) = (int(start), int(stop))
if (start >= content_length):
continue
if (stop < start):
return None
result.append((start, (stop + 1)))
else:
if (not stop):
return None
result.append(((content_length - int(stop)), content_length))
return result
| [
"def",
"get_ranges",
"(",
"headervalue",
",",
"content_length",
")",
":",
"if",
"(",
"not",
"headervalue",
")",
":",
"return",
"None",
"result",
"=",
"[",
"]",
"(",
"bytesunit",
",",
"byteranges",
")",
"=",
"headervalue",
".",
"split",
"(",
"'='",
",",
"1",
")",
"for",
"brange",
"in",
"byteranges",
".",
"split",
"(",
"','",
")",
":",
"(",
"start",
",",
"stop",
")",
"=",
"[",
"x",
".",
"strip",
"(",
")",
"for",
"x",
"in",
"brange",
".",
"split",
"(",
"'-'",
",",
"1",
")",
"]",
"if",
"start",
":",
"if",
"(",
"not",
"stop",
")",
":",
"stop",
"=",
"(",
"content_length",
"-",
"1",
")",
"(",
"start",
",",
"stop",
")",
"=",
"(",
"int",
"(",
"start",
")",
",",
"int",
"(",
"stop",
")",
")",
"if",
"(",
"start",
">=",
"content_length",
")",
":",
"continue",
"if",
"(",
"stop",
"<",
"start",
")",
":",
"return",
"None",
"result",
".",
"append",
"(",
"(",
"start",
",",
"(",
"stop",
"+",
"1",
")",
")",
")",
"else",
":",
"if",
"(",
"not",
"stop",
")",
":",
"return",
"None",
"result",
".",
"append",
"(",
"(",
"(",
"content_length",
"-",
"int",
"(",
"stop",
")",
")",
",",
"content_length",
")",
")",
"return",
"result"
] | return a list of indices from a range header . | train | false |
894 | def sort_unicode(choices, key):
if (not HAS_PYUCA):
return sorted(choices, key=(lambda tup: remove_accents(key(tup)).lower()))
else:
collator = pyuca.Collator()
return sorted(choices, key=(lambda tup: collator.sort_key(force_text(key(tup)))))
| [
"def",
"sort_unicode",
"(",
"choices",
",",
"key",
")",
":",
"if",
"(",
"not",
"HAS_PYUCA",
")",
":",
"return",
"sorted",
"(",
"choices",
",",
"key",
"=",
"(",
"lambda",
"tup",
":",
"remove_accents",
"(",
"key",
"(",
"tup",
")",
")",
".",
"lower",
"(",
")",
")",
")",
"else",
":",
"collator",
"=",
"pyuca",
".",
"Collator",
"(",
")",
"return",
"sorted",
"(",
"choices",
",",
"key",
"=",
"(",
"lambda",
"tup",
":",
"collator",
".",
"sort_key",
"(",
"force_text",
"(",
"key",
"(",
"tup",
")",
")",
")",
")",
")"
] | unicode aware sorting if available . | train | false |
895 | def onDBMgrReady():
INFO_MSG(('onDBMgrReady: bootstrapGroupIndex=%s, bootstrapGlobalIndex=%s' % (os.getenv('KBE_BOOTIDX_GROUP'), os.getenv('KBE_BOOTIDX_GLOBAL'))))
| [
"def",
"onDBMgrReady",
"(",
")",
":",
"INFO_MSG",
"(",
"(",
"'onDBMgrReady: bootstrapGroupIndex=%s, bootstrapGlobalIndex=%s'",
"%",
"(",
"os",
".",
"getenv",
"(",
"'KBE_BOOTIDX_GROUP'",
")",
",",
"os",
".",
"getenv",
"(",
"'KBE_BOOTIDX_GLOBAL'",
")",
")",
")",
")"
] | kbengine method . | train | false |
896 | def create_port(name, network, device_id=None, admin_state_up=True, profile=None):
conn = _auth(profile)
return conn.create_port(name, network, device_id, admin_state_up)
| [
"def",
"create_port",
"(",
"name",
",",
"network",
",",
"device_id",
"=",
"None",
",",
"admin_state_up",
"=",
"True",
",",
"profile",
"=",
"None",
")",
":",
"conn",
"=",
"_auth",
"(",
"profile",
")",
"return",
"conn",
".",
"create_port",
"(",
"name",
",",
"network",
",",
"device_id",
",",
"admin_state_up",
")"
] | create a brocade specific port . | train | true |
897 | def _root_krylov_doc():
pass
| [
"def",
"_root_krylov_doc",
"(",
")",
":",
"pass"
] | options nit : int . | train | false |
899 | def _get_container_inspect(c_id):
for (co, _, _, _, _) in TestServiceDiscovery.container_inspects:
if (co.get('Id') == c_id):
return co
return None
| [
"def",
"_get_container_inspect",
"(",
"c_id",
")",
":",
"for",
"(",
"co",
",",
"_",
",",
"_",
",",
"_",
",",
"_",
")",
"in",
"TestServiceDiscovery",
".",
"container_inspects",
":",
"if",
"(",
"co",
".",
"get",
"(",
"'Id'",
")",
"==",
"c_id",
")",
":",
"return",
"co",
"return",
"None"
] | return a mocked container inspect dict from self . | train | false |
900 | def getdocumenttext(document):
paratextlist = []
paralist = []
for element in document.iter():
if (element.tag == (('{' + nsprefixes['w']) + '}p')):
paralist.append(element)
for para in paralist:
paratext = u''
for element in para.iter():
if (element.tag == (('{' + nsprefixes['w']) + '}t')):
if element.text:
paratext = (paratext + element.text)
elif (element.tag == (('{' + nsprefixes['w']) + '}tab')):
paratext = (paratext + ' DCTB ')
if (not (len(paratext) == 0)):
paratextlist.append(paratext)
return paratextlist
| [
"def",
"getdocumenttext",
"(",
"document",
")",
":",
"paratextlist",
"=",
"[",
"]",
"paralist",
"=",
"[",
"]",
"for",
"element",
"in",
"document",
".",
"iter",
"(",
")",
":",
"if",
"(",
"element",
".",
"tag",
"==",
"(",
"(",
"'{'",
"+",
"nsprefixes",
"[",
"'w'",
"]",
")",
"+",
"'}p'",
")",
")",
":",
"paralist",
".",
"append",
"(",
"element",
")",
"for",
"para",
"in",
"paralist",
":",
"paratext",
"=",
"u''",
"for",
"element",
"in",
"para",
".",
"iter",
"(",
")",
":",
"if",
"(",
"element",
".",
"tag",
"==",
"(",
"(",
"'{'",
"+",
"nsprefixes",
"[",
"'w'",
"]",
")",
"+",
"'}t'",
")",
")",
":",
"if",
"element",
".",
"text",
":",
"paratext",
"=",
"(",
"paratext",
"+",
"element",
".",
"text",
")",
"elif",
"(",
"element",
".",
"tag",
"==",
"(",
"(",
"'{'",
"+",
"nsprefixes",
"[",
"'w'",
"]",
")",
"+",
"'}tab'",
")",
")",
":",
"paratext",
"=",
"(",
"paratext",
"+",
"' DCTB '",
")",
"if",
"(",
"not",
"(",
"len",
"(",
"paratext",
")",
"==",
"0",
")",
")",
":",
"paratextlist",
".",
"append",
"(",
"paratext",
")",
"return",
"paratextlist"
] | return the raw text of a document . | train | true |
902 | def is_static_method(method, klass):
for c in klass.mro():
if (name(method) in c.__dict__):
return isinstance(c.__dict__[name(method)], staticmethod)
else:
return False
| [
"def",
"is_static_method",
"(",
"method",
",",
"klass",
")",
":",
"for",
"c",
"in",
"klass",
".",
"mro",
"(",
")",
":",
"if",
"(",
"name",
"(",
"method",
")",
"in",
"c",
".",
"__dict__",
")",
":",
"return",
"isinstance",
"(",
"c",
".",
"__dict__",
"[",
"name",
"(",
"method",
")",
"]",
",",
"staticmethod",
")",
"else",
":",
"return",
"False"
] | returns true if method is an instance method of klass . | train | false |
904 | def gettext_noop(message):
return message
| [
"def",
"gettext_noop",
"(",
"message",
")",
":",
"return",
"message"
] | marks strings for translation but doesnt translate them now . | train | false |
905 | def _reset_drivers():
global _drivers
_drivers = None
| [
"def",
"_reset_drivers",
"(",
")",
":",
"global",
"_drivers",
"_drivers",
"=",
"None"
] | used by unit tests to reset the drivers . | train | false |
906 | def active_tcp():
if (__grains__['kernel'] == 'Linux'):
return salt.utils.network.active_tcp()
elif (__grains__['kernel'] == 'SunOS'):
ret = {}
for connection in _netstat_sunos():
if (not connection['proto'].startswith('tcp')):
continue
if (connection['state'] != 'ESTABLISHED'):
continue
ret[(len(ret) + 1)] = {'local_addr': '.'.join(connection['local-address'].split('.')[:(-1)]), 'local_port': '.'.join(connection['local-address'].split('.')[(-1):]), 'remote_addr': '.'.join(connection['remote-address'].split('.')[:(-1)]), 'remote_port': '.'.join(connection['remote-address'].split('.')[(-1):])}
return ret
else:
return {}
| [
"def",
"active_tcp",
"(",
")",
":",
"if",
"(",
"__grains__",
"[",
"'kernel'",
"]",
"==",
"'Linux'",
")",
":",
"return",
"salt",
".",
"utils",
".",
"network",
".",
"active_tcp",
"(",
")",
"elif",
"(",
"__grains__",
"[",
"'kernel'",
"]",
"==",
"'SunOS'",
")",
":",
"ret",
"=",
"{",
"}",
"for",
"connection",
"in",
"_netstat_sunos",
"(",
")",
":",
"if",
"(",
"not",
"connection",
"[",
"'proto'",
"]",
".",
"startswith",
"(",
"'tcp'",
")",
")",
":",
"continue",
"if",
"(",
"connection",
"[",
"'state'",
"]",
"!=",
"'ESTABLISHED'",
")",
":",
"continue",
"ret",
"[",
"(",
"len",
"(",
"ret",
")",
"+",
"1",
")",
"]",
"=",
"{",
"'local_addr'",
":",
"'.'",
".",
"join",
"(",
"connection",
"[",
"'local-address'",
"]",
".",
"split",
"(",
"'.'",
")",
"[",
":",
"(",
"-",
"1",
")",
"]",
")",
",",
"'local_port'",
":",
"'.'",
".",
"join",
"(",
"connection",
"[",
"'local-address'",
"]",
".",
"split",
"(",
"'.'",
")",
"[",
"(",
"-",
"1",
")",
":",
"]",
")",
",",
"'remote_addr'",
":",
"'.'",
".",
"join",
"(",
"connection",
"[",
"'remote-address'",
"]",
".",
"split",
"(",
"'.'",
")",
"[",
":",
"(",
"-",
"1",
")",
"]",
")",
",",
"'remote_port'",
":",
"'.'",
".",
"join",
"(",
"connection",
"[",
"'remote-address'",
"]",
".",
"split",
"(",
"'.'",
")",
"[",
"(",
"-",
"1",
")",
":",
"]",
")",
"}",
"return",
"ret",
"else",
":",
"return",
"{",
"}"
] | return a dict describing all active tcp connections as quickly as possible . | train | false |
907 | def record_user_edited_an_exploration(user_id):
user_settings = get_user_settings(user_id)
if user_settings:
user_settings.last_edited_an_exploration = datetime.datetime.utcnow()
_save_user_settings(user_settings)
| [
"def",
"record_user_edited_an_exploration",
"(",
"user_id",
")",
":",
"user_settings",
"=",
"get_user_settings",
"(",
"user_id",
")",
"if",
"user_settings",
":",
"user_settings",
".",
"last_edited_an_exploration",
"=",
"datetime",
".",
"datetime",
".",
"utcnow",
"(",
")",
"_save_user_settings",
"(",
"user_settings",
")"
] | updates last_edited_an_exploration to the current datetime for the user with given user_id . | train | false |
910 | def _xblock_type_and_display_name(xblock):
return _('{section_or_subsection} "{display_name}"').format(section_or_subsection=xblock_type_display_name(xblock), display_name=xblock.display_name_with_default)
| [
"def",
"_xblock_type_and_display_name",
"(",
"xblock",
")",
":",
"return",
"_",
"(",
"'{section_or_subsection} \"{display_name}\"'",
")",
".",
"format",
"(",
"section_or_subsection",
"=",
"xblock_type_display_name",
"(",
"xblock",
")",
",",
"display_name",
"=",
"xblock",
".",
"display_name_with_default",
")"
] | returns a string representation of the xblocks type and display name . | train | false |
911 | def format_guess(guess):
for (prop, value) in guess.items():
if (prop in (u'season', u'episodeNumber', u'year', u'cdNumber', u'cdNumberTotal', u'bonusNumber', u'filmNumber')):
guess[prop] = int(guess[prop])
elif isinstance(value, base_text_type):
if (prop in (u'edition',)):
value = clean_string(value)
guess[prop] = canonical_form(value).replace(u'\\', u'')
return guess
| [
"def",
"format_guess",
"(",
"guess",
")",
":",
"for",
"(",
"prop",
",",
"value",
")",
"in",
"guess",
".",
"items",
"(",
")",
":",
"if",
"(",
"prop",
"in",
"(",
"u'season'",
",",
"u'episodeNumber'",
",",
"u'year'",
",",
"u'cdNumber'",
",",
"u'cdNumberTotal'",
",",
"u'bonusNumber'",
",",
"u'filmNumber'",
")",
")",
":",
"guess",
"[",
"prop",
"]",
"=",
"int",
"(",
"guess",
"[",
"prop",
"]",
")",
"elif",
"isinstance",
"(",
"value",
",",
"base_text_type",
")",
":",
"if",
"(",
"prop",
"in",
"(",
"u'edition'",
",",
")",
")",
":",
"value",
"=",
"clean_string",
"(",
"value",
")",
"guess",
"[",
"prop",
"]",
"=",
"canonical_form",
"(",
"value",
")",
".",
"replace",
"(",
"u'\\\\'",
",",
"u''",
")",
"return",
"guess"
] | format all the found values to their natural type . | train | false |
912 | def test_cache_full(config_stub, tmpdir):
config_stub.data = {'storage': {'cache-size': 1024}, 'general': {'private-browsing': False}}
disk_cache = QNetworkDiskCache()
disk_cache.setCacheDirectory(str(tmpdir))
url = 'http://qutebrowser.org'
content = 'cutebowser'
preload_cache(disk_cache, url, content)
url2 = 'https://qutebrowser.org'
content2 = 'ohmycert'
preload_cache(disk_cache, url2, content2)
metadata = QNetworkCacheMetaData()
metadata.setUrl(QUrl(url))
soon = QDateTime.currentDateTime().addMonths(4)
assert soon.isValid()
metadata.setLastModified(soon)
assert metadata.isValid()
disk_cache.updateMetaData(metadata)
disk_cache.remove(QUrl(url2))
assert (disk_cache.metaData(QUrl(url)).lastModified() == soon)
assert (disk_cache.data(QUrl(url)).readAll() == content)
| [
"def",
"test_cache_full",
"(",
"config_stub",
",",
"tmpdir",
")",
":",
"config_stub",
".",
"data",
"=",
"{",
"'storage'",
":",
"{",
"'cache-size'",
":",
"1024",
"}",
",",
"'general'",
":",
"{",
"'private-browsing'",
":",
"False",
"}",
"}",
"disk_cache",
"=",
"QNetworkDiskCache",
"(",
")",
"disk_cache",
".",
"setCacheDirectory",
"(",
"str",
"(",
"tmpdir",
")",
")",
"url",
"=",
"'http://qutebrowser.org'",
"content",
"=",
"'cutebowser'",
"preload_cache",
"(",
"disk_cache",
",",
"url",
",",
"content",
")",
"url2",
"=",
"'https://qutebrowser.org'",
"content2",
"=",
"'ohmycert'",
"preload_cache",
"(",
"disk_cache",
",",
"url2",
",",
"content2",
")",
"metadata",
"=",
"QNetworkCacheMetaData",
"(",
")",
"metadata",
".",
"setUrl",
"(",
"QUrl",
"(",
"url",
")",
")",
"soon",
"=",
"QDateTime",
".",
"currentDateTime",
"(",
")",
".",
"addMonths",
"(",
"4",
")",
"assert",
"soon",
".",
"isValid",
"(",
")",
"metadata",
".",
"setLastModified",
"(",
"soon",
")",
"assert",
"metadata",
".",
"isValid",
"(",
")",
"disk_cache",
".",
"updateMetaData",
"(",
"metadata",
")",
"disk_cache",
".",
"remove",
"(",
"QUrl",
"(",
"url2",
")",
")",
"assert",
"(",
"disk_cache",
".",
"metaData",
"(",
"QUrl",
"(",
"url",
")",
")",
".",
"lastModified",
"(",
")",
"==",
"soon",
")",
"assert",
"(",
"disk_cache",
".",
"data",
"(",
"QUrl",
"(",
"url",
")",
")",
".",
"readAll",
"(",
")",
"==",
"content",
")"
] | do a sanity test involving everything . | train | false |
916 | def conditionally_trigger(context, dag_run_obj):
c_p = context['params']['condition_param']
print 'Controller DAG : conditionally_trigger = {}'.format(c_p)
if context['params']['condition_param']:
dag_run_obj.payload = {'message': context['params']['message']}
pp.pprint(dag_run_obj.payload)
return dag_run_obj
| [
"def",
"conditionally_trigger",
"(",
"context",
",",
"dag_run_obj",
")",
":",
"c_p",
"=",
"context",
"[",
"'params'",
"]",
"[",
"'condition_param'",
"]",
"print",
"'Controller DAG : conditionally_trigger = {}'",
".",
"format",
"(",
"c_p",
")",
"if",
"context",
"[",
"'params'",
"]",
"[",
"'condition_param'",
"]",
":",
"dag_run_obj",
".",
"payload",
"=",
"{",
"'message'",
":",
"context",
"[",
"'params'",
"]",
"[",
"'message'",
"]",
"}",
"pp",
".",
"pprint",
"(",
"dag_run_obj",
".",
"payload",
")",
"return",
"dag_run_obj"
] | this function decides whether or not to trigger the remote dag . | train | true |
918 | @loader_option()
def undefer_group(loadopt, name):
return loadopt.set_column_strategy('*', None, {('undefer_group_%s' % name): True}, opts_only=True)
| [
"@",
"loader_option",
"(",
")",
"def",
"undefer_group",
"(",
"loadopt",
",",
"name",
")",
":",
"return",
"loadopt",
".",
"set_column_strategy",
"(",
"'*'",
",",
"None",
",",
"{",
"(",
"'undefer_group_%s'",
"%",
"name",
")",
":",
"True",
"}",
",",
"opts_only",
"=",
"True",
")"
] | indicate that columns within the given deferred group name should be undeferred . | train | false |
919 | def p_expression_group(t):
t[0] = t[2]
| [
"def",
"p_expression_group",
"(",
"t",
")",
":",
"t",
"[",
"0",
"]",
"=",
"t",
"[",
"2",
"]"
] | expression : . | train | false |
920 | def lookup_loc(location, country):
corrected = location_names.get(location)
if corrected:
return get_loc_from_db(corrected, country)
if (location[(-6):] == 'County'):
return get_loc_from_db(location[:(-6)].strip(), 'Liberia')
if location.startswith('Western Area'):
return get_loc_from_db(location[12:].strip(), country)
if (location in rejected_loc):
rejected_loc[location] += 1
else:
rejected_loc[location] = 1
| [
"def",
"lookup_loc",
"(",
"location",
",",
"country",
")",
":",
"corrected",
"=",
"location_names",
".",
"get",
"(",
"location",
")",
"if",
"corrected",
":",
"return",
"get_loc_from_db",
"(",
"corrected",
",",
"country",
")",
"if",
"(",
"location",
"[",
"(",
"-",
"6",
")",
":",
"]",
"==",
"'County'",
")",
":",
"return",
"get_loc_from_db",
"(",
"location",
"[",
":",
"(",
"-",
"6",
")",
"]",
".",
"strip",
"(",
")",
",",
"'Liberia'",
")",
"if",
"location",
".",
"startswith",
"(",
"'Western Area'",
")",
":",
"return",
"get_loc_from_db",
"(",
"location",
"[",
"12",
":",
"]",
".",
"strip",
"(",
")",
",",
"country",
")",
"if",
"(",
"location",
"in",
"rejected_loc",
")",
":",
"rejected_loc",
"[",
"location",
"]",
"+=",
"1",
"else",
":",
"rejected_loc",
"[",
"location",
"]",
"=",
"1"
] | location names need to match what we have already . | train | false |
922 | def _do_install_one(reg, app_loc, relative_path):
LOG.info(('=== Installing app at %s' % (app_loc,)))
try:
app_loc = os.path.realpath(app_loc)
(app_name, version, desc, author) = get_app_info(app_loc)
except (ValueError, OSError) as ex:
LOG.error(ex)
return False
app = registry.HueApp(app_name, version, app_loc, desc, author)
if relative_path:
app.use_rel_path()
else:
app.use_abs_path()
if reg.contains(app):
LOG.warn(('=== %s is already installed' % (app,)))
return True
return (reg.register(app) and build.make_app(app) and app.install_conf())
| [
"def",
"_do_install_one",
"(",
"reg",
",",
"app_loc",
",",
"relative_path",
")",
":",
"LOG",
".",
"info",
"(",
"(",
"'=== Installing app at %s'",
"%",
"(",
"app_loc",
",",
")",
")",
")",
"try",
":",
"app_loc",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"app_loc",
")",
"(",
"app_name",
",",
"version",
",",
"desc",
",",
"author",
")",
"=",
"get_app_info",
"(",
"app_loc",
")",
"except",
"(",
"ValueError",
",",
"OSError",
")",
"as",
"ex",
":",
"LOG",
".",
"error",
"(",
"ex",
")",
"return",
"False",
"app",
"=",
"registry",
".",
"HueApp",
"(",
"app_name",
",",
"version",
",",
"app_loc",
",",
"desc",
",",
"author",
")",
"if",
"relative_path",
":",
"app",
".",
"use_rel_path",
"(",
")",
"else",
":",
"app",
".",
"use_abs_path",
"(",
")",
"if",
"reg",
".",
"contains",
"(",
"app",
")",
":",
"LOG",
".",
"warn",
"(",
"(",
"'=== %s is already installed'",
"%",
"(",
"app",
",",
")",
")",
")",
"return",
"True",
"return",
"(",
"reg",
".",
"register",
"(",
"app",
")",
"and",
"build",
".",
"make_app",
"(",
"app",
")",
"and",
"app",
".",
"install_conf",
"(",
")",
")"
] | install one app . | train | false |
923 | def get_users_email_preferences_for_exploration(user_ids, exploration_id):
exploration_user_models = user_models.ExplorationUserDataModel.get_multi(user_ids, exploration_id)
result = []
for exploration_user_model in exploration_user_models:
if (exploration_user_model is None):
result.append(user_domain.UserExplorationPrefs.create_default_prefs())
else:
result.append(user_domain.UserExplorationPrefs(exploration_user_model.mute_feedback_notifications, exploration_user_model.mute_suggestion_notifications))
return result
| [
"def",
"get_users_email_preferences_for_exploration",
"(",
"user_ids",
",",
"exploration_id",
")",
":",
"exploration_user_models",
"=",
"user_models",
".",
"ExplorationUserDataModel",
".",
"get_multi",
"(",
"user_ids",
",",
"exploration_id",
")",
"result",
"=",
"[",
"]",
"for",
"exploration_user_model",
"in",
"exploration_user_models",
":",
"if",
"(",
"exploration_user_model",
"is",
"None",
")",
":",
"result",
".",
"append",
"(",
"user_domain",
".",
"UserExplorationPrefs",
".",
"create_default_prefs",
"(",
")",
")",
"else",
":",
"result",
".",
"append",
"(",
"user_domain",
".",
"UserExplorationPrefs",
"(",
"exploration_user_model",
".",
"mute_feedback_notifications",
",",
"exploration_user_model",
".",
"mute_suggestion_notifications",
")",
")",
"return",
"result"
] | gives mute preferences for exploration with given exploration_id of user with given user_id . | train | false |
924 | def deprecated_conditional(predicate, removal_version, entity_description, hint_message=None, stacklevel=4):
validate_removal_semver(removal_version)
if predicate():
warn_or_error(removal_version, entity_description, hint_message, stacklevel=stacklevel)
| [
"def",
"deprecated_conditional",
"(",
"predicate",
",",
"removal_version",
",",
"entity_description",
",",
"hint_message",
"=",
"None",
",",
"stacklevel",
"=",
"4",
")",
":",
"validate_removal_semver",
"(",
"removal_version",
")",
"if",
"predicate",
"(",
")",
":",
"warn_or_error",
"(",
"removal_version",
",",
"entity_description",
",",
"hint_message",
",",
"stacklevel",
"=",
"stacklevel",
")"
] | marks a certain configuration as deprecated . | train | false |
925 | def DFS(gr, s):
path = set([])
depth_first_search(gr, s, path)
return path
| [
"def",
"DFS",
"(",
"gr",
",",
"s",
")",
":",
"path",
"=",
"set",
"(",
"[",
"]",
")",
"depth_first_search",
"(",
"gr",
",",
"s",
",",
"path",
")",
"return",
"path"
] | depth first search wrapper . | train | false |
926 | def calculate_cache_path(cache_location, url):
thumb = md5(url).hexdigest()
header = os.path.join(cache_location, (thumb + '.headers'))
body = os.path.join(cache_location, (thumb + '.body'))
return (header, body)
| [
"def",
"calculate_cache_path",
"(",
"cache_location",
",",
"url",
")",
":",
"thumb",
"=",
"md5",
"(",
"url",
")",
".",
"hexdigest",
"(",
")",
"header",
"=",
"os",
".",
"path",
".",
"join",
"(",
"cache_location",
",",
"(",
"thumb",
"+",
"'.headers'",
")",
")",
"body",
"=",
"os",
".",
"path",
".",
"join",
"(",
"cache_location",
",",
"(",
"thumb",
"+",
"'.body'",
")",
")",
"return",
"(",
"header",
",",
"body",
")"
] | checks if [cache_location]/[hash_of_url] . | train | false |
927 | def list_designs(request):
DEFAULT_PAGE_SIZE = 20
app_name = get_app_name(request)
prefix = 'q-'
querydict_query = _copy_prefix(prefix, request.GET)
querydict_query[(prefix + 'type')] = app_name
search_filter = request.GET.get('text', None)
if (search_filter is not None):
querydict_query[(prefix + 'text')] = search_filter
(page, filter_params) = _list_designs(request.user, querydict_query, DEFAULT_PAGE_SIZE, prefix)
return render('list_designs.mako', request, {'page': page, 'filter_params': filter_params, 'prefix': prefix, 'user': request.user, 'designs_json': json.dumps([query.id for query in page.object_list])})
| [
"def",
"list_designs",
"(",
"request",
")",
":",
"DEFAULT_PAGE_SIZE",
"=",
"20",
"app_name",
"=",
"get_app_name",
"(",
"request",
")",
"prefix",
"=",
"'q-'",
"querydict_query",
"=",
"_copy_prefix",
"(",
"prefix",
",",
"request",
".",
"GET",
")",
"querydict_query",
"[",
"(",
"prefix",
"+",
"'type'",
")",
"]",
"=",
"app_name",
"search_filter",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'text'",
",",
"None",
")",
"if",
"(",
"search_filter",
"is",
"not",
"None",
")",
":",
"querydict_query",
"[",
"(",
"prefix",
"+",
"'text'",
")",
"]",
"=",
"search_filter",
"(",
"page",
",",
"filter_params",
")",
"=",
"_list_designs",
"(",
"request",
".",
"user",
",",
"querydict_query",
",",
"DEFAULT_PAGE_SIZE",
",",
"prefix",
")",
"return",
"render",
"(",
"'list_designs.mako'",
",",
"request",
",",
"{",
"'page'",
":",
"page",
",",
"'filter_params'",
":",
"filter_params",
",",
"'prefix'",
":",
"prefix",
",",
"'user'",
":",
"request",
".",
"user",
",",
"'designs_json'",
":",
"json",
".",
"dumps",
"(",
"[",
"query",
".",
"id",
"for",
"query",
"in",
"page",
".",
"object_list",
"]",
")",
"}",
")"
] | list all workflow designs . | train | false |
929 | def real_ip(request):
return request.META.get('HTTP_X_REAL_IP')
| [
"def",
"real_ip",
"(",
"request",
")",
":",
"return",
"request",
".",
"META",
".",
"get",
"(",
"'HTTP_X_REAL_IP'",
")"
] | returns the ip address contained in the http_x_real_ip headers . | train | false |
931 | def write_all_rst_pages():
infos = get_infos(screenshots_dir)
s = make_gallery_page(infos)
write_file(gallery_filename, s)
for info in infos:
s = make_detail_page(info)
detail_name = slash(generation_dir, 'gen__{}.rst'.format(info['dunder']))
write_file(detail_name, s)
s = make_index(infos)
index_name = slash(generation_dir, 'index.rst')
write_file(index_name, s)
Logger.info('gallery.py: Created gallery rst documentation pages.')
| [
"def",
"write_all_rst_pages",
"(",
")",
":",
"infos",
"=",
"get_infos",
"(",
"screenshots_dir",
")",
"s",
"=",
"make_gallery_page",
"(",
"infos",
")",
"write_file",
"(",
"gallery_filename",
",",
"s",
")",
"for",
"info",
"in",
"infos",
":",
"s",
"=",
"make_detail_page",
"(",
"info",
")",
"detail_name",
"=",
"slash",
"(",
"generation_dir",
",",
"'gen__{}.rst'",
".",
"format",
"(",
"info",
"[",
"'dunder'",
"]",
")",
")",
"write_file",
"(",
"detail_name",
",",
"s",
")",
"s",
"=",
"make_index",
"(",
"infos",
")",
"index_name",
"=",
"slash",
"(",
"generation_dir",
",",
"'index.rst'",
")",
"write_file",
"(",
"index_name",
",",
"s",
")",
"Logger",
".",
"info",
"(",
"'gallery.py: Created gallery rst documentation pages.'",
")"
] | do the main task of writing the gallery . | train | false |
933 | def create_function_from_source(function_source, imports=None):
ns = {}
import_keys = []
try:
if (imports is not None):
for statement in imports:
exec statement in ns
import_keys = list(ns.keys())
exec function_source in ns
except Exception as e:
msg = (u'\nError executing function:\n %s\n' % function_source)
msg += u'\n'.join([u'Functions in connection strings have to be standalone.', u'They cannot be declared either interactively or inside', u'another function or inline in the connect string. Any', u'imports should be done inside the function'])
raise_from(RuntimeError(msg), e)
ns_funcs = list((set(ns) - set((import_keys + [u'__builtins__']))))
assert (len(ns_funcs) == 1), u'Function or inputs are ill-defined'
funcname = ns_funcs[0]
func = ns[funcname]
return func
| [
"def",
"create_function_from_source",
"(",
"function_source",
",",
"imports",
"=",
"None",
")",
":",
"ns",
"=",
"{",
"}",
"import_keys",
"=",
"[",
"]",
"try",
":",
"if",
"(",
"imports",
"is",
"not",
"None",
")",
":",
"for",
"statement",
"in",
"imports",
":",
"exec",
"statement",
"in",
"ns",
"import_keys",
"=",
"list",
"(",
"ns",
".",
"keys",
"(",
")",
")",
"exec",
"function_source",
"in",
"ns",
"except",
"Exception",
"as",
"e",
":",
"msg",
"=",
"(",
"u'\\nError executing function:\\n %s\\n'",
"%",
"function_source",
")",
"msg",
"+=",
"u'\\n'",
".",
"join",
"(",
"[",
"u'Functions in connection strings have to be standalone.'",
",",
"u'They cannot be declared either interactively or inside'",
",",
"u'another function or inline in the connect string. Any'",
",",
"u'imports should be done inside the function'",
"]",
")",
"raise_from",
"(",
"RuntimeError",
"(",
"msg",
")",
",",
"e",
")",
"ns_funcs",
"=",
"list",
"(",
"(",
"set",
"(",
"ns",
")",
"-",
"set",
"(",
"(",
"import_keys",
"+",
"[",
"u'__builtins__'",
"]",
")",
")",
")",
")",
"assert",
"(",
"len",
"(",
"ns_funcs",
")",
"==",
"1",
")",
",",
"u'Function or inputs are ill-defined'",
"funcname",
"=",
"ns_funcs",
"[",
"0",
"]",
"func",
"=",
"ns",
"[",
"funcname",
"]",
"return",
"func"
] | return a function object from a function source parameters function_source : pickled string string in pickled form defining a function imports : list of strings list of import statements in string form that allow the function to be executed in an otherwise empty namespace . | train | false |
935 | def build_agg_tooltip(hover_text=None, agg_text=None, aggregated_col=None):
if (hover_text is None):
if (agg_text is None):
if isinstance(aggregated_col, str):
hover_text = aggregated_col
else:
hover_text = 'value'
else:
hover_text = agg_text
if isinstance(aggregated_col, str):
hover_text = ('%s of %s' % (hover_text, aggregated_col))
return (hover_text.title(), '@values')
| [
"def",
"build_agg_tooltip",
"(",
"hover_text",
"=",
"None",
",",
"agg_text",
"=",
"None",
",",
"aggregated_col",
"=",
"None",
")",
":",
"if",
"(",
"hover_text",
"is",
"None",
")",
":",
"if",
"(",
"agg_text",
"is",
"None",
")",
":",
"if",
"isinstance",
"(",
"aggregated_col",
",",
"str",
")",
":",
"hover_text",
"=",
"aggregated_col",
"else",
":",
"hover_text",
"=",
"'value'",
"else",
":",
"hover_text",
"=",
"agg_text",
"if",
"isinstance",
"(",
"aggregated_col",
",",
"str",
")",
":",
"hover_text",
"=",
"(",
"'%s of %s'",
"%",
"(",
"hover_text",
",",
"aggregated_col",
")",
")",
"return",
"(",
"hover_text",
".",
"title",
"(",
")",
",",
"'@values'",
")"
] | produce a consistent tooltip based on available chart configuration . | train | false |
938 | def autolinks_simple(url):
u_url = url.lower()
if (('@' in url) and ('://' not in url)):
return ('<a href="mailto:%s">%s</a>' % (url, url))
elif u_url.endswith(('.jpg', '.jpeg', '.gif', '.png')):
return ('<img src="%s" controls />' % url)
elif u_url.endswith(('.mp4', '.mpeg', '.mov', '.ogv')):
return ('<video src="%s" controls></video>' % url)
elif u_url.endswith(('.mp3', '.wav', '.ogg')):
return ('<audio src="%s" controls></audio>' % url)
return ('<a href="%s">%s</a>' % (url, url))
| [
"def",
"autolinks_simple",
"(",
"url",
")",
":",
"u_url",
"=",
"url",
".",
"lower",
"(",
")",
"if",
"(",
"(",
"'@'",
"in",
"url",
")",
"and",
"(",
"'://'",
"not",
"in",
"url",
")",
")",
":",
"return",
"(",
"'<a href=\"mailto:%s\">%s</a>'",
"%",
"(",
"url",
",",
"url",
")",
")",
"elif",
"u_url",
".",
"endswith",
"(",
"(",
"'.jpg'",
",",
"'.jpeg'",
",",
"'.gif'",
",",
"'.png'",
")",
")",
":",
"return",
"(",
"'<img src=\"%s\" controls />'",
"%",
"url",
")",
"elif",
"u_url",
".",
"endswith",
"(",
"(",
"'.mp4'",
",",
"'.mpeg'",
",",
"'.mov'",
",",
"'.ogv'",
")",
")",
":",
"return",
"(",
"'<video src=\"%s\" controls></video>'",
"%",
"url",
")",
"elif",
"u_url",
".",
"endswith",
"(",
"(",
"'.mp3'",
",",
"'.wav'",
",",
"'.ogg'",
")",
")",
":",
"return",
"(",
"'<audio src=\"%s\" controls></audio>'",
"%",
"url",
")",
"return",
"(",
"'<a href=\"%s\">%s</a>'",
"%",
"(",
"url",
",",
"url",
")",
")"
] | it automatically converts the url to link . | train | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.