id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
sequencelengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
1,601
def _RPCCategory(rpcstatsproto): category = '' if (not rpcstatsproto.has_datastore_details()): return category servicecallname = rpcstatsproto.service_call_name() if (servicecallname == 'datastore_v3.Put'): category = entity.EntityListKind(rpcstatsproto.datastore_details().keys_written_list()) elif (servicecallname == 'datastore_v3.Get'): category = entity.EntityListKind(rpcstatsproto.datastore_details().keys_read_list()) elif (servicecallname == 'datastore_v3.Next'): category = entity.EntityListKind(rpcstatsproto.datastore_details().keys_read_list()) elif (servicecallname == 'datastore_v3.RunQuery'): if rpcstatsproto.datastore_details().has_query_kind(): kind = rpcstatsproto.datastore_details().query_kind() else: kind = 'NoKind' if rpcstatsproto.datastore_details().has_query_ancestor(): ancestor = '_ANC' else: ancestor = '' category = ('%s%s' % (kind, ancestor)) return category
[ "def", "_RPCCategory", "(", "rpcstatsproto", ")", ":", "category", "=", "''", "if", "(", "not", "rpcstatsproto", ".", "has_datastore_details", "(", ")", ")", ":", "return", "category", "servicecallname", "=", "rpcstatsproto", ".", "service_call_name", "(", ")", "if", "(", "servicecallname", "==", "'datastore_v3.Put'", ")", ":", "category", "=", "entity", ".", "EntityListKind", "(", "rpcstatsproto", ".", "datastore_details", "(", ")", ".", "keys_written_list", "(", ")", ")", "elif", "(", "servicecallname", "==", "'datastore_v3.Get'", ")", ":", "category", "=", "entity", ".", "EntityListKind", "(", "rpcstatsproto", ".", "datastore_details", "(", ")", ".", "keys_read_list", "(", ")", ")", "elif", "(", "servicecallname", "==", "'datastore_v3.Next'", ")", ":", "category", "=", "entity", ".", "EntityListKind", "(", "rpcstatsproto", ".", "datastore_details", "(", ")", ".", "keys_read_list", "(", ")", ")", "elif", "(", "servicecallname", "==", "'datastore_v3.RunQuery'", ")", ":", "if", "rpcstatsproto", ".", "datastore_details", "(", ")", ".", "has_query_kind", "(", ")", ":", "kind", "=", "rpcstatsproto", ".", "datastore_details", "(", ")", ".", "query_kind", "(", ")", "else", ":", "kind", "=", "'NoKind'", "if", "rpcstatsproto", ".", "datastore_details", "(", ")", ".", "has_query_ancestor", "(", ")", ":", "ancestor", "=", "'_ANC'", "else", ":", "ancestor", "=", "''", "category", "=", "(", "'%s%s'", "%", "(", "kind", ",", "ancestor", ")", ")", "return", "category" ]
categorize datastore rpcs by entity kind and other information .
train
false
1,605
def get_project_name(project_id, projects): for project in projects: if (project_id == project.id): return project.name
[ "def", "get_project_name", "(", "project_id", ",", "projects", ")", ":", "for", "project", "in", "projects", ":", "if", "(", "project_id", "==", "project", ".", "id", ")", ":", "return", "project", ".", "name" ]
retrieves project name for given project id args: projects: list of projects project_id: project id returns: project name or none if there is no match .
train
true
1,606
def removePixelTableFromPixelTable(pixelDictionaryToBeRemoved, pixelDictionaryToBeRemovedFrom): removeListFromDictionary(pixelDictionaryToBeRemovedFrom, pixelDictionaryToBeRemoved.keys())
[ "def", "removePixelTableFromPixelTable", "(", "pixelDictionaryToBeRemoved", ",", "pixelDictionaryToBeRemovedFrom", ")", ":", "removeListFromDictionary", "(", "pixelDictionaryToBeRemovedFrom", ",", "pixelDictionaryToBeRemoved", ".", "keys", "(", ")", ")" ]
remove pixel from the pixel table .
train
false
1,608
@click.command(u'reload-doctype') @click.argument(u'doctype') @pass_context def reload_doctype(context, doctype): for site in context.sites: try: frappe.init(site=site) frappe.connect() frappe.reload_doctype(doctype, force=context.force) frappe.db.commit() finally: frappe.destroy()
[ "@", "click", ".", "command", "(", "u'reload-doctype'", ")", "@", "click", ".", "argument", "(", "u'doctype'", ")", "@", "pass_context", "def", "reload_doctype", "(", "context", ",", "doctype", ")", ":", "for", "site", "in", "context", ".", "sites", ":", "try", ":", "frappe", ".", "init", "(", "site", "=", "site", ")", "frappe", ".", "connect", "(", ")", "frappe", ".", "reload_doctype", "(", "doctype", ",", "force", "=", "context", ".", "force", ")", "frappe", ".", "db", ".", "commit", "(", ")", "finally", ":", "frappe", ".", "destroy", "(", ")" ]
reload doctype from model files .
train
false
1,609
def library_blocks_view(library, user, response_format): assert isinstance(library.location.library_key, LibraryLocator) assert isinstance(library.location, LibraryUsageLocator) children = library.children if (response_format == 'json'): prev_version = library.runtime.course_entry.structure['previous_version'] return JsonResponse({'display_name': library.display_name, 'library_id': unicode(library.location.library_key), 'version': unicode(library.runtime.course_entry.course_key.version), 'previous_version': (unicode(prev_version) if prev_version else None), 'blocks': [unicode(x) for x in children]}) can_edit = has_studio_write_access(user, library.location.library_key) xblock_info = create_xblock_info(library, include_ancestor_info=False, graders=[]) component_templates = (get_component_templates(library, library=True) if can_edit else []) return render_to_response('library.html', {'can_edit': can_edit, 'context_library': library, 'component_templates': component_templates, 'xblock_info': xblock_info, 'templates': CONTAINER_TEMPLATES})
[ "def", "library_blocks_view", "(", "library", ",", "user", ",", "response_format", ")", ":", "assert", "isinstance", "(", "library", ".", "location", ".", "library_key", ",", "LibraryLocator", ")", "assert", "isinstance", "(", "library", ".", "location", ",", "LibraryUsageLocator", ")", "children", "=", "library", ".", "children", "if", "(", "response_format", "==", "'json'", ")", ":", "prev_version", "=", "library", ".", "runtime", ".", "course_entry", ".", "structure", "[", "'previous_version'", "]", "return", "JsonResponse", "(", "{", "'display_name'", ":", "library", ".", "display_name", ",", "'library_id'", ":", "unicode", "(", "library", ".", "location", ".", "library_key", ")", ",", "'version'", ":", "unicode", "(", "library", ".", "runtime", ".", "course_entry", ".", "course_key", ".", "version", ")", ",", "'previous_version'", ":", "(", "unicode", "(", "prev_version", ")", "if", "prev_version", "else", "None", ")", ",", "'blocks'", ":", "[", "unicode", "(", "x", ")", "for", "x", "in", "children", "]", "}", ")", "can_edit", "=", "has_studio_write_access", "(", "user", ",", "library", ".", "location", ".", "library_key", ")", "xblock_info", "=", "create_xblock_info", "(", "library", ",", "include_ancestor_info", "=", "False", ",", "graders", "=", "[", "]", ")", "component_templates", "=", "(", "get_component_templates", "(", "library", ",", "library", "=", "True", ")", "if", "can_edit", "else", "[", "]", ")", "return", "render_to_response", "(", "'library.html'", ",", "{", "'can_edit'", ":", "can_edit", ",", "'context_library'", ":", "library", ",", "'component_templates'", ":", "component_templates", ",", "'xblock_info'", ":", "xblock_info", ",", "'templates'", ":", "CONTAINER_TEMPLATES", "}", ")" ]
the main view of a courses content library .
train
false
1,610
def _needs_document_lock(func): @gen.coroutine def _needs_document_lock_wrapper(self, *args, **kwargs): self.block_expiration() try: with (yield self._lock.acquire()): if (self._pending_writes is not None): raise RuntimeError(('internal class invariant violated: _pending_writes ' + 'should be None if lock is not held')) self._pending_writes = [] try: result = (yield yield_for_all_futures(func(self, *args, **kwargs))) finally: pending_writes = self._pending_writes self._pending_writes = None for p in pending_writes: (yield p) raise gen.Return(result) finally: self.unblock_expiration() return _needs_document_lock_wrapper
[ "def", "_needs_document_lock", "(", "func", ")", ":", "@", "gen", ".", "coroutine", "def", "_needs_document_lock_wrapper", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ":", "self", ".", "block_expiration", "(", ")", "try", ":", "with", "(", "yield", "self", ".", "_lock", ".", "acquire", "(", ")", ")", ":", "if", "(", "self", ".", "_pending_writes", "is", "not", "None", ")", ":", "raise", "RuntimeError", "(", "(", "'internal class invariant violated: _pending_writes '", "+", "'should be None if lock is not held'", ")", ")", "self", ".", "_pending_writes", "=", "[", "]", "try", ":", "result", "=", "(", "yield", "yield_for_all_futures", "(", "func", "(", "self", ",", "*", "args", ",", "**", "kwargs", ")", ")", ")", "finally", ":", "pending_writes", "=", "self", ".", "_pending_writes", "self", ".", "_pending_writes", "=", "None", "for", "p", "in", "pending_writes", ":", "(", "yield", "p", ")", "raise", "gen", ".", "Return", "(", "result", ")", "finally", ":", "self", ".", "unblock_expiration", "(", ")", "return", "_needs_document_lock_wrapper" ]
decorator that adds the necessary locking and post-processing to manipulate the sessions document .
train
true
1,611
def get_portable_base(): if isportable: return os.path.dirname(os.path.dirname(os.environ['CALIBRE_PORTABLE_BUILD']))
[ "def", "get_portable_base", "(", ")", ":", "if", "isportable", ":", "return", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "environ", "[", "'CALIBRE_PORTABLE_BUILD'", "]", ")", ")" ]
return path to the directory that contains calibre-portable .
train
false
1,612
def loadRandomForestModel(RFmodelName, isRegression=False): try: fo = open((RFmodelName + 'MEANS'), 'rb') except IOError: print "Load Random Forest Model: Didn't find file" return try: MEAN = cPickle.load(fo) STD = cPickle.load(fo) if (not isRegression): classNames = cPickle.load(fo) mtWin = cPickle.load(fo) mtStep = cPickle.load(fo) stWin = cPickle.load(fo) stStep = cPickle.load(fo) computeBEAT = cPickle.load(fo) except: fo.close() fo.close() MEAN = numpy.array(MEAN) STD = numpy.array(STD) COEFF = [] with open(RFmodelName, 'rb') as fid: RF = cPickle.load(fid) if isRegression: return (RF, MEAN, STD, mtWin, mtStep, stWin, stStep, computeBEAT) else: return (RF, MEAN, STD, classNames, mtWin, mtStep, stWin, stStep, computeBEAT)
[ "def", "loadRandomForestModel", "(", "RFmodelName", ",", "isRegression", "=", "False", ")", ":", "try", ":", "fo", "=", "open", "(", "(", "RFmodelName", "+", "'MEANS'", ")", ",", "'rb'", ")", "except", "IOError", ":", "print", "\"Load Random Forest Model: Didn't find file\"", "return", "try", ":", "MEAN", "=", "cPickle", ".", "load", "(", "fo", ")", "STD", "=", "cPickle", ".", "load", "(", "fo", ")", "if", "(", "not", "isRegression", ")", ":", "classNames", "=", "cPickle", ".", "load", "(", "fo", ")", "mtWin", "=", "cPickle", ".", "load", "(", "fo", ")", "mtStep", "=", "cPickle", ".", "load", "(", "fo", ")", "stWin", "=", "cPickle", ".", "load", "(", "fo", ")", "stStep", "=", "cPickle", ".", "load", "(", "fo", ")", "computeBEAT", "=", "cPickle", ".", "load", "(", "fo", ")", "except", ":", "fo", ".", "close", "(", ")", "fo", ".", "close", "(", ")", "MEAN", "=", "numpy", ".", "array", "(", "MEAN", ")", "STD", "=", "numpy", ".", "array", "(", "STD", ")", "COEFF", "=", "[", "]", "with", "open", "(", "RFmodelName", ",", "'rb'", ")", "as", "fid", ":", "RF", "=", "cPickle", ".", "load", "(", "fid", ")", "if", "isRegression", ":", "return", "(", "RF", ",", "MEAN", ",", "STD", ",", "mtWin", ",", "mtStep", ",", "stWin", ",", "stStep", ",", "computeBEAT", ")", "else", ":", "return", "(", "RF", ",", "MEAN", ",", "STD", ",", "classNames", ",", "mtWin", ",", "mtStep", ",", "stWin", ",", "stStep", ",", "computeBEAT", ")" ]
this function loads an svm model either for classification or training .
train
false
1,614
def test_numpy_piecewise_regression(): p = Piecewise((1, (x < 0)), (0, True)) assert (NumPyPrinter().doprint(p) == 'select([less(x, 0),True], [1,0], default=nan)')
[ "def", "test_numpy_piecewise_regression", "(", ")", ":", "p", "=", "Piecewise", "(", "(", "1", ",", "(", "x", "<", "0", ")", ")", ",", "(", "0", ",", "True", ")", ")", "assert", "(", "NumPyPrinter", "(", ")", ".", "doprint", "(", "p", ")", "==", "'select([less(x, 0),True], [1,0], default=nan)'", ")" ]
numpyprinter needs to print piecewise()s choicelist as a list to avoid breaking compatibility with numpy 1 .
train
false
1,615
def _join(value): return ' '.join(map(_stringify, value))
[ "def", "_join", "(", "value", ")", ":", "return", "' '", ".", "join", "(", "map", "(", "_stringify", ",", "value", ")", ")" ]
join a list into a string .
train
false
1,616
def test_find_eog(): raw = read_raw_fif(raw_fname) events = find_eog_events(raw) n_events = len(events) assert_true((n_events == 4))
[ "def", "test_find_eog", "(", ")", ":", "raw", "=", "read_raw_fif", "(", "raw_fname", ")", "events", "=", "find_eog_events", "(", "raw", ")", "n_events", "=", "len", "(", "events", ")", "assert_true", "(", "(", "n_events", "==", "4", ")", ")" ]
test find eog peaks .
train
false
1,617
def _validateChecksum(sentence): if (sentence[(-3):(-2)] == '*'): (reference, source) = (int(sentence[(-2):], 16), sentence[1:(-3)]) computed = reduce(operator.xor, [ord(x) for x in iterbytes(source)]) if (computed != reference): raise base.InvalidChecksum(('%02x != %02x' % (computed, reference)))
[ "def", "_validateChecksum", "(", "sentence", ")", ":", "if", "(", "sentence", "[", "(", "-", "3", ")", ":", "(", "-", "2", ")", "]", "==", "'*'", ")", ":", "(", "reference", ",", "source", ")", "=", "(", "int", "(", "sentence", "[", "(", "-", "2", ")", ":", "]", ",", "16", ")", ",", "sentence", "[", "1", ":", "(", "-", "3", ")", "]", ")", "computed", "=", "reduce", "(", "operator", ".", "xor", ",", "[", "ord", "(", "x", ")", "for", "x", "in", "iterbytes", "(", "source", ")", "]", ")", "if", "(", "computed", "!=", "reference", ")", ":", "raise", "base", ".", "InvalidChecksum", "(", "(", "'%02x != %02x'", "%", "(", "computed", ",", "reference", ")", ")", ")" ]
validates the checksum of an nmea sentence .
train
false
1,619
def exactly_n(l, n=1): i = iter(l) return (all((any(i) for j in range(n))) and (not any(i)))
[ "def", "exactly_n", "(", "l", ",", "n", "=", "1", ")", ":", "i", "=", "iter", "(", "l", ")", "return", "(", "all", "(", "(", "any", "(", "i", ")", "for", "j", "in", "range", "(", "n", ")", ")", ")", "and", "(", "not", "any", "(", "i", ")", ")", ")" ]
tests that exactly n items in an iterable are "truthy" .
train
true
1,620
def iter_fields(fields): if isinstance(fields, dict): return ((k, v) for (k, v) in six.iteritems(fields)) return ((k, v) for (k, v) in fields)
[ "def", "iter_fields", "(", "fields", ")", ":", "if", "isinstance", "(", "fields", ",", "dict", ")", ":", "return", "(", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "fields", ")", ")", "return", "(", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "fields", ")" ]
yield a tuple of for each field in node .
train
true
1,621
def htmldecode(text, use_repr=False): def entitydecode(match): entity = match.group(1) try: if entity.startswith('#x'): return unichr(int(entity[2:], 16)) elif entity.startswith('#'): return unichr(int(entity[1:])) elif (entity in name2codepoint): return unichr(name2codepoint[entity]) else: return match.group(0) except: return match.group(0) if (not isinstance(text, unicode)): text = text.decode(DEFAULT_ENCODING, errors=HTML_ENCODE) return CHAR_REF_PATT.sub(entitydecode, text)
[ "def", "htmldecode", "(", "text", ",", "use_repr", "=", "False", ")", ":", "def", "entitydecode", "(", "match", ")", ":", "entity", "=", "match", ".", "group", "(", "1", ")", "try", ":", "if", "entity", ".", "startswith", "(", "'#x'", ")", ":", "return", "unichr", "(", "int", "(", "entity", "[", "2", ":", "]", ",", "16", ")", ")", "elif", "entity", ".", "startswith", "(", "'#'", ")", ":", "return", "unichr", "(", "int", "(", "entity", "[", "1", ":", "]", ")", ")", "elif", "(", "entity", "in", "name2codepoint", ")", ":", "return", "unichr", "(", "name2codepoint", "[", "entity", "]", ")", "else", ":", "return", "match", ".", "group", "(", "0", ")", "except", ":", "return", "match", ".", "group", "(", "0", ")", "if", "(", "not", "isinstance", "(", "text", ",", "unicode", ")", ")", ":", "text", "=", "text", ".", "decode", "(", "DEFAULT_ENCODING", ",", "errors", "=", "HTML_ENCODE", ")", "return", "CHAR_REF_PATT", ".", "sub", "(", "entitydecode", ",", "text", ")" ]
decode html entities in the given text .
train
false
1,622
def validipaddr(address): try: octets = address.split('.') assert (len(octets) == 4) for x in octets: assert (0 <= int(x) <= 255) except (AssertionError, ValueError): return False return True
[ "def", "validipaddr", "(", "address", ")", ":", "try", ":", "octets", "=", "address", ".", "split", "(", "'.'", ")", "assert", "(", "len", "(", "octets", ")", "==", "4", ")", "for", "x", "in", "octets", ":", "assert", "(", "0", "<=", "int", "(", "x", ")", "<=", "255", ")", "except", "(", "AssertionError", ",", "ValueError", ")", ":", "return", "False", "return", "True" ]
returns true if address is a valid ipv4 address .
train
true
1,623
def where(condition, x, y): return Where()(condition, x, y)
[ "def", "where", "(", "condition", ",", "x", ",", "y", ")", ":", "return", "Where", "(", ")", "(", "condition", ",", "x", ",", "y", ")" ]
return elements .
train
false
1,624
def _net_lock(network_id): lock_name = ('dhcp-agent-network-lock-%s' % network_id) return lockutils.lock(lock_name, utils.SYNCHRONIZED_PREFIX)
[ "def", "_net_lock", "(", "network_id", ")", ":", "lock_name", "=", "(", "'dhcp-agent-network-lock-%s'", "%", "network_id", ")", "return", "lockutils", ".", "lock", "(", "lock_name", ",", "utils", ".", "SYNCHRONIZED_PREFIX", ")" ]
returns a context manager lock based on network_id .
train
false
1,625
def places_radar(client, location, radius, keyword=None, min_price=None, max_price=None, name=None, open_now=False, type=None): if (not (keyword or name or type)): raise ValueError('either a keyword, name, or type arg is required') return _places(client, 'radar', location=location, radius=radius, keyword=keyword, min_price=min_price, max_price=max_price, name=name, open_now=open_now, type=type)
[ "def", "places_radar", "(", "client", ",", "location", ",", "radius", ",", "keyword", "=", "None", ",", "min_price", "=", "None", ",", "max_price", "=", "None", ",", "name", "=", "None", ",", "open_now", "=", "False", ",", "type", "=", "None", ")", ":", "if", "(", "not", "(", "keyword", "or", "name", "or", "type", ")", ")", ":", "raise", "ValueError", "(", "'either a keyword, name, or type arg is required'", ")", "return", "_places", "(", "client", ",", "'radar'", ",", "location", "=", "location", ",", "radius", "=", "radius", ",", "keyword", "=", "keyword", ",", "min_price", "=", "min_price", ",", "max_price", "=", "max_price", ",", "name", "=", "name", ",", "open_now", "=", "open_now", ",", "type", "=", "type", ")" ]
performs radar search for places .
train
true
1,626
def make_pretty_name(method): meth_pieces = [method.__name__] if (hasattr(method, '__self__') and (method.__self__ is not None)): try: meth_pieces.insert(0, method.__self__.__class__.__name__) except AttributeError: pass return '.'.join(meth_pieces)
[ "def", "make_pretty_name", "(", "method", ")", ":", "meth_pieces", "=", "[", "method", ".", "__name__", "]", "if", "(", "hasattr", "(", "method", ",", "'__self__'", ")", "and", "(", "method", ".", "__self__", "is", "not", "None", ")", ")", ":", "try", ":", "meth_pieces", ".", "insert", "(", "0", ",", "method", ".", "__self__", ".", "__class__", ".", "__name__", ")", "except", "AttributeError", ":", "pass", "return", "'.'", ".", "join", "(", "meth_pieces", ")" ]
makes a pretty name for a function/method .
train
false
1,628
@task(ignore_result=False) def check_celery(): pass
[ "@", "task", "(", "ignore_result", "=", "False", ")", "def", "check_celery", "(", ")", ":", "pass" ]
dummy celery task to check that everything runs smoothly .
train
false
1,629
def stts2universal(token, tag): if (tag in ('KON', 'KOUI', 'KOUS', 'KOKOM')): return (token, CONJ) if (tag in ('PTKZU', 'PTKNEG', 'PTKVZ', 'PTKANT')): return (token, PRT) if (tag in ('PDF', 'PDAT', 'PIS', 'PIAT', 'PIDAT', 'PPER', 'PPOS', 'PPOSAT')): return (token, PRON) if (tag in ('PRELS', 'PRELAT', 'PRF', 'PWS', 'PWAT', 'PWAV', 'PAV')): return (token, PRON) return penntreebank2universal(*stts2penntreebank(token, tag))
[ "def", "stts2universal", "(", "token", ",", "tag", ")", ":", "if", "(", "tag", "in", "(", "'KON'", ",", "'KOUI'", ",", "'KOUS'", ",", "'KOKOM'", ")", ")", ":", "return", "(", "token", ",", "CONJ", ")", "if", "(", "tag", "in", "(", "'PTKZU'", ",", "'PTKNEG'", ",", "'PTKVZ'", ",", "'PTKANT'", ")", ")", ":", "return", "(", "token", ",", "PRT", ")", "if", "(", "tag", "in", "(", "'PDF'", ",", "'PDAT'", ",", "'PIS'", ",", "'PIAT'", ",", "'PIDAT'", ",", "'PPER'", ",", "'PPOS'", ",", "'PPOSAT'", ")", ")", ":", "return", "(", "token", ",", "PRON", ")", "if", "(", "tag", "in", "(", "'PRELS'", ",", "'PRELAT'", ",", "'PRF'", ",", "'PWS'", ",", "'PWAT'", ",", "'PWAV'", ",", "'PAV'", ")", ")", ":", "return", "(", "token", ",", "PRON", ")", "return", "penntreebank2universal", "(", "*", "stts2penntreebank", "(", "token", ",", "tag", ")", ")" ]
converts an stts tag to a universal tag .
train
true
1,632
def add_array_type(property_schema): new_schema = property_schema.copy() new_schema['type'] = [property_schema['type'], 'array'] return new_schema
[ "def", "add_array_type", "(", "property_schema", ")", ":", "new_schema", "=", "property_schema", ".", "copy", "(", ")", "new_schema", "[", "'type'", "]", "=", "[", "property_schema", "[", "'type'", "]", ",", "'array'", "]", "return", "new_schema" ]
convert the parameter schema to be of type list .
train
false
1,633
def spearman_rho(worder, normalize=True): worder_len = len(worder) sum_d_square = sum((((wi - i) ** 2) for (wi, i) in zip(worder, range(worder_len)))) rho = (1 - (sum_d_square / choose((worder_len + 1), 3))) if normalize: return ((rho + 1) / 2) else: return rho
[ "def", "spearman_rho", "(", "worder", ",", "normalize", "=", "True", ")", ":", "worder_len", "=", "len", "(", "worder", ")", "sum_d_square", "=", "sum", "(", "(", "(", "(", "wi", "-", "i", ")", "**", "2", ")", "for", "(", "wi", ",", "i", ")", "in", "zip", "(", "worder", ",", "range", "(", "worder_len", ")", ")", ")", ")", "rho", "=", "(", "1", "-", "(", "sum_d_square", "/", "choose", "(", "(", "worder_len", "+", "1", ")", ",", "3", ")", ")", ")", "if", "normalize", ":", "return", "(", "(", "rho", "+", "1", ")", "/", "2", ")", "else", ":", "return", "rho" ]
calculates the spearmans rho correlation coefficient given the *worder* list of word alignment from word_rank_alignment() .
train
false
1,636
def crelu(x, axis=1): return CReLU(axis=axis)(x)
[ "def", "crelu", "(", "x", ",", "axis", "=", "1", ")", ":", "return", "CReLU", "(", "axis", "=", "axis", ")", "(", "x", ")" ]
concatenated rectified linear unit function .
train
false
1,638
def TemporaryFile(mode='w+b', bufsize=(-1), suffix='', prefix=template, dir=None): return StringIO()
[ "def", "TemporaryFile", "(", "mode", "=", "'w+b'", ",", "bufsize", "=", "(", "-", "1", ")", ",", "suffix", "=", "''", ",", "prefix", "=", "template", ",", "dir", "=", "None", ")", ":", "return", "StringIO", "(", ")" ]
create and return a temporary file .
train
false
1,639
def libvlc_media_discoverer_stop(p_mdis): f = (_Cfunctions.get('libvlc_media_discoverer_stop', None) or _Cfunction('libvlc_media_discoverer_stop', ((1,),), None, None, MediaDiscoverer)) return f(p_mdis)
[ "def", "libvlc_media_discoverer_stop", "(", "p_mdis", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_discoverer_stop'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_discoverer_stop'", ",", "(", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaDiscoverer", ")", ")", "return", "f", "(", "p_mdis", ")" ]
stop media discovery .
train
false
1,641
@retry_on_failure def test_gethostbyname_ex(): joe = socket.gethostbyname_ex('localhost')[2] Assert(('127.0.0.1' in joe)) joe = socket.gethostbyname_ex('127.0.0.1')[2] Assert(('127.0.0.1' in joe)) AssertError(socket.gaierror, socket.gethostbyname_ex, 'should never work')
[ "@", "retry_on_failure", "def", "test_gethostbyname_ex", "(", ")", ":", "joe", "=", "socket", ".", "gethostbyname_ex", "(", "'localhost'", ")", "[", "2", "]", "Assert", "(", "(", "'127.0.0.1'", "in", "joe", ")", ")", "joe", "=", "socket", ".", "gethostbyname_ex", "(", "'127.0.0.1'", ")", "[", "2", "]", "Assert", "(", "(", "'127.0.0.1'", "in", "joe", ")", ")", "AssertError", "(", "socket", ".", "gaierror", ",", "socket", ".", "gethostbyname_ex", ",", "'should never work'", ")" ]
tests socket .
train
false
1,642
@login_required @mobile_template('messages/{mobile/}new.html') def new_message(request, template): to = request.GET.get('to') if to: try: for username in to.split(','): User.objects.get(username=username) except User.DoesNotExist: contrib_messages.add_message(request, contrib_messages.ERROR, _('Invalid username provided. Enter a new username below.')) return HttpResponseRedirect(reverse('messages.new')) message = request.GET.get('message') form = MessageForm((request.POST or None), initial={'to': to, 'message': message}) if ((request.method == 'POST') and form.is_valid() and (not is_ratelimited(request, 'primate-message-day', '50/d'))): send_message(form.cleaned_data['to'], form.cleaned_data['message'], request.user) if form.cleaned_data['in_reply_to']: irt = form.cleaned_data['in_reply_to'] try: m = InboxMessage.objects.get(pk=irt, to=request.user) m.update(replied=True) except InboxMessage.DoesNotExist: pass contrib_messages.add_message(request, contrib_messages.SUCCESS, _('Your message was sent!')) return HttpResponseRedirect(reverse('messages.inbox')) return render(request, template, {'form': form})
[ "@", "login_required", "@", "mobile_template", "(", "'messages/{mobile/}new.html'", ")", "def", "new_message", "(", "request", ",", "template", ")", ":", "to", "=", "request", ".", "GET", ".", "get", "(", "'to'", ")", "if", "to", ":", "try", ":", "for", "username", "in", "to", ".", "split", "(", "','", ")", ":", "User", ".", "objects", ".", "get", "(", "username", "=", "username", ")", "except", "User", ".", "DoesNotExist", ":", "contrib_messages", ".", "add_message", "(", "request", ",", "contrib_messages", ".", "ERROR", ",", "_", "(", "'Invalid username provided. Enter a new username below.'", ")", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'messages.new'", ")", ")", "message", "=", "request", ".", "GET", ".", "get", "(", "'message'", ")", "form", "=", "MessageForm", "(", "(", "request", ".", "POST", "or", "None", ")", ",", "initial", "=", "{", "'to'", ":", "to", ",", "'message'", ":", "message", "}", ")", "if", "(", "(", "request", ".", "method", "==", "'POST'", ")", "and", "form", ".", "is_valid", "(", ")", "and", "(", "not", "is_ratelimited", "(", "request", ",", "'primate-message-day'", ",", "'50/d'", ")", ")", ")", ":", "send_message", "(", "form", ".", "cleaned_data", "[", "'to'", "]", ",", "form", ".", "cleaned_data", "[", "'message'", "]", ",", "request", ".", "user", ")", "if", "form", ".", "cleaned_data", "[", "'in_reply_to'", "]", ":", "irt", "=", "form", ".", "cleaned_data", "[", "'in_reply_to'", "]", "try", ":", "m", "=", "InboxMessage", ".", "objects", ".", "get", "(", "pk", "=", "irt", ",", "to", "=", "request", ".", "user", ")", "m", ".", "update", "(", "replied", "=", "True", ")", "except", "InboxMessage", ".", "DoesNotExist", ":", "pass", "contrib_messages", ".", "add_message", "(", "request", ",", "contrib_messages", ".", "SUCCESS", ",", "_", "(", "'Your message was sent!'", ")", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'messages.inbox'", ")", ")", "return", "render", "(", "request", ",", "template", ",", "{", "'form'", ":", "form", "}", ")" ]
creates an email with a random subject .
train
false
1,643
def VBox(*args, **kwargs): return Column(*args, **kwargs)
[ "def", "VBox", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "Column", "(", "*", "args", ",", "**", "kwargs", ")" ]
lay out child components in a single vertical row .
train
false
1,644
@contextmanager def random_seed(seed): state = random.getstate() random.seed(seed) try: (yield) finally: random.setstate(state)
[ "@", "contextmanager", "def", "random_seed", "(", "seed", ")", ":", "state", "=", "random", ".", "getstate", "(", ")", "random", ".", "seed", "(", "seed", ")", "try", ":", "(", "yield", ")", "finally", ":", "random", ".", "setstate", "(", "state", ")" ]
temporarily change the seed of the random number generator .
train
false
1,645
def instrumented_test_render(self, context): signals.template_rendered.send(sender=self, template=self, context=context) return self.nodelist.render(context)
[ "def", "instrumented_test_render", "(", "self", ",", "context", ")", ":", "signals", ".", "template_rendered", ".", "send", "(", "sender", "=", "self", ",", "template", "=", "self", ",", "context", "=", "context", ")", "return", "self", ".", "nodelist", ".", "render", "(", "context", ")" ]
an instrumented template render method .
train
false
1,646
def _hash_of_file(path, algorithm): with open(path, 'rb') as archive: hash = hashlib.new(algorithm) for chunk in read_chunks(archive): hash.update(chunk) return hash.hexdigest()
[ "def", "_hash_of_file", "(", "path", ",", "algorithm", ")", ":", "with", "open", "(", "path", ",", "'rb'", ")", "as", "archive", ":", "hash", "=", "hashlib", ".", "new", "(", "algorithm", ")", "for", "chunk", "in", "read_chunks", "(", "archive", ")", ":", "hash", ".", "update", "(", "chunk", ")", "return", "hash", ".", "hexdigest", "(", ")" ]
return the hash digest of a file .
train
true
1,647
def longToString(l): result = '' while (l > 0L): result = (chr((l % 256)) + result) l = (l / 256L) return result
[ "def", "longToString", "(", "l", ")", ":", "result", "=", "''", "while", "(", "l", ">", "0", "L", ")", ":", "result", "=", "(", "chr", "(", "(", "l", "%", "256", ")", ")", "+", "result", ")", "l", "=", "(", "l", "/", "256", "L", ")", "return", "result" ]
convert long to digest .
train
false
1,648
def typeof(val, purpose=Purpose.argument): c = _TypeofContext(purpose) ty = typeof_impl(val, c) if (ty is None): msg = ('cannot determine Numba type of %r' % (type(val),)) raise ValueError(msg) return ty
[ "def", "typeof", "(", "val", ",", "purpose", "=", "Purpose", ".", "argument", ")", ":", "c", "=", "_TypeofContext", "(", "purpose", ")", "ty", "=", "typeof_impl", "(", "val", ",", "c", ")", "if", "(", "ty", "is", "None", ")", ":", "msg", "=", "(", "'cannot determine Numba type of %r'", "%", "(", "type", "(", "val", ")", ",", ")", ")", "raise", "ValueError", "(", "msg", ")", "return", "ty" ]
get the numba type of a python value for the given purpose .
train
false
1,650
def testSimple(data='Hello <b>World</b><br/><img src="img/test.jpg"/>', dest='test.pdf'): pdf = pisa.CreatePDF(cStringIO.StringIO(data), file(dest, 'wb')) if pdf.err: dumpErrors(pdf) else: pisa.startViewer(dest)
[ "def", "testSimple", "(", "data", "=", "'Hello <b>World</b><br/><img src=\"img/test.jpg\"/>'", ",", "dest", "=", "'test.pdf'", ")", ":", "pdf", "=", "pisa", ".", "CreatePDF", "(", "cStringIO", ".", "StringIO", "(", "data", ")", ",", "file", "(", "dest", ",", "'wb'", ")", ")", "if", "pdf", ".", "err", ":", "dumpErrors", "(", "pdf", ")", "else", ":", "pisa", ".", "startViewer", "(", "dest", ")" ]
simple test showing how to create a pdf file from pml source string .
train
false
1,653
def broadcastable_to_str(b): named_broadcastable = {(): 'scalar', (False,): 'vector', (False, True): 'col', (True, False): 'row', (False, False): 'matrix'} if (b in named_broadcastable): bcast = named_broadcastable[b] else: bcast = '' return bcast
[ "def", "broadcastable_to_str", "(", "b", ")", ":", "named_broadcastable", "=", "{", "(", ")", ":", "'scalar'", ",", "(", "False", ",", ")", ":", "'vector'", ",", "(", "False", ",", "True", ")", ":", "'col'", ",", "(", "True", ",", "False", ")", ":", "'row'", ",", "(", "False", ",", "False", ")", ":", "'matrix'", "}", "if", "(", "b", "in", "named_broadcastable", ")", ":", "bcast", "=", "named_broadcastable", "[", "b", "]", "else", ":", "bcast", "=", "''", "return", "bcast" ]
return string representation of broadcastable .
train
false
1,654
def decode_integer(data, prefix_bits): multiple = (lambda index: (128 ** (index - 1))) max_number = ((2 ** prefix_bits) - 1) mask = (255 >> (8 - prefix_bits)) index = 0 number = (to_byte(data[index]) & mask) if (number == max_number): while True: index += 1 next_byte = to_byte(data[index]) if (next_byte >= 128): number += ((next_byte - 128) * multiple(index)) else: number += (next_byte * multiple(index)) break return (number, (index + 1))
[ "def", "decode_integer", "(", "data", ",", "prefix_bits", ")", ":", "multiple", "=", "(", "lambda", "index", ":", "(", "128", "**", "(", "index", "-", "1", ")", ")", ")", "max_number", "=", "(", "(", "2", "**", "prefix_bits", ")", "-", "1", ")", "mask", "=", "(", "255", ">>", "(", "8", "-", "prefix_bits", ")", ")", "index", "=", "0", "number", "=", "(", "to_byte", "(", "data", "[", "index", "]", ")", "&", "mask", ")", "if", "(", "number", "==", "max_number", ")", ":", "while", "True", ":", "index", "+=", "1", "next_byte", "=", "to_byte", "(", "data", "[", "index", "]", ")", "if", "(", "next_byte", ">=", "128", ")", ":", "number", "+=", "(", "(", "next_byte", "-", "128", ")", "*", "multiple", "(", "index", ")", ")", "else", ":", "number", "+=", "(", "next_byte", "*", "multiple", "(", "index", ")", ")", "break", "return", "(", "number", ",", "(", "index", "+", "1", ")", ")" ]
this decodes an integer according to the wacky integer encoding rules defined in the hpack spec .
train
false
1,655
def partition_data(X, y): (Xs, ys) = shuffle_array(X, y) mapping = {} for i in xrange(len(y)): yi = ys[i] try: mapping[yi].append(i) except KeyError: mapping[yi] = [i] (Xtrain, ytrain) = ([], []) (Xtest, ytest) = ([], []) for (key, indices) in mapping.iteritems(): Xtrain.extend([Xs[i] for i in indices[:1]]) ytrain.extend([ys[i] for i in indices[:1]]) Xtest.extend([Xs[i] for i in indices[1:20]]) ytest.extend([ys[i] for i in indices[1:20]]) return (Xtrain, ytrain, Xtest, ytest)
[ "def", "partition_data", "(", "X", ",", "y", ")", ":", "(", "Xs", ",", "ys", ")", "=", "shuffle_array", "(", "X", ",", "y", ")", "mapping", "=", "{", "}", "for", "i", "in", "xrange", "(", "len", "(", "y", ")", ")", ":", "yi", "=", "ys", "[", "i", "]", "try", ":", "mapping", "[", "yi", "]", ".", "append", "(", "i", ")", "except", "KeyError", ":", "mapping", "[", "yi", "]", "=", "[", "i", "]", "(", "Xtrain", ",", "ytrain", ")", "=", "(", "[", "]", ",", "[", "]", ")", "(", "Xtest", ",", "ytest", ")", "=", "(", "[", "]", ",", "[", "]", ")", "for", "(", "key", ",", "indices", ")", "in", "mapping", ".", "iteritems", "(", ")", ":", "Xtrain", ".", "extend", "(", "[", "Xs", "[", "i", "]", "for", "i", "in", "indices", "[", ":", "1", "]", "]", ")", "ytrain", ".", "extend", "(", "[", "ys", "[", "i", "]", "for", "i", "in", "indices", "[", ":", "1", "]", "]", ")", "Xtest", ".", "extend", "(", "[", "Xs", "[", "i", "]", "for", "i", "in", "indices", "[", "1", ":", "20", "]", "]", ")", "ytest", ".", "extend", "(", "[", "ys", "[", "i", "]", "for", "i", "in", "indices", "[", "1", ":", "20", "]", "]", ")", "return", "(", "Xtrain", ",", "ytrain", ",", "Xtest", ",", "ytest", ")" ]
shuffles the input data and splits it into a new set of images .
train
false
1,656
def get_info_extractor(ie_name): return globals()[(ie_name + u'IE')]
[ "def", "get_info_extractor", "(", "ie_name", ")", ":", "return", "globals", "(", ")", "[", "(", "ie_name", "+", "u'IE'", ")", "]" ]
returns the info extractor class with the given ie_name .
train
false
1,657
def rotate_token(request): request.META.update({'CSRF_COOKIE_USED': True, 'CSRF_COOKIE': _get_new_csrf_token()}) request.csrf_cookie_needs_reset = True
[ "def", "rotate_token", "(", "request", ")", ":", "request", ".", "META", ".", "update", "(", "{", "'CSRF_COOKIE_USED'", ":", "True", ",", "'CSRF_COOKIE'", ":", "_get_new_csrf_token", "(", ")", "}", ")", "request", ".", "csrf_cookie_needs_reset", "=", "True" ]
changes the csrf token in use for a request - should be done on login for security purposes .
train
false
1,658
def select_commits(title, revs, summaries, multiselect=True): model = Model(revs, summaries) parent = qtutils.active_window() dialog = SelectCommitsDialog(model, parent, title, multiselect=multiselect) return dialog.select_commits()
[ "def", "select_commits", "(", "title", ",", "revs", ",", "summaries", ",", "multiselect", "=", "True", ")", ":", "model", "=", "Model", "(", "revs", ",", "summaries", ")", "parent", "=", "qtutils", ".", "active_window", "(", ")", "dialog", "=", "SelectCommitsDialog", "(", "model", ",", "parent", ",", "title", ",", "multiselect", "=", "multiselect", ")", "return", "dialog", ".", "select_commits", "(", ")" ]
use the selectcommitsdialog to select commits from a list .
train
false
1,659
def Ttemplate(txt): global _SKIN_CACHE if (txt in _SKIN_CACHE): return _SKIN_CACHE[txt] else: tra = html_escape(Tx(SKIN_TEXT.get(txt, txt))) _SKIN_CACHE[txt] = tra return tra
[ "def", "Ttemplate", "(", "txt", ")", ":", "global", "_SKIN_CACHE", "if", "(", "txt", "in", "_SKIN_CACHE", ")", ":", "return", "_SKIN_CACHE", "[", "txt", "]", "else", ":", "tra", "=", "html_escape", "(", "Tx", "(", "SKIN_TEXT", ".", "get", "(", "txt", ",", "txt", ")", ")", ")", "_SKIN_CACHE", "[", "txt", "]", "=", "tra", "return", "tra" ]
translation function for skin texts .
train
false
1,660
@pytest.mark.skipif(str(u'not six.PY2')) def test_color_print_no_default_encoding(): orig_func = locale.getpreferredencoding locale.getpreferredencoding = (lambda : u'') try: stream = io.StringIO() console.color_print('\xe2\x98\x83', u'white', file=stream) assert (stream.getvalue() == u'\u2603\n') stream = io.StringIO() console.color_print('\xcd\xef', u'red', file=stream) assert (stream.getvalue() == u'\xcd\xef\n') finally: locale.getpreferredencoding = orig_func
[ "@", "pytest", ".", "mark", ".", "skipif", "(", "str", "(", "u'not six.PY2'", ")", ")", "def", "test_color_print_no_default_encoding", "(", ")", ":", "orig_func", "=", "locale", ".", "getpreferredencoding", "locale", ".", "getpreferredencoding", "=", "(", "lambda", ":", "u''", ")", "try", ":", "stream", "=", "io", ".", "StringIO", "(", ")", "console", ".", "color_print", "(", "'\\xe2\\x98\\x83'", ",", "u'white'", ",", "file", "=", "stream", ")", "assert", "(", "stream", ".", "getvalue", "(", ")", "==", "u'\\u2603\\n'", ")", "stream", "=", "io", ".", "StringIO", "(", ")", "console", ".", "color_print", "(", "'\\xcd\\xef'", ",", "u'red'", ",", "file", "=", "stream", ")", "assert", "(", "stream", ".", "getvalue", "(", ")", "==", "u'\\xcd\\xef\\n'", ")", "finally", ":", "locale", ".", "getpreferredencoding", "=", "orig_func" ]
regression test for #1244 in some environments locale .
train
false
1,661
def abort_import(handler, host=None, core_name=None, verbose=False): if ((not _is_master()) and (_get_none_or_value(host) is None)): err = ['solr.abort_import can only be called on "master" minions'] return _get_return_dict(False, errors=err) if ((_get_none_or_value(core_name) is None) and _check_for_cores()): err = ['No core specified when minion is configured as "multi-core".'] return _get_return_dict(False, err) params = ['command=abort'] if verbose: params.append('verbose=true') url = _format_url(handler, host=host, core_name=core_name, extra=params) return _http_request(url)
[ "def", "abort_import", "(", "handler", ",", "host", "=", "None", ",", "core_name", "=", "None", ",", "verbose", "=", "False", ")", ":", "if", "(", "(", "not", "_is_master", "(", ")", ")", "and", "(", "_get_none_or_value", "(", "host", ")", "is", "None", ")", ")", ":", "err", "=", "[", "'solr.abort_import can only be called on \"master\" minions'", "]", "return", "_get_return_dict", "(", "False", ",", "errors", "=", "err", ")", "if", "(", "(", "_get_none_or_value", "(", "core_name", ")", "is", "None", ")", "and", "_check_for_cores", "(", ")", ")", ":", "err", "=", "[", "'No core specified when minion is configured as \"multi-core\".'", "]", "return", "_get_return_dict", "(", "False", ",", "err", ")", "params", "=", "[", "'command=abort'", "]", "if", "verbose", ":", "params", ".", "append", "(", "'verbose=true'", ")", "url", "=", "_format_url", "(", "handler", ",", "host", "=", "host", ",", "core_name", "=", "core_name", ",", "extra", "=", "params", ")", "return", "_http_request", "(", "url", ")" ]
master only aborts an existing import command to the specified handler .
train
true
1,662
def _is_known_retryable(exception): if isinstance(exception, APIError): if (exception.response.status_code == INTERNAL_SERVER_ERROR): error_text = exception.response.text return any(((known in error_text) for known in [u'Unknown device', u'no such device'])) if isinstance(exception, ConnectionError): if ((len(exception.args) > 0) and isinstance(exception.args[0], ProtocolError)): if ((len(exception.args[0].args) > 1) and isinstance(exception.args[0].args[1], socket_error)): return (exception.args[0].args[1].errno in {ECONNREFUSED}) return False
[ "def", "_is_known_retryable", "(", "exception", ")", ":", "if", "isinstance", "(", "exception", ",", "APIError", ")", ":", "if", "(", "exception", ".", "response", ".", "status_code", "==", "INTERNAL_SERVER_ERROR", ")", ":", "error_text", "=", "exception", ".", "response", ".", "text", "return", "any", "(", "(", "(", "known", "in", "error_text", ")", "for", "known", "in", "[", "u'Unknown device'", ",", "u'no such device'", "]", ")", ")", "if", "isinstance", "(", "exception", ",", "ConnectionError", ")", ":", "if", "(", "(", "len", "(", "exception", ".", "args", ")", ">", "0", ")", "and", "isinstance", "(", "exception", ".", "args", "[", "0", "]", ",", "ProtocolError", ")", ")", ":", "if", "(", "(", "len", "(", "exception", ".", "args", "[", "0", "]", ".", "args", ")", ">", "1", ")", "and", "isinstance", "(", "exception", ".", "args", "[", "0", "]", ".", "args", "[", "1", "]", ",", "socket_error", ")", ")", ":", "return", "(", "exception", ".", "args", "[", "0", "]", ".", "args", "[", "1", "]", ".", "errno", "in", "{", "ECONNREFUSED", "}", ")", "return", "False" ]
determine if the text of a docker 500 error represents a case which warrants an automatic retry .
train
false
1,664
def list_imdbs(): return __sets.keys()
[ "def", "list_imdbs", "(", ")", ":", "return", "__sets", ".", "keys", "(", ")" ]
list all registered imdbs .
train
false
1,668
def log_buffer_contents(): return logs_buffer().contents()
[ "def", "log_buffer_contents", "(", ")", ":", "return", "logs_buffer", "(", ")", ".", "contents", "(", ")" ]
returns the contents of the logs buffer .
train
false
1,669
def get_windows_dir(): try: import win32api except ImportError: windir = compat.getenv('SystemRoot', compat.getenv('WINDIR')) else: windir = win32api.GetWindowsDirectory() if (not windir): raise SystemExit('Error: Can not determine your Windows directory') return windir
[ "def", "get_windows_dir", "(", ")", ":", "try", ":", "import", "win32api", "except", "ImportError", ":", "windir", "=", "compat", ".", "getenv", "(", "'SystemRoot'", ",", "compat", ".", "getenv", "(", "'WINDIR'", ")", ")", "else", ":", "windir", "=", "win32api", ".", "GetWindowsDirectory", "(", ")", "if", "(", "not", "windir", ")", ":", "raise", "SystemExit", "(", "'Error: Can not determine your Windows directory'", ")", "return", "windir" ]
return the windows directory e .
train
true
1,670
def getMaximumByPathsComplex(paths): maximum = complex((-999999999.0), (-999999999.0)) for path in paths: maximum = getMaximum(maximum, getMaximumByPathComplex(path)) return maximum
[ "def", "getMaximumByPathsComplex", "(", "paths", ")", ":", "maximum", "=", "complex", "(", "(", "-", "999999999.0", ")", ",", "(", "-", "999999999.0", ")", ")", "for", "path", "in", "paths", ":", "maximum", "=", "getMaximum", "(", "maximum", ",", "getMaximumByPathComplex", "(", "path", ")", ")", "return", "maximum" ]
get a complex with each component the maximum of the respective components of lists of complex points .
train
false
1,671
def connections_support_transactions(): return all((conn.settings_dict['SUPPORTS_TRANSACTIONS'] for conn in connections.all()))
[ "def", "connections_support_transactions", "(", ")", ":", "return", "all", "(", "(", "conn", ".", "settings_dict", "[", "'SUPPORTS_TRANSACTIONS'", "]", "for", "conn", "in", "connections", ".", "all", "(", ")", ")", ")" ]
returns true if all connections support transactions .
train
false
1,672
def test_type_extensibility(): from sympy.core import Basic class MyType(Basic, ): pass class MyAskHandler(AskHandler, ): @staticmethod def MyType(expr, assumptions): return True a = MyType() register_handler(Q.prime, MyAskHandler) assert (ask(Q.prime(a)) is True)
[ "def", "test_type_extensibility", "(", ")", ":", "from", "sympy", ".", "core", "import", "Basic", "class", "MyType", "(", "Basic", ",", ")", ":", "pass", "class", "MyAskHandler", "(", "AskHandler", ",", ")", ":", "@", "staticmethod", "def", "MyType", "(", "expr", ",", "assumptions", ")", ":", "return", "True", "a", "=", "MyType", "(", ")", "register_handler", "(", "Q", ".", "prime", ",", "MyAskHandler", ")", "assert", "(", "ask", "(", "Q", ".", "prime", "(", "a", ")", ")", "is", "True", ")" ]
test that new types can be added to the ask system at runtime we create a custom type mytype .
train
false
1,673
def iter_sha1(iter): sha = sha1() for name in iter: sha.update(name) return sha.hexdigest().encode('ascii')
[ "def", "iter_sha1", "(", "iter", ")", ":", "sha", "=", "sha1", "(", ")", "for", "name", "in", "iter", ":", "sha", ".", "update", "(", "name", ")", "return", "sha", ".", "hexdigest", "(", ")", ".", "encode", "(", "'ascii'", ")" ]
return the hexdigest of the sha1 over a set of names .
train
false
1,674
def exit_gracefully(code=0): global RUN_CONFIG if os.path.exists(RUN_CONFIG.temp): for f in os.listdir(RUN_CONFIG.temp): os.remove((RUN_CONFIG.temp + f)) os.rmdir(RUN_CONFIG.temp) disable_monitor_mode() mac_change_back() print (((GR + ' [+]') + W) + ' quitting') print '' exit(code)
[ "def", "exit_gracefully", "(", "code", "=", "0", ")", ":", "global", "RUN_CONFIG", "if", "os", ".", "path", ".", "exists", "(", "RUN_CONFIG", ".", "temp", ")", ":", "for", "f", "in", "os", ".", "listdir", "(", "RUN_CONFIG", ".", "temp", ")", ":", "os", ".", "remove", "(", "(", "RUN_CONFIG", ".", "temp", "+", "f", ")", ")", "os", ".", "rmdir", "(", "RUN_CONFIG", ".", "temp", ")", "disable_monitor_mode", "(", ")", "mac_change_back", "(", ")", "print", "(", "(", "(", "GR", "+", "' [+]'", ")", "+", "W", ")", "+", "' quitting'", ")", "print", "''", "exit", "(", "code", ")" ]
we may exit the program at any time .
train
false
1,675
def test_css(Chart): css = '{{ id }}text { fill: #bedead; }\n' with NamedTemporaryFile('w') as f: f.write(css) f.flush() config = Config() config.css.append(('file://' + f.name)) chart = Chart(config) chart.add('/', [10, 1, 5]) svg = chart.render().decode('utf-8') assert ('#bedead' in svg) chart = Chart(css=(_ellipsis, ('file://' + f.name))) chart.add('/', [10, 1, 5]) svg = chart.render().decode('utf-8') assert ('#bedead' in svg)
[ "def", "test_css", "(", "Chart", ")", ":", "css", "=", "'{{ id }}text { fill: #bedead; }\\n'", "with", "NamedTemporaryFile", "(", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "css", ")", "f", ".", "flush", "(", ")", "config", "=", "Config", "(", ")", "config", ".", "css", ".", "append", "(", "(", "'file://'", "+", "f", ".", "name", ")", ")", "chart", "=", "Chart", "(", "config", ")", "chart", ".", "add", "(", "'/'", ",", "[", "10", ",", "1", ",", "5", "]", ")", "svg", "=", "chart", ".", "render", "(", ")", ".", "decode", "(", "'utf-8'", ")", "assert", "(", "'#bedead'", "in", "svg", ")", "chart", "=", "Chart", "(", "css", "=", "(", "_ellipsis", ",", "(", "'file://'", "+", "f", ".", "name", ")", ")", ")", "chart", ".", "add", "(", "'/'", ",", "[", "10", ",", "1", ",", "5", "]", ")", "svg", "=", "chart", ".", "render", "(", ")", ".", "decode", "(", "'utf-8'", ")", "assert", "(", "'#bedead'", "in", "svg", ")" ]
test css file option .
train
false
1,677
def generate_dummy_image(_unused): return ContentFile(ImageField()._make_data({'color': 'blue', 'width': 50, 'height': 50, 'format': 'PNG'}), 'test.png')
[ "def", "generate_dummy_image", "(", "_unused", ")", ":", "return", "ContentFile", "(", "ImageField", "(", ")", ".", "_make_data", "(", "{", "'color'", ":", "'blue'", ",", "'width'", ":", "50", ",", "'height'", ":", "50", ",", "'format'", ":", "'PNG'", "}", ")", ",", "'test.png'", ")" ]
used for image fields to create a sane default .
train
false
1,679
def get_interface_version(): return INTERFACE_VERSION
[ "def", "get_interface_version", "(", ")", ":", "return", "INTERFACE_VERSION" ]
return interface version .
train
false
1,680
@addon_view @non_atomic_requests def downloads_series(request, addon, group, start, end, format): date_range = check_series_params_or_404(group, start, end, format) check_stats_permission(request, addon) series = get_series(DownloadCount, addon=addon.id, date__range=date_range) if (format == 'csv'): return render_csv(request, addon, series, ['date', 'count']) elif (format == 'json'): return render_json(request, addon, series)
[ "@", "addon_view", "@", "non_atomic_requests", "def", "downloads_series", "(", "request", ",", "addon", ",", "group", ",", "start", ",", "end", ",", "format", ")", ":", "date_range", "=", "check_series_params_or_404", "(", "group", ",", "start", ",", "end", ",", "format", ")", "check_stats_permission", "(", "request", ",", "addon", ")", "series", "=", "get_series", "(", "DownloadCount", ",", "addon", "=", "addon", ".", "id", ",", "date__range", "=", "date_range", ")", "if", "(", "format", "==", "'csv'", ")", ":", "return", "render_csv", "(", "request", ",", "addon", ",", "series", ",", "[", "'date'", ",", "'count'", "]", ")", "elif", "(", "format", "==", "'json'", ")", ":", "return", "render_json", "(", "request", ",", "addon", ",", "series", ")" ]
generate download counts grouped by group in format .
train
false
1,682
def color_parser(colors, function): from numbers import Number if isinstance(colors, str): return function(colors) if (isinstance(colors, tuple) and isinstance(colors[0], Number)): return function(colors) if hasattr(colors, '__iter__'): if isinstance(colors, tuple): new_color_tuple = tuple((function(item) for item in colors)) return new_color_tuple else: new_color_list = [function(item) for item in colors] return new_color_list
[ "def", "color_parser", "(", "colors", ",", "function", ")", ":", "from", "numbers", "import", "Number", "if", "isinstance", "(", "colors", ",", "str", ")", ":", "return", "function", "(", "colors", ")", "if", "(", "isinstance", "(", "colors", ",", "tuple", ")", "and", "isinstance", "(", "colors", "[", "0", "]", ",", "Number", ")", ")", ":", "return", "function", "(", "colors", ")", "if", "hasattr", "(", "colors", ",", "'__iter__'", ")", ":", "if", "isinstance", "(", "colors", ",", "tuple", ")", ":", "new_color_tuple", "=", "tuple", "(", "(", "function", "(", "item", ")", "for", "item", "in", "colors", ")", ")", "return", "new_color_tuple", "else", ":", "new_color_list", "=", "[", "function", "(", "item", ")", "for", "item", "in", "colors", "]", "return", "new_color_list" ]
takes color(s) and a function and applies the function on the color(s) in particular .
train
false
1,683
def html_strip(text): mark = 0 markstart = 0 markend = 0 index = 0 occur = 0 for i in text: if (i == '<'): try: if (text[(index + 1)] != ' '): mark = 1 markstart = index except IndexError: pass elif (i == '>'): if (mark == 1): mark = 0 markend = index text = ('%s%s' % (text[:markstart], text[(markend + 1):])) occur = 1 break index += 1 if (occur == 1): text = html_strip(text) return text
[ "def", "html_strip", "(", "text", ")", ":", "mark", "=", "0", "markstart", "=", "0", "markend", "=", "0", "index", "=", "0", "occur", "=", "0", "for", "i", "in", "text", ":", "if", "(", "i", "==", "'<'", ")", ":", "try", ":", "if", "(", "text", "[", "(", "index", "+", "1", ")", "]", "!=", "' '", ")", ":", "mark", "=", "1", "markstart", "=", "index", "except", "IndexError", ":", "pass", "elif", "(", "i", "==", "'>'", ")", ":", "if", "(", "mark", "==", "1", ")", ":", "mark", "=", "0", "markend", "=", "index", "text", "=", "(", "'%s%s'", "%", "(", "text", "[", ":", "markstart", "]", ",", "text", "[", "(", "markend", "+", "1", ")", ":", "]", ")", ")", "occur", "=", "1", "break", "index", "+=", "1", "if", "(", "occur", "==", "1", ")", ":", "text", "=", "html_strip", "(", "text", ")", "return", "text" ]
strips html markup from text .
train
false
1,684
def b64w(b): return string.translate(b, B64W_TRANSLATE, B64C_STRIP)
[ "def", "b64w", "(", "b", ")", ":", "return", "string", ".", "translate", "(", "b", ",", "B64W_TRANSLATE", ",", "B64C_STRIP", ")" ]
rewrite a base64 string by replacing "+" by "-" .
train
false
1,685
def loadSVModel(SVMmodelName, isRegression=False): try: fo = open((SVMmodelName + 'MEANS'), 'rb') except IOError: print "Load SVM Model: Didn't find file" return try: MEAN = cPickle.load(fo) STD = cPickle.load(fo) if (not isRegression): classNames = cPickle.load(fo) mtWin = cPickle.load(fo) mtStep = cPickle.load(fo) stWin = cPickle.load(fo) stStep = cPickle.load(fo) computeBEAT = cPickle.load(fo) except: fo.close() fo.close() MEAN = numpy.array(MEAN) STD = numpy.array(STD) COEFF = [] with open(SVMmodelName, 'rb') as fid: SVM = cPickle.load(fid) if isRegression: return (SVM, MEAN, STD, mtWin, mtStep, stWin, stStep, computeBEAT) else: return (SVM, MEAN, STD, classNames, mtWin, mtStep, stWin, stStep, computeBEAT)
[ "def", "loadSVModel", "(", "SVMmodelName", ",", "isRegression", "=", "False", ")", ":", "try", ":", "fo", "=", "open", "(", "(", "SVMmodelName", "+", "'MEANS'", ")", ",", "'rb'", ")", "except", "IOError", ":", "print", "\"Load SVM Model: Didn't find file\"", "return", "try", ":", "MEAN", "=", "cPickle", ".", "load", "(", "fo", ")", "STD", "=", "cPickle", ".", "load", "(", "fo", ")", "if", "(", "not", "isRegression", ")", ":", "classNames", "=", "cPickle", ".", "load", "(", "fo", ")", "mtWin", "=", "cPickle", ".", "load", "(", "fo", ")", "mtStep", "=", "cPickle", ".", "load", "(", "fo", ")", "stWin", "=", "cPickle", ".", "load", "(", "fo", ")", "stStep", "=", "cPickle", ".", "load", "(", "fo", ")", "computeBEAT", "=", "cPickle", ".", "load", "(", "fo", ")", "except", ":", "fo", ".", "close", "(", ")", "fo", ".", "close", "(", ")", "MEAN", "=", "numpy", ".", "array", "(", "MEAN", ")", "STD", "=", "numpy", ".", "array", "(", "STD", ")", "COEFF", "=", "[", "]", "with", "open", "(", "SVMmodelName", ",", "'rb'", ")", "as", "fid", ":", "SVM", "=", "cPickle", ".", "load", "(", "fid", ")", "if", "isRegression", ":", "return", "(", "SVM", ",", "MEAN", ",", "STD", ",", "mtWin", ",", "mtStep", ",", "stWin", ",", "stStep", ",", "computeBEAT", ")", "else", ":", "return", "(", "SVM", ",", "MEAN", ",", "STD", ",", "classNames", ",", "mtWin", ",", "mtStep", ",", "stWin", ",", "stStep", ",", "computeBEAT", ")" ]
this function loads an svm model either for classification or training .
train
false
1,686
def put(dct, entry): id = int(entry['id']) if (id in dct): if (entry == dct[id]): pass else: print entry print dct[id] assert False else: dct[id] = entry
[ "def", "put", "(", "dct", ",", "entry", ")", ":", "id", "=", "int", "(", "entry", "[", "'id'", "]", ")", "if", "(", "id", "in", "dct", ")", ":", "if", "(", "entry", "==", "dct", "[", "id", "]", ")", ":", "pass", "else", ":", "print", "entry", "print", "dct", "[", "id", "]", "assert", "False", "else", ":", "dct", "[", "id", "]", "=", "entry" ]
default function for handling put requests .
train
false
1,687
def _key_error(neighbors, strict): if strict: raise ValueError((("no thermodynamic data for neighbors '" + neighbors) + "' available")) else: warnings.warn((("no themodynamic data for neighbors '" + neighbors) + "' available. Calculation will be wrong"), BiopythonWarning) return
[ "def", "_key_error", "(", "neighbors", ",", "strict", ")", ":", "if", "strict", ":", "raise", "ValueError", "(", "(", "(", "\"no thermodynamic data for neighbors '\"", "+", "neighbors", ")", "+", "\"' available\"", ")", ")", "else", ":", "warnings", ".", "warn", "(", "(", "(", "\"no themodynamic data for neighbors '\"", "+", "neighbors", ")", "+", "\"' available. Calculation will be wrong\"", ")", ",", "BiopythonWarning", ")", "return" ]
throw an error or a warning if there is no data for the neighbors .
train
false
1,688
def getAbs(value): return abs(value)
[ "def", "getAbs", "(", "value", ")", ":", "return", "abs", "(", "value", ")" ]
get the abs .
train
false
1,689
@facebook_required(scope='publish_actions') @csrf_protect def open_graph_beta(request): message = request.POST.get('message') if message: fb = get_persistent_graph(request) entity_url = 'http://www.fashiolista.com/item/2081202/' fb.set('me/fashiolista:love', item=entity_url, message=message) messages.info(request, ('Frictionless sharing to open graph beta action fashiolista:love with item_url %s, this url contains open graph data which Facebook scrapes' % entity_url))
[ "@", "facebook_required", "(", "scope", "=", "'publish_actions'", ")", "@", "csrf_protect", "def", "open_graph_beta", "(", "request", ")", ":", "message", "=", "request", ".", "POST", ".", "get", "(", "'message'", ")", "if", "message", ":", "fb", "=", "get_persistent_graph", "(", "request", ")", "entity_url", "=", "'http://www.fashiolista.com/item/2081202/'", "fb", ".", "set", "(", "'me/fashiolista:love'", ",", "item", "=", "entity_url", ",", "message", "=", "message", ")", "messages", ".", "info", "(", "request", ",", "(", "'Frictionless sharing to open graph beta action fashiolista:love with item_url %s, this url contains open graph data which Facebook scrapes'", "%", "entity_url", ")", ")" ]
simple example on how to do open graph postings .
train
false
1,690
def PackInteger(name, value, pbvalue): pbvalue.set_int64value(value)
[ "def", "PackInteger", "(", "name", ",", "value", ",", "pbvalue", ")", ":", "pbvalue", ".", "set_int64value", "(", "value", ")" ]
packs an integer property into a entity_pb .
train
false
1,692
def getConcatenatedList(originalLists): concatenatedList = [] for originalList in originalLists: concatenatedList += originalList return concatenatedList
[ "def", "getConcatenatedList", "(", "originalLists", ")", ":", "concatenatedList", "=", "[", "]", "for", "originalList", "in", "originalLists", ":", "concatenatedList", "+=", "originalList", "return", "concatenatedList" ]
get the lists as one concatenated list .
train
false
1,693
def check_hexsha(hex, error_msg): if (not valid_hexsha(hex)): raise ObjectFormatException(('%s %s' % (error_msg, hex)))
[ "def", "check_hexsha", "(", "hex", ",", "error_msg", ")", ":", "if", "(", "not", "valid_hexsha", "(", "hex", ")", ")", ":", "raise", "ObjectFormatException", "(", "(", "'%s %s'", "%", "(", "error_msg", ",", "hex", ")", ")", ")" ]
check if a string is a valid hex sha string .
train
false
1,694
def arp_cache(attrs=None, where=None): return _osquery_cmd(table='arp_cache', attrs=attrs, where=where)
[ "def", "arp_cache", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'arp_cache'", ",", "attrs", "=", "attrs", ",", "where", "=", "where", ")" ]
return arp_cache information from osquery cli example: .
train
false
1,695
def __global_logging_exception_handler(exc_type, exc_value, exc_traceback): if (exc_type.__name__ == 'KeyboardInterrupt'): if is_mp_logging_listener_configured(): shutdown_multiprocessing_logging_listener() else: logging.getLogger(__name__).error("An un-handled exception was caught by salt's global exception handler:\n{0}: {1}\n{2}".format(exc_type.__name__, exc_value, ''.join(traceback.format_exception(exc_type, exc_value, exc_traceback)).strip())) sys.__excepthook__(exc_type, exc_value, exc_traceback)
[ "def", "__global_logging_exception_handler", "(", "exc_type", ",", "exc_value", ",", "exc_traceback", ")", ":", "if", "(", "exc_type", ".", "__name__", "==", "'KeyboardInterrupt'", ")", ":", "if", "is_mp_logging_listener_configured", "(", ")", ":", "shutdown_multiprocessing_logging_listener", "(", ")", "else", ":", "logging", ".", "getLogger", "(", "__name__", ")", ".", "error", "(", "\"An un-handled exception was caught by salt's global exception handler:\\n{0}: {1}\\n{2}\"", ".", "format", "(", "exc_type", ".", "__name__", ",", "exc_value", ",", "''", ".", "join", "(", "traceback", ".", "format_exception", "(", "exc_type", ",", "exc_value", ",", "exc_traceback", ")", ")", ".", "strip", "(", ")", ")", ")", "sys", ".", "__excepthook__", "(", "exc_type", ",", "exc_value", ",", "exc_traceback", ")" ]
this function will log all un-handled python exceptions .
train
true
1,696
def is_public_volume_type(context, volume_type_id): volume_type = db.volume_type_get(context, volume_type_id) return volume_type['is_public']
[ "def", "is_public_volume_type", "(", "context", ",", "volume_type_id", ")", ":", "volume_type", "=", "db", ".", "volume_type_get", "(", "context", ",", "volume_type_id", ")", "return", "volume_type", "[", "'is_public'", "]" ]
return is_public boolean value of volume type .
train
false
1,697
def indeterminate(seg): if seg._uploaded: if seg.explicit: assert (seg._marked is False) else: assert (seg._marked is True) else: assert (seg._marked is False) return True
[ "def", "indeterminate", "(", "seg", ")", ":", "if", "seg", ".", "_uploaded", ":", "if", "seg", ".", "explicit", ":", "assert", "(", "seg", ".", "_marked", "is", "False", ")", "else", ":", "assert", "(", "seg", ".", "_marked", "is", "True", ")", "else", ":", "assert", "(", "seg", ".", "_marked", "is", "False", ")", "return", "True" ]
returns true as long as the segment is internally consistent .
train
false
1,700
def boto_supports_profile_name_arg(ec2): run_instances_method = getattr(ec2, 'run_instances') return ('instance_profile_name' in get_function_code(run_instances_method).co_varnames)
[ "def", "boto_supports_profile_name_arg", "(", "ec2", ")", ":", "run_instances_method", "=", "getattr", "(", "ec2", ",", "'run_instances'", ")", "return", "(", "'instance_profile_name'", "in", "get_function_code", "(", "run_instances_method", ")", ".", "co_varnames", ")" ]
check if boto library has instance_profile_name argument .
train
false
1,701
def fast_exponentiation(a, p, n): result = (a % n) remainders = [] while (p != 1): remainders.append((p & 1)) p = (p >> 1) while remainders: rem = remainders.pop() result = (((a ** rem) * (result ** 2)) % n) return result
[ "def", "fast_exponentiation", "(", "a", ",", "p", ",", "n", ")", ":", "result", "=", "(", "a", "%", "n", ")", "remainders", "=", "[", "]", "while", "(", "p", "!=", "1", ")", ":", "remainders", ".", "append", "(", "(", "p", "&", "1", ")", ")", "p", "=", "(", "p", ">>", "1", ")", "while", "remainders", ":", "rem", "=", "remainders", ".", "pop", "(", ")", "result", "=", "(", "(", "(", "a", "**", "rem", ")", "*", "(", "result", "**", "2", ")", ")", "%", "n", ")", "return", "result" ]
calculates r = a^p mod n .
train
false
1,703
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
disable insecure ssl ciphers on old qt versions .
train
false
1,707
def scale_by_constant(builder, val, factor): return builder.mul(val, Constant.int(TIMEDELTA64, factor))
[ "def", "scale_by_constant", "(", "builder", ",", "val", ",", "factor", ")", ":", "return", "builder", ".", "mul", "(", "val", ",", "Constant", ".", "int", "(", "TIMEDELTA64", ",", "factor", ")", ")" ]
multiply *val* by the constant *factor* .
train
false
1,708
def hierarchy(): s3db.gis_hierarchy_form_setup() return s3_rest_controller()
[ "def", "hierarchy", "(", ")", ":", "s3db", ".", "gis_hierarchy_form_setup", "(", ")", "return", "s3_rest_controller", "(", ")" ]
restful crud controller .
train
false
1,709
def service_get_all_bmc_by_host(context, host): return IMPL.service_get_all_bmc_by_host(context, host)
[ "def", "service_get_all_bmc_by_host", "(", "context", ",", "host", ")", ":", "return", "IMPL", ".", "service_get_all_bmc_by_host", "(", "context", ",", "host", ")" ]
get all compute services for a given host .
train
false
1,710
def category_list(request, template_name='blog/category_list.html', **kwargs): return list_detail.object_list(request, queryset=Category.objects.all(), template_name=template_name, **kwargs)
[ "def", "category_list", "(", "request", ",", "template_name", "=", "'blog/category_list.html'", ",", "**", "kwargs", ")", ":", "return", "list_detail", ".", "object_list", "(", "request", ",", "queryset", "=", "Category", ".", "objects", ".", "all", "(", ")", ",", "template_name", "=", "template_name", ",", "**", "kwargs", ")" ]
category list template: blog/category_list .
train
false
1,711
def libvlc_media_new_path(p_instance, path): f = (_Cfunctions.get('libvlc_media_new_path', None) or _Cfunction('libvlc_media_new_path', ((1,), (1,)), class_result(Media), ctypes.c_void_p, Instance, ctypes.c_char_p)) return f(p_instance, path)
[ "def", "libvlc_media_new_path", "(", "p_instance", ",", "path", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_media_new_path'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_media_new_path'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "class_result", "(", "Media", ")", ",", "ctypes", ".", "c_void_p", ",", "Instance", ",", "ctypes", ".", "c_char_p", ")", ")", "return", "f", "(", "p_instance", ",", "path", ")" ]
create a media for a certain file path .
train
true
1,712
def _pretty_any(obj): outstr = u'' for k in obj: if (isinstance(obj[k], string_types) and (len(obj[k]) > 65)): outstr += u'[{0}]\n'.format(k) outstr += u'{0}'.format(_pretty_longstring(obj[k], prefix=u' ')) outstr += u'\n' else: outstr += u'[{0}] {1}\n'.format(k, obj[k]) return outstr
[ "def", "_pretty_any", "(", "obj", ")", ":", "outstr", "=", "u''", "for", "k", "in", "obj", ":", "if", "(", "isinstance", "(", "obj", "[", "k", "]", ",", "string_types", ")", "and", "(", "len", "(", "obj", "[", "k", "]", ")", ">", "65", ")", ")", ":", "outstr", "+=", "u'[{0}]\\n'", ".", "format", "(", "k", ")", "outstr", "+=", "u'{0}'", ".", "format", "(", "_pretty_longstring", "(", "obj", "[", "k", "]", ",", "prefix", "=", "u' '", ")", ")", "outstr", "+=", "u'\\n'", "else", ":", "outstr", "+=", "u'[{0}] {1}\\n'", ".", "format", "(", "k", ",", "obj", "[", "k", "]", ")", "return", "outstr" ]
helper function for pretty-printing any attrdict object .
train
false
1,713
def test_finder_installs_dev_releases(data): req = InstallRequirement.from_line('bar', None) finder = PackageFinder([], [data.index_url('dev')], allow_all_prereleases=True, session=PipSession()) link = finder.find_requirement(req, False) assert link.url.endswith('bar-2.0.dev1.tar.gz'), link.url
[ "def", "test_finder_installs_dev_releases", "(", "data", ")", ":", "req", "=", "InstallRequirement", ".", "from_line", "(", "'bar'", ",", "None", ")", "finder", "=", "PackageFinder", "(", "[", "]", ",", "[", "data", ".", "index_url", "(", "'dev'", ")", "]", ",", "allow_all_prereleases", "=", "True", ",", "session", "=", "PipSession", "(", ")", ")", "link", "=", "finder", ".", "find_requirement", "(", "req", ",", "False", ")", "assert", "link", ".", "url", ".", "endswith", "(", "'bar-2.0.dev1.tar.gz'", ")", ",", "link", ".", "url" ]
test packagefinder finds dev releases if asked to .
train
false
1,714
def _get_qpoint_pos(pos): return (pos.x(), pos.y())
[ "def", "_get_qpoint_pos", "(", "pos", ")", ":", "return", "(", "pos", ".", "x", "(", ")", ",", "pos", ".", "y", "(", ")", ")" ]
return the coordinates of a qpointf object .
train
false
1,716
def p_abstract_declarator_2(t): pass
[ "def", "p_abstract_declarator_2", "(", "t", ")", ":", "pass" ]
abstract_declarator : pointer direct_abstract_declarator .
train
false
1,717
def getFloatByPrefixBeginEnd(prefixBegin, prefixEnd, valueFloat, xmlElement): valueFloat = evaluate.getEvaluatedFloatDefault(valueFloat, prefixBegin, xmlElement) if (prefixEnd in xmlElement.attributeDictionary): return (0.5 * evaluate.getEvaluatedFloatDefault((valueFloat + valueFloat), prefixEnd, xmlElement)) else: return valueFloat
[ "def", "getFloatByPrefixBeginEnd", "(", "prefixBegin", ",", "prefixEnd", ",", "valueFloat", ",", "xmlElement", ")", ":", "valueFloat", "=", "evaluate", ".", "getEvaluatedFloatDefault", "(", "valueFloat", ",", "prefixBegin", ",", "xmlElement", ")", "if", "(", "prefixEnd", "in", "xmlElement", ".", "attributeDictionary", ")", ":", "return", "(", "0.5", "*", "evaluate", ".", "getEvaluatedFloatDefault", "(", "(", "valueFloat", "+", "valueFloat", ")", ",", "prefixEnd", ",", "xmlElement", ")", ")", "else", ":", "return", "valueFloat" ]
get float from prefixbegin .
train
false
1,718
def install_default_handler(): logger = logging.getLogger('pwnlib') if (console not in logger.handlers): logger.addHandler(console) logger.addHandler(log_file) logger.setLevel(1)
[ "def", "install_default_handler", "(", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "'pwnlib'", ")", "if", "(", "console", "not", "in", "logger", ".", "handlers", ")", ":", "logger", ".", "addHandler", "(", "console", ")", "logger", ".", "addHandler", "(", "log_file", ")", "logger", ".", "setLevel", "(", "1", ")" ]
install_default_handler() instantiates a :class:handler and :class:formatter and installs them for the pwnlib root logger .
train
false
1,719
def copy(src, dst, createpath=0, copydates=1, forcetype=None): src = File.pathname(src) dst = File.pathname(dst) if createpath: mkdirs(os.path.split(dst)[0]) ifp = open(src, 'rb') ofp = open(dst, 'wb') d = ifp.read(BUFSIZ) while d: ofp.write(d) d = ifp.read(BUFSIZ) ifp.close() ofp.close() ifp = openrf(src, '*rb') ofp = openrf(dst, '*wb') d = ifp.read(BUFSIZ) while d: ofp.write(d) d = ifp.read(BUFSIZ) ifp.close() ofp.close() srcfss = File.FSSpec(src) dstfss = File.FSSpec(dst) sf = srcfss.FSpGetFInfo() df = dstfss.FSpGetFInfo() (df.Creator, df.Type) = (sf.Creator, sf.Type) if (forcetype is not None): df.Type = forcetype df.Flags = (sf.Flags & COPY_FLAGS) dstfss.FSpSetFInfo(df) if copydates: srcfsr = File.FSRef(src) dstfsr = File.FSRef(dst) (catinfo, _, _, _) = srcfsr.FSGetCatalogInfo(Files.kFSCatInfoAllDates) dstfsr.FSSetCatalogInfo(Files.kFSCatInfoAllDates, catinfo)
[ "def", "copy", "(", "src", ",", "dst", ",", "createpath", "=", "0", ",", "copydates", "=", "1", ",", "forcetype", "=", "None", ")", ":", "src", "=", "File", ".", "pathname", "(", "src", ")", "dst", "=", "File", ".", "pathname", "(", "dst", ")", "if", "createpath", ":", "mkdirs", "(", "os", ".", "path", ".", "split", "(", "dst", ")", "[", "0", "]", ")", "ifp", "=", "open", "(", "src", ",", "'rb'", ")", "ofp", "=", "open", "(", "dst", ",", "'wb'", ")", "d", "=", "ifp", ".", "read", "(", "BUFSIZ", ")", "while", "d", ":", "ofp", ".", "write", "(", "d", ")", "d", "=", "ifp", ".", "read", "(", "BUFSIZ", ")", "ifp", ".", "close", "(", ")", "ofp", ".", "close", "(", ")", "ifp", "=", "openrf", "(", "src", ",", "'*rb'", ")", "ofp", "=", "openrf", "(", "dst", ",", "'*wb'", ")", "d", "=", "ifp", ".", "read", "(", "BUFSIZ", ")", "while", "d", ":", "ofp", ".", "write", "(", "d", ")", "d", "=", "ifp", ".", "read", "(", "BUFSIZ", ")", "ifp", ".", "close", "(", ")", "ofp", ".", "close", "(", ")", "srcfss", "=", "File", ".", "FSSpec", "(", "src", ")", "dstfss", "=", "File", ".", "FSSpec", "(", "dst", ")", "sf", "=", "srcfss", ".", "FSpGetFInfo", "(", ")", "df", "=", "dstfss", ".", "FSpGetFInfo", "(", ")", "(", "df", ".", "Creator", ",", "df", ".", "Type", ")", "=", "(", "sf", ".", "Creator", ",", "sf", ".", "Type", ")", "if", "(", "forcetype", "is", "not", "None", ")", ":", "df", ".", "Type", "=", "forcetype", "df", ".", "Flags", "=", "(", "sf", ".", "Flags", "&", "COPY_FLAGS", ")", "dstfss", ".", "FSpSetFInfo", "(", "df", ")", "if", "copydates", ":", "srcfsr", "=", "File", ".", "FSRef", "(", "src", ")", "dstfsr", "=", "File", ".", "FSRef", "(", "dst", ")", "(", "catinfo", ",", "_", ",", "_", ",", "_", ")", "=", "srcfsr", ".", "FSGetCatalogInfo", "(", "Files", ".", "kFSCatInfoAllDates", ")", "dstfsr", ".", "FSSetCatalogInfo", "(", "Files", ".", "kFSCatInfoAllDates", ",", "catinfo", ")" ]
copy files from one location to another .
train
false
1,720
def need(): return s3_rest_controller()
[ "def", "need", "(", ")", ":", "return", "s3_rest_controller", "(", ")" ]
needs: restful crud controller .
train
false
1,721
def GenInit(svc, desc, start=('2', '3', '4', '5'), stop='1'): insserv = '\n $local_fs +umountfs\n $network +networking\n $remote_fs $local_fs +umountnfs +sendsigs\n $syslog +rsyslog +sysklogd +syslog-ng +dsyslog +inetutils-syslogd\n ' tmpl = ('\n ### BEGIN INIT INFO\n # Provides: %s\n # Required-Start: $remote_fs $syslog\n # Required-Stop: $syslog\n # Default-Start: %s\n # Default-Stop: %s\n # Short-Description: %s\n ### END INIT INFO\n ' % (svc, ' '.join(start), ' '.join(stop), desc)) return {'/etc/insserv.conf': insserv, ('/etc/init.d/%s' % svc): tmpl}
[ "def", "GenInit", "(", "svc", ",", "desc", ",", "start", "=", "(", "'2'", ",", "'3'", ",", "'4'", ",", "'5'", ")", ",", "stop", "=", "'1'", ")", ":", "insserv", "=", "'\\n $local_fs +umountfs\\n $network +networking\\n $remote_fs $local_fs +umountnfs +sendsigs\\n $syslog +rsyslog +sysklogd +syslog-ng +dsyslog +inetutils-syslogd\\n '", "tmpl", "=", "(", "'\\n ### BEGIN INIT INFO\\n # Provides: %s\\n # Required-Start: $remote_fs $syslog\\n # Required-Stop: $syslog\\n # Default-Start: %s\\n # Default-Stop: %s\\n # Short-Description: %s\\n ### END INIT INFO\\n '", "%", "(", "svc", ",", "' '", ".", "join", "(", "start", ")", ",", "' '", ".", "join", "(", "stop", ")", ",", "desc", ")", ")", "return", "{", "'/etc/insserv.conf'", ":", "insserv", ",", "(", "'/etc/init.d/%s'", "%", "svc", ")", ":", "tmpl", "}" ]
generate init file .
train
false
1,723
def fail_with_changes(name): ret = {'name': name, 'changes': {}, 'result': False, 'comment': 'Failure!'} ret['changes'] = {'testing': {'old': 'Unchanged', 'new': 'Something pretended to change'}} if __opts__['test']: ret['result'] = None ret['comment'] = "If we weren't testing, this would be failed with changes" return ret
[ "def", "fail_with_changes", "(", "name", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "False", ",", "'comment'", ":", "'Failure!'", "}", "ret", "[", "'changes'", "]", "=", "{", "'testing'", ":", "{", "'old'", ":", "'Unchanged'", ",", "'new'", ":", "'Something pretended to change'", "}", "}", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'result'", "]", "=", "None", "ret", "[", "'comment'", "]", "=", "\"If we weren't testing, this would be failed with changes\"", "return", "ret" ]
returns failure and changes is not empty .
train
false
1,725
def p_comparison(p): if (len(p) == 4): p[0] = binary_ops[p[2]]((p[1], p[3])) elif (len(p) == 3): p[0] = unary_ops[p[1]](p[2]) else: p[0] = p[1]
[ "def", "p_comparison", "(", "p", ")", ":", "if", "(", "len", "(", "p", ")", "==", "4", ")", ":", "p", "[", "0", "]", "=", "binary_ops", "[", "p", "[", "2", "]", "]", "(", "(", "p", "[", "1", "]", ",", "p", "[", "3", "]", ")", ")", "elif", "(", "len", "(", "p", ")", "==", "3", ")", ":", "p", "[", "0", "]", "=", "unary_ops", "[", "p", "[", "1", "]", "]", "(", "p", "[", "2", "]", ")", "else", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
comparison : comparison plus comparison | comparison minus comparison | comparison mult comparison | comparison div comparison | comparison lt comparison | comparison eq comparison | comparison gt comparison | plus comparison | minus comparison | power .
train
false
1,726
def mutualinfo_kde(y, x, normed=True): nobs = len(x) if (not (len(y) == nobs)): raise ValueError('both data arrays need to have the same size') x = np.asarray(x, float) y = np.asarray(y, float) yx = np.vstack((y, x)) kde_x = gaussian_kde(x)(x) kde_y = gaussian_kde(y)(y) kde_yx = gaussian_kde(yx)(yx) mi_obs = ((np.log(kde_yx) - np.log(kde_x)) - np.log(kde_y)) mi = (mi_obs.sum() / nobs) if normed: mi_normed = np.sqrt((1.0 - np.exp(((-2) * mi)))) return mi_normed else: return mi
[ "def", "mutualinfo_kde", "(", "y", ",", "x", ",", "normed", "=", "True", ")", ":", "nobs", "=", "len", "(", "x", ")", "if", "(", "not", "(", "len", "(", "y", ")", "==", "nobs", ")", ")", ":", "raise", "ValueError", "(", "'both data arrays need to have the same size'", ")", "x", "=", "np", ".", "asarray", "(", "x", ",", "float", ")", "y", "=", "np", ".", "asarray", "(", "y", ",", "float", ")", "yx", "=", "np", ".", "vstack", "(", "(", "y", ",", "x", ")", ")", "kde_x", "=", "gaussian_kde", "(", "x", ")", "(", "x", ")", "kde_y", "=", "gaussian_kde", "(", "y", ")", "(", "y", ")", "kde_yx", "=", "gaussian_kde", "(", "yx", ")", "(", "yx", ")", "mi_obs", "=", "(", "(", "np", ".", "log", "(", "kde_yx", ")", "-", "np", ".", "log", "(", "kde_x", ")", ")", "-", "np", ".", "log", "(", "kde_y", ")", ")", "mi", "=", "(", "mi_obs", ".", "sum", "(", ")", "/", "nobs", ")", "if", "normed", ":", "mi_normed", "=", "np", ".", "sqrt", "(", "(", "1.0", "-", "np", ".", "exp", "(", "(", "(", "-", "2", ")", "*", "mi", ")", ")", ")", ")", "return", "mi_normed", "else", ":", "return", "mi" ]
mutual information of two random variables estimated with kde .
train
false
1,727
def _tofloat(value): if isiterable(value): try: value = np.array(value, dtype=np.float) except (TypeError, ValueError): raise InputParameterError(u'Parameter of {0} could not be converted to float'.format(type(value))) elif isinstance(value, np.ndarray): value = float(value.item()) elif isinstance(value, (numbers.Number, np.number)): value = float(value) elif isinstance(value, bool): raise InputParameterError(u'Expected parameter to be of numerical type, not boolean') else: raise InputParameterError(u"Don't know how to convert parameter of {0} to float".format(type(value))) return value
[ "def", "_tofloat", "(", "value", ")", ":", "if", "isiterable", "(", "value", ")", ":", "try", ":", "value", "=", "np", ".", "array", "(", "value", ",", "dtype", "=", "np", ".", "float", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "InputParameterError", "(", "u'Parameter of {0} could not be converted to float'", ".", "format", "(", "type", "(", "value", ")", ")", ")", "elif", "isinstance", "(", "value", ",", "np", ".", "ndarray", ")", ":", "value", "=", "float", "(", "value", ".", "item", "(", ")", ")", "elif", "isinstance", "(", "value", ",", "(", "numbers", ".", "Number", ",", "np", ".", "number", ")", ")", ":", "value", "=", "float", "(", "value", ")", "elif", "isinstance", "(", "value", ",", "bool", ")", ":", "raise", "InputParameterError", "(", "u'Expected parameter to be of numerical type, not boolean'", ")", "else", ":", "raise", "InputParameterError", "(", "u\"Don't know how to convert parameter of {0} to float\"", ".", "format", "(", "type", "(", "value", ")", ")", ")", "return", "value" ]
convert a parameter to float or float array .
train
false
1,728
def _coercedUnicode(s): if isinstance(s, bytes): if _PY3: raise TypeError(('Expected str not %r (bytes)' % (s,))) else: return s.decode('ascii') else: return s
[ "def", "_coercedUnicode", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "bytes", ")", ":", "if", "_PY3", ":", "raise", "TypeError", "(", "(", "'Expected str not %r (bytes)'", "%", "(", "s", ",", ")", ")", ")", "else", ":", "return", "s", ".", "decode", "(", "'ascii'", ")", "else", ":", "return", "s" ]
coerce ascii-only byte strings into unicode for python 2 .
train
false
1,730
def OnMismatch(function, matcher): return _OnMismatch(_function=function, _matcher=matcher)
[ "def", "OnMismatch", "(", "function", ",", "matcher", ")", ":", "return", "_OnMismatch", "(", "_function", "=", "function", ",", "_matcher", "=", "matcher", ")" ]
decorate matcher such that function is called on mismatches .
train
false
1,732
@require_POST @login_required @ensure_csrf_cookie def change_email_settings(request): user = request.user course_id = request.POST.get('course_id') course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) receive_emails = request.POST.get('receive_emails') if receive_emails: optout_object = Optout.objects.filter(user=user, course_id=course_key) if optout_object: optout_object.delete() log.info(u'User %s (%s) opted in to receive emails from course %s', user.username, user.email, course_id) track.views.server_track(request, 'change-email-settings', {'receive_emails': 'yes', 'course': course_id}, page='dashboard') else: Optout.objects.get_or_create(user=user, course_id=course_key) log.info(u'User %s (%s) opted out of receiving emails from course %s', user.username, user.email, course_id) track.views.server_track(request, 'change-email-settings', {'receive_emails': 'no', 'course': course_id}, page='dashboard') return JsonResponse({'success': True})
[ "@", "require_POST", "@", "login_required", "@", "ensure_csrf_cookie", "def", "change_email_settings", "(", "request", ")", ":", "user", "=", "request", ".", "user", "course_id", "=", "request", ".", "POST", ".", "get", "(", "'course_id'", ")", "course_key", "=", "SlashSeparatedCourseKey", ".", "from_deprecated_string", "(", "course_id", ")", "receive_emails", "=", "request", ".", "POST", ".", "get", "(", "'receive_emails'", ")", "if", "receive_emails", ":", "optout_object", "=", "Optout", ".", "objects", ".", "filter", "(", "user", "=", "user", ",", "course_id", "=", "course_key", ")", "if", "optout_object", ":", "optout_object", ".", "delete", "(", ")", "log", ".", "info", "(", "u'User %s (%s) opted in to receive emails from course %s'", ",", "user", ".", "username", ",", "user", ".", "email", ",", "course_id", ")", "track", ".", "views", ".", "server_track", "(", "request", ",", "'change-email-settings'", ",", "{", "'receive_emails'", ":", "'yes'", ",", "'course'", ":", "course_id", "}", ",", "page", "=", "'dashboard'", ")", "else", ":", "Optout", ".", "objects", ".", "get_or_create", "(", "user", "=", "user", ",", "course_id", "=", "course_key", ")", "log", ".", "info", "(", "u'User %s (%s) opted out of receiving emails from course %s'", ",", "user", ".", "username", ",", "user", ".", "email", ",", "course_id", ")", "track", ".", "views", ".", "server_track", "(", "request", ",", "'change-email-settings'", ",", "{", "'receive_emails'", ":", "'no'", ",", "'course'", ":", "course_id", "}", ",", "page", "=", "'dashboard'", ")", "return", "JsonResponse", "(", "{", "'success'", ":", "True", "}", ")" ]
modify logged-in users setting for receiving emails from a course .
train
false
1,733
def fake_get_vmdk_size_and_properties(context, image_id, instance): props = {'vmware_ostype': 'otherGuest', 'vmware_adaptertype': 'lsiLogic'} return (_FAKE_FILE_SIZE, props)
[ "def", "fake_get_vmdk_size_and_properties", "(", "context", ",", "image_id", ",", "instance", ")", ":", "props", "=", "{", "'vmware_ostype'", ":", "'otherGuest'", ",", "'vmware_adaptertype'", ":", "'lsiLogic'", "}", "return", "(", "_FAKE_FILE_SIZE", ",", "props", ")" ]
fakes the file size and properties fetch for the image file .
train
false
1,734
def get_error_details(exclude_robot_traces=EXCLUDE_ROBOT_TRACES): details = ErrorDetails(exclude_robot_traces=exclude_robot_traces) return (details.message, details.traceback)
[ "def", "get_error_details", "(", "exclude_robot_traces", "=", "EXCLUDE_ROBOT_TRACES", ")", ":", "details", "=", "ErrorDetails", "(", "exclude_robot_traces", "=", "exclude_robot_traces", ")", "return", "(", "details", ".", "message", ",", "details", ".", "traceback", ")" ]
returns error message and details of the last occurred exception .
train
false
1,736
def Filter(l, item): res = {} return [res.setdefault(e, e) for e in l if (e != item)]
[ "def", "Filter", "(", "l", ",", "item", ")", ":", "res", "=", "{", "}", "return", "[", "res", ".", "setdefault", "(", "e", ",", "e", ")", "for", "e", "in", "l", "if", "(", "e", "!=", "item", ")", "]" ]
removes item from l .
train
false