id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
sequencelengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
939
def is_association_exist(openid_url): is_exist = True try: uassoc = UserAssociation.objects.get(openid_url__exact=openid_url) except: is_exist = False return is_exist
[ "def", "is_association_exist", "(", "openid_url", ")", ":", "is_exist", "=", "True", "try", ":", "uassoc", "=", "UserAssociation", ".", "objects", ".", "get", "(", "openid_url__exact", "=", "openid_url", ")", "except", ":", "is_exist", "=", "False", "return", "is_exist" ]
test if an openid is already in database .
train
false
940
def sdb_delete(uri, opts): if (not isinstance(uri, string_types)): return False if (not uri.startswith('sdb://')): return False sdlen = len('sdb://') indx = uri.find('/', sdlen) if ((indx == (-1)) or (len(uri[(indx + 1):]) == 0)): return False profile = opts.get(uri[sdlen:indx], {}) if (not profile): profile = opts.get('pillar', {}).get(uri[sdlen:indx], {}) if ('driver' not in profile): return False fun = '{0}.delete'.format(profile['driver']) query = uri[(indx + 1):] loaded_db = salt.loader.sdb(opts, fun) return loaded_db[fun](query, profile=profile)
[ "def", "sdb_delete", "(", "uri", ",", "opts", ")", ":", "if", "(", "not", "isinstance", "(", "uri", ",", "string_types", ")", ")", ":", "return", "False", "if", "(", "not", "uri", ".", "startswith", "(", "'sdb://'", ")", ")", ":", "return", "False", "sdlen", "=", "len", "(", "'sdb://'", ")", "indx", "=", "uri", ".", "find", "(", "'/'", ",", "sdlen", ")", "if", "(", "(", "indx", "==", "(", "-", "1", ")", ")", "or", "(", "len", "(", "uri", "[", "(", "indx", "+", "1", ")", ":", "]", ")", "==", "0", ")", ")", ":", "return", "False", "profile", "=", "opts", ".", "get", "(", "uri", "[", "sdlen", ":", "indx", "]", ",", "{", "}", ")", "if", "(", "not", "profile", ")", ":", "profile", "=", "opts", ".", "get", "(", "'pillar'", ",", "{", "}", ")", ".", "get", "(", "uri", "[", "sdlen", ":", "indx", "]", ",", "{", "}", ")", "if", "(", "'driver'", "not", "in", "profile", ")", ":", "return", "False", "fun", "=", "'{0}.delete'", ".", "format", "(", "profile", "[", "'driver'", "]", ")", "query", "=", "uri", "[", "(", "indx", "+", "1", ")", ":", "]", "loaded_db", "=", "salt", ".", "loader", ".", "sdb", "(", "opts", ",", "fun", ")", "return", "loaded_db", "[", "fun", "]", "(", "query", ",", "profile", "=", "profile", ")" ]
delete a value from a db .
train
true
941
def _dict_with_qos_specs(rows): result = [] for row in rows: if (row['key'] == 'QoS_Specs_Name'): member = {'name': row['value'], 'id': row['id']} if row.specs: spec_dict = _dict_with_children_specs(row.specs) member['consumer'] = spec_dict.pop('consumer') member.update(dict(specs=spec_dict)) result.append(member) return result
[ "def", "_dict_with_qos_specs", "(", "rows", ")", ":", "result", "=", "[", "]", "for", "row", "in", "rows", ":", "if", "(", "row", "[", "'key'", "]", "==", "'QoS_Specs_Name'", ")", ":", "member", "=", "{", "'name'", ":", "row", "[", "'value'", "]", ",", "'id'", ":", "row", "[", "'id'", "]", "}", "if", "row", ".", "specs", ":", "spec_dict", "=", "_dict_with_children_specs", "(", "row", ".", "specs", ")", "member", "[", "'consumer'", "]", "=", "spec_dict", ".", "pop", "(", "'consumer'", ")", "member", ".", "update", "(", "dict", "(", "specs", "=", "spec_dict", ")", ")", "result", ".", "append", "(", "member", ")", "return", "result" ]
convert qos specs query results to list .
train
false
945
def filename_search_replace(sr_pairs, filename, backup=False): in_txt = open(filename, u'rt').read((-1)) out_txt = in_txt[:] for (in_exp, out_exp) in sr_pairs: in_exp = re.compile(in_exp) out_txt = in_exp.sub(out_exp, out_txt) if (in_txt == out_txt): return False open(filename, u'wt').write(out_txt) if backup: open((filename + u'.bak'), u'wt').write(in_txt) return True
[ "def", "filename_search_replace", "(", "sr_pairs", ",", "filename", ",", "backup", "=", "False", ")", ":", "in_txt", "=", "open", "(", "filename", ",", "u'rt'", ")", ".", "read", "(", "(", "-", "1", ")", ")", "out_txt", "=", "in_txt", "[", ":", "]", "for", "(", "in_exp", ",", "out_exp", ")", "in", "sr_pairs", ":", "in_exp", "=", "re", ".", "compile", "(", "in_exp", ")", "out_txt", "=", "in_exp", ".", "sub", "(", "out_exp", ",", "out_txt", ")", "if", "(", "in_txt", "==", "out_txt", ")", ":", "return", "False", "open", "(", "filename", ",", "u'wt'", ")", ".", "write", "(", "out_txt", ")", "if", "backup", ":", "open", "(", "(", "filename", "+", "u'.bak'", ")", ",", "u'wt'", ")", ".", "write", "(", "in_txt", ")", "return", "True" ]
search and replace for expressions in files .
train
false
946
def getFrameInfo(frame): f_locals = frame.f_locals f_globals = frame.f_globals sameNamespace = (f_locals is f_globals) hasModule = ('__module__' in f_locals) hasName = ('__name__' in f_globals) sameName = (hasModule and hasName) sameName = (sameName and (f_globals['__name__'] == f_locals['__module__'])) module = ((hasName and sys.modules.get(f_globals['__name__'])) or None) namespaceIsModule = (module and (module.__dict__ is f_globals)) if (not namespaceIsModule): kind = 'exec' elif (sameNamespace and (not hasModule)): kind = 'module' elif (sameName and (not sameNamespace)): kind = 'class' elif (not sameNamespace): kind = 'function call' else: kind = 'unknown' return (kind, module, f_locals, f_globals)
[ "def", "getFrameInfo", "(", "frame", ")", ":", "f_locals", "=", "frame", ".", "f_locals", "f_globals", "=", "frame", ".", "f_globals", "sameNamespace", "=", "(", "f_locals", "is", "f_globals", ")", "hasModule", "=", "(", "'__module__'", "in", "f_locals", ")", "hasName", "=", "(", "'__name__'", "in", "f_globals", ")", "sameName", "=", "(", "hasModule", "and", "hasName", ")", "sameName", "=", "(", "sameName", "and", "(", "f_globals", "[", "'__name__'", "]", "==", "f_locals", "[", "'__module__'", "]", ")", ")", "module", "=", "(", "(", "hasName", "and", "sys", ".", "modules", ".", "get", "(", "f_globals", "[", "'__name__'", "]", ")", ")", "or", "None", ")", "namespaceIsModule", "=", "(", "module", "and", "(", "module", ".", "__dict__", "is", "f_globals", ")", ")", "if", "(", "not", "namespaceIsModule", ")", ":", "kind", "=", "'exec'", "elif", "(", "sameNamespace", "and", "(", "not", "hasModule", ")", ")", ":", "kind", "=", "'module'", "elif", "(", "sameName", "and", "(", "not", "sameNamespace", ")", ")", ":", "kind", "=", "'class'", "elif", "(", "not", "sameNamespace", ")", ":", "kind", "=", "'function call'", "else", ":", "kind", "=", "'unknown'", "return", "(", "kind", ",", "module", ",", "f_locals", ",", "f_globals", ")" ]
return for a frame kind is one of "exec" .
train
false
947
def test_all_costs(): cases = [[SigmoidConvNonlinearity(), Sigmoid, True], [IdentityConvNonlinearity(), Linear, True], [TanhConvNonlinearity(), Tanh, False], [RectifierConvNonlinearity(), RectifiedLinear, False]] for (conv_nonlinearity, mlp_nonlinearity, cost_implemented) in cases: check_case(conv_nonlinearity, mlp_nonlinearity, cost_implemented)
[ "def", "test_all_costs", "(", ")", ":", "cases", "=", "[", "[", "SigmoidConvNonlinearity", "(", ")", ",", "Sigmoid", ",", "True", "]", ",", "[", "IdentityConvNonlinearity", "(", ")", ",", "Linear", ",", "True", "]", ",", "[", "TanhConvNonlinearity", "(", ")", ",", "Tanh", ",", "False", "]", ",", "[", "RectifierConvNonlinearity", "(", ")", ",", "RectifiedLinear", ",", "False", "]", "]", "for", "(", "conv_nonlinearity", ",", "mlp_nonlinearity", ",", "cost_implemented", ")", "in", "cases", ":", "check_case", "(", "conv_nonlinearity", ",", "mlp_nonlinearity", ",", "cost_implemented", ")" ]
check all instances of convnonlinearity .
train
false
948
def getlines(filename, module_globals=None): if (filename in cache): return cache[filename][2] try: return updatecache(filename, module_globals) except MemoryError: clearcache() return []
[ "def", "getlines", "(", "filename", ",", "module_globals", "=", "None", ")", ":", "if", "(", "filename", "in", "cache", ")", ":", "return", "cache", "[", "filename", "]", "[", "2", "]", "try", ":", "return", "updatecache", "(", "filename", ",", "module_globals", ")", "except", "MemoryError", ":", "clearcache", "(", ")", "return", "[", "]" ]
deprecated since ipython 6 .
train
true
949
def implicit_multiplication(result, local_dict, global_dict): for step in (_group_parentheses(implicit_multiplication), _apply_functions, _implicit_multiplication): result = step(result, local_dict, global_dict) result = _flatten(result) return result
[ "def", "implicit_multiplication", "(", "result", ",", "local_dict", ",", "global_dict", ")", ":", "for", "step", "in", "(", "_group_parentheses", "(", "implicit_multiplication", ")", ",", "_apply_functions", ",", "_implicit_multiplication", ")", ":", "result", "=", "step", "(", "result", ",", "local_dict", ",", "global_dict", ")", "result", "=", "_flatten", "(", "result", ")", "return", "result" ]
makes the multiplication operator optional in most cases .
train
false
950
def getCentersFromCircleNodes(circleNodes, radius): if (len(circleNodes) < 2): return [] circleIntersections = getCircleIntersectionsFromCircleNodes(circleNodes) circleIntersectionLoops = getCircleIntersectionLoops(circleIntersections) return getCentersFromIntersectionLoops(circleIntersectionLoops, radius)
[ "def", "getCentersFromCircleNodes", "(", "circleNodes", ",", "radius", ")", ":", "if", "(", "len", "(", "circleNodes", ")", "<", "2", ")", ":", "return", "[", "]", "circleIntersections", "=", "getCircleIntersectionsFromCircleNodes", "(", "circleNodes", ")", "circleIntersectionLoops", "=", "getCircleIntersectionLoops", "(", "circleIntersections", ")", "return", "getCentersFromIntersectionLoops", "(", "circleIntersectionLoops", ",", "radius", ")" ]
get the complex centers of the circle intersection loops from circle nodes .
train
false
951
def invert_colors(clip): maxi = (1.0 if clip.ismask else 255) return clip.fl_image((lambda f: (maxi - f)))
[ "def", "invert_colors", "(", "clip", ")", ":", "maxi", "=", "(", "1.0", "if", "clip", ".", "ismask", "else", "255", ")", "return", "clip", ".", "fl_image", "(", "(", "lambda", "f", ":", "(", "maxi", "-", "f", ")", ")", ")" ]
returns the color-inversed clip .
train
false
952
def service_client_config(service_client_name=None): _parameters = {'disable_ssl_certificate_validation': CONF.identity.disable_ssl_certificate_validation, 'ca_certs': CONF.identity.ca_certificates_file, 'trace_requests': CONF.debug.trace_requests, 'http_timeout': CONF.service_clients.http_timeout} if (service_client_name is None): return _parameters config_group = service_client_name.replace('-', '_') try: options = getattr(CONF, config_group) except cfg.NoSuchOptError: raise exceptions.UnknownServiceClient(services=service_client_name) if (service_client_name != 'identity'): _parameters['endpoint_type'] = getattr(options, 'endpoint_type') for setting in ['build_timeout', 'build_interval']: if ((not hasattr(options, setting)) or (not getattr(options, setting))): _parameters[setting] = getattr(CONF.compute, setting) else: _parameters[setting] = getattr(options, setting) if ((not hasattr(options, 'region')) or (not getattr(options, 'region'))): _parameters['region'] = CONF.identity.region else: _parameters['region'] = getattr(options, 'region') _parameters['service'] = getattr(options, 'catalog_type') return _parameters
[ "def", "service_client_config", "(", "service_client_name", "=", "None", ")", ":", "_parameters", "=", "{", "'disable_ssl_certificate_validation'", ":", "CONF", ".", "identity", ".", "disable_ssl_certificate_validation", ",", "'ca_certs'", ":", "CONF", ".", "identity", ".", "ca_certificates_file", ",", "'trace_requests'", ":", "CONF", ".", "debug", ".", "trace_requests", ",", "'http_timeout'", ":", "CONF", ".", "service_clients", ".", "http_timeout", "}", "if", "(", "service_client_name", "is", "None", ")", ":", "return", "_parameters", "config_group", "=", "service_client_name", ".", "replace", "(", "'-'", ",", "'_'", ")", "try", ":", "options", "=", "getattr", "(", "CONF", ",", "config_group", ")", "except", "cfg", ".", "NoSuchOptError", ":", "raise", "exceptions", ".", "UnknownServiceClient", "(", "services", "=", "service_client_name", ")", "if", "(", "service_client_name", "!=", "'identity'", ")", ":", "_parameters", "[", "'endpoint_type'", "]", "=", "getattr", "(", "options", ",", "'endpoint_type'", ")", "for", "setting", "in", "[", "'build_timeout'", ",", "'build_interval'", "]", ":", "if", "(", "(", "not", "hasattr", "(", "options", ",", "setting", ")", ")", "or", "(", "not", "getattr", "(", "options", ",", "setting", ")", ")", ")", ":", "_parameters", "[", "setting", "]", "=", "getattr", "(", "CONF", ".", "compute", ",", "setting", ")", "else", ":", "_parameters", "[", "setting", "]", "=", "getattr", "(", "options", ",", "setting", ")", "if", "(", "(", "not", "hasattr", "(", "options", ",", "'region'", ")", ")", "or", "(", "not", "getattr", "(", "options", ",", "'region'", ")", ")", ")", ":", "_parameters", "[", "'region'", "]", "=", "CONF", ".", "identity", ".", "region", "else", ":", "_parameters", "[", "'region'", "]", "=", "getattr", "(", "options", ",", "'region'", ")", "_parameters", "[", "'service'", "]", "=", "getattr", "(", "options", ",", "'catalog_type'", ")", "return", "_parameters" ]
return a dict with the parameters to init service clients extracts from conf the settings specific to the service_client_name and api_version .
train
false
953
def channelRelease(BaRange_presence=0, GroupChannelDescription_presence=0, GroupCipherKeyNumber_presence=0, GprsResumption_presence=0, BaListPref_presence=0): a = TpPd(pd=6) b = MessageType(mesType=13) c = RrCause() packet = ((a / b) / c) if (BaRange_presence is 1): d = BaRangeHdr(ieiBR=115, eightBitBR=0) packet = (packet / d) if (GroupChannelDescription_presence is 1): e = GroupChannelDescriptionHdr(ieiGCD=116, eightBitGCD=0) packet = (packet / e) if (GroupCipherKeyNumber_presence is 1): f = GroupCipherKeyNumber(ieiGCKN=8) packet = (packet / f) if (GprsResumption_presence is 1): g = GprsResumptionHdr(ieiGR=12, eightBitGR=0) packet = (packet / g) if (BaListPref_presence is 1): h = BaListPrefHdr(ieiBLP=117, eightBitBLP=0) packet = (packet / h) return packet
[ "def", "channelRelease", "(", "BaRange_presence", "=", "0", ",", "GroupChannelDescription_presence", "=", "0", ",", "GroupCipherKeyNumber_presence", "=", "0", ",", "GprsResumption_presence", "=", "0", ",", "BaListPref_presence", "=", "0", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "6", ")", "b", "=", "MessageType", "(", "mesType", "=", "13", ")", "c", "=", "RrCause", "(", ")", "packet", "=", "(", "(", "a", "/", "b", ")", "/", "c", ")", "if", "(", "BaRange_presence", "is", "1", ")", ":", "d", "=", "BaRangeHdr", "(", "ieiBR", "=", "115", ",", "eightBitBR", "=", "0", ")", "packet", "=", "(", "packet", "/", "d", ")", "if", "(", "GroupChannelDescription_presence", "is", "1", ")", ":", "e", "=", "GroupChannelDescriptionHdr", "(", "ieiGCD", "=", "116", ",", "eightBitGCD", "=", "0", ")", "packet", "=", "(", "packet", "/", "e", ")", "if", "(", "GroupCipherKeyNumber_presence", "is", "1", ")", ":", "f", "=", "GroupCipherKeyNumber", "(", "ieiGCKN", "=", "8", ")", "packet", "=", "(", "packet", "/", "f", ")", "if", "(", "GprsResumption_presence", "is", "1", ")", ":", "g", "=", "GprsResumptionHdr", "(", "ieiGR", "=", "12", ",", "eightBitGR", "=", "0", ")", "packet", "=", "(", "packet", "/", "g", ")", "if", "(", "BaListPref_presence", "is", "1", ")", ":", "h", "=", "BaListPrefHdr", "(", "ieiBLP", "=", "117", ",", "eightBitBLP", "=", "0", ")", "packet", "=", "(", "packet", "/", "h", ")", "return", "packet" ]
channel release section 9 .
train
true
954
def test_conv_str_type(): param = inspect.Parameter('foo', inspect.Parameter.POSITIONAL_ONLY) with pytest.raises(TypeError) as excinfo: argparser.type_conv(param, 'val', None) assert (str(excinfo.value) == 'foo: Legacy string type!')
[ "def", "test_conv_str_type", "(", ")", ":", "param", "=", "inspect", ".", "Parameter", "(", "'foo'", ",", "inspect", ".", "Parameter", ".", "POSITIONAL_ONLY", ")", "with", "pytest", ".", "raises", "(", "TypeError", ")", "as", "excinfo", ":", "argparser", ".", "type_conv", "(", "param", ",", "'val'", ",", "None", ")", "assert", "(", "str", "(", "excinfo", ".", "value", ")", "==", "'foo: Legacy string type!'", ")" ]
using a str literal as type used to mean exactly thats a valid value .
train
false
957
def pcapname(dev): if (type(dev) is NetworkInterface): if dev.is_invalid(): return None return dev.pcap_name try: return IFACES.dev_from_name(dev).pcap_name except ValueError: if conf.use_pcap: return None raise
[ "def", "pcapname", "(", "dev", ")", ":", "if", "(", "type", "(", "dev", ")", "is", "NetworkInterface", ")", ":", "if", "dev", ".", "is_invalid", "(", ")", ":", "return", "None", "return", "dev", ".", "pcap_name", "try", ":", "return", "IFACES", ".", "dev_from_name", "(", "dev", ")", ".", "pcap_name", "except", "ValueError", ":", "if", "conf", ".", "use_pcap", ":", "return", "None", "raise" ]
return pypcap device name for given interface or libdnet/scapy device name .
train
false
958
def str2dicts(data): list_data = [] list_data.append({}) d = list_data[(-1)] lines = data.split('\n') for line in lines: line = line.strip() if (not line): d = {} list_data.append(d) d = list_data[(-1)] continue whitespace = line.find(' ') if (not whitespace): continue key = line[0:whitespace] value = line[(whitespace + 1):] d.update({key: value}) list_data = [val for val in list_data if (val != {})] return list_data
[ "def", "str2dicts", "(", "data", ")", ":", "list_data", "=", "[", "]", "list_data", ".", "append", "(", "{", "}", ")", "d", "=", "list_data", "[", "(", "-", "1", ")", "]", "lines", "=", "data", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "(", "not", "line", ")", ":", "d", "=", "{", "}", "list_data", ".", "append", "(", "d", ")", "d", "=", "list_data", "[", "(", "-", "1", ")", "]", "continue", "whitespace", "=", "line", ".", "find", "(", "' '", ")", "if", "(", "not", "whitespace", ")", ":", "continue", "key", "=", "line", "[", "0", ":", "whitespace", "]", "value", "=", "line", "[", "(", "whitespace", "+", "1", ")", ":", "]", "d", ".", "update", "(", "{", "key", ":", "value", "}", ")", "list_data", "=", "[", "val", "for", "val", "in", "list_data", "if", "(", "val", "!=", "{", "}", ")", "]", "return", "list_data" ]
create a list of dictionaries from a whitespace and newline delimited text .
train
false
959
def bottomhat(image, selem, out=None, mask=None, shift_x=False, shift_y=False): return _apply_scalar_per_pixel(generic_cy._bottomhat, image, selem, out=out, mask=mask, shift_x=shift_x, shift_y=shift_y)
[ "def", "bottomhat", "(", "image", ",", "selem", ",", "out", "=", "None", ",", "mask", "=", "None", ",", "shift_x", "=", "False", ",", "shift_y", "=", "False", ")", ":", "return", "_apply_scalar_per_pixel", "(", "generic_cy", ".", "_bottomhat", ",", "image", ",", "selem", ",", "out", "=", "out", ",", "mask", "=", "mask", ",", "shift_x", "=", "shift_x", ",", "shift_y", "=", "shift_y", ")" ]
local bottom-hat of an image .
train
false
960
def enable_sig_handler(signal_name, handler): if hasattr(signal, signal_name): signal.signal(getattr(signal, signal_name), handler)
[ "def", "enable_sig_handler", "(", "signal_name", ",", "handler", ")", ":", "if", "hasattr", "(", "signal", ",", "signal_name", ")", ":", "signal", ".", "signal", "(", "getattr", "(", "signal", ",", "signal_name", ")", ",", "handler", ")" ]
add signal handler for signal name if it exists on given platform .
train
true
962
@frappe.whitelist() def update_event(args, field_map): args = frappe._dict(json.loads(args)) field_map = frappe._dict(json.loads(field_map)) w = frappe.get_doc(args.doctype, args.name) w.set(field_map.start, args[field_map.start]) w.set(field_map.end, args.get(field_map.end)) w.save()
[ "@", "frappe", ".", "whitelist", "(", ")", "def", "update_event", "(", "args", ",", "field_map", ")", ":", "args", "=", "frappe", ".", "_dict", "(", "json", ".", "loads", "(", "args", ")", ")", "field_map", "=", "frappe", ".", "_dict", "(", "json", ".", "loads", "(", "field_map", ")", ")", "w", "=", "frappe", ".", "get_doc", "(", "args", ".", "doctype", ",", "args", ".", "name", ")", "w", ".", "set", "(", "field_map", ".", "start", ",", "args", "[", "field_map", ".", "start", "]", ")", "w", ".", "set", "(", "field_map", ".", "end", ",", "args", ".", "get", "(", "field_map", ".", "end", ")", ")", "w", ".", "save", "(", ")" ]
updates event based on passed field_map .
train
false
963
def re(s): return REParser(s).parse_re()
[ "def", "re", "(", "s", ")", ":", "return", "REParser", "(", "s", ")", ".", "parse_re", "(", ")" ]
convert traditional string representation of regular expression |s| into plex representation .
train
false
964
def paginate_query(query, model, limit, sort_keys, marker=None, sort_dir=None, sort_dirs=None): if ('id' not in sort_keys): LOG.warn(_('Id not in sort_keys; is sort_keys unique?')) assert (not (sort_dir and sort_dirs)) if ((sort_dirs is None) and (sort_dir is None)): sort_dir = 'asc' if (sort_dirs is None): sort_dirs = [sort_dir for _sort_key in sort_keys] assert (len(sort_dirs) == len(sort_keys)) for (current_sort_key, current_sort_dir) in zip(sort_keys, sort_dirs): sort_dir_func = {'asc': sqlalchemy.asc, 'desc': sqlalchemy.desc}[current_sort_dir] try: sort_key_attr = getattr(model, current_sort_key) except AttributeError: raise InvalidSortKey() query = query.order_by(sort_dir_func(sort_key_attr)) if (marker is not None): marker_values = [] for sort_key in sort_keys: v = getattr(marker, sort_key) marker_values.append(v) criteria_list = [] for i in xrange(0, len(sort_keys)): crit_attrs = [] for j in xrange(0, i): model_attr = getattr(model, sort_keys[j]) crit_attrs.append((model_attr == marker_values[j])) model_attr = getattr(model, sort_keys[i]) if (sort_dirs[i] == 'desc'): crit_attrs.append((model_attr < marker_values[i])) elif (sort_dirs[i] == 'asc'): crit_attrs.append((model_attr > marker_values[i])) else: raise ValueError(_("Unknown sort direction, must be 'desc' or 'asc'")) criteria = sqlalchemy.sql.and_(*crit_attrs) criteria_list.append(criteria) f = sqlalchemy.sql.or_(*criteria_list) query = query.filter(f) if (limit is not None): query = query.limit(limit) return query
[ "def", "paginate_query", "(", "query", ",", "model", ",", "limit", ",", "sort_keys", ",", "marker", "=", "None", ",", "sort_dir", "=", "None", ",", "sort_dirs", "=", "None", ")", ":", "if", "(", "'id'", "not", "in", "sort_keys", ")", ":", "LOG", ".", "warn", "(", "_", "(", "'Id not in sort_keys; is sort_keys unique?'", ")", ")", "assert", "(", "not", "(", "sort_dir", "and", "sort_dirs", ")", ")", "if", "(", "(", "sort_dirs", "is", "None", ")", "and", "(", "sort_dir", "is", "None", ")", ")", ":", "sort_dir", "=", "'asc'", "if", "(", "sort_dirs", "is", "None", ")", ":", "sort_dirs", "=", "[", "sort_dir", "for", "_sort_key", "in", "sort_keys", "]", "assert", "(", "len", "(", "sort_dirs", ")", "==", "len", "(", "sort_keys", ")", ")", "for", "(", "current_sort_key", ",", "current_sort_dir", ")", "in", "zip", "(", "sort_keys", ",", "sort_dirs", ")", ":", "sort_dir_func", "=", "{", "'asc'", ":", "sqlalchemy", ".", "asc", ",", "'desc'", ":", "sqlalchemy", ".", "desc", "}", "[", "current_sort_dir", "]", "try", ":", "sort_key_attr", "=", "getattr", "(", "model", ",", "current_sort_key", ")", "except", "AttributeError", ":", "raise", "InvalidSortKey", "(", ")", "query", "=", "query", ".", "order_by", "(", "sort_dir_func", "(", "sort_key_attr", ")", ")", "if", "(", "marker", "is", "not", "None", ")", ":", "marker_values", "=", "[", "]", "for", "sort_key", "in", "sort_keys", ":", "v", "=", "getattr", "(", "marker", ",", "sort_key", ")", "marker_values", ".", "append", "(", "v", ")", "criteria_list", "=", "[", "]", "for", "i", "in", "xrange", "(", "0", ",", "len", "(", "sort_keys", ")", ")", ":", "crit_attrs", "=", "[", "]", "for", "j", "in", "xrange", "(", "0", ",", "i", ")", ":", "model_attr", "=", "getattr", "(", "model", ",", "sort_keys", "[", "j", "]", ")", "crit_attrs", ".", "append", "(", "(", "model_attr", "==", "marker_values", "[", "j", "]", ")", ")", "model_attr", "=", "getattr", "(", "model", ",", "sort_keys", "[", "i", "]", ")", "if", "(", "sort_dirs", "[", "i", "]", "==", "'desc'", ")", ":", "crit_attrs", ".", "append", "(", "(", "model_attr", "<", "marker_values", "[", "i", "]", ")", ")", "elif", "(", "sort_dirs", "[", "i", "]", "==", "'asc'", ")", ":", "crit_attrs", ".", "append", "(", "(", "model_attr", ">", "marker_values", "[", "i", "]", ")", ")", "else", ":", "raise", "ValueError", "(", "_", "(", "\"Unknown sort direction, must be 'desc' or 'asc'\"", ")", ")", "criteria", "=", "sqlalchemy", ".", "sql", ".", "and_", "(", "*", "crit_attrs", ")", "criteria_list", ".", "append", "(", "criteria", ")", "f", "=", "sqlalchemy", ".", "sql", ".", "or_", "(", "*", "criteria_list", ")", "query", "=", "query", ".", "filter", "(", "f", ")", "if", "(", "limit", "is", "not", "None", ")", ":", "query", "=", "query", ".", "limit", "(", "limit", ")", "return", "query" ]
returns paginated statement if page is provided .
train
false
965
def fill_settings(sections, acquire_settings, log_printer): local_bears = {} global_bears = {} for (section_name, section) in sections.items(): bear_dirs = section.bear_dirs() bears = list(section.get('bears', '')) (section_local_bears, section_global_bears) = collect_bears(bear_dirs, bears, [BEAR_KIND.LOCAL, BEAR_KIND.GLOBAL], log_printer) section_local_bears = Dependencies.resolve(section_local_bears) section_global_bears = Dependencies.resolve(section_global_bears) all_bears = copy.deepcopy(section_local_bears) all_bears.extend(section_global_bears) fill_section(section, acquire_settings, log_printer, all_bears) local_bears[section_name] = section_local_bears global_bears[section_name] = section_global_bears return (local_bears, global_bears)
[ "def", "fill_settings", "(", "sections", ",", "acquire_settings", ",", "log_printer", ")", ":", "local_bears", "=", "{", "}", "global_bears", "=", "{", "}", "for", "(", "section_name", ",", "section", ")", "in", "sections", ".", "items", "(", ")", ":", "bear_dirs", "=", "section", ".", "bear_dirs", "(", ")", "bears", "=", "list", "(", "section", ".", "get", "(", "'bears'", ",", "''", ")", ")", "(", "section_local_bears", ",", "section_global_bears", ")", "=", "collect_bears", "(", "bear_dirs", ",", "bears", ",", "[", "BEAR_KIND", ".", "LOCAL", ",", "BEAR_KIND", ".", "GLOBAL", "]", ",", "log_printer", ")", "section_local_bears", "=", "Dependencies", ".", "resolve", "(", "section_local_bears", ")", "section_global_bears", "=", "Dependencies", ".", "resolve", "(", "section_global_bears", ")", "all_bears", "=", "copy", ".", "deepcopy", "(", "section_local_bears", ")", "all_bears", ".", "extend", "(", "section_global_bears", ")", "fill_section", "(", "section", ",", "acquire_settings", ",", "log_printer", ",", "all_bears", ")", "local_bears", "[", "section_name", "]", "=", "section_local_bears", "global_bears", "[", "section_name", "]", "=", "section_global_bears", "return", "(", "local_bears", ",", "global_bears", ")" ]
retrieves all bears and requests missing settings via the given acquire_settings method .
train
false
966
def test_dim_exception_1d(): def f(x): return (x ** 2) with pytest.raises(ValueError) as exc: discretize_model(f, ((-10), 11), ((-10), 11)) assert (exc.value.args[0] == u'y range specified, but model is only 1-d.')
[ "def", "test_dim_exception_1d", "(", ")", ":", "def", "f", "(", "x", ")", ":", "return", "(", "x", "**", "2", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", "as", "exc", ":", "discretize_model", "(", "f", ",", "(", "(", "-", "10", ")", ",", "11", ")", ",", "(", "(", "-", "10", ")", ",", "11", ")", ")", "assert", "(", "exc", ".", "value", ".", "args", "[", "0", "]", "==", "u'y range specified, but model is only 1-d.'", ")" ]
test dimension exception 1d .
train
false
967
def make_csr(key_str, domains, must_staple=False): assert domains, 'Must provide one or more hostnames for the CSR.' pkey = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, key_str) req = OpenSSL.crypto.X509Req() req.get_subject().CN = domains[0] extensions = [OpenSSL.crypto.X509Extension('subjectAltName', critical=False, value=', '.join((('DNS:%s' % d) for d in domains)).encode('ascii'))] if must_staple: extensions.append(OpenSSL.crypto.X509Extension('1.3.6.1.5.5.7.1.24', critical=False, value='DER:30:03:02:01:05')) req.add_extensions(extensions) req.set_version(2) req.set_pubkey(pkey) req.sign(pkey, 'sha256') return tuple((OpenSSL.crypto.dump_certificate_request(method, req) for method in (OpenSSL.crypto.FILETYPE_PEM, OpenSSL.crypto.FILETYPE_ASN1)))
[ "def", "make_csr", "(", "key_str", ",", "domains", ",", "must_staple", "=", "False", ")", ":", "assert", "domains", ",", "'Must provide one or more hostnames for the CSR.'", "pkey", "=", "OpenSSL", ".", "crypto", ".", "load_privatekey", "(", "OpenSSL", ".", "crypto", ".", "FILETYPE_PEM", ",", "key_str", ")", "req", "=", "OpenSSL", ".", "crypto", ".", "X509Req", "(", ")", "req", ".", "get_subject", "(", ")", ".", "CN", "=", "domains", "[", "0", "]", "extensions", "=", "[", "OpenSSL", ".", "crypto", ".", "X509Extension", "(", "'subjectAltName'", ",", "critical", "=", "False", ",", "value", "=", "', '", ".", "join", "(", "(", "(", "'DNS:%s'", "%", "d", ")", "for", "d", "in", "domains", ")", ")", ".", "encode", "(", "'ascii'", ")", ")", "]", "if", "must_staple", ":", "extensions", ".", "append", "(", "OpenSSL", ".", "crypto", ".", "X509Extension", "(", "'1.3.6.1.5.5.7.1.24'", ",", "critical", "=", "False", ",", "value", "=", "'DER:30:03:02:01:05'", ")", ")", "req", ".", "add_extensions", "(", "extensions", ")", "req", ".", "set_version", "(", "2", ")", "req", ".", "set_pubkey", "(", "pkey", ")", "req", ".", "sign", "(", "pkey", ",", "'sha256'", ")", "return", "tuple", "(", "(", "OpenSSL", ".", "crypto", ".", "dump_certificate_request", "(", "method", ",", "req", ")", "for", "method", "in", "(", "OpenSSL", ".", "crypto", ".", "FILETYPE_PEM", ",", "OpenSSL", ".", "crypto", ".", "FILETYPE_ASN1", ")", ")", ")" ]
generate a csr .
train
false
969
@then(u'we see table created') def step_see_table_created(context): _expect_exact(context, u'CREATE TABLE', timeout=2)
[ "@", "then", "(", "u'we see table created'", ")", "def", "step_see_table_created", "(", "context", ")", ":", "_expect_exact", "(", "context", ",", "u'CREATE TABLE'", ",", "timeout", "=", "2", ")" ]
wait to see create table output .
train
false
970
def is_unicode(obj): if PY2: return isinstance(obj, unicode) else: return isinstance(obj, str)
[ "def", "is_unicode", "(", "obj", ")", ":", "if", "PY2", ":", "return", "isinstance", "(", "obj", ",", "unicode", ")", "else", ":", "return", "isinstance", "(", "obj", ",", "str", ")" ]
return true if obj is unicode .
train
false
973
def filter_thing2(x): return x._thing2
[ "def", "filter_thing2", "(", "x", ")", ":", "return", "x", ".", "_thing2" ]
a filter to apply to the results of a relationship query returns the object of the relationship .
train
false
975
def create_formatter(type_, *args, **kwargs): return ext.formatter(type_, *args, **kwargs)
[ "def", "create_formatter", "(", "type_", ",", "*", "args", ",", "**", "kwargs", ")", ":", "return", "ext", ".", "formatter", "(", "type_", ",", "*", "args", ",", "**", "kwargs", ")" ]
creates a formatter of type type .
train
false
976
def _vpc_peering_conn_id_for_name(name, conn): log.debug('Retrieving VPC peering connection id') ids = _get_peering_connection_ids(name, conn) if (not ids): ids = [None] elif (len(ids) > 1): raise SaltInvocationError('Found multiple VPC peering connections with the same name!! Please make sure you have only one VPC peering connection named {0} or invoke this function with a VPC peering connection ID'.format(name)) return ids[0]
[ "def", "_vpc_peering_conn_id_for_name", "(", "name", ",", "conn", ")", ":", "log", ".", "debug", "(", "'Retrieving VPC peering connection id'", ")", "ids", "=", "_get_peering_connection_ids", "(", "name", ",", "conn", ")", "if", "(", "not", "ids", ")", ":", "ids", "=", "[", "None", "]", "elif", "(", "len", "(", "ids", ")", ">", "1", ")", ":", "raise", "SaltInvocationError", "(", "'Found multiple VPC peering connections with the same name!! Please make sure you have only one VPC peering connection named {0} or invoke this function with a VPC peering connection ID'", ".", "format", "(", "name", ")", ")", "return", "ids", "[", "0", "]" ]
get the id associated with this name .
train
true
977
def GetStdout(cmdlist): job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE) out = job.communicate()[0] if (job.returncode != 0): sys.stderr.write((out + '\n')) raise GypError(('Error %d running %s' % (job.returncode, cmdlist[0]))) return out.rstrip('\n')
[ "def", "GetStdout", "(", "cmdlist", ")", ":", "job", "=", "subprocess", ".", "Popen", "(", "cmdlist", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "out", "=", "job", ".", "communicate", "(", ")", "[", "0", "]", "if", "(", "job", ".", "returncode", "!=", "0", ")", ":", "sys", ".", "stderr", ".", "write", "(", "(", "out", "+", "'\\n'", ")", ")", "raise", "GypError", "(", "(", "'Error %d running %s'", "%", "(", "job", ".", "returncode", ",", "cmdlist", "[", "0", "]", ")", ")", ")", "return", "out", ".", "rstrip", "(", "'\\n'", ")" ]
returns the content of standard output returned by invoking |cmdlist| .
train
false
978
def _smart_pad(x, n_pad): if (n_pad == 0).all(): return x elif (n_pad < 0).any(): raise RuntimeError('n_pad must be non-negative') l_z_pad = np.zeros(max(((n_pad[0] - len(x)) + 1), 0), dtype=x.dtype) r_z_pad = np.zeros(max(((n_pad[0] - len(x)) + 1), 0), dtype=x.dtype) return np.concatenate([l_z_pad, ((2 * x[0]) - x[n_pad[0]:0:(-1)]), x, ((2 * x[(-1)]) - x[(-2):((- n_pad[1]) - 2):(-1)]), r_z_pad])
[ "def", "_smart_pad", "(", "x", ",", "n_pad", ")", ":", "if", "(", "n_pad", "==", "0", ")", ".", "all", "(", ")", ":", "return", "x", "elif", "(", "n_pad", "<", "0", ")", ".", "any", "(", ")", ":", "raise", "RuntimeError", "(", "'n_pad must be non-negative'", ")", "l_z_pad", "=", "np", ".", "zeros", "(", "max", "(", "(", "(", "n_pad", "[", "0", "]", "-", "len", "(", "x", ")", ")", "+", "1", ")", ",", "0", ")", ",", "dtype", "=", "x", ".", "dtype", ")", "r_z_pad", "=", "np", ".", "zeros", "(", "max", "(", "(", "(", "n_pad", "[", "0", "]", "-", "len", "(", "x", ")", ")", "+", "1", ")", ",", "0", ")", ",", "dtype", "=", "x", ".", "dtype", ")", "return", "np", ".", "concatenate", "(", "[", "l_z_pad", ",", "(", "(", "2", "*", "x", "[", "0", "]", ")", "-", "x", "[", "n_pad", "[", "0", "]", ":", "0", ":", "(", "-", "1", ")", "]", ")", ",", "x", ",", "(", "(", "2", "*", "x", "[", "(", "-", "1", ")", "]", ")", "-", "x", "[", "(", "-", "2", ")", ":", "(", "(", "-", "n_pad", "[", "1", "]", ")", "-", "2", ")", ":", "(", "-", "1", ")", "]", ")", ",", "r_z_pad", "]", ")" ]
pad vector x .
train
false
980
def laplace(f, g_inv, g_det, X): r = 0 for i in range(len(X)): for j in range(len(X)): r += (g_inv[(i, j)] * f.diff(X[i]).diff(X[j])) for sigma in range(len(X)): for alpha in range(len(X)): r += (((g_det.diff(X[sigma]) * g_inv[(sigma, alpha)]) * f.diff(X[alpha])) / (2 * g_det)) return r
[ "def", "laplace", "(", "f", ",", "g_inv", ",", "g_det", ",", "X", ")", ":", "r", "=", "0", "for", "i", "in", "range", "(", "len", "(", "X", ")", ")", ":", "for", "j", "in", "range", "(", "len", "(", "X", ")", ")", ":", "r", "+=", "(", "g_inv", "[", "(", "i", ",", "j", ")", "]", "*", "f", ".", "diff", "(", "X", "[", "i", "]", ")", ".", "diff", "(", "X", "[", "j", "]", ")", ")", "for", "sigma", "in", "range", "(", "len", "(", "X", ")", ")", ":", "for", "alpha", "in", "range", "(", "len", "(", "X", ")", ")", ":", "r", "+=", "(", "(", "(", "g_det", ".", "diff", "(", "X", "[", "sigma", "]", ")", "*", "g_inv", "[", "(", "sigma", ",", "alpha", ")", "]", ")", "*", "f", ".", "diff", "(", "X", "[", "alpha", "]", ")", ")", "/", "(", "2", "*", "g_det", ")", ")", "return", "r" ]
n-dimensional laplace filter based on approximate second derivatives .
train
false
981
def hash_resource(resource): md5 = hashlib.md5() md5.update(repr(resource)) return md5.hexdigest()
[ "def", "hash_resource", "(", "resource", ")", ":", "md5", "=", "hashlib", ".", "md5", "(", ")", "md5", ".", "update", "(", "repr", "(", "resource", ")", ")", "return", "md5", ".", "hexdigest", "(", ")" ]
hash a :class:xblock .
train
false
982
def _get_module(module_name, backend=default_backend): backend_instance = testinfra.get_backend(backend) return backend_instance.get_module(_to_pascal_case(module_name))
[ "def", "_get_module", "(", "module_name", ",", "backend", "=", "default_backend", ")", ":", "backend_instance", "=", "testinfra", ".", "get_backend", "(", "backend", ")", "return", "backend_instance", ".", "get_module", "(", "_to_pascal_case", "(", "module_name", ")", ")" ]
retrieve the correct module implementation determined by the backend being used .
train
false
983
def _breakdown_point(n_samples, n_subsamples): return (1 - (((((0.5 ** (1 / n_subsamples)) * ((n_samples - n_subsamples) + 1)) + n_subsamples) - 1) / n_samples))
[ "def", "_breakdown_point", "(", "n_samples", ",", "n_subsamples", ")", ":", "return", "(", "1", "-", "(", "(", "(", "(", "(", "0.5", "**", "(", "1", "/", "n_subsamples", ")", ")", "*", "(", "(", "n_samples", "-", "n_subsamples", ")", "+", "1", ")", ")", "+", "n_subsamples", ")", "-", "1", ")", "/", "n_samples", ")", ")" ]
approximation of the breakdown point .
train
false
984
def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): _ensure_cfg_read() if expand: return _expand_vars(scheme, vars) else: return dict(_SCHEMES.items(scheme))
[ "def", "get_paths", "(", "scheme", "=", "_get_default_scheme", "(", ")", ",", "vars", "=", "None", ",", "expand", "=", "True", ")", ":", "_ensure_cfg_read", "(", ")", "if", "expand", ":", "return", "_expand_vars", "(", "scheme", ",", "vars", ")", "else", ":", "return", "dict", "(", "_SCHEMES", ".", "items", "(", "scheme", ")", ")" ]
returns a mapping containing an install scheme .
train
true
985
def ws_message(message): message.reply_channel.send({'text': message['text']})
[ "def", "ws_message", "(", "message", ")", ":", "message", ".", "reply_channel", ".", "send", "(", "{", "'text'", ":", "message", "[", "'text'", "]", "}", ")" ]
echoes messages back to the client .
train
false
986
def brightness_state(value): if (value.data > 0): return (((value.data / 99) * 255), STATE_ON) else: return (0, STATE_OFF)
[ "def", "brightness_state", "(", "value", ")", ":", "if", "(", "value", ".", "data", ">", "0", ")", ":", "return", "(", "(", "(", "value", ".", "data", "/", "99", ")", "*", "255", ")", ",", "STATE_ON", ")", "else", ":", "return", "(", "0", ",", "STATE_OFF", ")" ]
return the brightness and state .
train
false
987
def add_prefix(key): return ('id_' + key)
[ "def", "add_prefix", "(", "key", ")", ":", "return", "(", "'id_'", "+", "key", ")" ]
adds prefixes to the key .
train
false
988
def exit_from_conference(id, user): if checking_conference(id): if verification_user(id, user): conferences = get_memcached(get_key('conferences')) if is_owner_user(id, user): delete_conference(id, user) del conferences[id]['users'][user] set_memcached(get_key('conferences'), conferences) return get_new_message_for_user(user)
[ "def", "exit_from_conference", "(", "id", ",", "user", ")", ":", "if", "checking_conference", "(", "id", ")", ":", "if", "verification_user", "(", "id", ",", "user", ")", ":", "conferences", "=", "get_memcached", "(", "get_key", "(", "'conferences'", ")", ")", "if", "is_owner_user", "(", "id", ",", "user", ")", ":", "delete_conference", "(", "id", ",", "user", ")", "del", "conferences", "[", "id", "]", "[", "'users'", "]", "[", "user", "]", "set_memcached", "(", "get_key", "(", "'conferences'", ")", ",", "conferences", ")", "return", "get_new_message_for_user", "(", "user", ")" ]
remove user from conference if user exited from conference .
train
false
989
def _parse_bytes(range_header): try: parsed_ranges = [] (units, ranges) = range_header.split('=', 1) for range_value in ranges.split(','): range_value = range_value.strip() if range_value: parsed_ranges.append(_parse_range_value(range_value)) if (not parsed_ranges): return None return (units, parsed_ranges) except ValueError: return None
[ "def", "_parse_bytes", "(", "range_header", ")", ":", "try", ":", "parsed_ranges", "=", "[", "]", "(", "units", ",", "ranges", ")", "=", "range_header", ".", "split", "(", "'='", ",", "1", ")", "for", "range_value", "in", "ranges", ".", "split", "(", "','", ")", ":", "range_value", "=", "range_value", ".", "strip", "(", ")", "if", "range_value", ":", "parsed_ranges", ".", "append", "(", "_parse_range_value", "(", "range_value", ")", ")", "if", "(", "not", "parsed_ranges", ")", ":", "return", "None", "return", "(", "units", ",", "parsed_ranges", ")", "except", "ValueError", ":", "return", "None" ]
parses a full http range header .
train
false
991
def get_tool_source(config_file=None, xml_tree=None, enable_beta_formats=True): if (xml_tree is not None): return XmlToolSource(xml_tree, source_path=config_file) elif (config_file is None): raise ValueError('get_tool_source called with invalid config_file None.') if (not enable_beta_formats): tree = load_tool_xml(config_file) return XmlToolSource(tree, source_path=config_file) if config_file.endswith('.yml'): log.info('Loading tool from YAML - this is experimental - tool will not function in future.') with open(config_file, 'r') as f: as_dict = ordered_load(f) return YamlToolSource(as_dict, source_path=config_file) elif (config_file.endswith('.json') or config_file.endswith('.cwl')): log.info('Loading CWL tool - this is experimental - tool likely will not function in future at least in same way.') return CwlToolSource(config_file) else: tree = load_tool_xml(config_file) return XmlToolSource(tree, source_path=config_file)
[ "def", "get_tool_source", "(", "config_file", "=", "None", ",", "xml_tree", "=", "None", ",", "enable_beta_formats", "=", "True", ")", ":", "if", "(", "xml_tree", "is", "not", "None", ")", ":", "return", "XmlToolSource", "(", "xml_tree", ",", "source_path", "=", "config_file", ")", "elif", "(", "config_file", "is", "None", ")", ":", "raise", "ValueError", "(", "'get_tool_source called with invalid config_file None.'", ")", "if", "(", "not", "enable_beta_formats", ")", ":", "tree", "=", "load_tool_xml", "(", "config_file", ")", "return", "XmlToolSource", "(", "tree", ",", "source_path", "=", "config_file", ")", "if", "config_file", ".", "endswith", "(", "'.yml'", ")", ":", "log", ".", "info", "(", "'Loading tool from YAML - this is experimental - tool will not function in future.'", ")", "with", "open", "(", "config_file", ",", "'r'", ")", "as", "f", ":", "as_dict", "=", "ordered_load", "(", "f", ")", "return", "YamlToolSource", "(", "as_dict", ",", "source_path", "=", "config_file", ")", "elif", "(", "config_file", ".", "endswith", "(", "'.json'", ")", "or", "config_file", ".", "endswith", "(", "'.cwl'", ")", ")", ":", "log", ".", "info", "(", "'Loading CWL tool - this is experimental - tool likely will not function in future at least in same way.'", ")", "return", "CwlToolSource", "(", "config_file", ")", "else", ":", "tree", "=", "load_tool_xml", "(", "config_file", ")", "return", "XmlToolSource", "(", "tree", ",", "source_path", "=", "config_file", ")" ]
return a toolsource object corresponding to supplied source .
train
false
992
def modify_user(id, **data): models.User.smart_get(id).update_object(data)
[ "def", "modify_user", "(", "id", ",", "**", "data", ")", ":", "models", ".", "User", ".", "smart_get", "(", "id", ")", ".", "update_object", "(", "data", ")" ]
modify user .
train
false
994
def execute_sample(package, activity): try: package_activity = ('%s/%s' % (package, activity)) args = ['/system/bin/sh', '/system/bin/am', 'start', '-n', package_activity] output = subprocess.check_output(args) except subprocess.CalledProcessError as e: log.error('Error executing package activity: %r', e) return log.info('Executed package activity: %r', output)
[ "def", "execute_sample", "(", "package", ",", "activity", ")", ":", "try", ":", "package_activity", "=", "(", "'%s/%s'", "%", "(", "package", ",", "activity", ")", ")", "args", "=", "[", "'/system/bin/sh'", ",", "'/system/bin/am'", ",", "'start'", ",", "'-n'", ",", "package_activity", "]", "output", "=", "subprocess", ".", "check_output", "(", "args", ")", "except", "subprocess", ".", "CalledProcessError", "as", "e", ":", "log", ".", "error", "(", "'Error executing package activity: %r'", ",", "e", ")", "return", "log", ".", "info", "(", "'Executed package activity: %r'", ",", "output", ")" ]
execute the sample on the emulator via adb .
train
false
995
def _wanmen_get_json_api_content_by_courseID(courseID): return loads(get_content('http://api.wanmen.org/course/getCourseNested/{courseID}'.format(courseID=courseID)))
[ "def", "_wanmen_get_json_api_content_by_courseID", "(", "courseID", ")", ":", "return", "loads", "(", "get_content", "(", "'http://api.wanmen.org/course/getCourseNested/{courseID}'", ".", "format", "(", "courseID", "=", "courseID", ")", ")", ")" ]
int->json return a parsed json tree of wanmens api .
train
false
996
def svdvals(a, overwrite_a=False, check_finite=True): a = _asarray_validated(a, check_finite=check_finite) if a.size: return svd(a, compute_uv=0, overwrite_a=overwrite_a, check_finite=False) elif (len(a.shape) != 2): raise ValueError('expected matrix') else: return numpy.empty(0)
[ "def", "svdvals", "(", "a", ",", "overwrite_a", "=", "False", ",", "check_finite", "=", "True", ")", ":", "a", "=", "_asarray_validated", "(", "a", ",", "check_finite", "=", "check_finite", ")", "if", "a", ".", "size", ":", "return", "svd", "(", "a", ",", "compute_uv", "=", "0", ",", "overwrite_a", "=", "overwrite_a", ",", "check_finite", "=", "False", ")", "elif", "(", "len", "(", "a", ".", "shape", ")", "!=", "2", ")", ":", "raise", "ValueError", "(", "'expected matrix'", ")", "else", ":", "return", "numpy", ".", "empty", "(", "0", ")" ]
compute singular values of a matrix .
train
false
997
def list_certs(keychain='/Library/Keychains/System.keychain'): cmd = 'security find-certificate -a {0} | grep -o "alis".*\\" | grep -o \'\\"[-A-Za-z0-9.:() ]*\\"\''.format(_quote(keychain)) out = __salt__['cmd.run'](cmd, python_shell=True) return out.replace('"', '').split('\n')
[ "def", "list_certs", "(", "keychain", "=", "'/Library/Keychains/System.keychain'", ")", ":", "cmd", "=", "'security find-certificate -a {0} | grep -o \"alis\".*\\\\\" | grep -o \\'\\\\\"[-A-Za-z0-9.:() ]*\\\\\"\\''", ".", "format", "(", "_quote", "(", "keychain", ")", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ",", "python_shell", "=", "True", ")", "return", "out", ".", "replace", "(", "'\"'", ",", "''", ")", ".", "split", "(", "'\\n'", ")" ]
list all of the installed certificates keychain the keychain to install the certificate to .
train
true
998
def JoinableQueue(maxsize=0): from multiprocessing.queues import JoinableQueue return JoinableQueue(maxsize)
[ "def", "JoinableQueue", "(", "maxsize", "=", "0", ")", ":", "from", "multiprocessing", ".", "queues", "import", "JoinableQueue", "return", "JoinableQueue", "(", "maxsize", ")" ]
returns a queue object .
train
false
999
def cv2_read_file_rgb(filename): im = cv2.imread(filename) if (len(im.shape) == 2): im = im[:, :, np.newaxis] if (im.shape[2] == 1): im = np.tile(im, (1, 1, 3)) if (im.shape[2] > 3): im = im[:, :, :3] im = im[:, :, ::(-1)] return im
[ "def", "cv2_read_file_rgb", "(", "filename", ")", ":", "im", "=", "cv2", ".", "imread", "(", "filename", ")", "if", "(", "len", "(", "im", ".", "shape", ")", "==", "2", ")", ":", "im", "=", "im", "[", ":", ",", ":", ",", "np", ".", "newaxis", "]", "if", "(", "im", ".", "shape", "[", "2", "]", "==", "1", ")", ":", "im", "=", "np", ".", "tile", "(", "im", ",", "(", "1", ",", "1", ",", "3", ")", ")", "if", "(", "im", ".", "shape", "[", "2", "]", ">", "3", ")", ":", "im", "=", "im", "[", ":", ",", ":", ",", ":", "3", "]", "im", "=", "im", "[", ":", ",", ":", ",", ":", ":", "(", "-", "1", ")", "]", "return", "im" ]
reads an image from file .
train
false
1,002
def iterate_file(file): while 1: chunk = file.read(CHUNK_SIZE) if (not chunk): break (yield chunk)
[ "def", "iterate_file", "(", "file", ")", ":", "while", "1", ":", "chunk", "=", "file", ".", "read", "(", "CHUNK_SIZE", ")", "if", "(", "not", "chunk", ")", ":", "break", "(", "yield", "chunk", ")" ]
progressively return chunks from file .
train
false
1,003
def Potential(name, var, model=None): model = modelcontext(model) var.name = model.name_for(name) model.potentials.append(var) return var
[ "def", "Potential", "(", "name", ",", "var", ",", "model", "=", "None", ")", ":", "model", "=", "modelcontext", "(", "model", ")", "var", ".", "name", "=", "model", ".", "name_for", "(", "name", ")", "model", ".", "potentials", ".", "append", "(", "var", ")", "return", "var" ]
add an arbitrary factor potential to the model likelihood parameters name : str var : theano variables returns var : var .
train
false
1,004
def to_rgba(c, alpha=None): if _is_nth_color(c): from matplotlib import rcParams prop_cycler = rcParams[u'axes.prop_cycle'] colors = prop_cycler.by_key().get(u'color', [u'k']) c = colors[(int(c[1]) % len(colors))] try: rgba = _colors_full_map.cache[(c, alpha)] except (KeyError, TypeError): rgba = _to_rgba_no_colorcycle(c, alpha) try: _colors_full_map.cache[(c, alpha)] = rgba except TypeError: pass return rgba
[ "def", "to_rgba", "(", "c", ",", "alpha", "=", "None", ")", ":", "if", "_is_nth_color", "(", "c", ")", ":", "from", "matplotlib", "import", "rcParams", "prop_cycler", "=", "rcParams", "[", "u'axes.prop_cycle'", "]", "colors", "=", "prop_cycler", ".", "by_key", "(", ")", ".", "get", "(", "u'color'", ",", "[", "u'k'", "]", ")", "c", "=", "colors", "[", "(", "int", "(", "c", "[", "1", "]", ")", "%", "len", "(", "colors", ")", ")", "]", "try", ":", "rgba", "=", "_colors_full_map", ".", "cache", "[", "(", "c", ",", "alpha", ")", "]", "except", "(", "KeyError", ",", "TypeError", ")", ":", "rgba", "=", "_to_rgba_no_colorcycle", "(", "c", ",", "alpha", ")", "try", ":", "_colors_full_map", ".", "cache", "[", "(", "c", ",", "alpha", ")", "]", "=", "rgba", "except", "TypeError", ":", "pass", "return", "rgba" ]
convert c to an rgba color .
train
false
1,006
def test_range_extra_dtypes(): dtype_range_extra = dtype_range.copy() dtype_range_extra.update({np.int32: ((-2147483648), 2147483647), np.uint32: (0, 4294967295)}) dtype_pairs = [(np.uint8, np.uint32), (np.int8, np.uint32), (np.int8, np.int32), (np.int32, np.int8), (np.float64, np.float32), (np.int32, np.float32)] for (dtype_in, dt) in dtype_pairs: (imin, imax) = dtype_range_extra[dtype_in] x = np.linspace(imin, imax, 10).astype(dtype_in) with expected_warnings(['precision loss|sign loss|\\A\\Z']): y = convert(x, dt) (omin, omax) = dtype_range_extra[dt] (yield (_verify_range, ('From %s to %s' % (np.dtype(dtype_in), np.dtype(dt))), y, omin, omax, np.dtype(dt)))
[ "def", "test_range_extra_dtypes", "(", ")", ":", "dtype_range_extra", "=", "dtype_range", ".", "copy", "(", ")", "dtype_range_extra", ".", "update", "(", "{", "np", ".", "int32", ":", "(", "(", "-", "2147483648", ")", ",", "2147483647", ")", ",", "np", ".", "uint32", ":", "(", "0", ",", "4294967295", ")", "}", ")", "dtype_pairs", "=", "[", "(", "np", ".", "uint8", ",", "np", ".", "uint32", ")", ",", "(", "np", ".", "int8", ",", "np", ".", "uint32", ")", ",", "(", "np", ".", "int8", ",", "np", ".", "int32", ")", ",", "(", "np", ".", "int32", ",", "np", ".", "int8", ")", ",", "(", "np", ".", "float64", ",", "np", ".", "float32", ")", ",", "(", "np", ".", "int32", ",", "np", ".", "float32", ")", "]", "for", "(", "dtype_in", ",", "dt", ")", "in", "dtype_pairs", ":", "(", "imin", ",", "imax", ")", "=", "dtype_range_extra", "[", "dtype_in", "]", "x", "=", "np", ".", "linspace", "(", "imin", ",", "imax", ",", "10", ")", ".", "astype", "(", "dtype_in", ")", "with", "expected_warnings", "(", "[", "'precision loss|sign loss|\\\\A\\\\Z'", "]", ")", ":", "y", "=", "convert", "(", "x", ",", "dt", ")", "(", "omin", ",", "omax", ")", "=", "dtype_range_extra", "[", "dt", "]", "(", "yield", "(", "_verify_range", ",", "(", "'From %s to %s'", "%", "(", "np", ".", "dtype", "(", "dtype_in", ")", ",", "np", ".", "dtype", "(", "dt", ")", ")", ")", ",", "y", ",", "omin", ",", "omax", ",", "np", ".", "dtype", "(", "dt", ")", ")", ")" ]
test code paths that are not skipped by test_range .
train
false
1,008
@open_file(0, mode='rt') def read_sparse6(path): glist = [] for line in path: line = line.strip() if (not len(line)): continue glist.append(parse_sparse6(line)) if (len(glist) == 1): return glist[0] else: return glist
[ "@", "open_file", "(", "0", ",", "mode", "=", "'rt'", ")", "def", "read_sparse6", "(", "path", ")", ":", "glist", "=", "[", "]", "for", "line", "in", "path", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "(", "not", "len", "(", "line", ")", ")", ":", "continue", "glist", ".", "append", "(", "parse_sparse6", "(", "line", ")", ")", "if", "(", "len", "(", "glist", ")", "==", "1", ")", ":", "return", "glist", "[", "0", "]", "else", ":", "return", "glist" ]
read an undirected graph in sparse6 format from path .
train
false
1,009
def bic_sigma(sigma2, nobs, df_modelwc, islog=False): if (not islog): sigma2 = np.log(sigma2) return (sigma2 + (bic(0, nobs, df_modelwc) / nobs))
[ "def", "bic_sigma", "(", "sigma2", ",", "nobs", ",", "df_modelwc", ",", "islog", "=", "False", ")", ":", "if", "(", "not", "islog", ")", ":", "sigma2", "=", "np", ".", "log", "(", "sigma2", ")", "return", "(", "sigma2", "+", "(", "bic", "(", "0", ",", "nobs", ",", "df_modelwc", ")", "/", "nobs", ")", ")" ]
bayesian information criterion or schwarz criterion parameters sigma2 : float estimate of the residual variance or determinant of sigma_hat in the multivariate case .
train
false
1,010
def decode_packet(packet_buff): if (len(packet_buff) < 4): raise InvalidPacketException((u'Packet too small (<4): %s' % hexlify(packet_buff))) (opcode, session_id) = struct.unpack_from('!HH', packet_buff, 0) if (opcode not in PACKET_DECODE_DICT): raise InvalidPacketException((u'Invalid opcode: %s' % opcode)) packet = {'opcode': opcode, 'session_id': session_id} return PACKET_DECODE_DICT[opcode](packet, packet_buff, 4)
[ "def", "decode_packet", "(", "packet_buff", ")", ":", "if", "(", "len", "(", "packet_buff", ")", "<", "4", ")", ":", "raise", "InvalidPacketException", "(", "(", "u'Packet too small (<4): %s'", "%", "hexlify", "(", "packet_buff", ")", ")", ")", "(", "opcode", ",", "session_id", ")", "=", "struct", ".", "unpack_from", "(", "'!HH'", ",", "packet_buff", ",", "0", ")", "if", "(", "opcode", "not", "in", "PACKET_DECODE_DICT", ")", ":", "raise", "InvalidPacketException", "(", "(", "u'Invalid opcode: %s'", "%", "opcode", ")", ")", "packet", "=", "{", "'opcode'", ":", "opcode", ",", "'session_id'", ":", "session_id", "}", "return", "PACKET_DECODE_DICT", "[", "opcode", "]", "(", "packet", ",", "packet_buff", ",", "4", ")" ]
decodes a packet binary string into a packet dictionary .
train
false
1,011
def add_new_field(): field_to_update = 'new_field2' tablename = 'org_organisation' s3migrate.migrate_to_unique_field(tablename, field_to_update, mapping_function(), ['org_organisation_type', 'org_sector']) table = db[tablename] for row in db((table.id > 0)).select(['id'], table[field_to_update]): print 'id = ', row['id'], field_to_update, ' = ', row[field_to_update]
[ "def", "add_new_field", "(", ")", ":", "field_to_update", "=", "'new_field2'", "tablename", "=", "'org_organisation'", "s3migrate", ".", "migrate_to_unique_field", "(", "tablename", ",", "field_to_update", ",", "mapping_function", "(", ")", ",", "[", "'org_organisation_type'", ",", "'org_sector'", "]", ")", "table", "=", "db", "[", "tablename", "]", "for", "row", "in", "db", "(", "(", "table", ".", "id", ">", "0", ")", ")", ".", "select", "(", "[", "'id'", "]", ",", "table", "[", "field_to_update", "]", ")", ":", "print", "'id = '", ",", "row", "[", "'id'", "]", ",", "field_to_update", ",", "' = '", ",", "row", "[", "field_to_update", "]" ]
test for s3migrate() .
train
false
1,012
def send_mass_mail(datatuple, fail_silently=False, auth_user=None, auth_password=None, connection=None): connection = (connection or get_connection(username=auth_user, password=auth_password, fail_silently=fail_silently)) messages = [EmailMessage(subject, message, sender, recipient, connection=connection) for (subject, message, sender, recipient) in datatuple] return connection.send_messages(messages)
[ "def", "send_mass_mail", "(", "datatuple", ",", "fail_silently", "=", "False", ",", "auth_user", "=", "None", ",", "auth_password", "=", "None", ",", "connection", "=", "None", ")", ":", "connection", "=", "(", "connection", "or", "get_connection", "(", "username", "=", "auth_user", ",", "password", "=", "auth_password", ",", "fail_silently", "=", "fail_silently", ")", ")", "messages", "=", "[", "EmailMessage", "(", "subject", ",", "message", ",", "sender", ",", "recipient", ",", "connection", "=", "connection", ")", "for", "(", "subject", ",", "message", ",", "sender", ",", "recipient", ")", "in", "datatuple", "]", "return", "connection", ".", "send_messages", "(", "messages", ")" ]
given a datatuple of .
train
true
1,013
@contextmanager def maybe_profiled(profile_path): if (not profile_path): (yield) return import cProfile profiler = cProfile.Profile() try: profiler.enable() (yield) finally: profiler.disable() profiler.dump_stats(profile_path) view_cmd = green(u'gprof2dot -f pstats {path} | dot -Tpng -o {path}.png && open {path}.png'.format(path=profile_path)) logging.getLogger().info(u'Dumped profile data to: {}\nUse e.g. {} to render and view.'.format(profile_path, view_cmd))
[ "@", "contextmanager", "def", "maybe_profiled", "(", "profile_path", ")", ":", "if", "(", "not", "profile_path", ")", ":", "(", "yield", ")", "return", "import", "cProfile", "profiler", "=", "cProfile", ".", "Profile", "(", ")", "try", ":", "profiler", ".", "enable", "(", ")", "(", "yield", ")", "finally", ":", "profiler", ".", "disable", "(", ")", "profiler", ".", "dump_stats", "(", "profile_path", ")", "view_cmd", "=", "green", "(", "u'gprof2dot -f pstats {path} | dot -Tpng -o {path}.png && open {path}.png'", ".", "format", "(", "path", "=", "profile_path", ")", ")", "logging", ".", "getLogger", "(", ")", ".", "info", "(", "u'Dumped profile data to: {}\\nUse e.g. {} to render and view.'", ".", "format", "(", "profile_path", ",", "view_cmd", ")", ")" ]
a profiling context manager .
train
true
1,014
def unroll(deep_autoencoder): net = Network(deep_autoencoder.layers) net.weights = deep_autoencoder.weights[:(len(deep_autoencoder.layers) - 1)] net.biases = deep_autoencoder.biases[:(len(deep_autoencoder.layers) - 1)] return net
[ "def", "unroll", "(", "deep_autoencoder", ")", ":", "net", "=", "Network", "(", "deep_autoencoder", ".", "layers", ")", "net", ".", "weights", "=", "deep_autoencoder", ".", "weights", "[", ":", "(", "len", "(", "deep_autoencoder", ".", "layers", ")", "-", "1", ")", "]", "net", ".", "biases", "=", "deep_autoencoder", ".", "biases", "[", ":", "(", "len", "(", "deep_autoencoder", ".", "layers", ")", "-", "1", ")", "]", "return", "net" ]
return a network that contains the compression stage of the deep_autoencoder .
train
false
1,015
def add_jinja2_ext(pelican): if (u'JINJA_ENVIRONMENT' in pelican.settings): pelican.settings[u'JINJA_ENVIRONMENT'][u'extensions'].append(AssetsExtension) else: pelican.settings[u'JINJA_EXTENSIONS'].append(AssetsExtension)
[ "def", "add_jinja2_ext", "(", "pelican", ")", ":", "if", "(", "u'JINJA_ENVIRONMENT'", "in", "pelican", ".", "settings", ")", ":", "pelican", ".", "settings", "[", "u'JINJA_ENVIRONMENT'", "]", "[", "u'extensions'", "]", ".", "append", "(", "AssetsExtension", ")", "else", ":", "pelican", ".", "settings", "[", "u'JINJA_EXTENSIONS'", "]", ".", "append", "(", "AssetsExtension", ")" ]
add webassets to jinja2 extensions in pelican settings .
train
false
1,017
def vb_wait_for_network_address(timeout, step=None, machine_name=None, machine=None): kwargs = {'machine_name': machine_name, 'machine': machine} return wait_for(vb_get_network_addresses, timeout=timeout, step=step, default=[], func_kwargs=kwargs)
[ "def", "vb_wait_for_network_address", "(", "timeout", ",", "step", "=", "None", ",", "machine_name", "=", "None", ",", "machine", "=", "None", ")", ":", "kwargs", "=", "{", "'machine_name'", ":", "machine_name", ",", "'machine'", ":", "machine", "}", "return", "wait_for", "(", "vb_get_network_addresses", ",", "timeout", "=", "timeout", ",", "step", "=", "step", ",", "default", "=", "[", "]", ",", "func_kwargs", "=", "kwargs", ")" ]
wait until a machine has a network address to return or quit after the timeout .
train
true
1,018
def is_hex(value): try: int(value, 16) return True except (TypeError, ValueError): return False
[ "def", "is_hex", "(", "value", ")", ":", "try", ":", "int", "(", "value", ",", "16", ")", "return", "True", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "False" ]
returns true if value is a hexidecimal string .
train
false
1,020
def python_revision(): return _sys_version()[3]
[ "def", "python_revision", "(", ")", ":", "return", "_sys_version", "(", ")", "[", "3", "]" ]
returns a string identifying the python implementation revision .
train
false
1,021
def check_xfail_no_run(item): if (not item.config.option.runxfail): evalxfail = item._evalxfail if evalxfail.istrue(): if (not evalxfail.get('run', True)): pytest.xfail(('[NOTRUN] ' + evalxfail.getexplanation()))
[ "def", "check_xfail_no_run", "(", "item", ")", ":", "if", "(", "not", "item", ".", "config", ".", "option", ".", "runxfail", ")", ":", "evalxfail", "=", "item", ".", "_evalxfail", "if", "evalxfail", ".", "istrue", "(", ")", ":", "if", "(", "not", "evalxfail", ".", "get", "(", "'run'", ",", "True", ")", ")", ":", "pytest", ".", "xfail", "(", "(", "'[NOTRUN] '", "+", "evalxfail", ".", "getexplanation", "(", ")", ")", ")" ]
check xfail .
train
false
1,022
def _get_jid_snapshots(jid, config='root'): jid_snapshots = [x for x in list_snapshots(config) if (x['userdata'].get('salt_jid') == jid)] pre_snapshot = [x for x in jid_snapshots if (x['type'] == 'pre')] post_snapshot = [x for x in jid_snapshots if (x['type'] == 'post')] if ((not pre_snapshot) or (not post_snapshot)): raise CommandExecutionError("Jid '{0}' snapshots not found".format(jid)) return (pre_snapshot[0]['id'], post_snapshot[0]['id'])
[ "def", "_get_jid_snapshots", "(", "jid", ",", "config", "=", "'root'", ")", ":", "jid_snapshots", "=", "[", "x", "for", "x", "in", "list_snapshots", "(", "config", ")", "if", "(", "x", "[", "'userdata'", "]", ".", "get", "(", "'salt_jid'", ")", "==", "jid", ")", "]", "pre_snapshot", "=", "[", "x", "for", "x", "in", "jid_snapshots", "if", "(", "x", "[", "'type'", "]", "==", "'pre'", ")", "]", "post_snapshot", "=", "[", "x", "for", "x", "in", "jid_snapshots", "if", "(", "x", "[", "'type'", "]", "==", "'post'", ")", "]", "if", "(", "(", "not", "pre_snapshot", ")", "or", "(", "not", "post_snapshot", ")", ")", ":", "raise", "CommandExecutionError", "(", "\"Jid '{0}' snapshots not found\"", ".", "format", "(", "jid", ")", ")", "return", "(", "pre_snapshot", "[", "0", "]", "[", "'id'", "]", ",", "post_snapshot", "[", "0", "]", "[", "'id'", "]", ")" ]
returns pre/post snapshots made by a given salt jid looks for salt_jid entries into snapshots userdata which are created when snapper .
train
true
1,023
def getFilteredPageContent(page, onlyText=True): retVal = page if isinstance(page, unicode): retVal = re.sub(('(?si)<script.+?</script>|<!--.+?-->|<style.+?</style>%s' % ('|<[^>]+>|\\t|\\n|\\r' if onlyText else '')), ' ', page) while (retVal.find(' ') != (-1)): retVal = retVal.replace(' ', ' ') retVal = htmlunescape(retVal.strip()) return retVal
[ "def", "getFilteredPageContent", "(", "page", ",", "onlyText", "=", "True", ")", ":", "retVal", "=", "page", "if", "isinstance", "(", "page", ",", "unicode", ")", ":", "retVal", "=", "re", ".", "sub", "(", "(", "'(?si)<script.+?</script>|<!--.+?-->|<style.+?</style>%s'", "%", "(", "'|<[^>]+>|\\\\t|\\\\n|\\\\r'", "if", "onlyText", "else", "''", ")", ")", ",", "' '", ",", "page", ")", "while", "(", "retVal", ".", "find", "(", "' '", ")", "!=", "(", "-", "1", ")", ")", ":", "retVal", "=", "retVal", ".", "replace", "(", "' '", ",", "' '", ")", "retVal", "=", "htmlunescape", "(", "retVal", ".", "strip", "(", ")", ")", "return", "retVal" ]
returns filtered page content without script .
train
false
1,025
def prepend_line(filepath, line): with open(filepath) as f: lines = f.readlines() lines.insert(0, line) with open(filepath, 'w') as f: f.writelines(lines)
[ "def", "prepend_line", "(", "filepath", ",", "line", ")", ":", "with", "open", "(", "filepath", ")", "as", "f", ":", "lines", "=", "f", ".", "readlines", "(", ")", "lines", ".", "insert", "(", "0", ",", "line", ")", "with", "open", "(", "filepath", ",", "'w'", ")", "as", "f", ":", "f", ".", "writelines", "(", "lines", ")" ]
rewrite a file adding a line to its beginning .
train
true
1,026
def get_model_classes(): result = [] for module_name in MODEL_MODULE_NAMES: module = importlib.import_module(module_name) model_classes = getattr(module, 'MODELS', []) result.extend(model_classes) return result
[ "def", "get_model_classes", "(", ")", ":", "result", "=", "[", "]", "for", "module_name", "in", "MODEL_MODULE_NAMES", ":", "module", "=", "importlib", ".", "import_module", "(", "module_name", ")", "model_classes", "=", "getattr", "(", "module", ",", "'MODELS'", ",", "[", "]", ")", "result", ".", "extend", "(", "model_classes", ")", "return", "result" ]
retrieve a list of all the defined model classes .
train
false
1,027
def GetPassword(user): s = secrets.GetSecret(_PasswordName(user)) try: return json.loads(s) except ValueError: return dict(version=_PASSWORD_VERSION_MD5, hashed=s)
[ "def", "GetPassword", "(", "user", ")", ":", "s", "=", "secrets", ".", "GetSecret", "(", "_PasswordName", "(", "user", ")", ")", "try", ":", "return", "json", ".", "loads", "(", "s", ")", "except", "ValueError", ":", "return", "dict", "(", "version", "=", "_PASSWORD_VERSION_MD5", ",", "hashed", "=", "s", ")" ]
returns the encrypted user password from the secrets database .
train
false
1,028
def conlleval(p, g, w, filename, script_path): out = '' for (sl, sp, sw) in zip(g, p, w): out += 'BOS O O\n' for (wl, wp, w) in zip(sl, sp, sw): out += (((((w + ' ') + wl) + ' ') + wp) + '\n') out += 'EOS O O\n\n' f = open(filename, 'w') f.writelines(out) f.close() return get_perf(filename, script_path)
[ "def", "conlleval", "(", "p", ",", "g", ",", "w", ",", "filename", ",", "script_path", ")", ":", "out", "=", "''", "for", "(", "sl", ",", "sp", ",", "sw", ")", "in", "zip", "(", "g", ",", "p", ",", "w", ")", ":", "out", "+=", "'BOS O O\\n'", "for", "(", "wl", ",", "wp", ",", "w", ")", "in", "zip", "(", "sl", ",", "sp", ",", "sw", ")", ":", "out", "+=", "(", "(", "(", "(", "(", "w", "+", "' '", ")", "+", "wl", ")", "+", "' '", ")", "+", "wp", ")", "+", "'\\n'", ")", "out", "+=", "'EOS O O\\n\\n'", "f", "=", "open", "(", "filename", ",", "'w'", ")", "f", ".", "writelines", "(", "out", ")", "f", ".", "close", "(", ")", "return", "get_perf", "(", "filename", ",", "script_path", ")" ]
input: p :: predictions g :: groundtruth w :: corresponding words output: filename :: name of the file where the predictions are written .
train
false
1,029
def AND(domains): return combine(AND_OPERATOR, TRUE_DOMAIN, FALSE_DOMAIN, domains)
[ "def", "AND", "(", "domains", ")", ":", "return", "combine", "(", "AND_OPERATOR", ",", "TRUE_DOMAIN", ",", "FALSE_DOMAIN", ",", "domains", ")" ]
emulate sqlobjects and .
train
false
1,030
def copy_stored_file(src_path, dst_path, src_storage=private_storage, dst_storage=private_storage): if ((src_path == dst_path) and (src_storage.__class__ == dst_storage.__class__)): return with src_storage.open(src_path, 'rb') as src: with dst_storage.open(dst_path, 'wb') as dest: shutil.copyfileobj(src, dest)
[ "def", "copy_stored_file", "(", "src_path", ",", "dst_path", ",", "src_storage", "=", "private_storage", ",", "dst_storage", "=", "private_storage", ")", ":", "if", "(", "(", "src_path", "==", "dst_path", ")", "and", "(", "src_storage", ".", "__class__", "==", "dst_storage", ".", "__class__", ")", ")", ":", "return", "with", "src_storage", ".", "open", "(", "src_path", ",", "'rb'", ")", "as", "src", ":", "with", "dst_storage", ".", "open", "(", "dst_path", ",", "'wb'", ")", "as", "dest", ":", "shutil", ".", "copyfileobj", "(", "src", ",", "dest", ")" ]
copy one storage path to another storage path .
train
false
1,031
def test_module_exceptions(): normal_types = ['sys', 'clr', 'exceptions', '__builtin__', '_winreg', 'mmap', 'nt', 'posix'] builtins = [x for x in sys.builtin_module_names if (x not in normal_types)] for module in builtins: mod = __import__(module) for attrName in dir(mod): val = getattr(mod, attrName) if (isinstance(val, type) and issubclass(val, Exception)): if ('BlockingIOError' not in repr(val)): Assert(repr(val).startswith('<class ')) val.x = 2 AreEqual(val.x, 2) else: Assert(repr(val).startswith('<type '))
[ "def", "test_module_exceptions", "(", ")", ":", "normal_types", "=", "[", "'sys'", ",", "'clr'", ",", "'exceptions'", ",", "'__builtin__'", ",", "'_winreg'", ",", "'mmap'", ",", "'nt'", ",", "'posix'", "]", "builtins", "=", "[", "x", "for", "x", "in", "sys", ".", "builtin_module_names", "if", "(", "x", "not", "in", "normal_types", ")", "]", "for", "module", "in", "builtins", ":", "mod", "=", "__import__", "(", "module", ")", "for", "attrName", "in", "dir", "(", "mod", ")", ":", "val", "=", "getattr", "(", "mod", ",", "attrName", ")", "if", "(", "isinstance", "(", "val", ",", "type", ")", "and", "issubclass", "(", "val", ",", "Exception", ")", ")", ":", "if", "(", "'BlockingIOError'", "not", "in", "repr", "(", "val", ")", ")", ":", "Assert", "(", "repr", "(", "val", ")", ".", "startswith", "(", "'<class '", ")", ")", "val", ".", "x", "=", "2", "AreEqual", "(", "val", ".", "x", ",", "2", ")", "else", ":", "Assert", "(", "repr", "(", "val", ")", ".", "startswith", "(", "'<type '", ")", ")" ]
verify exceptions in modules are like user defined exception objects .
train
false
1,032
@must_be_valid_project @must_have_permission(ADMIN) def node_registration_retraction_get(auth, node, **kwargs): if (not node.is_registration): raise HTTPError(http.BAD_REQUEST, data={'message_short': 'Invalid Request', 'message_long': 'Withdrawal of non-registrations is not permitted.'}) if node.is_pending_retraction: raise HTTPError(http.BAD_REQUEST, data={'message_short': 'Invalid Request', 'message_long': 'This registration is already pending withdrawal.'}) return serialize_node(node, auth, primary=True)
[ "@", "must_be_valid_project", "@", "must_have_permission", "(", "ADMIN", ")", "def", "node_registration_retraction_get", "(", "auth", ",", "node", ",", "**", "kwargs", ")", ":", "if", "(", "not", "node", ".", "is_registration", ")", ":", "raise", "HTTPError", "(", "http", ".", "BAD_REQUEST", ",", "data", "=", "{", "'message_short'", ":", "'Invalid Request'", ",", "'message_long'", ":", "'Withdrawal of non-registrations is not permitted.'", "}", ")", "if", "node", ".", "is_pending_retraction", ":", "raise", "HTTPError", "(", "http", ".", "BAD_REQUEST", ",", "data", "=", "{", "'message_short'", ":", "'Invalid Request'", ",", "'message_long'", ":", "'This registration is already pending withdrawal.'", "}", ")", "return", "serialize_node", "(", "node", ",", "auth", ",", "primary", "=", "True", ")" ]
prepares node object for registration retraction page .
train
false
1,033
def monkey_patch_username_validator(): username = User._meta.get_field('username') regex = re.compile(('^%s$' % get_username_re_rule())) for validator in username.validators: if isinstance(validator, RegexValidator): validator.regex = regex
[ "def", "monkey_patch_username_validator", "(", ")", ":", "username", "=", "User", ".", "_meta", ".", "get_field", "(", "'username'", ")", "regex", "=", "re", ".", "compile", "(", "(", "'^%s$'", "%", "get_username_re_rule", "(", ")", ")", ")", "for", "validator", "in", "username", ".", "validators", ":", "if", "isinstance", "(", "validator", ",", "RegexValidator", ")", ":", "validator", ".", "regex", "=", "regex" ]
in 1 .
train
false
1,035
def define_vol_xml_str(xml): poolname = __salt__['config.get']('libvirt:storagepool', 'default') conn = __get_conn() pool = conn.storagePoolLookupByName(str(poolname)) return (pool.createXML(xml, 0) is not None)
[ "def", "define_vol_xml_str", "(", "xml", ")", ":", "poolname", "=", "__salt__", "[", "'config.get'", "]", "(", "'libvirt:storagepool'", ",", "'default'", ")", "conn", "=", "__get_conn", "(", ")", "pool", "=", "conn", ".", "storagePoolLookupByName", "(", "str", "(", "poolname", ")", ")", "return", "(", "pool", ".", "createXML", "(", "xml", ",", "0", ")", "is", "not", "None", ")" ]
define a volume based on the xml passed to the function cli example: .
train
false
1,036
def _validate_named_port_params(params): if (not params['named_ports']): return (True, '') if (not isinstance(params['named_ports'], list)): return (False, 'named_ports: expected list of name:port dictionaries.') req_fields = [{'name': 'name', 'required': True, 'type': str}, {'name': 'port', 'required': True, 'type': int}] for np in params['named_ports']: (valid_named_ports, np_msg) = _check_params(np, req_fields) if (not valid_named_ports): return (False, np_msg) return (True, '')
[ "def", "_validate_named_port_params", "(", "params", ")", ":", "if", "(", "not", "params", "[", "'named_ports'", "]", ")", ":", "return", "(", "True", ",", "''", ")", "if", "(", "not", "isinstance", "(", "params", "[", "'named_ports'", "]", ",", "list", ")", ")", ":", "return", "(", "False", ",", "'named_ports: expected list of name:port dictionaries.'", ")", "req_fields", "=", "[", "{", "'name'", ":", "'name'", ",", "'required'", ":", "True", ",", "'type'", ":", "str", "}", ",", "{", "'name'", ":", "'port'", ",", "'required'", ":", "True", ",", "'type'", ":", "int", "}", "]", "for", "np", "in", "params", "[", "'named_ports'", "]", ":", "(", "valid_named_ports", ",", "np_msg", ")", "=", "_check_params", "(", "np", ",", "req_fields", ")", "if", "(", "not", "valid_named_ports", ")", ":", "return", "(", "False", ",", "np_msg", ")", "return", "(", "True", ",", "''", ")" ]
validate the named ports parameters .
train
false
1,040
def test_vectorizer(): data = np.random.rand(150, 18, 6) vect = Vectorizer() result = vect.fit_transform(data) assert_equal(result.ndim, 2) orig_data = vect.inverse_transform(result) assert_equal(orig_data.ndim, 3) assert_array_equal(orig_data, data) assert_array_equal(vect.inverse_transform(result[1:]), data[1:]) assert_equal(vect.fit_transform(np.random.rand(150, 18, 6, 3)).shape, (150, 324)) assert_equal(vect.fit_transform(data[1:]).shape, (149, 108)) vect.fit(np.random.rand(105, 12, 3)) assert_raises(ValueError, vect.transform, np.random.rand(105, 12, 3, 1)) assert_raises(ValueError, vect.inverse_transform, np.random.rand(102, 12, 12))
[ "def", "test_vectorizer", "(", ")", ":", "data", "=", "np", ".", "random", ".", "rand", "(", "150", ",", "18", ",", "6", ")", "vect", "=", "Vectorizer", "(", ")", "result", "=", "vect", ".", "fit_transform", "(", "data", ")", "assert_equal", "(", "result", ".", "ndim", ",", "2", ")", "orig_data", "=", "vect", ".", "inverse_transform", "(", "result", ")", "assert_equal", "(", "orig_data", ".", "ndim", ",", "3", ")", "assert_array_equal", "(", "orig_data", ",", "data", ")", "assert_array_equal", "(", "vect", ".", "inverse_transform", "(", "result", "[", "1", ":", "]", ")", ",", "data", "[", "1", ":", "]", ")", "assert_equal", "(", "vect", ".", "fit_transform", "(", "np", ".", "random", ".", "rand", "(", "150", ",", "18", ",", "6", ",", "3", ")", ")", ".", "shape", ",", "(", "150", ",", "324", ")", ")", "assert_equal", "(", "vect", ".", "fit_transform", "(", "data", "[", "1", ":", "]", ")", ".", "shape", ",", "(", "149", ",", "108", ")", ")", "vect", ".", "fit", "(", "np", ".", "random", ".", "rand", "(", "105", ",", "12", ",", "3", ")", ")", "assert_raises", "(", "ValueError", ",", "vect", ".", "transform", ",", "np", ".", "random", ".", "rand", "(", "105", ",", "12", ",", "3", ",", "1", ")", ")", "assert_raises", "(", "ValueError", ",", "vect", ".", "inverse_transform", ",", "np", ".", "random", ".", "rand", "(", "102", ",", "12", ",", "12", ")", ")" ]
test vectorizer .
train
false
1,041
def set_login_view(login_view, blueprint=None): num_login_views = len(current_app.login_manager.blueprint_login_views) if ((blueprint is not None) or (num_login_views != 0)): current_app.login_manager.blueprint_login_views[blueprint.name] = login_view if ((current_app.login_manager.login_view is not None) and (None not in current_app.login_manager.blueprint_login_views)): current_app.login_manager.blueprint_login_views[None] = current_app.login_manager.login_view current_app.login_manager.login_view = None else: current_app.login_manager.login_view = login_view
[ "def", "set_login_view", "(", "login_view", ",", "blueprint", "=", "None", ")", ":", "num_login_views", "=", "len", "(", "current_app", ".", "login_manager", ".", "blueprint_login_views", ")", "if", "(", "(", "blueprint", "is", "not", "None", ")", "or", "(", "num_login_views", "!=", "0", ")", ")", ":", "current_app", ".", "login_manager", ".", "blueprint_login_views", "[", "blueprint", ".", "name", "]", "=", "login_view", "if", "(", "(", "current_app", ".", "login_manager", ".", "login_view", "is", "not", "None", ")", "and", "(", "None", "not", "in", "current_app", ".", "login_manager", ".", "blueprint_login_views", ")", ")", ":", "current_app", ".", "login_manager", ".", "blueprint_login_views", "[", "None", "]", "=", "current_app", ".", "login_manager", ".", "login_view", "current_app", ".", "login_manager", ".", "login_view", "=", "None", "else", ":", "current_app", ".", "login_manager", ".", "login_view", "=", "login_view" ]
sets the login view for the app or blueprint .
train
true
1,042
def install_le_auto(contents, venv_dir): venv_le_auto_path = join(venv_dir, 'letsencrypt-auto') with open(venv_le_auto_path, 'w') as le_auto: le_auto.write(contents) chmod(venv_le_auto_path, (S_IRUSR | S_IXUSR))
[ "def", "install_le_auto", "(", "contents", ",", "venv_dir", ")", ":", "venv_le_auto_path", "=", "join", "(", "venv_dir", ",", "'letsencrypt-auto'", ")", "with", "open", "(", "venv_le_auto_path", ",", "'w'", ")", "as", "le_auto", ":", "le_auto", ".", "write", "(", "contents", ")", "chmod", "(", "venv_le_auto_path", ",", "(", "S_IRUSR", "|", "S_IXUSR", ")", ")" ]
install some given source code as the letsencrypt-auto script at the root level of a virtualenv .
train
false
1,044
@contextmanager def temporary_file_path(root_dir=None, cleanup=True, suffix=u'', permissions=None): with temporary_file(root_dir, cleanup=cleanup, suffix=suffix, permissions=permissions) as fd: fd.close() (yield fd.name)
[ "@", "contextmanager", "def", "temporary_file_path", "(", "root_dir", "=", "None", ",", "cleanup", "=", "True", ",", "suffix", "=", "u''", ",", "permissions", "=", "None", ")", ":", "with", "temporary_file", "(", "root_dir", ",", "cleanup", "=", "cleanup", ",", "suffix", "=", "suffix", ",", "permissions", "=", "permissions", ")", "as", "fd", ":", "fd", ".", "close", "(", ")", "(", "yield", "fd", ".", "name", ")" ]
a with-context that creates a temporary file and returns its path .
train
true
1,045
def AddUser(username, password=None, labels=None, token=None): token = data_store.GetDefaultToken(token) user_urn = ('aff4:/users/%s' % username) try: if aff4.FACTORY.Open(user_urn, users.GRRUser, token=token): raise UserError(('Cannot add user %s: User already exists.' % username)) except aff4.InstantiationError: pass fd = aff4.FACTORY.Create(user_urn, users.GRRUser, mode='rw', token=token) if (password is None): password = getpass.getpass(prompt=("Please enter password for user '%s': " % username)) fd.SetPassword(password) if labels: fd.AddLabels(owner='GRR', *set(labels)) fd.Close() EPrint(('Added user %s.' % username)) events.Events.PublishEvent('Audit', events.AuditEvent(user=token.username, action='USER_ADD', urn=user_urn), token=token)
[ "def", "AddUser", "(", "username", ",", "password", "=", "None", ",", "labels", "=", "None", ",", "token", "=", "None", ")", ":", "token", "=", "data_store", ".", "GetDefaultToken", "(", "token", ")", "user_urn", "=", "(", "'aff4:/users/%s'", "%", "username", ")", "try", ":", "if", "aff4", ".", "FACTORY", ".", "Open", "(", "user_urn", ",", "users", ".", "GRRUser", ",", "token", "=", "token", ")", ":", "raise", "UserError", "(", "(", "'Cannot add user %s: User already exists.'", "%", "username", ")", ")", "except", "aff4", ".", "InstantiationError", ":", "pass", "fd", "=", "aff4", ".", "FACTORY", ".", "Create", "(", "user_urn", ",", "users", ".", "GRRUser", ",", "mode", "=", "'rw'", ",", "token", "=", "token", ")", "if", "(", "password", "is", "None", ")", ":", "password", "=", "getpass", ".", "getpass", "(", "prompt", "=", "(", "\"Please enter password for user '%s': \"", "%", "username", ")", ")", "fd", ".", "SetPassword", "(", "password", ")", "if", "labels", ":", "fd", ".", "AddLabels", "(", "owner", "=", "'GRR'", ",", "*", "set", "(", "labels", ")", ")", "fd", ".", "Close", "(", ")", "EPrint", "(", "(", "'Added user %s.'", "%", "username", ")", ")", "events", ".", "Events", ".", "PublishEvent", "(", "'Audit'", ",", "events", ".", "AuditEvent", "(", "user", "=", "token", ".", "username", ",", "action", "=", "'USER_ADD'", ",", "urn", "=", "user_urn", ")", ",", "token", "=", "token", ")" ]
implementation of the add_user command .
train
false
1,046
@conf.commands.register def bind_layers(lower, upper, __fval=None, **fval): if (__fval is not None): fval.update(__fval) bind_top_down(lower, upper, **fval) bind_bottom_up(lower, upper, **fval)
[ "@", "conf", ".", "commands", ".", "register", "def", "bind_layers", "(", "lower", ",", "upper", ",", "__fval", "=", "None", ",", "**", "fval", ")", ":", "if", "(", "__fval", "is", "not", "None", ")", ":", "fval", ".", "update", "(", "__fval", ")", "bind_top_down", "(", "lower", ",", "upper", ",", "**", "fval", ")", "bind_bottom_up", "(", "lower", ",", "upper", ",", "**", "fval", ")" ]
bind 2 layers on some specific fields values .
train
false
1,047
def guess_repo(path=None): global repodir if path: repodir = path if (not repodir): repodir = os.environ.get('BUP_DIR') if (not repodir): repodir = os.path.expanduser('~/.bup')
[ "def", "guess_repo", "(", "path", "=", "None", ")", ":", "global", "repodir", "if", "path", ":", "repodir", "=", "path", "if", "(", "not", "repodir", ")", ":", "repodir", "=", "os", ".", "environ", ".", "get", "(", "'BUP_DIR'", ")", "if", "(", "not", "repodir", ")", ":", "repodir", "=", "os", ".", "path", ".", "expanduser", "(", "'~/.bup'", ")" ]
set the path value in the global variable "repodir" .
train
false
1,049
def CreateBinaryConfigPaths(token=None): required_urns = set() try: for platform in SUPPORTED_PLATFORMS: required_urns.add(('aff4:/config/executables/%s/agentupdates' % platform)) required_urns.add(('aff4:/config/executables/%s/installers' % platform)) existing_urns = [x['urn'] for x in aff4.FACTORY.Stat(list(required_urns), token=token)] missing_urns = (required_urns - set(existing_urns)) for urn in missing_urns: aff4.FACTORY.Create(urn, aff4.AFF4Volume, token=token).Flush() except access_control.UnauthorizedAccess: logging.info('User is not admin, cannot check configuration tree.') return
[ "def", "CreateBinaryConfigPaths", "(", "token", "=", "None", ")", ":", "required_urns", "=", "set", "(", ")", "try", ":", "for", "platform", "in", "SUPPORTED_PLATFORMS", ":", "required_urns", ".", "add", "(", "(", "'aff4:/config/executables/%s/agentupdates'", "%", "platform", ")", ")", "required_urns", ".", "add", "(", "(", "'aff4:/config/executables/%s/installers'", "%", "platform", ")", ")", "existing_urns", "=", "[", "x", "[", "'urn'", "]", "for", "x", "in", "aff4", ".", "FACTORY", ".", "Stat", "(", "list", "(", "required_urns", ")", ",", "token", "=", "token", ")", "]", "missing_urns", "=", "(", "required_urns", "-", "set", "(", "existing_urns", ")", ")", "for", "urn", "in", "missing_urns", ":", "aff4", ".", "FACTORY", ".", "Create", "(", "urn", ",", "aff4", ".", "AFF4Volume", ",", "token", "=", "token", ")", ".", "Flush", "(", ")", "except", "access_control", ".", "UnauthorizedAccess", ":", "logging", ".", "info", "(", "'User is not admin, cannot check configuration tree.'", ")", "return" ]
create the paths required for binary configs .
train
false
1,050
def make_browser_model(browser, filter_type=None): factories = {FilterType.instrument_hotswap: make_instruments_browser_model, FilterType.drum_pad_hotswap: make_drum_pad_browser_model, FilterType.audio_effect_hotswap: make_audio_effect_browser_model, FilterType.midi_effect_hotswap: make_midi_effect_browser_model} if (filter_type == None): filter_type = filter_type_for_browser(browser) return factories.get(filter_type, make_fallback_browser_model)(browser)
[ "def", "make_browser_model", "(", "browser", ",", "filter_type", "=", "None", ")", ":", "factories", "=", "{", "FilterType", ".", "instrument_hotswap", ":", "make_instruments_browser_model", ",", "FilterType", ".", "drum_pad_hotswap", ":", "make_drum_pad_browser_model", ",", "FilterType", ".", "audio_effect_hotswap", ":", "make_audio_effect_browser_model", ",", "FilterType", ".", "midi_effect_hotswap", ":", "make_midi_effect_browser_model", "}", "if", "(", "filter_type", "==", "None", ")", ":", "filter_type", "=", "filter_type_for_browser", "(", "browser", ")", "return", "factories", ".", "get", "(", "filter_type", ",", "make_fallback_browser_model", ")", "(", "browser", ")" ]
factory that returns an appropriate browser model depending on the browser filter type and hotswap target .
train
false
1,052
def test_prewitt_h_mask(): np.random.seed(0) result = filters.prewitt_h(np.random.uniform(size=(10, 10)), np.zeros((10, 10), bool)) assert_allclose(result, 0)
[ "def", "test_prewitt_h_mask", "(", ")", ":", "np", ".", "random", ".", "seed", "(", "0", ")", "result", "=", "filters", ".", "prewitt_h", "(", "np", ".", "random", ".", "uniform", "(", "size", "=", "(", "10", ",", "10", ")", ")", ",", "np", ".", "zeros", "(", "(", "10", ",", "10", ")", ",", "bool", ")", ")", "assert_allclose", "(", "result", ",", "0", ")" ]
horizontal prewitt on a masked array should be zero .
train
false
1,053
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) @utils.arg('secgroup', metavar='<secgroup>', help=_('Name of Security Group.')) def do_remove_secgroup(cs, args): server = _find_server(cs, args.server) server.remove_security_group(args.secgroup)
[ "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "@", "utils", ".", "arg", "(", "'secgroup'", ",", "metavar", "=", "'<secgroup>'", ",", "help", "=", "_", "(", "'Name of Security Group.'", ")", ")", "def", "do_remove_secgroup", "(", "cs", ",", "args", ")", ":", "server", "=", "_find_server", "(", "cs", ",", "args", ".", "server", ")", "server", ".", "remove_security_group", "(", "args", ".", "secgroup", ")" ]
remove a security group from a server .
train
false
1,054
def _logdet(A): vals = linalg.eigh(A)[0] tol = ((vals.max() * vals.size) * np.finfo(np.float64).eps) vals = np.where((vals > tol), vals, tol) return np.sum(np.log(vals))
[ "def", "_logdet", "(", "A", ")", ":", "vals", "=", "linalg", ".", "eigh", "(", "A", ")", "[", "0", "]", "tol", "=", "(", "(", "vals", ".", "max", "(", ")", "*", "vals", ".", "size", ")", "*", "np", ".", "finfo", "(", "np", ".", "float64", ")", ".", "eps", ")", "vals", "=", "np", ".", "where", "(", "(", "vals", ">", "tol", ")", ",", "vals", ",", "tol", ")", "return", "np", ".", "sum", "(", "np", ".", "log", "(", "vals", ")", ")" ]
compute the log det of a symmetric matrix .
train
false
1,055
def retrieve_next_page(key, seed, args, callback, initial_bookmark=None): filter = dict(args) if ('pager_info' in filter): initial_bookmark = decrypt(filter.pop('pager_info'), key, seed) page_size = filter.pop('page_size', DEFAULT_PAGE_SIZE) (result_list, new_bookmark, approx_result_size) = callback(filter, initial_bookmark, page_size) filter['pager_info'] = encrypt(new_bookmark, key, seed) filter['page_size'] = page_size return (result_list, filter, approx_result_size)
[ "def", "retrieve_next_page", "(", "key", ",", "seed", ",", "args", ",", "callback", ",", "initial_bookmark", "=", "None", ")", ":", "filter", "=", "dict", "(", "args", ")", "if", "(", "'pager_info'", "in", "filter", ")", ":", "initial_bookmark", "=", "decrypt", "(", "filter", ".", "pop", "(", "'pager_info'", ")", ",", "key", ",", "seed", ")", "page_size", "=", "filter", ".", "pop", "(", "'page_size'", ",", "DEFAULT_PAGE_SIZE", ")", "(", "result_list", ",", "new_bookmark", ",", "approx_result_size", ")", "=", "callback", "(", "filter", ",", "initial_bookmark", ",", "page_size", ")", "filter", "[", "'pager_info'", "]", "=", "encrypt", "(", "new_bookmark", ",", "key", ",", "seed", ")", "filter", "[", "'page_size'", "]", "=", "page_size", "return", "(", "result_list", ",", "filter", ",", "approx_result_size", ")" ]
a helper for the bookmark pager .
train
false
1,059
def rdiv(self, rhs): if isinstance(rhs, variable.Variable): return Div()(rhs, self) _check_constant_type(rhs) return DivFromConstant(rhs)(self)
[ "def", "rdiv", "(", "self", ",", "rhs", ")", ":", "if", "isinstance", "(", "rhs", ",", "variable", ".", "Variable", ")", ":", "return", "Div", "(", ")", "(", "rhs", ",", "self", ")", "_check_constant_type", "(", "rhs", ")", "return", "DivFromConstant", "(", "rhs", ")", "(", "self", ")" ]
element-wise division .
train
false
1,060
def rescan_all(host): if os.path.isdir('/sys/class/scsi_host/host{0}'.format(host)): cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host) else: return 'Host {0} does not exist'.format(host) return __salt__['cmd.run'](cmd).splitlines()
[ "def", "rescan_all", "(", "host", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "'/sys/class/scsi_host/host{0}'", ".", "format", "(", "host", ")", ")", ":", "cmd", "=", "'echo \"- - -\" > /sys/class/scsi_host/host{0}/scan'", ".", "format", "(", "host", ")", "else", ":", "return", "'Host {0} does not exist'", ".", "format", "(", "host", ")", "return", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", ".", "splitlines", "(", ")" ]
list scsi devices cli example: .
train
true
1,061
def bezier_curve(r0, c0, r1, c1, r2, c2, weight, shape=None): return _bezier_curve(r0, c0, r1, c1, r2, c2, weight, shape)
[ "def", "bezier_curve", "(", "r0", ",", "c0", ",", "r1", ",", "c1", ",", "r2", ",", "c2", ",", "weight", ",", "shape", "=", "None", ")", ":", "return", "_bezier_curve", "(", "r0", ",", "c0", ",", "r1", ",", "c1", ",", "r2", ",", "c2", ",", "weight", ",", "shape", ")" ]
generate bezier curve coordinates .
train
false
1,062
def join_header_words(lists): headers = [] for pairs in lists: attr = [] for (k, v) in pairs: if (v is not None): if (not re.search('^\\w+$', v)): v = join_escape_re.sub('\\\\\\1', v) v = ('"%s"' % v) if (k is None): k = v else: k = ('%s=%s' % (k, v)) attr.append(k) if attr: headers.append('; '.join(attr)) return ', '.join(headers)
[ "def", "join_header_words", "(", "lists", ")", ":", "headers", "=", "[", "]", "for", "pairs", "in", "lists", ":", "attr", "=", "[", "]", "for", "(", "k", ",", "v", ")", "in", "pairs", ":", "if", "(", "v", "is", "not", "None", ")", ":", "if", "(", "not", "re", ".", "search", "(", "'^\\\\w+$'", ",", "v", ")", ")", ":", "v", "=", "join_escape_re", ".", "sub", "(", "'\\\\\\\\\\\\1'", ",", "v", ")", "v", "=", "(", "'\"%s\"'", "%", "v", ")", "if", "(", "k", "is", "None", ")", ":", "k", "=", "v", "else", ":", "k", "=", "(", "'%s=%s'", "%", "(", "k", ",", "v", ")", ")", "attr", ".", "append", "(", "k", ")", "if", "attr", ":", "headers", ".", "append", "(", "'; '", ".", "join", "(", "attr", ")", ")", "return", "', '", ".", "join", "(", "headers", ")" ]
do the inverse of the conversion done by split_header_words .
train
false
1,063
def get_student_data(students, exclude=None): FakeUser = namedtuple('Fake', 'id username is_anonymous') exclude = (exclude if exclude else set()) def make(svalue): "\n Given a User value entry `svalue`, extracts the student's email and fullname,\n and provides a unique id for the user.\n\n Returns a dictionary with keys 'EMAIL', 'FULLNAME', and 'EDX_ID'.\n " fake_user = FakeUser(svalue['user_id'], svalue['user__username'], (lambda : True)) entry = {'EMAIL': svalue['user__email'], 'FULLNAME': svalue['name'].title(), 'EDX_ID': unique_id_for_user(fake_user)} return entry fields = ('user__email', 'name', 'user_id', 'user__username') values = students.values(*fields) exclude_func = (lambda s: (s['user__email'] in exclude)) return [make(s) for s in values if (not exclude_func(s))]
[ "def", "get_student_data", "(", "students", ",", "exclude", "=", "None", ")", ":", "FakeUser", "=", "namedtuple", "(", "'Fake'", ",", "'id username is_anonymous'", ")", "exclude", "=", "(", "exclude", "if", "exclude", "else", "set", "(", ")", ")", "def", "make", "(", "svalue", ")", ":", "fake_user", "=", "FakeUser", "(", "svalue", "[", "'user_id'", "]", ",", "svalue", "[", "'user__username'", "]", ",", "(", "lambda", ":", "True", ")", ")", "entry", "=", "{", "'EMAIL'", ":", "svalue", "[", "'user__email'", "]", ",", "'FULLNAME'", ":", "svalue", "[", "'name'", "]", ".", "title", "(", ")", ",", "'EDX_ID'", ":", "unique_id_for_user", "(", "fake_user", ")", "}", "return", "entry", "fields", "=", "(", "'user__email'", ",", "'name'", ",", "'user_id'", ",", "'user__username'", ")", "values", "=", "students", ".", "values", "(", "*", "fields", ")", "exclude_func", "=", "(", "lambda", "s", ":", "(", "s", "[", "'user__email'", "]", "in", "exclude", ")", ")", "return", "[", "make", "(", "s", ")", "for", "s", "in", "values", "if", "(", "not", "exclude_func", "(", "s", ")", ")", "]" ]
given a queryset of django users .
train
false
1,068
@task @timed def i18n_validate_transifex_config(): home = path('~').expanduser() config = (home / '.transifexrc') if ((not config.isfile) or (config.getsize == 0)): msg = colorize('red', 'Cannot connect to Transifex, config file is missing or empty: {config} \nSee http://help.transifex.com/features/client/#transifexrc \n'.format(config=config)) sys.stderr.write(msg) sys.exit(1)
[ "@", "task", "@", "timed", "def", "i18n_validate_transifex_config", "(", ")", ":", "home", "=", "path", "(", "'~'", ")", ".", "expanduser", "(", ")", "config", "=", "(", "home", "/", "'.transifexrc'", ")", "if", "(", "(", "not", "config", ".", "isfile", ")", "or", "(", "config", ".", "getsize", "==", "0", ")", ")", ":", "msg", "=", "colorize", "(", "'red'", ",", "'Cannot connect to Transifex, config file is missing or empty: {config} \\nSee http://help.transifex.com/features/client/#transifexrc \\n'", ".", "format", "(", "config", "=", "config", ")", ")", "sys", ".", "stderr", ".", "write", "(", "msg", ")", "sys", ".", "exit", "(", "1", ")" ]
make sure config file with username/password exists .
train
false
1,069
@task() def update_search_index(course_id, triggered_time_isoformat): try: course_key = CourseKey.from_string(course_id) CoursewareSearchIndexer.index(modulestore(), course_key, triggered_at=_parse_time(triggered_time_isoformat)) except SearchIndexingError as exc: LOGGER.error('Search indexing error for complete course %s - %s', course_id, unicode(exc)) else: LOGGER.debug('Search indexing successful for complete course %s', course_id)
[ "@", "task", "(", ")", "def", "update_search_index", "(", "course_id", ",", "triggered_time_isoformat", ")", ":", "try", ":", "course_key", "=", "CourseKey", ".", "from_string", "(", "course_id", ")", "CoursewareSearchIndexer", ".", "index", "(", "modulestore", "(", ")", ",", "course_key", ",", "triggered_at", "=", "_parse_time", "(", "triggered_time_isoformat", ")", ")", "except", "SearchIndexingError", "as", "exc", ":", "LOGGER", ".", "error", "(", "'Search indexing error for complete course %s - %s'", ",", "course_id", ",", "unicode", "(", "exc", ")", ")", "else", ":", "LOGGER", ".", "debug", "(", "'Search indexing successful for complete course %s'", ",", "course_id", ")" ]
updates course search index .
train
false
1,070
def asset_get_current_log(asset_id): table = current.s3db.asset_log query = (((table.asset_id == asset_id) & (table.cancel == False)) & (table.deleted == False)) asset_log = current.db(query).select(table.id, table.status, table.datetime, table.cond, table.person_id, table.organisation_id, table.site_id, orderby=(~ table.datetime), limitby=(0, 1)).first() if asset_log: return Storage(datetime=asset_log.datetime, person_id=asset_log.person_id, cond=int((asset_log.cond or 0)), status=int((asset_log.status or 0)), organisation_id=asset_log.organisation_id, site_id=asset_log.site_id) else: return Storage()
[ "def", "asset_get_current_log", "(", "asset_id", ")", ":", "table", "=", "current", ".", "s3db", ".", "asset_log", "query", "=", "(", "(", "(", "table", ".", "asset_id", "==", "asset_id", ")", "&", "(", "table", ".", "cancel", "==", "False", ")", ")", "&", "(", "table", ".", "deleted", "==", "False", ")", ")", "asset_log", "=", "current", ".", "db", "(", "query", ")", ".", "select", "(", "table", ".", "id", ",", "table", ".", "status", ",", "table", ".", "datetime", ",", "table", ".", "cond", ",", "table", ".", "person_id", ",", "table", ".", "organisation_id", ",", "table", ".", "site_id", ",", "orderby", "=", "(", "~", "table", ".", "datetime", ")", ",", "limitby", "=", "(", "0", ",", "1", ")", ")", ".", "first", "(", ")", "if", "asset_log", ":", "return", "Storage", "(", "datetime", "=", "asset_log", ".", "datetime", ",", "person_id", "=", "asset_log", ".", "person_id", ",", "cond", "=", "int", "(", "(", "asset_log", ".", "cond", "or", "0", ")", ")", ",", "status", "=", "int", "(", "(", "asset_log", ".", "status", "or", "0", ")", ")", ",", "organisation_id", "=", "asset_log", ".", "organisation_id", ",", "site_id", "=", "asset_log", ".", "site_id", ")", "else", ":", "return", "Storage", "(", ")" ]
get the current log entry for this asset .
train
false
1,071
def find_globals(node, globs): for n in node: if isinstance(n, AST): globs = find_globals(n, globs) elif (n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL')): globs.add(n.pattr) return globs
[ "def", "find_globals", "(", "node", ",", "globs", ")", ":", "for", "n", "in", "node", ":", "if", "isinstance", "(", "n", ",", "AST", ")", ":", "globs", "=", "find_globals", "(", "n", ",", "globs", ")", "elif", "(", "n", ".", "type", "in", "(", "'STORE_GLOBAL'", ",", "'DELETE_GLOBAL'", ")", ")", ":", "globs", ".", "add", "(", "n", ".", "pattr", ")", "return", "globs" ]
uses ast to find globals in an ast tree .
train
false
1,073
def _queue_manangement_worker(executor_reference, processes, pending_work_items, work_ids_queue, call_queue, result_queue, shutdown_process_event): while True: _add_call_item_to_queue(pending_work_items, work_ids_queue, call_queue) try: result_item = result_queue.get(block=True, timeout=0.1) except queue.Empty: executor = executor_reference() if (_shutdown or (executor is None) or executor._shutdown_thread): if (not pending_work_items): shutdown_process_event.set() for p in processes: p.join() return del executor else: work_item = pending_work_items[result_item.work_id] del pending_work_items[result_item.work_id] if result_item.exception: work_item.future.set_exception(result_item.exception) else: work_item.future.set_result(result_item.result)
[ "def", "_queue_manangement_worker", "(", "executor_reference", ",", "processes", ",", "pending_work_items", ",", "work_ids_queue", ",", "call_queue", ",", "result_queue", ",", "shutdown_process_event", ")", ":", "while", "True", ":", "_add_call_item_to_queue", "(", "pending_work_items", ",", "work_ids_queue", ",", "call_queue", ")", "try", ":", "result_item", "=", "result_queue", ".", "get", "(", "block", "=", "True", ",", "timeout", "=", "0.1", ")", "except", "queue", ".", "Empty", ":", "executor", "=", "executor_reference", "(", ")", "if", "(", "_shutdown", "or", "(", "executor", "is", "None", ")", "or", "executor", ".", "_shutdown_thread", ")", ":", "if", "(", "not", "pending_work_items", ")", ":", "shutdown_process_event", ".", "set", "(", ")", "for", "p", "in", "processes", ":", "p", ".", "join", "(", ")", "return", "del", "executor", "else", ":", "work_item", "=", "pending_work_items", "[", "result_item", ".", "work_id", "]", "del", "pending_work_items", "[", "result_item", ".", "work_id", "]", "if", "result_item", ".", "exception", ":", "work_item", ".", "future", ".", "set_exception", "(", "result_item", ".", "exception", ")", "else", ":", "work_item", ".", "future", ".", "set_result", "(", "result_item", ".", "result", ")" ]
manages the communication between this process and the worker processes .
train
false