identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
technical_500_response
(request, exc_type, exc_value, tb, status_code=500)
Create a technical server error response. The last three arguments are the values returned from sys.exc_info() and friends.
Create a technical server error response. The last three arguments are the values returned from sys.exc_info() and friends.
def technical_500_response(request, exc_type, exc_value, tb, status_code=500): """ Create a technical server error response. The last three arguments are the values returned from sys.exc_info() and friends. """ reporter = get_exception_reporter_class(request)(request, exc_type, exc_value, tb) if request.accepts('text/html'): html = reporter.get_traceback_html() return HttpResponse(html, status=status_code, content_type='text/html') else: text = reporter.get_traceback_text() return HttpResponse(text, status=status_code, content_type='text/plain; charset=utf-8')
[ "def", "technical_500_response", "(", "request", ",", "exc_type", ",", "exc_value", ",", "tb", ",", "status_code", "=", "500", ")", ":", "reporter", "=", "get_exception_reporter_class", "(", "request", ")", "(", "request", ",", "exc_type", ",", "exc_value", ",", "tb", ")", "if", "request", ".", "accepts", "(", "'text/html'", ")", ":", "html", "=", "reporter", ".", "get_traceback_html", "(", ")", "return", "HttpResponse", "(", "html", ",", "status", "=", "status_code", ",", "content_type", "=", "'text/html'", ")", "else", ":", "text", "=", "reporter", ".", "get_traceback_text", "(", ")", "return", "HttpResponse", "(", "text", ",", "status", "=", "status_code", ",", "content_type", "=", "'text/plain; charset=utf-8'", ")" ]
[ 49, 0 ]
[ 60, 95 ]
python
en
['en', 'error', 'th']
False
technical_404_response
(request, exception)
Create a technical 404 error response. `exception` is the Http404.
Create a technical 404 error response. `exception` is the Http404.
def technical_404_response(request, exception): """Create a technical 404 error response. `exception` is the Http404.""" try: error_url = exception.args[0]['path'] except (IndexError, TypeError, KeyError): error_url = request.path_info[1:] # Trim leading slash try: tried = exception.args[0]['tried'] except (IndexError, TypeError, KeyError): resolved = True tried = request.resolver_match.tried if request.resolver_match else None else: resolved = False if (not tried or ( # empty URLconf request.path == '/' and len(tried) == 1 and # default URLconf len(tried[0]) == 1 and getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin' )): return default_urlconf(request) urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF) if isinstance(urlconf, types.ModuleType): urlconf = urlconf.__name__ caller = '' try: resolver_match = resolve(request.path) except Http404: pass else: obj = resolver_match.func if hasattr(obj, '__name__'): caller = obj.__name__ elif hasattr(obj, '__class__') and hasattr(obj.__class__, '__name__'): caller = obj.__class__.__name__ if hasattr(obj, '__module__'): module = obj.__module__ caller = '%s.%s' % (module, caller) with Path(CURRENT_DIR, 'templates', 'technical_404.html').open(encoding='utf-8') as fh: t = DEBUG_ENGINE.from_string(fh.read()) reporter_filter = get_default_exception_reporter_filter() c = Context({ 'urlconf': urlconf, 'root_urlconf': settings.ROOT_URLCONF, 'request_path': error_url, 'urlpatterns': tried, 'resolved': resolved, 'reason': str(exception), 'request': request, 'settings': reporter_filter.get_safe_settings(), 'raising_view_name': caller, }) return HttpResponseNotFound(t.render(c), content_type='text/html')
[ "def", "technical_404_response", "(", "request", ",", "exception", ")", ":", "try", ":", "error_url", "=", "exception", ".", "args", "[", "0", "]", "[", "'path'", "]", "except", "(", "IndexError", ",", "TypeError", ",", "KeyError", ")", ":", "error_url", "=", "request", ".", "path_info", "[", "1", ":", "]", "# Trim leading slash", "try", ":", "tried", "=", "exception", ".", "args", "[", "0", "]", "[", "'tried'", "]", "except", "(", "IndexError", ",", "TypeError", ",", "KeyError", ")", ":", "resolved", "=", "True", "tried", "=", "request", ".", "resolver_match", ".", "tried", "if", "request", ".", "resolver_match", "else", "None", "else", ":", "resolved", "=", "False", "if", "(", "not", "tried", "or", "(", "# empty URLconf", "request", ".", "path", "==", "'/'", "and", "len", "(", "tried", ")", "==", "1", "and", "# default URLconf", "len", "(", "tried", "[", "0", "]", ")", "==", "1", "and", "getattr", "(", "tried", "[", "0", "]", "[", "0", "]", ",", "'app_name'", ",", "''", ")", "==", "getattr", "(", "tried", "[", "0", "]", "[", "0", "]", ",", "'namespace'", ",", "''", ")", "==", "'admin'", ")", ")", ":", "return", "default_urlconf", "(", "request", ")", "urlconf", "=", "getattr", "(", "request", ",", "'urlconf'", ",", "settings", ".", "ROOT_URLCONF", ")", "if", "isinstance", "(", "urlconf", ",", "types", ".", "ModuleType", ")", ":", "urlconf", "=", "urlconf", ".", "__name__", "caller", "=", "''", "try", ":", "resolver_match", "=", "resolve", "(", "request", ".", "path", ")", "except", "Http404", ":", "pass", "else", ":", "obj", "=", "resolver_match", ".", "func", "if", "hasattr", "(", "obj", ",", "'__name__'", ")", ":", "caller", "=", "obj", ".", "__name__", "elif", "hasattr", "(", "obj", ",", "'__class__'", ")", "and", "hasattr", "(", "obj", ".", "__class__", ",", "'__name__'", ")", ":", "caller", "=", "obj", ".", "__class__", ".", "__name__", "if", "hasattr", "(", "obj", ",", "'__module__'", ")", ":", "module", "=", "obj", ".", "__module__", "caller", "=", "'%s.%s'", "%", "(", "module", ",", "caller", ")", "with", "Path", "(", "CURRENT_DIR", ",", "'templates'", ",", "'technical_404.html'", ")", ".", "open", "(", "encoding", "=", "'utf-8'", ")", "as", "fh", ":", "t", "=", "DEBUG_ENGINE", ".", "from_string", "(", "fh", ".", "read", "(", ")", ")", "reporter_filter", "=", "get_default_exception_reporter_filter", "(", ")", "c", "=", "Context", "(", "{", "'urlconf'", ":", "urlconf", ",", "'root_urlconf'", ":", "settings", ".", "ROOT_URLCONF", ",", "'request_path'", ":", "error_url", ",", "'urlpatterns'", ":", "tried", ",", "'resolved'", ":", "resolved", ",", "'reason'", ":", "str", "(", "exception", ")", ",", "'request'", ":", "request", ",", "'settings'", ":", "reporter_filter", ".", "get_safe_settings", "(", ")", ",", "'raising_view_name'", ":", "caller", ",", "}", ")", "return", "HttpResponseNotFound", "(", "t", ".", "render", "(", "c", ")", ",", "content_type", "=", "'text/html'", ")" ]
[ 491, 0 ]
[ 548, 70 ]
python
en
['en', 'en', 'en']
True
default_urlconf
(request)
Create an empty URLconf 404 error response.
Create an empty URLconf 404 error response.
def default_urlconf(request): """Create an empty URLconf 404 error response.""" with Path(CURRENT_DIR, 'templates', 'default_urlconf.html').open(encoding='utf-8') as fh: t = DEBUG_ENGINE.from_string(fh.read()) c = Context({ 'version': get_docs_version(), }) return HttpResponse(t.render(c), content_type='text/html')
[ "def", "default_urlconf", "(", "request", ")", ":", "with", "Path", "(", "CURRENT_DIR", ",", "'templates'", ",", "'default_urlconf.html'", ")", ".", "open", "(", "encoding", "=", "'utf-8'", ")", "as", "fh", ":", "t", "=", "DEBUG_ENGINE", ".", "from_string", "(", "fh", ".", "read", "(", ")", ")", "c", "=", "Context", "(", "{", "'version'", ":", "get_docs_version", "(", ")", ",", "}", ")", "return", "HttpResponse", "(", "t", ".", "render", "(", "c", ")", ",", "content_type", "=", "'text/html'", ")" ]
[ 551, 0 ]
[ 559, 62 ]
python
en
['en', 'de', 'en']
True
SafeExceptionReporterFilter.cleanse_setting
(self, key, value)
Cleanse an individual setting key/value of sensitive content. If the value is a dictionary, recursively cleanse the keys in that dictionary.
Cleanse an individual setting key/value of sensitive content. If the value is a dictionary, recursively cleanse the keys in that dictionary.
def cleanse_setting(self, key, value): """ Cleanse an individual setting key/value of sensitive content. If the value is a dictionary, recursively cleanse the keys in that dictionary. """ try: is_sensitive = self.hidden_settings.search(key) except TypeError: is_sensitive = False if is_sensitive: cleansed = self.cleansed_substitute elif isinstance(value, dict): cleansed = {k: self.cleanse_setting(k, v) for k, v in value.items()} elif isinstance(value, list): cleansed = [self.cleanse_setting('', v) for v in value] elif isinstance(value, tuple): cleansed = tuple([self.cleanse_setting('', v) for v in value]) else: cleansed = value if callable(cleansed): cleansed = CallableSettingWrapper(cleansed) return cleansed
[ "def", "cleanse_setting", "(", "self", ",", "key", ",", "value", ")", ":", "try", ":", "is_sensitive", "=", "self", ".", "hidden_settings", ".", "search", "(", "key", ")", "except", "TypeError", ":", "is_sensitive", "=", "False", "if", "is_sensitive", ":", "cleansed", "=", "self", ".", "cleansed_substitute", "elif", "isinstance", "(", "value", ",", "dict", ")", ":", "cleansed", "=", "{", "k", ":", "self", ".", "cleanse_setting", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "value", ".", "items", "(", ")", "}", "elif", "isinstance", "(", "value", ",", "list", ")", ":", "cleansed", "=", "[", "self", ".", "cleanse_setting", "(", "''", ",", "v", ")", "for", "v", "in", "value", "]", "elif", "isinstance", "(", "value", ",", "tuple", ")", ":", "cleansed", "=", "tuple", "(", "[", "self", ".", "cleanse_setting", "(", "''", ",", "v", ")", "for", "v", "in", "value", "]", ")", "else", ":", "cleansed", "=", "value", "if", "callable", "(", "cleansed", ")", ":", "cleansed", "=", "CallableSettingWrapper", "(", "cleansed", ")", "return", "cleansed" ]
[ 87, 4 ]
[ 111, 23 ]
python
en
['en', 'error', 'th']
False
SafeExceptionReporterFilter.get_safe_settings
(self)
Return a dictionary of the settings module with values of sensitive settings replaced with stars (*********).
Return a dictionary of the settings module with values of sensitive settings replaced with stars (*********).
def get_safe_settings(self): """ Return a dictionary of the settings module with values of sensitive settings replaced with stars (*********). """ settings_dict = {} for k in dir(settings): if k.isupper(): settings_dict[k] = self.cleanse_setting(k, getattr(settings, k)) return settings_dict
[ "def", "get_safe_settings", "(", "self", ")", ":", "settings_dict", "=", "{", "}", "for", "k", "in", "dir", "(", "settings", ")", ":", "if", "k", ".", "isupper", "(", ")", ":", "settings_dict", "[", "k", "]", "=", "self", ".", "cleanse_setting", "(", "k", ",", "getattr", "(", "settings", ",", "k", ")", ")", "return", "settings_dict" ]
[ 113, 4 ]
[ 122, 28 ]
python
en
['en', 'error', 'th']
False
SafeExceptionReporterFilter.get_safe_request_meta
(self, request)
Return a dictionary of request.META with sensitive values redacted.
Return a dictionary of request.META with sensitive values redacted.
def get_safe_request_meta(self, request): """ Return a dictionary of request.META with sensitive values redacted. """ if not hasattr(request, 'META'): return {} return {k: self.cleanse_setting(k, v) for k, v in request.META.items()}
[ "def", "get_safe_request_meta", "(", "self", ",", "request", ")", ":", "if", "not", "hasattr", "(", "request", ",", "'META'", ")", ":", "return", "{", "}", "return", "{", "k", ":", "self", ".", "cleanse_setting", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "request", ".", "META", ".", "items", "(", ")", "}" ]
[ 124, 4 ]
[ 130, 79 ]
python
en
['en', 'error', 'th']
False
SafeExceptionReporterFilter.is_active
(self, request)
This filter is to add safety in production environments (i.e. DEBUG is False). If DEBUG is True then your site is not safe anyway. This hook is provided as a convenience to easily activate or deactivate the filter on a per request basis.
This filter is to add safety in production environments (i.e. DEBUG is False). If DEBUG is True then your site is not safe anyway. This hook is provided as a convenience to easily activate or deactivate the filter on a per request basis.
def is_active(self, request): """ This filter is to add safety in production environments (i.e. DEBUG is False). If DEBUG is True then your site is not safe anyway. This hook is provided as a convenience to easily activate or deactivate the filter on a per request basis. """ return settings.DEBUG is False
[ "def", "is_active", "(", "self", ",", "request", ")", ":", "return", "settings", ".", "DEBUG", "is", "False" ]
[ 132, 4 ]
[ 139, 38 ]
python
en
['en', 'error', 'th']
False
SafeExceptionReporterFilter.get_cleansed_multivaluedict
(self, request, multivaluedict)
Replace the keys in a MultiValueDict marked as sensitive with stars. This mitigates leaking sensitive POST parameters if something like request.POST['nonexistent_key'] throws an exception (#21098).
Replace the keys in a MultiValueDict marked as sensitive with stars. This mitigates leaking sensitive POST parameters if something like request.POST['nonexistent_key'] throws an exception (#21098).
def get_cleansed_multivaluedict(self, request, multivaluedict): """ Replace the keys in a MultiValueDict marked as sensitive with stars. This mitigates leaking sensitive POST parameters if something like request.POST['nonexistent_key'] throws an exception (#21098). """ sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', []) if self.is_active(request) and sensitive_post_parameters: multivaluedict = multivaluedict.copy() for param in sensitive_post_parameters: if param in multivaluedict: multivaluedict[param] = self.cleansed_substitute return multivaluedict
[ "def", "get_cleansed_multivaluedict", "(", "self", ",", "request", ",", "multivaluedict", ")", ":", "sensitive_post_parameters", "=", "getattr", "(", "request", ",", "'sensitive_post_parameters'", ",", "[", "]", ")", "if", "self", ".", "is_active", "(", "request", ")", "and", "sensitive_post_parameters", ":", "multivaluedict", "=", "multivaluedict", ".", "copy", "(", ")", "for", "param", "in", "sensitive_post_parameters", ":", "if", "param", "in", "multivaluedict", ":", "multivaluedict", "[", "param", "]", "=", "self", ".", "cleansed_substitute", "return", "multivaluedict" ]
[ 141, 4 ]
[ 153, 29 ]
python
en
['en', 'error', 'th']
False
SafeExceptionReporterFilter.get_post_parameters
(self, request)
Replace the values of POST parameters marked as sensitive with stars (*********).
Replace the values of POST parameters marked as sensitive with stars (*********).
def get_post_parameters(self, request): """ Replace the values of POST parameters marked as sensitive with stars (*********). """ if request is None: return {} else: sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', []) if self.is_active(request) and sensitive_post_parameters: cleansed = request.POST.copy() if sensitive_post_parameters == '__ALL__': # Cleanse all parameters. for k in cleansed: cleansed[k] = self.cleansed_substitute return cleansed else: # Cleanse only the specified parameters. for param in sensitive_post_parameters: if param in cleansed: cleansed[param] = self.cleansed_substitute return cleansed else: return request.POST
[ "def", "get_post_parameters", "(", "self", ",", "request", ")", ":", "if", "request", "is", "None", ":", "return", "{", "}", "else", ":", "sensitive_post_parameters", "=", "getattr", "(", "request", ",", "'sensitive_post_parameters'", ",", "[", "]", ")", "if", "self", ".", "is_active", "(", "request", ")", "and", "sensitive_post_parameters", ":", "cleansed", "=", "request", ".", "POST", ".", "copy", "(", ")", "if", "sensitive_post_parameters", "==", "'__ALL__'", ":", "# Cleanse all parameters.", "for", "k", "in", "cleansed", ":", "cleansed", "[", "k", "]", "=", "self", ".", "cleansed_substitute", "return", "cleansed", "else", ":", "# Cleanse only the specified parameters.", "for", "param", "in", "sensitive_post_parameters", ":", "if", "param", "in", "cleansed", ":", "cleansed", "[", "param", "]", "=", "self", ".", "cleansed_substitute", "return", "cleansed", "else", ":", "return", "request", ".", "POST" ]
[ 155, 4 ]
[ 178, 35 ]
python
en
['en', 'error', 'th']
False
SafeExceptionReporterFilter.get_traceback_frame_variables
(self, request, tb_frame)
Replace the values of variables marked as sensitive with stars (*********).
Replace the values of variables marked as sensitive with stars (*********).
def get_traceback_frame_variables(self, request, tb_frame): """ Replace the values of variables marked as sensitive with stars (*********). """ # Loop through the frame's callers to see if the sensitive_variables # decorator was used. current_frame = tb_frame.f_back sensitive_variables = None while current_frame is not None: if (current_frame.f_code.co_name == 'sensitive_variables_wrapper' and 'sensitive_variables_wrapper' in current_frame.f_locals): # The sensitive_variables decorator was used, so we take note # of the sensitive variables' names. wrapper = current_frame.f_locals['sensitive_variables_wrapper'] sensitive_variables = getattr(wrapper, 'sensitive_variables', None) break current_frame = current_frame.f_back cleansed = {} if self.is_active(request) and sensitive_variables: if sensitive_variables == '__ALL__': # Cleanse all variables for name in tb_frame.f_locals: cleansed[name] = self.cleansed_substitute else: # Cleanse specified variables for name, value in tb_frame.f_locals.items(): if name in sensitive_variables: value = self.cleansed_substitute else: value = self.cleanse_special_types(request, value) cleansed[name] = value else: # Potentially cleanse the request and any MultiValueDicts if they # are one of the frame variables. for name, value in tb_frame.f_locals.items(): cleansed[name] = self.cleanse_special_types(request, value) if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper' and 'sensitive_variables_wrapper' in tb_frame.f_locals): # For good measure, obfuscate the decorated function's arguments in # the sensitive_variables decorator's frame, in case the variables # associated with those arguments were meant to be obfuscated from # the decorated function's frame. cleansed['func_args'] = self.cleansed_substitute cleansed['func_kwargs'] = self.cleansed_substitute return cleansed.items()
[ "def", "get_traceback_frame_variables", "(", "self", ",", "request", ",", "tb_frame", ")", ":", "# Loop through the frame's callers to see if the sensitive_variables", "# decorator was used.", "current_frame", "=", "tb_frame", ".", "f_back", "sensitive_variables", "=", "None", "while", "current_frame", "is", "not", "None", ":", "if", "(", "current_frame", ".", "f_code", ".", "co_name", "==", "'sensitive_variables_wrapper'", "and", "'sensitive_variables_wrapper'", "in", "current_frame", ".", "f_locals", ")", ":", "# The sensitive_variables decorator was used, so we take note", "# of the sensitive variables' names.", "wrapper", "=", "current_frame", ".", "f_locals", "[", "'sensitive_variables_wrapper'", "]", "sensitive_variables", "=", "getattr", "(", "wrapper", ",", "'sensitive_variables'", ",", "None", ")", "break", "current_frame", "=", "current_frame", ".", "f_back", "cleansed", "=", "{", "}", "if", "self", ".", "is_active", "(", "request", ")", "and", "sensitive_variables", ":", "if", "sensitive_variables", "==", "'__ALL__'", ":", "# Cleanse all variables", "for", "name", "in", "tb_frame", ".", "f_locals", ":", "cleansed", "[", "name", "]", "=", "self", ".", "cleansed_substitute", "else", ":", "# Cleanse specified variables", "for", "name", ",", "value", "in", "tb_frame", ".", "f_locals", ".", "items", "(", ")", ":", "if", "name", "in", "sensitive_variables", ":", "value", "=", "self", ".", "cleansed_substitute", "else", ":", "value", "=", "self", ".", "cleanse_special_types", "(", "request", ",", "value", ")", "cleansed", "[", "name", "]", "=", "value", "else", ":", "# Potentially cleanse the request and any MultiValueDicts if they", "# are one of the frame variables.", "for", "name", ",", "value", "in", "tb_frame", ".", "f_locals", ".", "items", "(", ")", ":", "cleansed", "[", "name", "]", "=", "self", ".", "cleanse_special_types", "(", "request", ",", "value", ")", "if", "(", "tb_frame", ".", "f_code", ".", "co_name", "==", "'sensitive_variables_wrapper'", "and", "'sensitive_variables_wrapper'", "in", "tb_frame", ".", "f_locals", ")", ":", "# For good measure, obfuscate the decorated function's arguments in", "# the sensitive_variables decorator's frame, in case the variables", "# associated with those arguments were meant to be obfuscated from", "# the decorated function's frame.", "cleansed", "[", "'func_args'", "]", "=", "self", ".", "cleansed_substitute", "cleansed", "[", "'func_kwargs'", "]", "=", "self", ".", "cleansed_substitute", "return", "cleansed", ".", "items", "(", ")" ]
[ 194, 4 ]
[ 242, 31 ]
python
en
['en', 'error', 'th']
False
ExceptionReporter.get_traceback_data
(self)
Return a dictionary containing traceback information.
Return a dictionary containing traceback information.
def get_traceback_data(self): """Return a dictionary containing traceback information.""" if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist): self.template_does_not_exist = True self.postmortem = self.exc_value.chain or [self.exc_value] frames = self.get_traceback_frames() for i, frame in enumerate(frames): if 'vars' in frame: frame_vars = [] for k, v in frame['vars']: v = pprint(v) # Trim large blobs of data if len(v) > 4096: v = '%s… <trimmed %d bytes string>' % (v[0:4096], len(v)) frame_vars.append((k, v)) frame['vars'] = frame_vars frames[i] = frame unicode_hint = '' if self.exc_type and issubclass(self.exc_type, UnicodeError): start = getattr(self.exc_value, 'start', None) end = getattr(self.exc_value, 'end', None) if start is not None and end is not None: unicode_str = self.exc_value.args[1] unicode_hint = force_str( unicode_str[max(start - 5, 0):min(end + 5, len(unicode_str))], 'ascii', errors='replace' ) from django import get_version if self.request is None: user_str = None else: try: user_str = str(self.request.user) except Exception: # request.user may raise OperationalError if the database is # unavailable, for example. user_str = '[unable to retrieve the current user]' c = { 'is_email': self.is_email, 'unicode_hint': unicode_hint, 'frames': frames, 'request': self.request, 'request_meta': self.filter.get_safe_request_meta(self.request), 'user_str': user_str, 'filtered_POST_items': list(self.filter.get_post_parameters(self.request).items()), 'settings': self.filter.get_safe_settings(), 'sys_executable': sys.executable, 'sys_version_info': '%d.%d.%d' % sys.version_info[0:3], 'server_time': timezone.now(), 'django_version_info': get_version(), 'sys_path': sys.path, 'template_info': self.template_info, 'template_does_not_exist': self.template_does_not_exist, 'postmortem': self.postmortem, } if self.request is not None: c['request_GET_items'] = self.request.GET.items() c['request_FILES_items'] = self.request.FILES.items() c['request_COOKIES_items'] = self.request.COOKIES.items() # Check whether exception info is available if self.exc_type: c['exception_type'] = self.exc_type.__name__ if self.exc_value: c['exception_value'] = str(self.exc_value) if frames: c['lastframe'] = frames[-1] return c
[ "def", "get_traceback_data", "(", "self", ")", ":", "if", "self", ".", "exc_type", "and", "issubclass", "(", "self", ".", "exc_type", ",", "TemplateDoesNotExist", ")", ":", "self", ".", "template_does_not_exist", "=", "True", "self", ".", "postmortem", "=", "self", ".", "exc_value", ".", "chain", "or", "[", "self", ".", "exc_value", "]", "frames", "=", "self", ".", "get_traceback_frames", "(", ")", "for", "i", ",", "frame", "in", "enumerate", "(", "frames", ")", ":", "if", "'vars'", "in", "frame", ":", "frame_vars", "=", "[", "]", "for", "k", ",", "v", "in", "frame", "[", "'vars'", "]", ":", "v", "=", "pprint", "(", "v", ")", "# Trim large blobs of data", "if", "len", "(", "v", ")", ">", "4096", ":", "v", "=", "'%s… <trimmed %d bytes string>' %", "(", "[", "0", ":", "4", "0", "96],", " ", "l", "n(v", ")", ")", "", "", "frame_vars", ".", "append", "(", "(", "k", ",", "v", ")", ")", "frame", "[", "'vars'", "]", "=", "frame_vars", "frames", "[", "i", "]", "=", "frame", "unicode_hint", "=", "''", "if", "self", ".", "exc_type", "and", "issubclass", "(", "self", ".", "exc_type", ",", "UnicodeError", ")", ":", "start", "=", "getattr", "(", "self", ".", "exc_value", ",", "'start'", ",", "None", ")", "end", "=", "getattr", "(", "self", ".", "exc_value", ",", "'end'", ",", "None", ")", "if", "start", "is", "not", "None", "and", "end", "is", "not", "None", ":", "unicode_str", "=", "self", ".", "exc_value", ".", "args", "[", "1", "]", "unicode_hint", "=", "force_str", "(", "unicode_str", "[", "max", "(", "start", "-", "5", ",", "0", ")", ":", "min", "(", "end", "+", "5", ",", "len", "(", "unicode_str", ")", ")", "]", ",", "'ascii'", ",", "errors", "=", "'replace'", ")", "from", "django", "import", "get_version", "if", "self", ".", "request", "is", "None", ":", "user_str", "=", "None", "else", ":", "try", ":", "user_str", "=", "str", "(", "self", ".", "request", ".", "user", ")", "except", "Exception", ":", "# request.user may raise OperationalError if the database is", "# unavailable, for example.", "user_str", "=", "'[unable to retrieve the current user]'", "c", "=", "{", "'is_email'", ":", "self", ".", "is_email", ",", "'unicode_hint'", ":", "unicode_hint", ",", "'frames'", ":", "frames", ",", "'request'", ":", "self", ".", "request", ",", "'request_meta'", ":", "self", ".", "filter", ".", "get_safe_request_meta", "(", "self", ".", "request", ")", ",", "'user_str'", ":", "user_str", ",", "'filtered_POST_items'", ":", "list", "(", "self", ".", "filter", ".", "get_post_parameters", "(", "self", ".", "request", ")", ".", "items", "(", ")", ")", ",", "'settings'", ":", "self", ".", "filter", ".", "get_safe_settings", "(", ")", ",", "'sys_executable'", ":", "sys", ".", "executable", ",", "'sys_version_info'", ":", "'%d.%d.%d'", "%", "sys", ".", "version_info", "[", "0", ":", "3", "]", ",", "'server_time'", ":", "timezone", ".", "now", "(", ")", ",", "'django_version_info'", ":", "get_version", "(", ")", ",", "'sys_path'", ":", "sys", ".", "path", ",", "'template_info'", ":", "self", ".", "template_info", ",", "'template_does_not_exist'", ":", "self", ".", "template_does_not_exist", ",", "'postmortem'", ":", "self", ".", "postmortem", ",", "}", "if", "self", ".", "request", "is", "not", "None", ":", "c", "[", "'request_GET_items'", "]", "=", "self", ".", "request", ".", "GET", ".", "items", "(", ")", "c", "[", "'request_FILES_items'", "]", "=", "self", ".", "request", ".", "FILES", ".", "items", "(", ")", "c", "[", "'request_COOKIES_items'", "]", "=", "self", ".", "request", ".", "COOKIES", ".", "items", "(", ")", "# Check whether exception info is available", "if", "self", ".", "exc_type", ":", "c", "[", "'exception_type'", "]", "=", "self", ".", "exc_type", ".", "__name__", "if", "self", ".", "exc_value", ":", "c", "[", "'exception_value'", "]", "=", "str", "(", "self", ".", "exc_value", ")", "if", "frames", ":", "c", "[", "'lastframe'", "]", "=", "frames", "[", "-", "1", "]", "return", "c" ]
[ 268, 4 ]
[ 338, 16 ]
python
en
['en', 'en', 'en']
True
ExceptionReporter.get_traceback_html
(self)
Return HTML version of debug 500 HTTP error page.
Return HTML version of debug 500 HTTP error page.
def get_traceback_html(self): """Return HTML version of debug 500 HTTP error page.""" with self.html_template_path.open(encoding='utf-8') as fh: t = DEBUG_ENGINE.from_string(fh.read()) c = Context(self.get_traceback_data(), use_l10n=False) return t.render(c)
[ "def", "get_traceback_html", "(", "self", ")", ":", "with", "self", ".", "html_template_path", ".", "open", "(", "encoding", "=", "'utf-8'", ")", "as", "fh", ":", "t", "=", "DEBUG_ENGINE", ".", "from_string", "(", "fh", ".", "read", "(", ")", ")", "c", "=", "Context", "(", "self", ".", "get_traceback_data", "(", ")", ",", "use_l10n", "=", "False", ")", "return", "t", ".", "render", "(", "c", ")" ]
[ 340, 4 ]
[ 345, 26 ]
python
en
['en', 'da', 'en']
True
ExceptionReporter.get_traceback_text
(self)
Return plain text version of debug 500 HTTP error page.
Return plain text version of debug 500 HTTP error page.
def get_traceback_text(self): """Return plain text version of debug 500 HTTP error page.""" with self.text_template_path.open(encoding='utf-8') as fh: t = DEBUG_ENGINE.from_string(fh.read()) c = Context(self.get_traceback_data(), autoescape=False, use_l10n=False) return t.render(c)
[ "def", "get_traceback_text", "(", "self", ")", ":", "with", "self", ".", "text_template_path", ".", "open", "(", "encoding", "=", "'utf-8'", ")", "as", "fh", ":", "t", "=", "DEBUG_ENGINE", ".", "from_string", "(", "fh", ".", "read", "(", ")", ")", "c", "=", "Context", "(", "self", ".", "get_traceback_data", "(", ")", ",", "autoescape", "=", "False", ",", "use_l10n", "=", "False", ")", "return", "t", ".", "render", "(", "c", ")" ]
[ 347, 4 ]
[ 352, 26 ]
python
en
['en', 'en', 'en']
True
ExceptionReporter._get_lines_from_file
(self, filename, lineno, context_lines, loader=None, module_name=None)
Return context_lines before and after lineno from file. Return (pre_context_lineno, pre_context, context_line, post_context).
Return context_lines before and after lineno from file. Return (pre_context_lineno, pre_context, context_line, post_context).
def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None): """ Return context_lines before and after lineno from file. Return (pre_context_lineno, pre_context, context_line, post_context). """ source = self._get_source(filename, loader, module_name) if source is None: return None, [], None, [] # If we just read the source from a file, or if the loader did not # apply tokenize.detect_encoding to decode the source into a # string, then we should do that ourselves. if isinstance(source[0], bytes): encoding = 'ascii' for line in source[:2]: # File coding may be specified. Match pattern from PEP-263 # (https://www.python.org/dev/peps/pep-0263/) match = re.search(br'coding[:=]\s*([-\w.]+)', line) if match: encoding = match[1].decode('ascii') break source = [str(sline, encoding, 'replace') for sline in source] lower_bound = max(0, lineno - context_lines) upper_bound = lineno + context_lines try: pre_context = source[lower_bound:lineno] context_line = source[lineno] post_context = source[lineno + 1:upper_bound] except IndexError: return None, [], None, [] return lower_bound, pre_context, context_line, post_context
[ "def", "_get_lines_from_file", "(", "self", ",", "filename", ",", "lineno", ",", "context_lines", ",", "loader", "=", "None", ",", "module_name", "=", "None", ")", ":", "source", "=", "self", ".", "_get_source", "(", "filename", ",", "loader", ",", "module_name", ")", "if", "source", "is", "None", ":", "return", "None", ",", "[", "]", ",", "None", ",", "[", "]", "# If we just read the source from a file, or if the loader did not", "# apply tokenize.detect_encoding to decode the source into a", "# string, then we should do that ourselves.", "if", "isinstance", "(", "source", "[", "0", "]", ",", "bytes", ")", ":", "encoding", "=", "'ascii'", "for", "line", "in", "source", "[", ":", "2", "]", ":", "# File coding may be specified. Match pattern from PEP-263", "# (https://www.python.org/dev/peps/pep-0263/)", "match", "=", "re", ".", "search", "(", "br'coding[:=]\\s*([-\\w.]+)'", ",", "line", ")", "if", "match", ":", "encoding", "=", "match", "[", "1", "]", ".", "decode", "(", "'ascii'", ")", "break", "source", "=", "[", "str", "(", "sline", ",", "encoding", ",", "'replace'", ")", "for", "sline", "in", "source", "]", "lower_bound", "=", "max", "(", "0", ",", "lineno", "-", "context_lines", ")", "upper_bound", "=", "lineno", "+", "context_lines", "try", ":", "pre_context", "=", "source", "[", "lower_bound", ":", "lineno", "]", "context_line", "=", "source", "[", "lineno", "]", "post_context", "=", "source", "[", "lineno", "+", "1", ":", "upper_bound", "]", "except", "IndexError", ":", "return", "None", ",", "[", "]", ",", "None", ",", "[", "]", "return", "lower_bound", ",", "pre_context", ",", "context_line", ",", "post_context" ]
[ 371, 4 ]
[ 403, 67 ]
python
en
['en', 'error', 'th']
False
hp_directory
(model_dir)
If running a hyperparam job, create subfolder name with trial ID. If not running a hyperparam job, just keep original model_dir.
If running a hyperparam job, create subfolder name with trial ID.
def hp_directory(model_dir): """If running a hyperparam job, create subfolder name with trial ID. If not running a hyperparam job, just keep original model_dir. """ trial_id = json.loads(os.environ.get('TF_CONFIG', '{}')).get('task', {}).get( 'trial', '') return os.path.join(model_dir, trial_id)
[ "def", "hp_directory", "(", "model_dir", ")", ":", "trial_id", "=", "json", ".", "loads", "(", "os", ".", "environ", ".", "get", "(", "'TF_CONFIG'", ",", "'{}'", ")", ")", ".", "get", "(", "'task'", ",", "{", "}", ")", ".", "get", "(", "'trial'", ",", "''", ")", "return", "os", ".", "path", ".", "join", "(", "model_dir", ",", "trial_id", ")" ]
[ 65, 0 ]
[ 72, 42 ]
python
en
['en', 'en', 'en']
True
UserBehavior.on_start
(self)
on_start is called when a Locust start before any task is scheduled
on_start is called when a Locust start before any task is scheduled
def on_start(self): """ on_start is called when a Locust start before any task is scheduled """ pass
[ "def", "on_start", "(", "self", ")", ":", "pass" ]
[ 8, 4 ]
[ 10, 12 ]
python
en
['en', 'en', 'en']
True
UserBehavior.on_stop
(self)
on_stop is called when the TaskSet is stopping
on_stop is called when the TaskSet is stopping
def on_stop(self): """ on_stop is called when the TaskSet is stopping """ pass
[ "def", "on_stop", "(", "self", ")", ":", "pass" ]
[ 12, 4 ]
[ 14, 12 ]
python
en
['en', 'en', 'en']
True
main
()
Create an array of dicts defining questions
Create an array of dicts defining questions
def main(): """ Create an array of dicts defining questions """ questions = [ { 'quiz': u'gcp', 'author': u'Nigel', 'title': u'Which company runs GCP?', 'answer1': u'Amazon', 'answer2': u'Google', 'answer3': u'IBM', 'answer4': u'Microsoft', 'correctAnswer': 2, 'imageUrl': u'' }, { 'quiz': u'gcp', 'author': u'Nigel', 'title': u'Which GCP product is NoSQL?', 'answer1': u'Compute Engine', 'answer2': u'Datastore', 'answer3': u'Spanner', 'answer4': u'BigQuery', 'correctAnswer': 2, 'imageUrl': u'' }, { 'quiz': u'gcp', 'author': u'Nigel', 'title': u'Which GCP product is an Object Store?', 'answer1': u'Cloud Storage', 'answer2': u'Datastore', 'answer3': u'Big Table', 'answer4': u'All of the above', 'correctAnswer': 1, 'imageUrl': u'' }, { 'quiz': u'places', 'author': u'Nigel', 'title': u'What is the capital of France?', 'answer1': u'Berlin', 'answer2': u'London', 'answer3': u'Paris', 'answer4': u'Stockholm', 'correctAnswer': 3, 'imageUrl': u'' }, ] client = datastore.Client(project_id) """ Create and persist and entity for each question """ for q_info in questions: key = client.key('Question') q_entity = datastore.Entity(key=key) for q_prop, q_val in q_info.items(): q_entity[q_prop] = q_val client.put(q_entity)
[ "def", "main", "(", ")", ":", "questions", "=", "[", "{", "'quiz'", ":", "u'gcp'", ",", "'author'", ":", "u'Nigel'", ",", "'title'", ":", "u'Which company runs GCP?'", ",", "'answer1'", ":", "u'Amazon'", ",", "'answer2'", ":", "u'Google'", ",", "'answer3'", ":", "u'IBM'", ",", "'answer4'", ":", "u'Microsoft'", ",", "'correctAnswer'", ":", "2", ",", "'imageUrl'", ":", "u''", "}", ",", "{", "'quiz'", ":", "u'gcp'", ",", "'author'", ":", "u'Nigel'", ",", "'title'", ":", "u'Which GCP product is NoSQL?'", ",", "'answer1'", ":", "u'Compute Engine'", ",", "'answer2'", ":", "u'Datastore'", ",", "'answer3'", ":", "u'Spanner'", ",", "'answer4'", ":", "u'BigQuery'", ",", "'correctAnswer'", ":", "2", ",", "'imageUrl'", ":", "u''", "}", ",", "{", "'quiz'", ":", "u'gcp'", ",", "'author'", ":", "u'Nigel'", ",", "'title'", ":", "u'Which GCP product is an Object Store?'", ",", "'answer1'", ":", "u'Cloud Storage'", ",", "'answer2'", ":", "u'Datastore'", ",", "'answer3'", ":", "u'Big Table'", ",", "'answer4'", ":", "u'All of the above'", ",", "'correctAnswer'", ":", "1", ",", "'imageUrl'", ":", "u''", "}", ",", "{", "'quiz'", ":", "u'places'", ",", "'author'", ":", "u'Nigel'", ",", "'title'", ":", "u'What is the capital of France?'", ",", "'answer1'", ":", "u'Berlin'", ",", "'answer2'", ":", "u'London'", ",", "'answer3'", ":", "u'Paris'", ",", "'answer4'", ":", "u'Stockholm'", ",", "'correctAnswer'", ":", "3", ",", "'imageUrl'", ":", "u''", "}", ",", "]", "client", "=", "datastore", ".", "Client", "(", "project_id", ")", "\"\"\"\n Create and persist and entity for each question\n \"\"\"", "for", "q_info", "in", "questions", ":", "key", "=", "client", ".", "key", "(", "'Question'", ")", "q_entity", "=", "datastore", ".", "Entity", "(", "key", "=", "key", ")", "for", "q_prop", ",", "q_val", "in", "q_info", ".", "items", "(", ")", ":", "q_entity", "[", "q_prop", "]", "=", "q_val", "client", ".", "put", "(", "q_entity", ")" ]
[ 23, 0 ]
[ 85, 28 ]
python
en
['en', 'error', 'th']
False
UserAgentMixin.user_agent
(self)
The current user agent.
The current user agent.
def user_agent(self): """The current user agent.""" return UserAgent(self.environ)
[ "def", "user_agent", "(", "self", ")", ":", "return", "UserAgent", "(", "self", ".", "environ", ")" ]
[ 11, 4 ]
[ 13, 38 ]
python
en
['en', 'en', 'en']
True
notification_from_headers
(channel, headers)
Parse a notification from the webhook request headers, validate the notification, and return a Notification object. Args: channel: Channel, The channel that the notification is associated with. headers: dict, A dictionary like object that contains the request headers from the webhook HTTP request. Returns: A Notification object. Raises: errors.InvalidNotificationError if the notification is invalid. ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.
Parse a notification from the webhook request headers, validate the notification, and return a Notification object.
def notification_from_headers(channel, headers): """Parse a notification from the webhook request headers, validate the notification, and return a Notification object. Args: channel: Channel, The channel that the notification is associated with. headers: dict, A dictionary like object that contains the request headers from the webhook HTTP request. Returns: A Notification object. Raises: errors.InvalidNotificationError if the notification is invalid. ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int. """ headers = _upper_header_keys(headers) channel_id = headers[X_GOOG_CHANNEL_ID] if channel.id != channel_id: raise errors.InvalidNotificationError( 'Channel id mismatch: %s != %s' % (channel.id, channel_id)) else: message_number = int(headers[X_GOOG_MESSAGE_NUMBER]) state = headers[X_GOOG_RESOURCE_STATE] resource_uri = headers[X_GOOG_RESOURCE_URI] resource_id = headers[X_GOOG_RESOURCE_ID] return Notification(message_number, state, resource_uri, resource_id)
[ "def", "notification_from_headers", "(", "channel", ",", "headers", ")", ":", "headers", "=", "_upper_header_keys", "(", "headers", ")", "channel_id", "=", "headers", "[", "X_GOOG_CHANNEL_ID", "]", "if", "channel", ".", "id", "!=", "channel_id", ":", "raise", "errors", ".", "InvalidNotificationError", "(", "'Channel id mismatch: %s != %s'", "%", "(", "channel", ".", "id", ",", "channel_id", ")", ")", "else", ":", "message_number", "=", "int", "(", "headers", "[", "X_GOOG_MESSAGE_NUMBER", "]", ")", "state", "=", "headers", "[", "X_GOOG_RESOURCE_STATE", "]", "resource_uri", "=", "headers", "[", "X_GOOG_RESOURCE_URI", "]", "resource_id", "=", "headers", "[", "X_GOOG_RESOURCE_ID", "]", "return", "Notification", "(", "message_number", ",", "state", ",", "resource_uri", ",", "resource_id", ")" ]
[ 234, 0 ]
[ 260, 73 ]
python
en
['en', 'en', 'en']
True
new_webhook_channel
(url, token=None, expiration=None, params=None)
Create a new webhook Channel. Args: url: str, URL to post notifications to. token: str, An arbitrary string associated with the channel that is delivered to the target address with each notification delivered over this channel. expiration: datetime.datetime, A time in the future when the channel should expire. Can also be None if the subscription should use the default expiration. Note that different services may have different limits on how long a subscription lasts. Check the response from the watch() method to see the value the service has set for an expiration time. params: dict, Extra parameters to pass on channel creation. Currently not used for webhook channels.
Create a new webhook Channel.
def new_webhook_channel(url, token=None, expiration=None, params=None): """Create a new webhook Channel. Args: url: str, URL to post notifications to. token: str, An arbitrary string associated with the channel that is delivered to the target address with each notification delivered over this channel. expiration: datetime.datetime, A time in the future when the channel should expire. Can also be None if the subscription should use the default expiration. Note that different services may have different limits on how long a subscription lasts. Check the response from the watch() method to see the value the service has set for an expiration time. params: dict, Extra parameters to pass on channel creation. Currently not used for webhook channels. """ expiration_ms = 0 if expiration: delta = expiration - EPOCH expiration_ms = delta.microseconds/1000 + ( delta.seconds + delta.days*24*3600)*1000 if expiration_ms < 0: expiration_ms = 0 return Channel('web_hook', str(uuid.uuid4()), token, url, expiration=expiration_ms, params=params)
[ "def", "new_webhook_channel", "(", "url", ",", "token", "=", "None", ",", "expiration", "=", "None", ",", "params", "=", "None", ")", ":", "expiration_ms", "=", "0", "if", "expiration", ":", "delta", "=", "expiration", "-", "EPOCH", "expiration_ms", "=", "delta", ".", "microseconds", "/", "1000", "+", "(", "delta", ".", "seconds", "+", "delta", ".", "days", "*", "24", "*", "3600", ")", "*", "1000", "if", "expiration_ms", "<", "0", ":", "expiration_ms", "=", "0", "return", "Channel", "(", "'web_hook'", ",", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ",", "token", ",", "url", ",", "expiration", "=", "expiration_ms", ",", "params", "=", "params", ")" ]
[ 264, 0 ]
[ 291, 33 ]
python
en
['en', 'en', 'en']
True
Notification.__init__
(self, message_number, state, resource_uri, resource_id)
Notification constructor. Args: message_number: int, The unique id number of this notification. state: str, The state of the resource being monitored. Can be one of "exists", "not_exists", or "sync". resource_uri: str, The address of the resource being monitored. resource_id: str, The identifier of the watched resource.
Notification constructor.
def __init__(self, message_number, state, resource_uri, resource_id): """Notification constructor. Args: message_number: int, The unique id number of this notification. state: str, The state of the resource being monitored. Can be one of "exists", "not_exists", or "sync". resource_uri: str, The address of the resource being monitored. resource_id: str, The identifier of the watched resource. """ self.message_number = message_number self.state = state self.resource_uri = resource_uri self.resource_id = resource_id
[ "def", "__init__", "(", "self", ",", "message_number", ",", "state", ",", "resource_uri", ",", "resource_id", ")", ":", "self", ".", "message_number", "=", "message_number", "self", ".", "state", "=", "state", "self", ".", "resource_uri", "=", "resource_uri", "self", ".", "resource_id", "=", "resource_id" ]
[ 117, 2 ]
[ 130, 34 ]
python
en
['en', 'en', 'en']
False
Channel.__init__
(self, type, id, token, address, expiration=None, params=None, resource_id="", resource_uri="")
Create a new Channel. In user code, this Channel constructor will not typically be called manually since there are functions for creating channels for each specific type with a more customized set of arguments to pass. Args: type: str, The type of delivery mechanism used by this channel. For example, 'web_hook'. id: str, A UUID for the channel. token: str, An arbitrary string associated with the channel that is delivered to the target address with each event delivered over this channel. address: str, The address of the receiving entity where events are delivered. Specific to the channel type. expiration: int, The time, in milliseconds from the epoch, when this channel will expire. params: dict, A dictionary of string to string, with additional parameters controlling delivery channel behavior. resource_id: str, An opaque id that identifies the resource that is being watched. Stable across different API versions. resource_uri: str, The canonicalized ID of the watched resource.
Create a new Channel.
def __init__(self, type, id, token, address, expiration=None, params=None, resource_id="", resource_uri=""): """Create a new Channel. In user code, this Channel constructor will not typically be called manually since there are functions for creating channels for each specific type with a more customized set of arguments to pass. Args: type: str, The type of delivery mechanism used by this channel. For example, 'web_hook'. id: str, A UUID for the channel. token: str, An arbitrary string associated with the channel that is delivered to the target address with each event delivered over this channel. address: str, The address of the receiving entity where events are delivered. Specific to the channel type. expiration: int, The time, in milliseconds from the epoch, when this channel will expire. params: dict, A dictionary of string to string, with additional parameters controlling delivery channel behavior. resource_id: str, An opaque id that identifies the resource that is being watched. Stable across different API versions. resource_uri: str, The canonicalized ID of the watched resource. """ self.type = type self.id = id self.token = token self.address = address self.expiration = expiration self.params = params self.resource_id = resource_id self.resource_uri = resource_uri
[ "def", "__init__", "(", "self", ",", "type", ",", "id", ",", "token", ",", "address", ",", "expiration", "=", "None", ",", "params", "=", "None", ",", "resource_id", "=", "\"\"", ",", "resource_uri", "=", "\"\"", ")", ":", "self", ".", "type", "=", "type", "self", ".", "id", "=", "id", "self", ".", "token", "=", "token", "self", ".", "address", "=", "address", "self", ".", "expiration", "=", "expiration", "self", ".", "params", "=", "params", "self", ".", "resource_id", "=", "resource_id", "self", ".", "resource_uri", "=", "resource_uri" ]
[ 158, 2 ]
[ 190, 36 ]
python
en
['en', 'en', 'en']
True
Channel.body
(self)
Build a body from the Channel. Constructs a dictionary that's appropriate for passing into watch() methods as the value of body argument. Returns: A dictionary representation of the channel.
Build a body from the Channel.
def body(self): """Build a body from the Channel. Constructs a dictionary that's appropriate for passing into watch() methods as the value of body argument. Returns: A dictionary representation of the channel. """ result = { 'id': self.id, 'token': self.token, 'type': self.type, 'address': self.address } if self.params: result['params'] = self.params if self.resource_id: result['resourceId'] = self.resource_id if self.resource_uri: result['resourceUri'] = self.resource_uri if self.expiration: result['expiration'] = self.expiration return result
[ "def", "body", "(", "self", ")", ":", "result", "=", "{", "'id'", ":", "self", ".", "id", ",", "'token'", ":", "self", ".", "token", ",", "'type'", ":", "self", ".", "type", ",", "'address'", ":", "self", ".", "address", "}", "if", "self", ".", "params", ":", "result", "[", "'params'", "]", "=", "self", ".", "params", "if", "self", ".", "resource_id", ":", "result", "[", "'resourceId'", "]", "=", "self", ".", "resource_id", "if", "self", ".", "resource_uri", ":", "result", "[", "'resourceUri'", "]", "=", "self", ".", "resource_uri", "if", "self", ".", "expiration", ":", "result", "[", "'expiration'", "]", "=", "self", ".", "expiration", "return", "result" ]
[ 192, 2 ]
[ 216, 17 ]
python
en
['en', 'en', 'en']
True
Channel.update
(self, resp)
Update a channel with information from the response of watch(). When a request is sent to watch() a resource, the response returned from the watch() request is a dictionary with updated channel information, such as the resource_id, which is needed when stopping a subscription. Args: resp: dict, The response from a watch() method.
Update a channel with information from the response of watch().
def update(self, resp): """Update a channel with information from the response of watch(). When a request is sent to watch() a resource, the response returned from the watch() request is a dictionary with updated channel information, such as the resource_id, which is needed when stopping a subscription. Args: resp: dict, The response from a watch() method. """ for json_name, param_name in six.iteritems(CHANNEL_PARAMS): value = resp.get(json_name) if value is not None: setattr(self, param_name, value)
[ "def", "update", "(", "self", ",", "resp", ")", ":", "for", "json_name", ",", "param_name", "in", "six", ".", "iteritems", "(", "CHANNEL_PARAMS", ")", ":", "value", "=", "resp", ".", "get", "(", "json_name", ")", "if", "value", "is", "not", "None", ":", "setattr", "(", "self", ",", "param_name", ",", "value", ")" ]
[ 218, 2 ]
[ 231, 40 ]
python
en
['en', 'en', 'en']
True
register_handler
(handler)
Install application-specific FITS image handler. :param handler: Handler object.
Install application-specific FITS image handler.
def register_handler(handler): """ Install application-specific FITS image handler. :param handler: Handler object. """ global _handler _handler = handler
[ "def", "register_handler", "(", "handler", ")", ":", "global", "_handler", "_handler", "=", "handler" ]
[ 16, 0 ]
[ 23, 22 ]
python
en
['en', 'error', 'th']
False
wrap_text
(text, width=78, initial_indent='', subsequent_indent='', preserve_paragraphs=False)
A helper function that intelligently wraps text. By default, it assumes that it operates on a single paragraph of text but if the `preserve_paragraphs` parameter is provided it will intelligently handle paragraphs (defined by two empty lines). If paragraphs are handled, a paragraph can be prefixed with an empty line containing the ``\\b`` character (``\\x08``) to indicate that no rewrapping should happen in that block. :param text: the text that should be rewrapped. :param width: the maximum width for the text. :param initial_indent: the initial indent that should be placed on the first line as a string. :param subsequent_indent: the indent string that should be placed on each consecutive line. :param preserve_paragraphs: if this flag is set then the wrapping will intelligently handle paragraphs.
A helper function that intelligently wraps text. By default, it assumes that it operates on a single paragraph of text but if the `preserve_paragraphs` parameter is provided it will intelligently handle paragraphs (defined by two empty lines).
def wrap_text(text, width=78, initial_indent='', subsequent_indent='', preserve_paragraphs=False): """A helper function that intelligently wraps text. By default, it assumes that it operates on a single paragraph of text but if the `preserve_paragraphs` parameter is provided it will intelligently handle paragraphs (defined by two empty lines). If paragraphs are handled, a paragraph can be prefixed with an empty line containing the ``\\b`` character (``\\x08``) to indicate that no rewrapping should happen in that block. :param text: the text that should be rewrapped. :param width: the maximum width for the text. :param initial_indent: the initial indent that should be placed on the first line as a string. :param subsequent_indent: the indent string that should be placed on each consecutive line. :param preserve_paragraphs: if this flag is set then the wrapping will intelligently handle paragraphs. """ from ._textwrap import TextWrapper text = text.expandtabs() wrapper = TextWrapper(width, initial_indent=initial_indent, subsequent_indent=subsequent_indent, replace_whitespace=False) if not preserve_paragraphs: return wrapper.fill(text) p = [] buf = [] indent = None def _flush_par(): if not buf: return if buf[0].strip() == '\b': p.append((indent or 0, True, '\n'.join(buf[1:]))) else: p.append((indent or 0, False, ' '.join(buf))) del buf[:] for line in text.splitlines(): if not line: _flush_par() indent = None else: if indent is None: orig_len = term_len(line) line = line.lstrip() indent = orig_len - term_len(line) buf.append(line) _flush_par() rv = [] for indent, raw, text in p: with wrapper.extra_indent(' ' * indent): if raw: rv.append(wrapper.indent_only(text)) else: rv.append(wrapper.fill(text)) return '\n\n'.join(rv)
[ "def", "wrap_text", "(", "text", ",", "width", "=", "78", ",", "initial_indent", "=", "''", ",", "subsequent_indent", "=", "''", ",", "preserve_paragraphs", "=", "False", ")", ":", "from", ".", "_textwrap", "import", "TextWrapper", "text", "=", "text", ".", "expandtabs", "(", ")", "wrapper", "=", "TextWrapper", "(", "width", ",", "initial_indent", "=", "initial_indent", ",", "subsequent_indent", "=", "subsequent_indent", ",", "replace_whitespace", "=", "False", ")", "if", "not", "preserve_paragraphs", ":", "return", "wrapper", ".", "fill", "(", "text", ")", "p", "=", "[", "]", "buf", "=", "[", "]", "indent", "=", "None", "def", "_flush_par", "(", ")", ":", "if", "not", "buf", ":", "return", "if", "buf", "[", "0", "]", ".", "strip", "(", ")", "==", "'\\b'", ":", "p", ".", "append", "(", "(", "indent", "or", "0", ",", "True", ",", "'\\n'", ".", "join", "(", "buf", "[", "1", ":", "]", ")", ")", ")", "else", ":", "p", ".", "append", "(", "(", "indent", "or", "0", ",", "False", ",", "' '", ".", "join", "(", "buf", ")", ")", ")", "del", "buf", "[", ":", "]", "for", "line", "in", "text", ".", "splitlines", "(", ")", ":", "if", "not", "line", ":", "_flush_par", "(", ")", "indent", "=", "None", "else", ":", "if", "indent", "is", "None", ":", "orig_len", "=", "term_len", "(", "line", ")", "line", "=", "line", ".", "lstrip", "(", ")", "indent", "=", "orig_len", "-", "term_len", "(", "line", ")", "buf", ".", "append", "(", "line", ")", "_flush_par", "(", ")", "rv", "=", "[", "]", "for", "indent", ",", "raw", ",", "text", "in", "p", ":", "with", "wrapper", ".", "extra_indent", "(", "' '", "*", "indent", ")", ":", "if", "raw", ":", "rv", ".", "append", "(", "wrapper", ".", "indent_only", "(", "text", ")", ")", "else", ":", "rv", ".", "append", "(", "wrapper", ".", "fill", "(", "text", ")", ")", "return", "'\\n\\n'", ".", "join", "(", "rv", ")" ]
[ 24, 0 ]
[ 85, 26 ]
python
en
['en', 'en', 'en']
True
join_options
(options)
Given a list of option strings this joins them in the most appropriate way and returns them in the form ``(formatted_string, any_prefix_is_slash)`` where the second item in the tuple is a flag that indicates if any of the option prefixes was a slash.
Given a list of option strings this joins them in the most appropriate way and returns them in the form ``(formatted_string, any_prefix_is_slash)`` where the second item in the tuple is a flag that indicates if any of the option prefixes was a slash.
def join_options(options): """Given a list of option strings this joins them in the most appropriate way and returns them in the form ``(formatted_string, any_prefix_is_slash)`` where the second item in the tuple is a flag that indicates if any of the option prefixes was a slash. """ rv = [] any_prefix_is_slash = False for opt in options: prefix = split_opt(opt)[0] if prefix == '/': any_prefix_is_slash = True rv.append((len(prefix), opt)) rv.sort(key=lambda x: x[0]) rv = ', '.join(x[1] for x in rv) return rv, any_prefix_is_slash
[ "def", "join_options", "(", "options", ")", ":", "rv", "=", "[", "]", "any_prefix_is_slash", "=", "False", "for", "opt", "in", "options", ":", "prefix", "=", "split_opt", "(", "opt", ")", "[", "0", "]", "if", "prefix", "==", "'/'", ":", "any_prefix_is_slash", "=", "True", "rv", ".", "append", "(", "(", "len", "(", "prefix", ")", ",", "opt", ")", ")", "rv", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ")", "rv", "=", "', '", ".", "join", "(", "x", "[", "1", "]", "for", "x", "in", "rv", ")", "return", "rv", ",", "any_prefix_is_slash" ]
[ 238, 0 ]
[ 255, 34 ]
python
en
['en', 'en', 'en']
True
HelpFormatter.write
(self, string)
Writes a unicode string into the internal buffer.
Writes a unicode string into the internal buffer.
def write(self, string): """Writes a unicode string into the internal buffer.""" self.buffer.append(string)
[ "def", "write", "(", "self", ",", "string", ")", ":", "self", ".", "buffer", ".", "append", "(", "string", ")" ]
[ 112, 4 ]
[ 114, 34 ]
python
en
['en', 'en', 'en']
True
HelpFormatter.indent
(self)
Increases the indentation.
Increases the indentation.
def indent(self): """Increases the indentation.""" self.current_indent += self.indent_increment
[ "def", "indent", "(", "self", ")", ":", "self", ".", "current_indent", "+=", "self", ".", "indent_increment" ]
[ 116, 4 ]
[ 118, 52 ]
python
en
['en', 'en', 'en']
True
HelpFormatter.dedent
(self)
Decreases the indentation.
Decreases the indentation.
def dedent(self): """Decreases the indentation.""" self.current_indent -= self.indent_increment
[ "def", "dedent", "(", "self", ")", ":", "self", ".", "current_indent", "-=", "self", ".", "indent_increment" ]
[ 120, 4 ]
[ 122, 52 ]
python
en
['en', 'en', 'en']
True
HelpFormatter.write_usage
(self, prog, args='', prefix='Usage: ')
Writes a usage line into the buffer. :param prog: the program name. :param args: whitespace separated list of arguments. :param prefix: the prefix for the first line.
Writes a usage line into the buffer.
def write_usage(self, prog, args='', prefix='Usage: '): """Writes a usage line into the buffer. :param prog: the program name. :param args: whitespace separated list of arguments. :param prefix: the prefix for the first line. """ usage_prefix = '%*s%s ' % (self.current_indent, prefix, prog) text_width = self.width - self.current_indent if text_width >= (term_len(usage_prefix) + 20): # The arguments will fit to the right of the prefix. indent = ' ' * term_len(usage_prefix) self.write(wrap_text(args, text_width, initial_indent=usage_prefix, subsequent_indent=indent)) else: # The prefix is too long, put the arguments on the next line. self.write(usage_prefix) self.write('\n') indent = ' ' * (max(self.current_indent, term_len(prefix)) + 4) self.write(wrap_text(args, text_width, initial_indent=indent, subsequent_indent=indent)) self.write('\n')
[ "def", "write_usage", "(", "self", ",", "prog", ",", "args", "=", "''", ",", "prefix", "=", "'Usage: '", ")", ":", "usage_prefix", "=", "'%*s%s '", "%", "(", "self", ".", "current_indent", ",", "prefix", ",", "prog", ")", "text_width", "=", "self", ".", "width", "-", "self", ".", "current_indent", "if", "text_width", ">=", "(", "term_len", "(", "usage_prefix", ")", "+", "20", ")", ":", "# The arguments will fit to the right of the prefix.", "indent", "=", "' '", "*", "term_len", "(", "usage_prefix", ")", "self", ".", "write", "(", "wrap_text", "(", "args", ",", "text_width", ",", "initial_indent", "=", "usage_prefix", ",", "subsequent_indent", "=", "indent", ")", ")", "else", ":", "# The prefix is too long, put the arguments on the next line.", "self", ".", "write", "(", "usage_prefix", ")", "self", ".", "write", "(", "'\\n'", ")", "indent", "=", "' '", "*", "(", "max", "(", "self", ".", "current_indent", ",", "term_len", "(", "prefix", ")", ")", "+", "4", ")", "self", ".", "write", "(", "wrap_text", "(", "args", ",", "text_width", ",", "initial_indent", "=", "indent", ",", "subsequent_indent", "=", "indent", ")", ")", "self", ".", "write", "(", "'\\n'", ")" ]
[ 124, 4 ]
[ 149, 24 ]
python
en
['en', 'haw', 'en']
True
HelpFormatter.write_heading
(self, heading)
Writes a heading into the buffer.
Writes a heading into the buffer.
def write_heading(self, heading): """Writes a heading into the buffer.""" self.write('%*s%s:\n' % (self.current_indent, '', heading))
[ "def", "write_heading", "(", "self", ",", "heading", ")", ":", "self", ".", "write", "(", "'%*s%s:\\n'", "%", "(", "self", ".", "current_indent", ",", "''", ",", "heading", ")", ")" ]
[ 151, 4 ]
[ 153, 67 ]
python
en
['en', 'en', 'en']
True
HelpFormatter.write_paragraph
(self)
Writes a paragraph into the buffer.
Writes a paragraph into the buffer.
def write_paragraph(self): """Writes a paragraph into the buffer.""" if self.buffer: self.write('\n')
[ "def", "write_paragraph", "(", "self", ")", ":", "if", "self", ".", "buffer", ":", "self", ".", "write", "(", "'\\n'", ")" ]
[ 155, 4 ]
[ 158, 28 ]
python
en
['en', 'en', 'en']
True
HelpFormatter.write_text
(self, text)
Writes re-indented text into the buffer. This rewraps and preserves paragraphs.
Writes re-indented text into the buffer. This rewraps and preserves paragraphs.
def write_text(self, text): """Writes re-indented text into the buffer. This rewraps and preserves paragraphs. """ text_width = max(self.width - self.current_indent, 11) indent = ' ' * self.current_indent self.write(wrap_text(text, text_width, initial_indent=indent, subsequent_indent=indent, preserve_paragraphs=True)) self.write('\n')
[ "def", "write_text", "(", "self", ",", "text", ")", ":", "text_width", "=", "max", "(", "self", ".", "width", "-", "self", ".", "current_indent", ",", "11", ")", "indent", "=", "' '", "*", "self", ".", "current_indent", "self", ".", "write", "(", "wrap_text", "(", "text", ",", "text_width", ",", "initial_indent", "=", "indent", ",", "subsequent_indent", "=", "indent", ",", "preserve_paragraphs", "=", "True", ")", ")", "self", ".", "write", "(", "'\\n'", ")" ]
[ 160, 4 ]
[ 170, 24 ]
python
en
['en', 'en', 'en']
True
HelpFormatter.write_dl
(self, rows, col_max=30, col_spacing=2)
Writes a definition list into the buffer. This is how options and commands are usually formatted. :param rows: a list of two item tuples for the terms and values. :param col_max: the maximum width of the first column. :param col_spacing: the number of spaces between the first and second column.
Writes a definition list into the buffer. This is how options and commands are usually formatted.
def write_dl(self, rows, col_max=30, col_spacing=2): """Writes a definition list into the buffer. This is how options and commands are usually formatted. :param rows: a list of two item tuples for the terms and values. :param col_max: the maximum width of the first column. :param col_spacing: the number of spaces between the first and second column. """ rows = list(rows) widths = measure_table(rows) if len(widths) != 2: raise TypeError('Expected two columns for definition list') first_col = min(widths[0], col_max) + col_spacing for first, second in iter_rows(rows, len(widths)): self.write('%*s%s' % (self.current_indent, '', first)) if not second: self.write('\n') continue if term_len(first) <= first_col - col_spacing: self.write(' ' * (first_col - term_len(first))) else: self.write('\n') self.write(' ' * (first_col + self.current_indent)) text_width = max(self.width - first_col - 2, 10) lines = iter(wrap_text(second, text_width).splitlines()) if lines: self.write(next(lines) + '\n') for line in lines: self.write('%*s%s\n' % ( first_col + self.current_indent, '', line)) else: self.write('\n')
[ "def", "write_dl", "(", "self", ",", "rows", ",", "col_max", "=", "30", ",", "col_spacing", "=", "2", ")", ":", "rows", "=", "list", "(", "rows", ")", "widths", "=", "measure_table", "(", "rows", ")", "if", "len", "(", "widths", ")", "!=", "2", ":", "raise", "TypeError", "(", "'Expected two columns for definition list'", ")", "first_col", "=", "min", "(", "widths", "[", "0", "]", ",", "col_max", ")", "+", "col_spacing", "for", "first", ",", "second", "in", "iter_rows", "(", "rows", ",", "len", "(", "widths", ")", ")", ":", "self", ".", "write", "(", "'%*s%s'", "%", "(", "self", ".", "current_indent", ",", "''", ",", "first", ")", ")", "if", "not", "second", ":", "self", ".", "write", "(", "'\\n'", ")", "continue", "if", "term_len", "(", "first", ")", "<=", "first_col", "-", "col_spacing", ":", "self", ".", "write", "(", "' '", "*", "(", "first_col", "-", "term_len", "(", "first", ")", ")", ")", "else", ":", "self", ".", "write", "(", "'\\n'", ")", "self", ".", "write", "(", "' '", "*", "(", "first_col", "+", "self", ".", "current_indent", ")", ")", "text_width", "=", "max", "(", "self", ".", "width", "-", "first_col", "-", "2", ",", "10", ")", "lines", "=", "iter", "(", "wrap_text", "(", "second", ",", "text_width", ")", ".", "splitlines", "(", ")", ")", "if", "lines", ":", "self", ".", "write", "(", "next", "(", "lines", ")", "+", "'\\n'", ")", "for", "line", "in", "lines", ":", "self", ".", "write", "(", "'%*s%s\\n'", "%", "(", "first_col", "+", "self", ".", "current_indent", ",", "''", ",", "line", ")", ")", "else", ":", "self", ".", "write", "(", "'\\n'", ")" ]
[ 172, 4 ]
[ 207, 32 ]
python
en
['en', 'en', 'en']
True
HelpFormatter.section
(self, name)
Helpful context manager that writes a paragraph, a heading, and the indents. :param name: the section name that is written as heading.
Helpful context manager that writes a paragraph, a heading, and the indents.
def section(self, name): """Helpful context manager that writes a paragraph, a heading, and the indents. :param name: the section name that is written as heading. """ self.write_paragraph() self.write_heading(name) self.indent() try: yield finally: self.dedent()
[ "def", "section", "(", "self", ",", "name", ")", ":", "self", ".", "write_paragraph", "(", ")", "self", ".", "write_heading", "(", "name", ")", "self", ".", "indent", "(", ")", "try", ":", "yield", "finally", ":", "self", ".", "dedent", "(", ")" ]
[ 210, 4 ]
[ 222, 25 ]
python
en
['en', 'en', 'en']
True
HelpFormatter.indentation
(self)
A context manager that increases the indentation.
A context manager that increases the indentation.
def indentation(self): """A context manager that increases the indentation.""" self.indent() try: yield finally: self.dedent()
[ "def", "indentation", "(", "self", ")", ":", "self", ".", "indent", "(", ")", "try", ":", "yield", "finally", ":", "self", ".", "dedent", "(", ")" ]
[ 225, 4 ]
[ 231, 25 ]
python
en
['en', 'en', 'en']
True
HelpFormatter.getvalue
(self)
Returns the buffer contents.
Returns the buffer contents.
def getvalue(self): """Returns the buffer contents.""" return ''.join(self.buffer)
[ "def", "getvalue", "(", "self", ")", ":", "return", "''", ".", "join", "(", "self", ".", "buffer", ")" ]
[ 233, 4 ]
[ 235, 35 ]
python
en
['en', 'en', 'en']
True
getprime
(nbits, poolsize)
Returns a prime number that can be stored in 'nbits' bits. Works in multiple threads at the same time. >>> p = getprime(128, 3) >>> rsa.prime.is_prime(p-1) False >>> rsa.prime.is_prime(p) True >>> rsa.prime.is_prime(p+1) False >>> from rsa import common >>> common.bit_size(p) == 128 True
Returns a prime number that can be stored in 'nbits' bits.
def getprime(nbits, poolsize): """Returns a prime number that can be stored in 'nbits' bits. Works in multiple threads at the same time. >>> p = getprime(128, 3) >>> rsa.prime.is_prime(p-1) False >>> rsa.prime.is_prime(p) True >>> rsa.prime.is_prime(p+1) False >>> from rsa import common >>> common.bit_size(p) == 128 True """ (pipe_recv, pipe_send) = mp.Pipe(duplex=False) # Create processes try: procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send)) for _ in range(poolsize)] # Start processes for p in procs: p.start() result = pipe_recv.recv() finally: pipe_recv.close() pipe_send.close() # Terminate processes for p in procs: p.terminate() return result
[ "def", "getprime", "(", "nbits", ",", "poolsize", ")", ":", "(", "pipe_recv", ",", "pipe_send", ")", "=", "mp", ".", "Pipe", "(", "duplex", "=", "False", ")", "# Create processes", "try", ":", "procs", "=", "[", "mp", ".", "Process", "(", "target", "=", "_find_prime", ",", "args", "=", "(", "nbits", ",", "pipe_send", ")", ")", "for", "_", "in", "range", "(", "poolsize", ")", "]", "# Start processes", "for", "p", "in", "procs", ":", "p", ".", "start", "(", ")", "result", "=", "pipe_recv", ".", "recv", "(", ")", "finally", ":", "pipe_recv", ".", "close", "(", ")", "pipe_send", ".", "close", "(", ")", "# Terminate processes", "for", "p", "in", "procs", ":", "p", ".", "terminate", "(", ")", "return", "result" ]
[ 45, 0 ]
[ 83, 17 ]
python
en
['en', 'en', 'en']
True
get_capability_token
()
Respond to incoming requests.
Respond to incoming requests.
def get_capability_token(): """Respond to incoming requests.""" # Find these values at twilio.com/console # To set up environmental variables, see http://twil.io/secure account_sid = os.environ['TWILIO_ACCOUNT_SID'] auth_token = os.environ['TWILIO_AUTH_TOKEN'] capability = ClientCapabilityToken(account_sid, auth_token) capability.allow_client_incoming("joey") token = capability.to_jwt() return Response(token, mimetype='application/jwt')
[ "def", "get_capability_token", "(", ")", ":", "# Find these values at twilio.com/console", "# To set up environmental variables, see http://twil.io/secure", "account_sid", "=", "os", ".", "environ", "[", "'TWILIO_ACCOUNT_SID'", "]", "auth_token", "=", "os", ".", "environ", "[", "'TWILIO_AUTH_TOKEN'", "]", "capability", "=", "ClientCapabilityToken", "(", "account_sid", ",", "auth_token", ")", "capability", ".", "allow_client_incoming", "(", "\"joey\"", ")", "token", "=", "capability", ".", "to_jwt", "(", ")", "return", "Response", "(", "token", ",", "mimetype", "=", "'application/jwt'", ")" ]
[ 8, 0 ]
[ 21, 54 ]
python
en
['en', 'en', 'en']
True
upload._load_password_from_keyring
(self)
Attempt to load password from keyring. Suppress Exceptions.
Attempt to load password from keyring. Suppress Exceptions.
def _load_password_from_keyring(self): """ Attempt to load password from keyring. Suppress Exceptions. """ try: keyring = __import__('keyring') return keyring.get_password(self.repository, self.username) except Exception: pass
[ "def", "_load_password_from_keyring", "(", "self", ")", ":", "try", ":", "keyring", "=", "__import__", "(", "'keyring'", ")", "return", "keyring", ".", "get_password", "(", "self", ".", "repository", ",", "self", ".", "username", ")", "except", "Exception", ":", "pass" ]
[ 24, 4 ]
[ 32, 16 ]
python
en
['en', 'error', 'th']
False
upload._prompt_for_password
(self)
Prompt for a password on the tty. Suppress Exceptions.
Prompt for a password on the tty. Suppress Exceptions.
def _prompt_for_password(self): """ Prompt for a password on the tty. Suppress Exceptions. """ try: return getpass.getpass() except (Exception, KeyboardInterrupt): pass
[ "def", "_prompt_for_password", "(", "self", ")", ":", "try", ":", "return", "getpass", ".", "getpass", "(", ")", "except", "(", "Exception", ",", "KeyboardInterrupt", ")", ":", "pass" ]
[ 34, 4 ]
[ 41, 16 ]
python
en
['en', 'error', 'th']
False
get_voice_twiml
()
Respond to incoming calls with a simple text message.
Respond to incoming calls with a simple text message.
def get_voice_twiml(): """Respond to incoming calls with a simple text message.""" resp = VoiceResponse() resp.say("Thanks for calling!") return Response(str(resp), mimetype='text/xml')
[ "def", "get_voice_twiml", "(", ")", ":", "resp", "=", "VoiceResponse", "(", ")", "resp", ".", "say", "(", "\"Thanks for calling!\"", ")", "return", "Response", "(", "str", "(", "resp", ")", ",", "mimetype", "=", "'text/xml'", ")" ]
[ 7, 0 ]
[ 13, 51 ]
python
en
['en', 'en', 'en']
True
get_supported_platform
()
Return this platform's maximum compatible version. distutils.util.get_platform() normally reports the minimum version of Mac OS X that would be required to *use* extensions produced by distutils. But what we want when checking compatibility is to know the version of Mac OS X that we are *running*. To allow usage of packages that explicitly require a newer version of Mac OS X, we must also know the current version of the OS. If this condition occurs for any other platform with a version in its platform strings, this function should be extended accordingly.
Return this platform's maximum compatible version.
def get_supported_platform(): """Return this platform's maximum compatible version. distutils.util.get_platform() normally reports the minimum version of Mac OS X that would be required to *use* extensions produced by distutils. But what we want when checking compatibility is to know the version of Mac OS X that we are *running*. To allow usage of packages that explicitly require a newer version of Mac OS X, we must also know the current version of the OS. If this condition occurs for any other platform with a version in its platform strings, this function should be extended accordingly. """ plat = get_build_platform() m = macosVersionString.match(plat) if m is not None and sys.platform == "darwin": try: plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) except ValueError: # not Mac OS X pass return plat
[ "def", "get_supported_platform", "(", ")", ":", "plat", "=", "get_build_platform", "(", ")", "m", "=", "macosVersionString", ".", "match", "(", "plat", ")", "if", "m", "is", "not", "None", "and", "sys", ".", "platform", "==", "\"darwin\"", ":", "try", ":", "plat", "=", "'macosx-%s-%s'", "%", "(", "'.'", ".", "join", "(", "_macosx_vers", "(", ")", "[", ":", "2", "]", ")", ",", "m", ".", "group", "(", "3", ")", ")", "except", "ValueError", ":", "# not Mac OS X", "pass", "return", "plat" ]
[ 176, 0 ]
[ 197, 15 ]
python
en
['en', 'la', 'en']
True
register_loader_type
(loader_type, provider_factory)
Register `provider_factory` to make providers for `loader_type` `loader_type` is the type or class of a PEP 302 ``module.__loader__``, and `provider_factory` is a function that, passed a *module* object, returns an ``IResourceProvider`` for that module.
Register `provider_factory` to make providers for `loader_type`
def register_loader_type(loader_type, provider_factory): """Register `provider_factory` to make providers for `loader_type` `loader_type` is the type or class of a PEP 302 ``module.__loader__``, and `provider_factory` is a function that, passed a *module* object, returns an ``IResourceProvider`` for that module. """ _provider_factories[loader_type] = provider_factory
[ "def", "register_loader_type", "(", "loader_type", ",", "provider_factory", ")", ":", "_provider_factories", "[", "loader_type", "]", "=", "provider_factory" ]
[ 343, 0 ]
[ 350, 55 ]
python
en
['en', 'no', 'en']
True
get_provider
(moduleOrReq)
Return an IResourceProvider for the named module or requirement
Return an IResourceProvider for the named module or requirement
def get_provider(moduleOrReq): """Return an IResourceProvider for the named module or requirement""" if isinstance(moduleOrReq, Requirement): return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] try: module = sys.modules[moduleOrReq] except KeyError: __import__(moduleOrReq) module = sys.modules[moduleOrReq] loader = getattr(module, '__loader__', None) return _find_adapter(_provider_factories, loader)(module)
[ "def", "get_provider", "(", "moduleOrReq", ")", ":", "if", "isinstance", "(", "moduleOrReq", ",", "Requirement", ")", ":", "return", "working_set", ".", "find", "(", "moduleOrReq", ")", "or", "require", "(", "str", "(", "moduleOrReq", ")", ")", "[", "0", "]", "try", ":", "module", "=", "sys", ".", "modules", "[", "moduleOrReq", "]", "except", "KeyError", ":", "__import__", "(", "moduleOrReq", ")", "module", "=", "sys", ".", "modules", "[", "moduleOrReq", "]", "loader", "=", "getattr", "(", "module", ",", "'__loader__'", ",", "None", ")", "return", "_find_adapter", "(", "_provider_factories", ",", "loader", ")", "(", "module", ")" ]
[ 353, 0 ]
[ 363, 61 ]
python
en
['en', 'en', 'en']
True
get_build_platform
()
Return this platform's string for platform-specific distributions XXX Currently this is the same as ``distutils.util.get_platform()``, but it needs some hacks for Linux and Mac OS X.
Return this platform's string for platform-specific distributions
def get_build_platform(): """Return this platform's string for platform-specific distributions XXX Currently this is the same as ``distutils.util.get_platform()``, but it needs some hacks for Linux and Mac OS X. """ from sysconfig import get_platform plat = get_platform() if sys.platform == "darwin" and not plat.startswith('macosx-'): try: version = _macosx_vers() machine = os.uname()[4].replace(" ", "_") return "macosx-%d.%d-%s" % ( int(version[0]), int(version[1]), _macosx_arch(machine), ) except ValueError: # if someone is running a non-Mac darwin system, this will fall # through to the default implementation pass return plat
[ "def", "get_build_platform", "(", ")", ":", "from", "sysconfig", "import", "get_platform", "plat", "=", "get_platform", "(", ")", "if", "sys", ".", "platform", "==", "\"darwin\"", "and", "not", "plat", ".", "startswith", "(", "'macosx-'", ")", ":", "try", ":", "version", "=", "_macosx_vers", "(", ")", "machine", "=", "os", ".", "uname", "(", ")", "[", "4", "]", ".", "replace", "(", "\" \"", ",", "\"_\"", ")", "return", "\"macosx-%d.%d-%s\"", "%", "(", "int", "(", "version", "[", "0", "]", ")", ",", "int", "(", "version", "[", "1", "]", ")", ",", "_macosx_arch", "(", "machine", ")", ",", ")", "except", "ValueError", ":", "# if someone is running a non-Mac darwin system, this will fall", "# through to the default implementation", "pass", "return", "plat" ]
[ 386, 0 ]
[ 407, 15 ]
python
en
['en', 'da', 'en']
True
compatible_platforms
(provided, required)
Can code for the `provided` platform run on the `required` platform? Returns true if either platform is ``None``, or the platforms are equal. XXX Needs compatibility checks for Linux and other unixy OSes.
Can code for the `provided` platform run on the `required` platform?
def compatible_platforms(provided, required): """Can code for the `provided` platform run on the `required` platform? Returns true if either platform is ``None``, or the platforms are equal. XXX Needs compatibility checks for Linux and other unixy OSes. """ if provided is None or required is None or provided == required: # easy case return True # Mac OS X special cases reqMac = macosVersionString.match(required) if reqMac: provMac = macosVersionString.match(provided) # is this a Mac package? if not provMac: # this is backwards compatibility for packages built before # setuptools 0.6. All packages built after this point will # use the new macosx designation. provDarwin = darwinVersionString.match(provided) if provDarwin: dversion = int(provDarwin.group(1)) macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) if dversion == 7 and macosversion >= "10.3" or \ dversion == 8 and macosversion >= "10.4": return True # egg isn't macosx or legacy darwin return False # are they the same major version and machine type? if provMac.group(1) != reqMac.group(1) or \ provMac.group(3) != reqMac.group(3): return False # is the required OS major update >= the provided one? if int(provMac.group(2)) > int(reqMac.group(2)): return False return True # XXX Linux and other platforms' special cases should go here return False
[ "def", "compatible_platforms", "(", "provided", ",", "required", ")", ":", "if", "provided", "is", "None", "or", "required", "is", "None", "or", "provided", "==", "required", ":", "# easy case", "return", "True", "# Mac OS X special cases", "reqMac", "=", "macosVersionString", ".", "match", "(", "required", ")", "if", "reqMac", ":", "provMac", "=", "macosVersionString", ".", "match", "(", "provided", ")", "# is this a Mac package?", "if", "not", "provMac", ":", "# this is backwards compatibility for packages built before", "# setuptools 0.6. All packages built after this point will", "# use the new macosx designation.", "provDarwin", "=", "darwinVersionString", ".", "match", "(", "provided", ")", "if", "provDarwin", ":", "dversion", "=", "int", "(", "provDarwin", ".", "group", "(", "1", ")", ")", "macosversion", "=", "\"%s.%s\"", "%", "(", "reqMac", ".", "group", "(", "1", ")", ",", "reqMac", ".", "group", "(", "2", ")", ")", "if", "dversion", "==", "7", "and", "macosversion", ">=", "\"10.3\"", "or", "dversion", "==", "8", "and", "macosversion", ">=", "\"10.4\"", ":", "return", "True", "# egg isn't macosx or legacy darwin", "return", "False", "# are they the same major version and machine type?", "if", "provMac", ".", "group", "(", "1", ")", "!=", "reqMac", ".", "group", "(", "1", ")", "or", "provMac", ".", "group", "(", "3", ")", "!=", "reqMac", ".", "group", "(", "3", ")", ":", "return", "False", "# is the required OS major update >= the provided one?", "if", "int", "(", "provMac", ".", "group", "(", "2", ")", ")", ">", "int", "(", "reqMac", ".", "group", "(", "2", ")", ")", ":", "return", "False", "return", "True", "# XXX Linux and other platforms' special cases should go here", "return", "False" ]
[ 416, 0 ]
[ 459, 16 ]
python
en
['en', 'en', 'en']
True
run_script
(dist_spec, script_name)
Locate distribution `dist_spec` and run its `script_name` script
Locate distribution `dist_spec` and run its `script_name` script
def run_script(dist_spec, script_name): """Locate distribution `dist_spec` and run its `script_name` script""" ns = sys._getframe(1).f_globals name = ns['__name__'] ns.clear() ns['__name__'] = name require(dist_spec)[0].run_script(script_name, ns)
[ "def", "run_script", "(", "dist_spec", ",", "script_name", ")", ":", "ns", "=", "sys", ".", "_getframe", "(", "1", ")", ".", "f_globals", "name", "=", "ns", "[", "'__name__'", "]", "ns", ".", "clear", "(", ")", "ns", "[", "'__name__'", "]", "=", "name", "require", "(", "dist_spec", ")", "[", "0", "]", ".", "run_script", "(", "script_name", ",", "ns", ")" ]
[ 462, 0 ]
[ 468, 53 ]
python
en
['en', 'co', 'en']
True
get_distribution
(dist)
Return a current distribution object for a Requirement or string
Return a current distribution object for a Requirement or string
def get_distribution(dist): """Return a current distribution object for a Requirement or string""" if isinstance(dist, six.string_types): dist = Requirement.parse(dist) if isinstance(dist, Requirement): dist = get_provider(dist) if not isinstance(dist, Distribution): raise TypeError("Expected string, Requirement, or Distribution", dist) return dist
[ "def", "get_distribution", "(", "dist", ")", ":", "if", "isinstance", "(", "dist", ",", "six", ".", "string_types", ")", ":", "dist", "=", "Requirement", ".", "parse", "(", "dist", ")", "if", "isinstance", "(", "dist", ",", "Requirement", ")", ":", "dist", "=", "get_provider", "(", "dist", ")", "if", "not", "isinstance", "(", "dist", ",", "Distribution", ")", ":", "raise", "TypeError", "(", "\"Expected string, Requirement, or Distribution\"", ",", "dist", ")", "return", "dist" ]
[ 475, 0 ]
[ 483, 15 ]
python
en
['en', 'en', 'en']
True
load_entry_point
(dist, group, name)
Return `name` entry point of `group` for `dist` or raise ImportError
Return `name` entry point of `group` for `dist` or raise ImportError
def load_entry_point(dist, group, name): """Return `name` entry point of `group` for `dist` or raise ImportError""" return get_distribution(dist).load_entry_point(group, name)
[ "def", "load_entry_point", "(", "dist", ",", "group", ",", "name", ")", ":", "return", "get_distribution", "(", "dist", ")", ".", "load_entry_point", "(", "group", ",", "name", ")" ]
[ 486, 0 ]
[ 488, 63 ]
python
en
['en', 'en', 'en']
True
get_entry_map
(dist, group=None)
Return the entry point map for `group`, or the full entry map
Return the entry point map for `group`, or the full entry map
def get_entry_map(dist, group=None): """Return the entry point map for `group`, or the full entry map""" return get_distribution(dist).get_entry_map(group)
[ "def", "get_entry_map", "(", "dist", ",", "group", "=", "None", ")", ":", "return", "get_distribution", "(", "dist", ")", ".", "get_entry_map", "(", "group", ")" ]
[ 491, 0 ]
[ 493, 54 ]
python
en
['en', 'en', 'en']
True
get_entry_info
(dist, group, name)
Return the EntryPoint object for `group`+`name`, or ``None``
Return the EntryPoint object for `group`+`name`, or ``None``
def get_entry_info(dist, group, name): """Return the EntryPoint object for `group`+`name`, or ``None``""" return get_distribution(dist).get_entry_info(group, name)
[ "def", "get_entry_info", "(", "dist", ",", "group", ",", "name", ")", ":", "return", "get_distribution", "(", "dist", ")", ".", "get_entry_info", "(", "group", ",", "name", ")" ]
[ 496, 0 ]
[ 498, 61 ]
python
en
['en', 'en', 'en']
True
get_default_cache
()
Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs".
Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs".
def get_default_cache(): """ Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs". """ return ( os.environ.get('PYTHON_EGG_CACHE') or appdirs.user_cache_dir(appname='Python-Eggs') )
[ "def", "get_default_cache", "(", ")", ":", "return", "(", "os", ".", "environ", ".", "get", "(", "'PYTHON_EGG_CACHE'", ")", "or", "appdirs", ".", "user_cache_dir", "(", "appname", "=", "'Python-Eggs'", ")", ")" ]
[ 1304, 0 ]
[ 1313, 5 ]
python
en
['en', 'error', 'th']
False
safe_name
(name)
Convert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single '-'.
Convert an arbitrary string to a standard distribution name
def safe_name(name): """Convert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single '-'. """ return re.sub('[^A-Za-z0-9.]+', '-', name)
[ "def", "safe_name", "(", "name", ")", ":", "return", "re", ".", "sub", "(", "'[^A-Za-z0-9.]+'", ",", "'-'", ",", "name", ")" ]
[ 1316, 0 ]
[ 1321, 46 ]
python
en
['en', 'en', 'en']
True
safe_version
(version)
Convert an arbitrary string to a standard version string
Convert an arbitrary string to a standard version string
def safe_version(version): """ Convert an arbitrary string to a standard version string """ try: # normalize the version return str(packaging.version.Version(version)) except packaging.version.InvalidVersion: version = version.replace(' ', '.') return re.sub('[^A-Za-z0-9.]+', '-', version)
[ "def", "safe_version", "(", "version", ")", ":", "try", ":", "# normalize the version", "return", "str", "(", "packaging", ".", "version", ".", "Version", "(", "version", ")", ")", "except", "packaging", ".", "version", ".", "InvalidVersion", ":", "version", "=", "version", ".", "replace", "(", "' '", ",", "'.'", ")", "return", "re", ".", "sub", "(", "'[^A-Za-z0-9.]+'", ",", "'-'", ",", "version", ")" ]
[ 1324, 0 ]
[ 1333, 53 ]
python
en
['en', 'error', 'th']
False
safe_extra
(extra)
Convert an arbitrary string to a standard 'extra' name Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased.
Convert an arbitrary string to a standard 'extra' name
def safe_extra(extra): """Convert an arbitrary string to a standard 'extra' name Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. """ return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
[ "def", "safe_extra", "(", "extra", ")", ":", "return", "re", ".", "sub", "(", "'[^A-Za-z0-9.-]+'", ",", "'_'", ",", "extra", ")", ".", "lower", "(", ")" ]
[ 1336, 0 ]
[ 1342, 56 ]
python
en
['en', 'en', 'en']
True
to_filename
(name)
Convert a project or version name to its filename-escaped form Any '-' characters are currently replaced with '_'.
Convert a project or version name to its filename-escaped form
def to_filename(name): """Convert a project or version name to its filename-escaped form Any '-' characters are currently replaced with '_'. """ return name.replace('-', '_')
[ "def", "to_filename", "(", "name", ")", ":", "return", "name", ".", "replace", "(", "'-'", ",", "'_'", ")" ]
[ 1345, 0 ]
[ 1350, 33 ]
python
en
['en', 'en', 'en']
True
invalid_marker
(text)
Validate text as a PEP 508 environment marker; return an exception if invalid or False otherwise.
Validate text as a PEP 508 environment marker; return an exception if invalid or False otherwise.
def invalid_marker(text): """ Validate text as a PEP 508 environment marker; return an exception if invalid or False otherwise. """ try: evaluate_marker(text) except SyntaxError as e: e.filename = None e.lineno = None return e return False
[ "def", "invalid_marker", "(", "text", ")", ":", "try", ":", "evaluate_marker", "(", "text", ")", "except", "SyntaxError", "as", "e", ":", "e", ".", "filename", "=", "None", "e", ".", "lineno", "=", "None", "return", "e", "return", "False" ]
[ 1353, 0 ]
[ 1364, 16 ]
python
en
['en', 'error', 'th']
False
evaluate_marker
(text, extra=None)
Evaluate a PEP 508 environment marker. Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid. This implementation uses the 'pyparsing' module.
Evaluate a PEP 508 environment marker. Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid.
def evaluate_marker(text, extra=None): """ Evaluate a PEP 508 environment marker. Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid. This implementation uses the 'pyparsing' module. """ try: marker = packaging.markers.Marker(text) return marker.evaluate() except packaging.markers.InvalidMarker as e: raise SyntaxError(e)
[ "def", "evaluate_marker", "(", "text", ",", "extra", "=", "None", ")", ":", "try", ":", "marker", "=", "packaging", ".", "markers", ".", "Marker", "(", "text", ")", "return", "marker", ".", "evaluate", "(", ")", "except", "packaging", ".", "markers", ".", "InvalidMarker", "as", "e", ":", "raise", "SyntaxError", "(", "e", ")" ]
[ 1367, 0 ]
[ 1379, 28 ]
python
en
['en', 'error', 'th']
False
register_finder
(importer_type, distribution_finder)
Register `distribution_finder` to find distributions in sys.path items `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item handler), and `distribution_finder` is a callable that, passed a path item and the importer instance, yields ``Distribution`` instances found on that path item. See ``pkg_resources.find_on_path`` for an example.
Register `distribution_finder` to find distributions in sys.path items
def register_finder(importer_type, distribution_finder): """Register `distribution_finder` to find distributions in sys.path items `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item handler), and `distribution_finder` is a callable that, passed a path item and the importer instance, yields ``Distribution`` instances found on that path item. See ``pkg_resources.find_on_path`` for an example.""" _distribution_finders[importer_type] = distribution_finder
[ "def", "register_finder", "(", "importer_type", ",", "distribution_finder", ")", ":", "_distribution_finders", "[", "importer_type", "]", "=", "distribution_finder" ]
[ 1956, 0 ]
[ 1963, 62 ]
python
en
['en', 'en', 'en']
True
find_distributions
(path_item, only=False)
Yield distributions accessible via `path_item`
Yield distributions accessible via `path_item`
def find_distributions(path_item, only=False): """Yield distributions accessible via `path_item`""" importer = get_importer(path_item) finder = _find_adapter(_distribution_finders, importer) return finder(importer, path_item, only)
[ "def", "find_distributions", "(", "path_item", ",", "only", "=", "False", ")", ":", "importer", "=", "get_importer", "(", "path_item", ")", "finder", "=", "_find_adapter", "(", "_distribution_finders", ",", "importer", ")", "return", "finder", "(", "importer", ",", "path_item", ",", "only", ")" ]
[ 1966, 0 ]
[ 1970, 44 ]
python
en
['en', 'en', 'en']
True
find_eggs_in_zip
(importer, path_item, only=False)
Find eggs in zip files; possibly multiple nested eggs.
Find eggs in zip files; possibly multiple nested eggs.
def find_eggs_in_zip(importer, path_item, only=False): """ Find eggs in zip files; possibly multiple nested eggs. """ if importer.archive.endswith('.whl'): # wheels are not supported with this finder # they don't have PKG-INFO metadata, and won't ever contain eggs return metadata = EggMetadata(importer) if metadata.has_metadata('PKG-INFO'): yield Distribution.from_filename(path_item, metadata=metadata) if only: # don't yield nested distros return for subitem in metadata.resource_listdir(''): if _is_egg_path(subitem): subpath = os.path.join(path_item, subitem) dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) for dist in dists: yield dist elif subitem.lower().endswith('.dist-info'): subpath = os.path.join(path_item, subitem) submeta = EggMetadata(zipimport.zipimporter(subpath)) submeta.egg_info = subpath yield Distribution.from_location(path_item, subitem, submeta)
[ "def", "find_eggs_in_zip", "(", "importer", ",", "path_item", ",", "only", "=", "False", ")", ":", "if", "importer", ".", "archive", ".", "endswith", "(", "'.whl'", ")", ":", "# wheels are not supported with this finder", "# they don't have PKG-INFO metadata, and won't ever contain eggs", "return", "metadata", "=", "EggMetadata", "(", "importer", ")", "if", "metadata", ".", "has_metadata", "(", "'PKG-INFO'", ")", ":", "yield", "Distribution", ".", "from_filename", "(", "path_item", ",", "metadata", "=", "metadata", ")", "if", "only", ":", "# don't yield nested distros", "return", "for", "subitem", "in", "metadata", ".", "resource_listdir", "(", "''", ")", ":", "if", "_is_egg_path", "(", "subitem", ")", ":", "subpath", "=", "os", ".", "path", ".", "join", "(", "path_item", ",", "subitem", ")", "dists", "=", "find_eggs_in_zip", "(", "zipimport", ".", "zipimporter", "(", "subpath", ")", ",", "subpath", ")", "for", "dist", "in", "dists", ":", "yield", "dist", "elif", "subitem", ".", "lower", "(", ")", ".", "endswith", "(", "'.dist-info'", ")", ":", "subpath", "=", "os", ".", "path", ".", "join", "(", "path_item", ",", "subitem", ")", "submeta", "=", "EggMetadata", "(", "zipimport", ".", "zipimporter", "(", "subpath", ")", ")", "submeta", ".", "egg_info", "=", "subpath", "yield", "Distribution", ".", "from_location", "(", "path_item", ",", "subitem", ",", "submeta", ")" ]
[ 1973, 0 ]
[ 1997, 73 ]
python
en
['en', 'error', 'th']
False
_by_version_descending
(names)
Given a list of filenames, return them in descending order by version number. >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' >>> _by_version_descending(names) ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
Given a list of filenames, return them in descending order by version number.
def _by_version_descending(names): """ Given a list of filenames, return them in descending order by version number. >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' >>> _by_version_descending(names) ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] """ def _by_version(name): """ Parse each component of the filename """ name, ext = os.path.splitext(name) parts = itertools.chain(name.split('-'), [ext]) return [packaging.version.parse(part) for part in parts] return sorted(names, key=_by_version, reverse=True)
[ "def", "_by_version_descending", "(", "names", ")", ":", "def", "_by_version", "(", "name", ")", ":", "\"\"\"\n Parse each component of the filename\n \"\"\"", "name", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "name", ")", "parts", "=", "itertools", ".", "chain", "(", "name", ".", "split", "(", "'-'", ")", ",", "[", "ext", "]", ")", "return", "[", "packaging", ".", "version", ".", "parse", "(", "part", ")", "for", "part", "in", "parts", "]", "return", "sorted", "(", "names", ",", "key", "=", "_by_version", ",", "reverse", "=", "True", ")" ]
[ 2010, 0 ]
[ 2033, 55 ]
python
en
['en', 'error', 'th']
False
find_on_path
(importer, path_item, only=False)
Yield distributions accessible on a sys.path directory
Yield distributions accessible on a sys.path directory
def find_on_path(importer, path_item, only=False): """Yield distributions accessible on a sys.path directory""" path_item = _normalize_cached(path_item) if _is_unpacked_egg(path_item): yield Distribution.from_filename( path_item, metadata=PathMetadata( path_item, os.path.join(path_item, 'EGG-INFO') ) ) return entries = safe_listdir(path_item) # for performance, before sorting by version, # screen entries for only those that will yield # distributions filtered = ( entry for entry in entries if dist_factory(path_item, entry, only) ) # scan for .egg and .egg-info in directory path_item_entries = _by_version_descending(filtered) for entry in path_item_entries: fullpath = os.path.join(path_item, entry) factory = dist_factory(path_item, entry, only) for dist in factory(fullpath): yield dist
[ "def", "find_on_path", "(", "importer", ",", "path_item", ",", "only", "=", "False", ")", ":", "path_item", "=", "_normalize_cached", "(", "path_item", ")", "if", "_is_unpacked_egg", "(", "path_item", ")", ":", "yield", "Distribution", ".", "from_filename", "(", "path_item", ",", "metadata", "=", "PathMetadata", "(", "path_item", ",", "os", ".", "path", ".", "join", "(", "path_item", ",", "'EGG-INFO'", ")", ")", ")", "return", "entries", "=", "safe_listdir", "(", "path_item", ")", "# for performance, before sorting by version,", "# screen entries for only those that will yield", "# distributions", "filtered", "=", "(", "entry", "for", "entry", "in", "entries", "if", "dist_factory", "(", "path_item", ",", "entry", ",", "only", ")", ")", "# scan for .egg and .egg-info in directory", "path_item_entries", "=", "_by_version_descending", "(", "filtered", ")", "for", "entry", "in", "path_item_entries", ":", "fullpath", "=", "os", ".", "path", ".", "join", "(", "path_item", ",", "entry", ")", "factory", "=", "dist_factory", "(", "path_item", ",", "entry", ",", "only", ")", "for", "dist", "in", "factory", "(", "fullpath", ")", ":", "yield", "dist" ]
[ 2036, 0 ]
[ 2065, 22 ]
python
en
['en', 'en', 'en']
True
dist_factory
(path_item, entry, only)
Return a dist_factory for a path_item and entry
Return a dist_factory for a path_item and entry
def dist_factory(path_item, entry, only): """ Return a dist_factory for a path_item and entry """ lower = entry.lower() is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) return ( distributions_from_metadata if is_meta else find_distributions if not only and _is_egg_path(entry) else resolve_egg_link if not only and lower.endswith('.egg-link') else NoDists() )
[ "def", "dist_factory", "(", "path_item", ",", "entry", ",", "only", ")", ":", "lower", "=", "entry", ".", "lower", "(", ")", "is_meta", "=", "any", "(", "map", "(", "lower", ".", "endswith", ",", "(", "'.egg-info'", ",", "'.dist-info'", ")", ")", ")", "return", "(", "distributions_from_metadata", "if", "is_meta", "else", "find_distributions", "if", "not", "only", "and", "_is_egg_path", "(", "entry", ")", "else", "resolve_egg_link", "if", "not", "only", "and", "lower", ".", "endswith", "(", "'.egg-link'", ")", "else", "NoDists", "(", ")", ")" ]
[ 2068, 0 ]
[ 2082, 5 ]
python
en
['en', 'error', 'th']
False
safe_listdir
(path)
Attempt to list contents of path, but suppress some exceptions.
Attempt to list contents of path, but suppress some exceptions.
def safe_listdir(path): """ Attempt to list contents of path, but suppress some exceptions. """ try: return os.listdir(path) except (PermissionError, NotADirectoryError): pass except OSError as e: # Ignore the directory if does not exist, not a directory or # permission denied ignorable = ( e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) # Python 2 on Windows needs to be handled this way :( or getattr(e, "winerror", None) == 267 ) if not ignorable: raise return ()
[ "def", "safe_listdir", "(", "path", ")", ":", "try", ":", "return", "os", ".", "listdir", "(", "path", ")", "except", "(", "PermissionError", ",", "NotADirectoryError", ")", ":", "pass", "except", "OSError", "as", "e", ":", "# Ignore the directory if does not exist, not a directory or", "# permission denied", "ignorable", "=", "(", "e", ".", "errno", "in", "(", "errno", ".", "ENOTDIR", ",", "errno", ".", "EACCES", ",", "errno", ".", "ENOENT", ")", "# Python 2 on Windows needs to be handled this way :(", "or", "getattr", "(", "e", ",", "\"winerror\"", ",", "None", ")", "==", "267", ")", "if", "not", "ignorable", ":", "raise", "return", "(", ")" ]
[ 2102, 0 ]
[ 2120, 13 ]
python
en
['en', 'error', 'th']
False
non_empty_lines
(path)
Yield non-empty lines from file at path
Yield non-empty lines from file at path
def non_empty_lines(path): """ Yield non-empty lines from file at path """ with open(path) as f: for line in f: line = line.strip() if line: yield line
[ "def", "non_empty_lines", "(", "path", ")", ":", "with", "open", "(", "path", ")", "as", "f", ":", "for", "line", "in", "f", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "line", ":", "yield", "line" ]
[ 2138, 0 ]
[ 2146, 26 ]
python
en
['en', 'error', 'th']
False
resolve_egg_link
(path)
Given a path to an .egg-link, resolve distributions present in the referenced path.
Given a path to an .egg-link, resolve distributions present in the referenced path.
def resolve_egg_link(path): """ Given a path to an .egg-link, resolve distributions present in the referenced path. """ referenced_paths = non_empty_lines(path) resolved_paths = ( os.path.join(os.path.dirname(path), ref) for ref in referenced_paths ) dist_groups = map(find_distributions, resolved_paths) return next(dist_groups, ())
[ "def", "resolve_egg_link", "(", "path", ")", ":", "referenced_paths", "=", "non_empty_lines", "(", "path", ")", "resolved_paths", "=", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "path", ")", ",", "ref", ")", "for", "ref", "in", "referenced_paths", ")", "dist_groups", "=", "map", "(", "find_distributions", ",", "resolved_paths", ")", "return", "next", "(", "dist_groups", ",", "(", ")", ")" ]
[ 2149, 0 ]
[ 2160, 32 ]
python
en
['en', 'error', 'th']
False
register_namespace_handler
(importer_type, namespace_handler)
Register `namespace_handler` to declare namespace packages `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item handler), and `namespace_handler` is a callable like this:: def namespace_handler(importer, path_entry, moduleName, module): # return a path_entry to use for child packages Namespace handlers are only called if the importer object has already agreed that it can handle the relevant path item, and they should only return a subpath if the module __path__ does not already contain an equivalent subpath. For an example namespace handler, see ``pkg_resources.file_ns_handler``.
Register `namespace_handler` to declare namespace packages
def register_namespace_handler(importer_type, namespace_handler): """Register `namespace_handler` to declare namespace packages `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item handler), and `namespace_handler` is a callable like this:: def namespace_handler(importer, path_entry, moduleName, module): # return a path_entry to use for child packages Namespace handlers are only called if the importer object has already agreed that it can handle the relevant path item, and they should only return a subpath if the module __path__ does not already contain an equivalent subpath. For an example namespace handler, see ``pkg_resources.file_ns_handler``. """ _namespace_handlers[importer_type] = namespace_handler
[ "def", "register_namespace_handler", "(", "importer_type", ",", "namespace_handler", ")", ":", "_namespace_handlers", "[", "importer_type", "]", "=", "namespace_handler" ]
[ 2172, 0 ]
[ 2187, 58 ]
python
en
['it', 'en', 'en']
True
_handle_ns
(packageName, path_item)
Ensure that named package includes a subpath of path_item (if needed)
Ensure that named package includes a subpath of path_item (if needed)
def _handle_ns(packageName, path_item): """Ensure that named package includes a subpath of path_item (if needed)""" importer = get_importer(path_item) if importer is None: return None # capture warnings due to #1111 with warnings.catch_warnings(): warnings.simplefilter("ignore") loader = importer.find_module(packageName) if loader is None: return None module = sys.modules.get(packageName) if module is None: module = sys.modules[packageName] = types.ModuleType(packageName) module.__path__ = [] _set_parent_ns(packageName) elif not hasattr(module, '__path__'): raise TypeError("Not a package:", packageName) handler = _find_adapter(_namespace_handlers, importer) subpath = handler(importer, path_item, packageName, module) if subpath is not None: path = module.__path__ path.append(subpath) loader.load_module(packageName) _rebuild_mod_path(path, packageName, module) return subpath
[ "def", "_handle_ns", "(", "packageName", ",", "path_item", ")", ":", "importer", "=", "get_importer", "(", "path_item", ")", "if", "importer", "is", "None", ":", "return", "None", "# capture warnings due to #1111", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "simplefilter", "(", "\"ignore\"", ")", "loader", "=", "importer", ".", "find_module", "(", "packageName", ")", "if", "loader", "is", "None", ":", "return", "None", "module", "=", "sys", ".", "modules", ".", "get", "(", "packageName", ")", "if", "module", "is", "None", ":", "module", "=", "sys", ".", "modules", "[", "packageName", "]", "=", "types", ".", "ModuleType", "(", "packageName", ")", "module", ".", "__path__", "=", "[", "]", "_set_parent_ns", "(", "packageName", ")", "elif", "not", "hasattr", "(", "module", ",", "'__path__'", ")", ":", "raise", "TypeError", "(", "\"Not a package:\"", ",", "packageName", ")", "handler", "=", "_find_adapter", "(", "_namespace_handlers", ",", "importer", ")", "subpath", "=", "handler", "(", "importer", ",", "path_item", ",", "packageName", ",", "module", ")", "if", "subpath", "is", "not", "None", ":", "path", "=", "module", ".", "__path__", "path", ".", "append", "(", "subpath", ")", "loader", ".", "load_module", "(", "packageName", ")", "_rebuild_mod_path", "(", "path", ",", "packageName", ",", "module", ")", "return", "subpath" ]
[ 2190, 0 ]
[ 2218, 18 ]
python
en
['en', 'en', 'en']
True
_rebuild_mod_path
(orig_path, package_name, module)
Rebuild module.__path__ ensuring that all entries are ordered corresponding to their sys.path order
Rebuild module.__path__ ensuring that all entries are ordered corresponding to their sys.path order
def _rebuild_mod_path(orig_path, package_name, module): """ Rebuild module.__path__ ensuring that all entries are ordered corresponding to their sys.path order """ sys_path = [_normalize_cached(p) for p in sys.path] def safe_sys_path_index(entry): """ Workaround for #520 and #513. """ try: return sys_path.index(entry) except ValueError: return float('inf') def position_in_sys_path(path): """ Return the ordinal of the path based on its position in sys.path """ path_parts = path.split(os.sep) module_parts = package_name.count('.') + 1 parts = path_parts[:-module_parts] return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) new_path = sorted(orig_path, key=position_in_sys_path) new_path = [_normalize_cached(p) for p in new_path] if isinstance(module.__path__, list): module.__path__[:] = new_path else: module.__path__ = new_path
[ "def", "_rebuild_mod_path", "(", "orig_path", ",", "package_name", ",", "module", ")", ":", "sys_path", "=", "[", "_normalize_cached", "(", "p", ")", "for", "p", "in", "sys", ".", "path", "]", "def", "safe_sys_path_index", "(", "entry", ")", ":", "\"\"\"\n Workaround for #520 and #513.\n \"\"\"", "try", ":", "return", "sys_path", ".", "index", "(", "entry", ")", "except", "ValueError", ":", "return", "float", "(", "'inf'", ")", "def", "position_in_sys_path", "(", "path", ")", ":", "\"\"\"\n Return the ordinal of the path based on its position in sys.path\n \"\"\"", "path_parts", "=", "path", ".", "split", "(", "os", ".", "sep", ")", "module_parts", "=", "package_name", ".", "count", "(", "'.'", ")", "+", "1", "parts", "=", "path_parts", "[", ":", "-", "module_parts", "]", "return", "safe_sys_path_index", "(", "_normalize_cached", "(", "os", ".", "sep", ".", "join", "(", "parts", ")", ")", ")", "new_path", "=", "sorted", "(", "orig_path", ",", "key", "=", "position_in_sys_path", ")", "new_path", "=", "[", "_normalize_cached", "(", "p", ")", "for", "p", "in", "new_path", "]", "if", "isinstance", "(", "module", ".", "__path__", ",", "list", ")", ":", "module", ".", "__path__", "[", ":", "]", "=", "new_path", "else", ":", "module", ".", "__path__", "=", "new_path" ]
[ 2221, 0 ]
[ 2252, 34 ]
python
en
['en', 'error', 'th']
False
declare_namespace
(packageName)
Declare that package 'packageName' is a namespace package
Declare that package 'packageName' is a namespace package
def declare_namespace(packageName): """Declare that package 'packageName' is a namespace package""" _imp.acquire_lock() try: if packageName in _namespace_packages: return path = sys.path parent, _, _ = packageName.rpartition('.') if parent: declare_namespace(parent) if parent not in _namespace_packages: __import__(parent) try: path = sys.modules[parent].__path__ except AttributeError: raise TypeError("Not a package:", parent) # Track what packages are namespaces, so when new path items are added, # they can be updated _namespace_packages.setdefault(parent or None, []).append(packageName) _namespace_packages.setdefault(packageName, []) for path_item in path: # Ensure all the parent's path items are reflected in the child, # if they apply _handle_ns(packageName, path_item) finally: _imp.release_lock()
[ "def", "declare_namespace", "(", "packageName", ")", ":", "_imp", ".", "acquire_lock", "(", ")", "try", ":", "if", "packageName", "in", "_namespace_packages", ":", "return", "path", "=", "sys", ".", "path", "parent", ",", "_", ",", "_", "=", "packageName", ".", "rpartition", "(", "'.'", ")", "if", "parent", ":", "declare_namespace", "(", "parent", ")", "if", "parent", "not", "in", "_namespace_packages", ":", "__import__", "(", "parent", ")", "try", ":", "path", "=", "sys", ".", "modules", "[", "parent", "]", ".", "__path__", "except", "AttributeError", ":", "raise", "TypeError", "(", "\"Not a package:\"", ",", "parent", ")", "# Track what packages are namespaces, so when new path items are added,", "# they can be updated", "_namespace_packages", ".", "setdefault", "(", "parent", "or", "None", ",", "[", "]", ")", ".", "append", "(", "packageName", ")", "_namespace_packages", ".", "setdefault", "(", "packageName", ",", "[", "]", ")", "for", "path_item", "in", "path", ":", "# Ensure all the parent's path items are reflected in the child,", "# if they apply", "_handle_ns", "(", "packageName", ",", "path_item", ")", "finally", ":", "_imp", ".", "release_lock", "(", ")" ]
[ 2255, 0 ]
[ 2286, 27 ]
python
en
['en', 'en', 'en']
True
fixup_namespace_packages
(path_item, parent=None)
Ensure that previously-declared namespace packages include path_item
Ensure that previously-declared namespace packages include path_item
def fixup_namespace_packages(path_item, parent=None): """Ensure that previously-declared namespace packages include path_item""" _imp.acquire_lock() try: for package in _namespace_packages.get(parent, ()): subpath = _handle_ns(package, path_item) if subpath: fixup_namespace_packages(subpath, package) finally: _imp.release_lock()
[ "def", "fixup_namespace_packages", "(", "path_item", ",", "parent", "=", "None", ")", ":", "_imp", ".", "acquire_lock", "(", ")", "try", ":", "for", "package", "in", "_namespace_packages", ".", "get", "(", "parent", ",", "(", ")", ")", ":", "subpath", "=", "_handle_ns", "(", "package", ",", "path_item", ")", "if", "subpath", ":", "fixup_namespace_packages", "(", "subpath", ",", "package", ")", "finally", ":", "_imp", ".", "release_lock", "(", ")" ]
[ 2289, 0 ]
[ 2298, 27 ]
python
en
['en', 'en', 'en']
True
file_ns_handler
(importer, path_item, packageName, module)
Compute an ns-package subpath for a filesystem or zipfile importer
Compute an ns-package subpath for a filesystem or zipfile importer
def file_ns_handler(importer, path_item, packageName, module): """Compute an ns-package subpath for a filesystem or zipfile importer""" subpath = os.path.join(path_item, packageName.split('.')[-1]) normalized = _normalize_cached(subpath) for item in module.__path__: if _normalize_cached(item) == normalized: break else: # Only return the path if it's not already there return subpath
[ "def", "file_ns_handler", "(", "importer", ",", "path_item", ",", "packageName", ",", "module", ")", ":", "subpath", "=", "os", ".", "path", ".", "join", "(", "path_item", ",", "packageName", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", ")", "normalized", "=", "_normalize_cached", "(", "subpath", ")", "for", "item", "in", "module", ".", "__path__", ":", "if", "_normalize_cached", "(", "item", ")", "==", "normalized", ":", "break", "else", ":", "# Only return the path if it's not already there", "return", "subpath" ]
[ 2301, 0 ]
[ 2311, 22 ]
python
en
['en', 'en', 'en']
True
normalize_path
(filename)
Normalize a file/dir name for comparison purposes
Normalize a file/dir name for comparison purposes
def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
[ "def", "normalize_path", "(", "filename", ")", ":", "return", "os", ".", "path", ".", "normcase", "(", "os", ".", "path", ".", "realpath", "(", "os", ".", "path", ".", "normpath", "(", "_cygwin_patch", "(", "filename", ")", ")", ")", ")" ]
[ 2328, 0 ]
[ 2330, 88 ]
python
en
['en', 'it', 'en']
True
_cygwin_patch
(filename)
Contrary to POSIX 2008, on Cygwin, getcwd (3) contains symlink components. Using os.path.abspath() works around this limitation. A fix in os.getcwd() would probably better, in Cygwin even more so, except that this seems to be by design...
Contrary to POSIX 2008, on Cygwin, getcwd (3) contains symlink components. Using os.path.abspath() works around this limitation. A fix in os.getcwd() would probably better, in Cygwin even more so, except that this seems to be by design...
def _cygwin_patch(filename): # pragma: nocover """ Contrary to POSIX 2008, on Cygwin, getcwd (3) contains symlink components. Using os.path.abspath() works around this limitation. A fix in os.getcwd() would probably better, in Cygwin even more so, except that this seems to be by design... """ return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
[ "def", "_cygwin_patch", "(", "filename", ")", ":", "# pragma: nocover", "return", "os", ".", "path", ".", "abspath", "(", "filename", ")", "if", "sys", ".", "platform", "==", "'cygwin'", "else", "filename" ]
[ 2333, 0 ]
[ 2341, 78 ]
python
en
['en', 'error', 'th']
False
_is_egg_path
(path)
Determine if given path appears to be an egg.
Determine if given path appears to be an egg.
def _is_egg_path(path): """ Determine if given path appears to be an egg. """ return path.lower().endswith('.egg')
[ "def", "_is_egg_path", "(", "path", ")", ":", "return", "path", ".", "lower", "(", ")", ".", "endswith", "(", "'.egg'", ")" ]
[ 2352, 0 ]
[ 2356, 40 ]
python
en
['en', 'error', 'th']
False
_is_unpacked_egg
(path)
Determine if given path appears to be an unpacked egg.
Determine if given path appears to be an unpacked egg.
def _is_unpacked_egg(path): """ Determine if given path appears to be an unpacked egg. """ return ( _is_egg_path(path) and os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) )
[ "def", "_is_unpacked_egg", "(", "path", ")", ":", "return", "(", "_is_egg_path", "(", "path", ")", "and", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'EGG-INFO'", ",", "'PKG-INFO'", ")", ")", ")" ]
[ 2359, 0 ]
[ 2366, 5 ]
python
en
['en', 'error', 'th']
False
yield_lines
(strs)
Yield non-empty/non-comment lines of a string or sequence
Yield non-empty/non-comment lines of a string or sequence
def yield_lines(strs): """Yield non-empty/non-comment lines of a string or sequence""" if isinstance(strs, six.string_types): for s in strs.splitlines(): s = s.strip() # skip blank lines/comments if s and not s.startswith('#'): yield s else: for ss in strs: for s in yield_lines(ss): yield s
[ "def", "yield_lines", "(", "strs", ")", ":", "if", "isinstance", "(", "strs", ",", "six", ".", "string_types", ")", ":", "for", "s", "in", "strs", ".", "splitlines", "(", ")", ":", "s", "=", "s", ".", "strip", "(", ")", "# skip blank lines/comments", "if", "s", "and", "not", "s", ".", "startswith", "(", "'#'", ")", ":", "yield", "s", "else", ":", "for", "ss", "in", "strs", ":", "for", "s", "in", "yield_lines", "(", "ss", ")", ":", "yield", "s" ]
[ 2377, 0 ]
[ 2388, 23 ]
python
en
['en', 'en', 'en']
True
_version_from_file
(lines)
Given an iterable of lines from a Metadata file, return the value of the Version field, if present, or None otherwise.
Given an iterable of lines from a Metadata file, return the value of the Version field, if present, or None otherwise.
def _version_from_file(lines): """ Given an iterable of lines from a Metadata file, return the value of the Version field, if present, or None otherwise. """ def is_version_line(line): return line.lower().startswith('version:') version_lines = filter(is_version_line, lines) line = next(iter(version_lines), '') _, _, value = line.partition(':') return safe_version(value.strip()) or None
[ "def", "_version_from_file", "(", "lines", ")", ":", "def", "is_version_line", "(", "line", ")", ":", "return", "line", ".", "lower", "(", ")", ".", "startswith", "(", "'version:'", ")", "version_lines", "=", "filter", "(", "is_version_line", ",", "lines", ")", "line", "=", "next", "(", "iter", "(", "version_lines", ")", ",", "''", ")", "_", ",", "_", ",", "value", "=", "line", ".", "partition", "(", "':'", ")", "return", "safe_version", "(", "value", ".", "strip", "(", ")", ")", "or", "None" ]
[ 2547, 0 ]
[ 2557, 46 ]
python
en
['en', 'error', 'th']
False
VersionConflict.with_context
(self, required_by)
If required_by is non-empty, return a version of self that is a ContextualVersionConflict.
If required_by is non-empty, return a version of self that is a ContextualVersionConflict.
def with_context(self, required_by): """ If required_by is non-empty, return a version of self that is a ContextualVersionConflict. """ if not required_by: return self args = self.args + (required_by,) return ContextualVersionConflict(*args)
[ "def", "with_context", "(", "self", ",", "required_by", ")", ":", "if", "not", "required_by", ":", "return", "self", "args", "=", "self", ".", "args", "+", "(", "required_by", ",", ")", "return", "ContextualVersionConflict", "(", "*", "args", ")" ]
[ 278, 4 ]
[ 286, 47 ]
python
en
['en', 'error', 'th']
False
IMetadataProvider.has_metadata
(name)
Does the package's distribution contain the named metadata?
Does the package's distribution contain the named metadata?
def has_metadata(name): """Does the package's distribution contain the named metadata?"""
[ "def", "has_metadata", "(", "name", ")", ":" ]
[ 502, 4 ]
[ 503, 73 ]
python
en
['en', 'en', 'en']
True
IMetadataProvider.get_metadata
(name)
The named metadata resource as a string
The named metadata resource as a string
def get_metadata(name): """The named metadata resource as a string"""
[ "def", "get_metadata", "(", "name", ")", ":" ]
[ 505, 4 ]
[ 506, 53 ]
python
en
['en', 'en', 'en']
True
IMetadataProvider.get_metadata_lines
(name)
Yield named metadata resource as list of non-blank non-comment lines Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.
Yield named metadata resource as list of non-blank non-comment lines
def get_metadata_lines(name): """Yield named metadata resource as list of non-blank non-comment lines Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted."""
[ "def", "get_metadata_lines", "(", "name", ")", ":" ]
[ 508, 4 ]
[ 512, 66 ]
python
en
['en', 'en', 'en']
True
IMetadataProvider.metadata_isdir
(name)
Is the named metadata a directory? (like ``os.path.isdir()``)
Is the named metadata a directory? (like ``os.path.isdir()``)
def metadata_isdir(name): """Is the named metadata a directory? (like ``os.path.isdir()``)"""
[ "def", "metadata_isdir", "(", "name", ")", ":" ]
[ 514, 4 ]
[ 515, 76 ]
python
en
['en', 'en', 'en']
True
IMetadataProvider.metadata_listdir
(name)
List of metadata names in the directory (like ``os.listdir()``)
List of metadata names in the directory (like ``os.listdir()``)
def metadata_listdir(name): """List of metadata names in the directory (like ``os.listdir()``)"""
[ "def", "metadata_listdir", "(", "name", ")", ":" ]
[ 517, 4 ]
[ 518, 77 ]
python
en
['en', 'en', 'en']
True
IMetadataProvider.run_script
(script_name, namespace)
Execute the named script in the supplied namespace dictionary
Execute the named script in the supplied namespace dictionary
def run_script(script_name, namespace): """Execute the named script in the supplied namespace dictionary"""
[ "def", "run_script", "(", "script_name", ",", "namespace", ")", ":" ]
[ 520, 4 ]
[ 521, 75 ]
python
en
['en', 'en', 'en']
True
IResourceProvider.get_resource_filename
(manager, resource_name)
Return a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``
Return a true filesystem path for `resource_name`
def get_resource_filename(manager, resource_name): """Return a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``"""
[ "def", "get_resource_filename", "(", "manager", ",", "resource_name", ")", ":" ]
[ 527, 4 ]
[ 530, 52 ]
python
en
['en', 'en', 'en']
True
IResourceProvider.get_resource_stream
(manager, resource_name)
Return a readable file-like object for `resource_name` `manager` must be an ``IResourceManager``
Return a readable file-like object for `resource_name`
def get_resource_stream(manager, resource_name): """Return a readable file-like object for `resource_name` `manager` must be an ``IResourceManager``"""
[ "def", "get_resource_stream", "(", "manager", ",", "resource_name", ")", ":" ]
[ 532, 4 ]
[ 535, 52 ]
python
en
['en', 'en', 'en']
True
IResourceProvider.get_resource_string
(manager, resource_name)
Return a string containing the contents of `resource_name` `manager` must be an ``IResourceManager``
Return a string containing the contents of `resource_name`
def get_resource_string(manager, resource_name): """Return a string containing the contents of `resource_name` `manager` must be an ``IResourceManager``"""
[ "def", "get_resource_string", "(", "manager", ",", "resource_name", ")", ":" ]
[ 537, 4 ]
[ 540, 52 ]
python
en
['en', 'en', 'en']
True
IResourceProvider.has_resource
(resource_name)
Does the package contain the named resource?
Does the package contain the named resource?
def has_resource(resource_name): """Does the package contain the named resource?"""
[ "def", "has_resource", "(", "resource_name", ")", ":" ]
[ 542, 4 ]
[ 543, 58 ]
python
en
['en', 'en', 'en']
True
IResourceProvider.resource_isdir
(resource_name)
Is the named resource a directory? (like ``os.path.isdir()``)
Is the named resource a directory? (like ``os.path.isdir()``)
def resource_isdir(resource_name): """Is the named resource a directory? (like ``os.path.isdir()``)"""
[ "def", "resource_isdir", "(", "resource_name", ")", ":" ]
[ 545, 4 ]
[ 546, 76 ]
python
en
['en', 'en', 'en']
True
IResourceProvider.resource_listdir
(resource_name)
List of resource names in the directory (like ``os.listdir()``)
List of resource names in the directory (like ``os.listdir()``)
def resource_listdir(resource_name): """List of resource names in the directory (like ``os.listdir()``)"""
[ "def", "resource_listdir", "(", "resource_name", ")", ":" ]
[ 548, 4 ]
[ 549, 77 ]
python
en
['en', 'en', 'en']
True
WorkingSet.__init__
(self, entries=None)
Create working set from list of path entries (default=sys.path)
Create working set from list of path entries (default=sys.path)
def __init__(self, entries=None): """Create working set from list of path entries (default=sys.path)""" self.entries = [] self.entry_keys = {} self.by_key = {} self.callbacks = [] if entries is None: entries = sys.path for entry in entries: self.add_entry(entry)
[ "def", "__init__", "(", "self", ",", "entries", "=", "None", ")", ":", "self", ".", "entries", "=", "[", "]", "self", ".", "entry_keys", "=", "{", "}", "self", ".", "by_key", "=", "{", "}", "self", ".", "callbacks", "=", "[", "]", "if", "entries", "is", "None", ":", "entries", "=", "sys", ".", "path", "for", "entry", "in", "entries", ":", "self", ".", "add_entry", "(", "entry", ")" ]
[ 555, 4 ]
[ 566, 33 ]
python
en
['en', 'en', 'en']
True
WorkingSet._build_master
(cls)
Prepare the master working set.
Prepare the master working set.
def _build_master(cls): """ Prepare the master working set. """ ws = cls() try: from __main__ import __requires__ except ImportError: # The main program does not list any requirements return ws # ensure the requirements are met try: ws.require(__requires__) except VersionConflict: return cls._build_from_requirements(__requires__) return ws
[ "def", "_build_master", "(", "cls", ")", ":", "ws", "=", "cls", "(", ")", "try", ":", "from", "__main__", "import", "__requires__", "except", "ImportError", ":", "# The main program does not list any requirements", "return", "ws", "# ensure the requirements are met", "try", ":", "ws", ".", "require", "(", "__requires__", ")", "except", "VersionConflict", ":", "return", "cls", ".", "_build_from_requirements", "(", "__requires__", ")", "return", "ws" ]
[ 569, 4 ]
[ 586, 17 ]
python
en
['en', 'error', 'th']
False
WorkingSet._build_from_requirements
(cls, req_spec)
Build a working set from a requirement spec. Rewrites sys.path.
Build a working set from a requirement spec. Rewrites sys.path.
def _build_from_requirements(cls, req_spec): """ Build a working set from a requirement spec. Rewrites sys.path. """ # try it without defaults already on sys.path # by starting with an empty path ws = cls([]) reqs = parse_requirements(req_spec) dists = ws.resolve(reqs, Environment()) for dist in dists: ws.add(dist) # add any missing entries from sys.path for entry in sys.path: if entry not in ws.entries: ws.add_entry(entry) # then copy back to sys.path sys.path[:] = ws.entries return ws
[ "def", "_build_from_requirements", "(", "cls", ",", "req_spec", ")", ":", "# try it without defaults already on sys.path", "# by starting with an empty path", "ws", "=", "cls", "(", "[", "]", ")", "reqs", "=", "parse_requirements", "(", "req_spec", ")", "dists", "=", "ws", ".", "resolve", "(", "reqs", ",", "Environment", "(", ")", ")", "for", "dist", "in", "dists", ":", "ws", ".", "add", "(", "dist", ")", "# add any missing entries from sys.path", "for", "entry", "in", "sys", ".", "path", ":", "if", "entry", "not", "in", "ws", ".", "entries", ":", "ws", ".", "add_entry", "(", "entry", ")", "# then copy back to sys.path", "sys", ".", "path", "[", ":", "]", "=", "ws", ".", "entries", "return", "ws" ]
[ 589, 4 ]
[ 608, 17 ]
python
en
['en', 'error', 'th']
False
WorkingSet.add_entry
(self, entry)
Add a path item to ``.entries``, finding any distributions on it ``find_distributions(entry, True)`` is used to find distributions corresponding to the path entry, and they are added. `entry` is always appended to ``.entries``, even if it is already present. (This is because ``sys.path`` can contain the same value more than once, and the ``.entries`` of the ``sys.path`` WorkingSet should always equal ``sys.path``.)
Add a path item to ``.entries``, finding any distributions on it
def add_entry(self, entry): """Add a path item to ``.entries``, finding any distributions on it ``find_distributions(entry, True)`` is used to find distributions corresponding to the path entry, and they are added. `entry` is always appended to ``.entries``, even if it is already present. (This is because ``sys.path`` can contain the same value more than once, and the ``.entries`` of the ``sys.path`` WorkingSet should always equal ``sys.path``.) """ self.entry_keys.setdefault(entry, []) self.entries.append(entry) for dist in find_distributions(entry, True): self.add(dist, entry, False)
[ "def", "add_entry", "(", "self", ",", "entry", ")", ":", "self", ".", "entry_keys", ".", "setdefault", "(", "entry", ",", "[", "]", ")", "self", ".", "entries", ".", "append", "(", "entry", ")", "for", "dist", "in", "find_distributions", "(", "entry", ",", "True", ")", ":", "self", ".", "add", "(", "dist", ",", "entry", ",", "False", ")" ]
[ 610, 4 ]
[ 623, 40 ]
python
en
['en', 'en', 'en']
True
WorkingSet.__contains__
(self, dist)
True if `dist` is the active distribution for its project
True if `dist` is the active distribution for its project
def __contains__(self, dist): """True if `dist` is the active distribution for its project""" return self.by_key.get(dist.key) == dist
[ "def", "__contains__", "(", "self", ",", "dist", ")", ":", "return", "self", ".", "by_key", ".", "get", "(", "dist", ".", "key", ")", "==", "dist" ]
[ 625, 4 ]
[ 627, 48 ]
python
en
['en', 'en', 'en']
True