identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
getTreeWalker
(treeType, implementation=None, **kwargs)
Get a TreeWalker class for various types of tree with built-in support :arg str treeType: the name of the tree type required (case-insensitive). Supported values are: * "dom": The xml.dom.minidom DOM implementation * "etree": A generic walker for tree implementations exposing an elementtree-like interface (known to work with ElementTree, cElementTree and lxml.etree). * "lxml": Optimized walker for lxml.etree * "genshi": a Genshi stream :arg implementation: A module implementing the tree type e.g. xml.etree.ElementTree or cElementTree (Currently applies to the "etree" tree type only). :arg kwargs: keyword arguments passed to the etree walker--for other walkers, this has no effect :returns: a TreeWalker class
Get a TreeWalker class for various types of tree with built-in support
def getTreeWalker(treeType, implementation=None, **kwargs): """Get a TreeWalker class for various types of tree with built-in support :arg str treeType: the name of the tree type required (case-insensitive). Supported values are: * "dom": The xml.dom.minidom DOM implementation * "etree": A generic walker for tree implementations exposing an elementtree-like interface (known to work with ElementTree, cElementTree and lxml.etree). * "lxml": Optimized walker for lxml.etree * "genshi": a Genshi stream :arg implementation: A module implementing the tree type e.g. xml.etree.ElementTree or cElementTree (Currently applies to the "etree" tree type only). :arg kwargs: keyword arguments passed to the etree walker--for other walkers, this has no effect :returns: a TreeWalker class """ treeType = treeType.lower() if treeType not in treeWalkerCache: if treeType == "dom": from . import dom treeWalkerCache[treeType] = dom.TreeWalker elif treeType == "genshi": from . import genshi treeWalkerCache[treeType] = genshi.TreeWalker elif treeType == "lxml": from . import etree_lxml treeWalkerCache[treeType] = etree_lxml.TreeWalker elif treeType == "etree": from . import etree if implementation is None: implementation = default_etree # XXX: NEVER cache here, caching is done in the etree submodule return etree.getETreeModule(implementation, **kwargs).TreeWalker return treeWalkerCache.get(treeType)
[ "def", "getTreeWalker", "(", "treeType", ",", "implementation", "=", "None", ",", "*", "*", "kwargs", ")", ":", "treeType", "=", "treeType", ".", "lower", "(", ")", "if", "treeType", "not", "in", "treeWalkerCache", ":", "if", "treeType", "==", "\"dom\"", ":", "from", ".", "import", "dom", "treeWalkerCache", "[", "treeType", "]", "=", "dom", ".", "TreeWalker", "elif", "treeType", "==", "\"genshi\"", ":", "from", ".", "import", "genshi", "treeWalkerCache", "[", "treeType", "]", "=", "genshi", ".", "TreeWalker", "elif", "treeType", "==", "\"lxml\"", ":", "from", ".", "import", "etree_lxml", "treeWalkerCache", "[", "treeType", "]", "=", "etree_lxml", ".", "TreeWalker", "elif", "treeType", "==", "\"etree\"", ":", "from", ".", "import", "etree", "if", "implementation", "is", "None", ":", "implementation", "=", "default_etree", "# XXX: NEVER cache here, caching is done in the etree submodule", "return", "etree", ".", "getETreeModule", "(", "implementation", ",", "*", "*", "kwargs", ")", ".", "TreeWalker", "return", "treeWalkerCache", ".", "get", "(", "treeType", ")" ]
[ 20, 0 ]
[ 61, 40 ]
python
en
['en', 'en', 'en']
True
pprint
(walker)
Pretty printer for tree walkers Takes a TreeWalker instance and pretty prints the output of walking the tree. :arg walker: a TreeWalker instance
Pretty printer for tree walkers
def pprint(walker): """Pretty printer for tree walkers Takes a TreeWalker instance and pretty prints the output of walking the tree. :arg walker: a TreeWalker instance """ output = [] indent = 0 for token in concatenateCharacterTokens(walker): type = token["type"] if type in ("StartTag", "EmptyTag"): # tag name if token["namespace"] and token["namespace"] != constants.namespaces["html"]: if token["namespace"] in constants.prefixes: ns = constants.prefixes[token["namespace"]] else: ns = token["namespace"] name = "%s %s" % (ns, token["name"]) else: name = token["name"] output.append("%s<%s>" % (" " * indent, name)) indent += 2 # attributes (sorted for consistent ordering) attrs = token["data"] for (namespace, localname), value in sorted(attrs.items()): if namespace: if namespace in constants.prefixes: ns = constants.prefixes[namespace] else: ns = namespace name = "%s %s" % (ns, localname) else: name = localname output.append("%s%s=\"%s\"" % (" " * indent, name, value)) # self-closing if type == "EmptyTag": indent -= 2 elif type == "EndTag": indent -= 2 elif type == "Comment": output.append("%s<!-- %s -->" % (" " * indent, token["data"])) elif type == "Doctype": if token["name"]: if token["publicId"]: output.append("""%s<!DOCTYPE %s "%s" "%s">""" % (" " * indent, token["name"], token["publicId"], token["systemId"] if token["systemId"] else "")) elif token["systemId"]: output.append("""%s<!DOCTYPE %s "" "%s">""" % (" " * indent, token["name"], token["systemId"])) else: output.append("%s<!DOCTYPE %s>" % (" " * indent, token["name"])) else: output.append("%s<!DOCTYPE >" % (" " * indent,)) elif type == "Characters": output.append("%s\"%s\"" % (" " * indent, token["data"])) elif type == "SpaceCharacters": assert False, "concatenateCharacterTokens should have got rid of all Space tokens" else: raise ValueError("Unknown token type, %s" % type) return "\n".join(output)
[ "def", "pprint", "(", "walker", ")", ":", "output", "=", "[", "]", "indent", "=", "0", "for", "token", "in", "concatenateCharacterTokens", "(", "walker", ")", ":", "type", "=", "token", "[", "\"type\"", "]", "if", "type", "in", "(", "\"StartTag\"", ",", "\"EmptyTag\"", ")", ":", "# tag name", "if", "token", "[", "\"namespace\"", "]", "and", "token", "[", "\"namespace\"", "]", "!=", "constants", ".", "namespaces", "[", "\"html\"", "]", ":", "if", "token", "[", "\"namespace\"", "]", "in", "constants", ".", "prefixes", ":", "ns", "=", "constants", ".", "prefixes", "[", "token", "[", "\"namespace\"", "]", "]", "else", ":", "ns", "=", "token", "[", "\"namespace\"", "]", "name", "=", "\"%s %s\"", "%", "(", "ns", ",", "token", "[", "\"name\"", "]", ")", "else", ":", "name", "=", "token", "[", "\"name\"", "]", "output", ".", "append", "(", "\"%s<%s>\"", "%", "(", "\" \"", "*", "indent", ",", "name", ")", ")", "indent", "+=", "2", "# attributes (sorted for consistent ordering)", "attrs", "=", "token", "[", "\"data\"", "]", "for", "(", "namespace", ",", "localname", ")", ",", "value", "in", "sorted", "(", "attrs", ".", "items", "(", ")", ")", ":", "if", "namespace", ":", "if", "namespace", "in", "constants", ".", "prefixes", ":", "ns", "=", "constants", ".", "prefixes", "[", "namespace", "]", "else", ":", "ns", "=", "namespace", "name", "=", "\"%s %s\"", "%", "(", "ns", ",", "localname", ")", "else", ":", "name", "=", "localname", "output", ".", "append", "(", "\"%s%s=\\\"%s\\\"\"", "%", "(", "\" \"", "*", "indent", ",", "name", ",", "value", ")", ")", "# self-closing", "if", "type", "==", "\"EmptyTag\"", ":", "indent", "-=", "2", "elif", "type", "==", "\"EndTag\"", ":", "indent", "-=", "2", "elif", "type", "==", "\"Comment\"", ":", "output", ".", "append", "(", "\"%s<!-- %s -->\"", "%", "(", "\" \"", "*", "indent", ",", "token", "[", "\"data\"", "]", ")", ")", "elif", "type", "==", "\"Doctype\"", ":", "if", "token", "[", "\"name\"", "]", ":", "if", "token", "[", "\"publicId\"", "]", ":", "output", ".", "append", "(", "\"\"\"%s<!DOCTYPE %s \"%s\" \"%s\">\"\"\"", "%", "(", "\" \"", "*", "indent", ",", "token", "[", "\"name\"", "]", ",", "token", "[", "\"publicId\"", "]", ",", "token", "[", "\"systemId\"", "]", "if", "token", "[", "\"systemId\"", "]", "else", "\"\"", ")", ")", "elif", "token", "[", "\"systemId\"", "]", ":", "output", ".", "append", "(", "\"\"\"%s<!DOCTYPE %s \"\" \"%s\">\"\"\"", "%", "(", "\" \"", "*", "indent", ",", "token", "[", "\"name\"", "]", ",", "token", "[", "\"systemId\"", "]", ")", ")", "else", ":", "output", ".", "append", "(", "\"%s<!DOCTYPE %s>\"", "%", "(", "\" \"", "*", "indent", ",", "token", "[", "\"name\"", "]", ")", ")", "else", ":", "output", ".", "append", "(", "\"%s<!DOCTYPE >\"", "%", "(", "\" \"", "*", "indent", ",", ")", ")", "elif", "type", "==", "\"Characters\"", ":", "output", ".", "append", "(", "\"%s\\\"%s\\\"\"", "%", "(", "\" \"", "*", "indent", ",", "token", "[", "\"data\"", "]", ")", ")", "elif", "type", "==", "\"SpaceCharacters\"", ":", "assert", "False", ",", "\"concatenateCharacterTokens should have got rid of all Space tokens\"", "else", ":", "raise", "ValueError", "(", "\"Unknown token type, %s\"", "%", "type", ")", "return", "\"\\n\"", ".", "join", "(", "output", ")" ]
[ 79, 0 ]
[ 153, 28 ]
python
en
['en', 'en', 'en']
True
Message._prepare
(self)
Prepare the message for serialization by forcing the ``message`` and ``extra_tags`` to str in case they are lazy translations.
Prepare the message for serialization by forcing the ``message`` and ``extra_tags`` to str in case they are lazy translations.
def _prepare(self): """ Prepare the message for serialization by forcing the ``message`` and ``extra_tags`` to str in case they are lazy translations. """ self.message = str(self.message) self.extra_tags = str(self.extra_tags) if self.extra_tags is not None else None
[ "def", "_prepare", "(", "self", ")", ":", "self", ".", "message", "=", "str", "(", "self", ".", "message", ")", "self", ".", "extra_tags", "=", "str", "(", "self", ".", "extra_tags", ")", "if", "self", ".", "extra_tags", "is", "not", "None", "else", "None" ]
[ 18, 4 ]
[ 24, 87 ]
python
en
['en', 'error', 'th']
False
BaseStorage._loaded_messages
(self)
Return a list of loaded messages, retrieving them first if they have not been loaded yet.
Return a list of loaded messages, retrieving them first if they have not been loaded yet.
def _loaded_messages(self): """ Return a list of loaded messages, retrieving them first if they have not been loaded yet. """ if not hasattr(self, '_loaded_data'): messages, all_retrieved = self._get() self._loaded_data = messages or [] return self._loaded_data
[ "def", "_loaded_messages", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_loaded_data'", ")", ":", "messages", ",", "all_retrieved", "=", "self", ".", "_get", "(", ")", "self", ".", "_loaded_data", "=", "messages", "or", "[", "]", "return", "self", ".", "_loaded_data" ]
[ 72, 4 ]
[ 80, 32 ]
python
en
['en', 'error', 'th']
False
BaseStorage._get
(self, *args, **kwargs)
Retrieve a list of stored messages. Return a tuple of the messages and a flag indicating whether or not all the messages originally intended to be stored in this storage were, in fact, stored and retrieved; e.g., ``(messages, all_retrieved)``. **This method must be implemented by a subclass.** If it is possible to tell if the backend was not used (as opposed to just containing no messages) then ``None`` should be returned in place of ``messages``.
Retrieve a list of stored messages. Return a tuple of the messages and a flag indicating whether or not all the messages originally intended to be stored in this storage were, in fact, stored and retrieved; e.g., ``(messages, all_retrieved)``.
def _get(self, *args, **kwargs): """ Retrieve a list of stored messages. Return a tuple of the messages and a flag indicating whether or not all the messages originally intended to be stored in this storage were, in fact, stored and retrieved; e.g., ``(messages, all_retrieved)``. **This method must be implemented by a subclass.** If it is possible to tell if the backend was not used (as opposed to just containing no messages) then ``None`` should be returned in place of ``messages``. """ raise NotImplementedError('subclasses of BaseStorage must provide a _get() method')
[ "def", "_get", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "raise", "NotImplementedError", "(", "'subclasses of BaseStorage must provide a _get() method'", ")" ]
[ 82, 4 ]
[ 95, 91 ]
python
en
['en', 'error', 'th']
False
BaseStorage._store
(self, messages, response, *args, **kwargs)
Store a list of messages and return a list of any messages which could not be stored. One type of object must be able to be stored, ``Message``. **This method must be implemented by a subclass.**
Store a list of messages and return a list of any messages which could not be stored.
def _store(self, messages, response, *args, **kwargs): """ Store a list of messages and return a list of any messages which could not be stored. One type of object must be able to be stored, ``Message``. **This method must be implemented by a subclass.** """ raise NotImplementedError('subclasses of BaseStorage must provide a _store() method')
[ "def", "_store", "(", "self", ",", "messages", ",", "response", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "raise", "NotImplementedError", "(", "'subclasses of BaseStorage must provide a _store() method'", ")" ]
[ 97, 4 ]
[ 106, 93 ]
python
en
['en', 'error', 'th']
False
BaseStorage._prepare_messages
(self, messages)
Prepare a list of messages for storage.
Prepare a list of messages for storage.
def _prepare_messages(self, messages): """ Prepare a list of messages for storage. """ for message in messages: message._prepare()
[ "def", "_prepare_messages", "(", "self", ",", "messages", ")", ":", "for", "message", "in", "messages", ":", "message", ".", "_prepare", "(", ")" ]
[ 108, 4 ]
[ 113, 30 ]
python
en
['en', 'error', 'th']
False
BaseStorage.update
(self, response)
Store all unread messages. If the backend has yet to be iterated, store previously stored messages again. Otherwise, only store messages added after the last iteration.
Store all unread messages.
def update(self, response): """ Store all unread messages. If the backend has yet to be iterated, store previously stored messages again. Otherwise, only store messages added after the last iteration. """ self._prepare_messages(self._queued_messages) if self.used: return self._store(self._queued_messages, response) elif self.added_new: messages = self._loaded_messages + self._queued_messages return self._store(messages, response)
[ "def", "update", "(", "self", ",", "response", ")", ":", "self", ".", "_prepare_messages", "(", "self", ".", "_queued_messages", ")", "if", "self", ".", "used", ":", "return", "self", ".", "_store", "(", "self", ".", "_queued_messages", ",", "response", ")", "elif", "self", ".", "added_new", ":", "messages", "=", "self", ".", "_loaded_messages", "+", "self", ".", "_queued_messages", "return", "self", ".", "_store", "(", "messages", ",", "response", ")" ]
[ 115, 4 ]
[ 127, 50 ]
python
en
['en', 'error', 'th']
False
BaseStorage.add
(self, level, message, extra_tags='')
Queue a message to be stored. The message is only queued if it contained something and its level is not less than the recording level (``self.level``).
Queue a message to be stored.
def add(self, level, message, extra_tags=''): """ Queue a message to be stored. The message is only queued if it contained something and its level is not less than the recording level (``self.level``). """ if not message: return # Check that the message level is not less than the recording level. level = int(level) if level < self.level: return # Add the message. self.added_new = True message = Message(level, message, extra_tags=extra_tags) self._queued_messages.append(message)
[ "def", "add", "(", "self", ",", "level", ",", "message", ",", "extra_tags", "=", "''", ")", ":", "if", "not", "message", ":", "return", "# Check that the message level is not less than the recording level.", "level", "=", "int", "(", "level", ")", "if", "level", "<", "self", ".", "level", ":", "return", "# Add the message.", "self", ".", "added_new", "=", "True", "message", "=", "Message", "(", "level", ",", "message", ",", "extra_tags", "=", "extra_tags", ")", "self", ".", "_queued_messages", ".", "append", "(", "message", ")" ]
[ 129, 4 ]
[ 145, 45 ]
python
en
['en', 'error', 'th']
False
BaseStorage._get_level
(self)
Return the minimum recorded level. The default level is the ``MESSAGE_LEVEL`` setting. If this is not found, the ``INFO`` level is used.
Return the minimum recorded level.
def _get_level(self): """ Return the minimum recorded level. The default level is the ``MESSAGE_LEVEL`` setting. If this is not found, the ``INFO`` level is used. """ if not hasattr(self, '_level'): self._level = getattr(settings, 'MESSAGE_LEVEL', constants.INFO) return self._level
[ "def", "_get_level", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_level'", ")", ":", "self", ".", "_level", "=", "getattr", "(", "settings", ",", "'MESSAGE_LEVEL'", ",", "constants", ".", "INFO", ")", "return", "self", ".", "_level" ]
[ 147, 4 ]
[ 156, 26 ]
python
en
['en', 'error', 'th']
False
BaseStorage._set_level
(self, value=None)
Set a custom minimum recorded level. If set to ``None``, the default level will be used (see the ``_get_level`` method).
Set a custom minimum recorded level.
def _set_level(self, value=None): """ Set a custom minimum recorded level. If set to ``None``, the default level will be used (see the ``_get_level`` method). """ if value is None and hasattr(self, '_level'): del self._level else: self._level = int(value)
[ "def", "_set_level", "(", "self", ",", "value", "=", "None", ")", ":", "if", "value", "is", "None", "and", "hasattr", "(", "self", ",", "'_level'", ")", ":", "del", "self", ".", "_level", "else", ":", "self", ".", "_level", "=", "int", "(", "value", ")" ]
[ 158, 4 ]
[ 168, 36 ]
python
en
['en', 'error', 'th']
False
encrypt_int
(message, ekey, n)
Encrypts a message using encryption key 'ekey', working modulo n
Encrypts a message using encryption key 'ekey', working modulo n
def encrypt_int(message, ekey, n): """Encrypts a message using encryption key 'ekey', working modulo n""" assert_int(message, 'message') assert_int(ekey, 'ekey') assert_int(n, 'n') if message < 0: raise ValueError('Only non-negative numbers are supported') if message > n: raise OverflowError("The message %i is too long for n=%i" % (message, n)) return pow(message, ekey, n)
[ "def", "encrypt_int", "(", "message", ",", "ekey", ",", "n", ")", ":", "assert_int", "(", "message", ",", "'message'", ")", "assert_int", "(", "ekey", ",", "'ekey'", ")", "assert_int", "(", "n", ",", "'n'", ")", "if", "message", "<", "0", ":", "raise", "ValueError", "(", "'Only non-negative numbers are supported'", ")", "if", "message", ">", "n", ":", "raise", "OverflowError", "(", "\"The message %i is too long for n=%i\"", "%", "(", "message", ",", "n", ")", ")", "return", "pow", "(", "message", ",", "ekey", ",", "n", ")" ]
[ 32, 0 ]
[ 45, 32 ]
python
en
['en', 'cy', 'en']
True
decrypt_int
(cyphertext, dkey, n)
Decrypts a cypher text using the decryption key 'dkey', working modulo n
Decrypts a cypher text using the decryption key 'dkey', working modulo n
def decrypt_int(cyphertext, dkey, n): """Decrypts a cypher text using the decryption key 'dkey', working modulo n""" assert_int(cyphertext, 'cyphertext') assert_int(dkey, 'dkey') assert_int(n, 'n') message = pow(cyphertext, dkey, n) return message
[ "def", "decrypt_int", "(", "cyphertext", ",", "dkey", ",", "n", ")", ":", "assert_int", "(", "cyphertext", ",", "'cyphertext'", ")", "assert_int", "(", "dkey", ",", "'dkey'", ")", "assert_int", "(", "n", ",", "'n'", ")", "message", "=", "pow", "(", "cyphertext", ",", "dkey", ",", "n", ")", "return", "message" ]
[ 48, 0 ]
[ 56, 18 ]
python
en
['en', 'cy', 'en']
True
show_actual_vendor_versions
(vendor_txt_versions: Dict[str, str])
Log the actual version and print extra info if there is a conflict or if the actual version could not be imported.
Log the actual version and print extra info if there is a conflict or if the actual version could not be imported.
def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None: """Log the actual version and print extra info if there is a conflict or if the actual version could not be imported. """ for module_name, expected_version in vendor_txt_versions.items(): extra_message = '' actual_version = get_vendor_version_from_module(module_name) if not actual_version: extra_message = ' (Unable to locate actual module version, using'\ ' vendor.txt specified version)' actual_version = expected_version elif parse_version(actual_version) != parse_version(expected_version): extra_message = ' (CONFLICT: vendor.txt suggests version should'\ ' be {})'.format(expected_version) logger.info('%s==%s%s', module_name, actual_version, extra_message)
[ "def", "show_actual_vendor_versions", "(", "vendor_txt_versions", ":", "Dict", "[", "str", ",", "str", "]", ")", "->", "None", ":", "for", "module_name", ",", "expected_version", "in", "vendor_txt_versions", ".", "items", "(", ")", ":", "extra_message", "=", "''", "actual_version", "=", "get_vendor_version_from_module", "(", "module_name", ")", "if", "not", "actual_version", ":", "extra_message", "=", "' (Unable to locate actual module version, using'", "' vendor.txt specified version)'", "actual_version", "=", "expected_version", "elif", "parse_version", "(", "actual_version", ")", "!=", "parse_version", "(", "expected_version", ")", ":", "extra_message", "=", "' (CONFLICT: vendor.txt suggests version should'", "' be {})'", ".", "format", "(", "expected_version", ")", "logger", ".", "info", "(", "'%s==%s%s'", ",", "module_name", ",", "actual_version", ",", "extra_message", ")" ]
[ 83, 0 ]
[ 97, 75 ]
python
en
['en', 'en', 'en']
True
before_sleep_nothing
(retry_state: "RetryCallState")
Before call strategy that does nothing.
Before call strategy that does nothing.
def before_sleep_nothing(retry_state: "RetryCallState") -> None: """Before call strategy that does nothing."""
[ "def", "before_sleep_nothing", "(", "retry_state", ":", "\"RetryCallState\"", ")", "->", "None", ":" ]
[ 26, 0 ]
[ 27, 49 ]
python
en
['en', 'en', 'en']
True
before_sleep_log
( logger: "logging.Logger", log_level: int, exc_info: bool = False, )
Before call strategy that logs to some logger the attempt.
Before call strategy that logs to some logger the attempt.
def before_sleep_log( logger: "logging.Logger", log_level: int, exc_info: bool = False, ) -> typing.Callable[["RetryCallState"], None]: """Before call strategy that logs to some logger the attempt.""" def log_it(retry_state: "RetryCallState") -> None: if retry_state.outcome.failed: ex = retry_state.outcome.exception() verb, value = "raised", f"{ex.__class__.__name__}: {ex}" if exc_info: local_exc_info = retry_state.outcome.exception() else: local_exc_info = False else: verb, value = "returned", retry_state.outcome.result() local_exc_info = False # exc_info does not apply when no exception logger.log( log_level, f"Retrying {_utils.get_callback_name(retry_state.fn)} " f"in {retry_state.next_action.sleep} seconds as it {verb} {value}.", exc_info=local_exc_info, ) return log_it
[ "def", "before_sleep_log", "(", "logger", ":", "\"logging.Logger\"", ",", "log_level", ":", "int", ",", "exc_info", ":", "bool", "=", "False", ",", ")", "->", "typing", ".", "Callable", "[", "[", "\"RetryCallState\"", "]", ",", "None", "]", ":", "def", "log_it", "(", "retry_state", ":", "\"RetryCallState\"", ")", "->", "None", ":", "if", "retry_state", ".", "outcome", ".", "failed", ":", "ex", "=", "retry_state", ".", "outcome", ".", "exception", "(", ")", "verb", ",", "value", "=", "\"raised\"", ",", "f\"{ex.__class__.__name__}: {ex}\"", "if", "exc_info", ":", "local_exc_info", "=", "retry_state", ".", "outcome", ".", "exception", "(", ")", "else", ":", "local_exc_info", "=", "False", "else", ":", "verb", ",", "value", "=", "\"returned\"", ",", "retry_state", ".", "outcome", ".", "result", "(", ")", "local_exc_info", "=", "False", "# exc_info does not apply when no exception", "logger", ".", "log", "(", "log_level", ",", "f\"Retrying {_utils.get_callback_name(retry_state.fn)} \"", "f\"in {retry_state.next_action.sleep} seconds as it {verb} {value}.\"", ",", "exc_info", "=", "local_exc_info", ",", ")", "return", "log_it" ]
[ 30, 0 ]
[ 57, 17 ]
python
en
['en', 'en', 'en']
True
UserAttributeKNN.__init__
(self, train_file=None, test_file=None, output_file=None, metadata_file=None, similarity_file=None, k_neighbors=30, rank_length=10, as_binary=False, as_similar_first=True, metadata_as_binary=False, metadata_similarity_sep='\t', similarity_metric="cosine", sep='\t', output_sep='\t')
User Attribute KNN for Item Recommendation This algorithm predicts a rank for each user based on the similar items that his neighbors (similar users) consumed, using a metadata or similarity pre-computed file Usage:: >> UserAttributeKNN(train, test, similarity_file=sim_matrix, as_similar_first=True).compute() >> UserAttributeKNN(train, test, metadata_file=metadata, as_similar_first=True).compute() :param train_file: File which contains the train set. This file needs to have at least 3 columns (user item feedback_value). :type train_file: str :param test_file: File which contains the test set. This file needs to have at least 3 columns (user item feedback_value). :type test_file: str, default None :param output_file: File with dir to write the final predictions :type output_file: str, default None :param metadata_file: File which contains the metadata set. This file needs to have at least 2 columns (user metadata). :type metadata_file: str, default None :param similarity_file: File which contains the similarity set. This file needs to have at least 3 columns (user user similarity). :type similarity_file: str, default None :param k_neighbors: Number of neighbors to use. If None, k_neighbor = int(sqrt(n_users)) :type k_neighbors: int, default None :param rank_length: Size of the rank that must be generated by the predictions of the recommender algorithm :type rank_length: int, default 10 :param as_binary: If True, the explicit feedback will be transform to binary :type as_binary: bool, default False :param as_similar_first: If True, for each unknown item, which will be predicted, we first look for its k most similar users and then take the intersection with the users that seen that item. :type as_similar_first: bool, default True :param metadata_as_binary: f True, the explicit value will be transform to binary :type metadata_as_binary: bool, default False :param metadata_similarity_sep: Delimiter for similarity or metadata file :type metadata_similarity_sep: str, default '\t' :param similarity_metric: :type similarity_metric: str, default cosine :param sep: Delimiter for input files file :type sep: str, default '\t' :param output_sep: Delimiter for output file :type output_sep: str, default '\t'
User Attribute KNN for Item Recommendation
def __init__(self, train_file=None, test_file=None, output_file=None, metadata_file=None, similarity_file=None, k_neighbors=30, rank_length=10, as_binary=False, as_similar_first=True, metadata_as_binary=False, metadata_similarity_sep='\t', similarity_metric="cosine", sep='\t', output_sep='\t'): """ User Attribute KNN for Item Recommendation This algorithm predicts a rank for each user based on the similar items that his neighbors (similar users) consumed, using a metadata or similarity pre-computed file Usage:: >> UserAttributeKNN(train, test, similarity_file=sim_matrix, as_similar_first=True).compute() >> UserAttributeKNN(train, test, metadata_file=metadata, as_similar_first=True).compute() :param train_file: File which contains the train set. This file needs to have at least 3 columns (user item feedback_value). :type train_file: str :param test_file: File which contains the test set. This file needs to have at least 3 columns (user item feedback_value). :type test_file: str, default None :param output_file: File with dir to write the final predictions :type output_file: str, default None :param metadata_file: File which contains the metadata set. This file needs to have at least 2 columns (user metadata). :type metadata_file: str, default None :param similarity_file: File which contains the similarity set. This file needs to have at least 3 columns (user user similarity). :type similarity_file: str, default None :param k_neighbors: Number of neighbors to use. If None, k_neighbor = int(sqrt(n_users)) :type k_neighbors: int, default None :param rank_length: Size of the rank that must be generated by the predictions of the recommender algorithm :type rank_length: int, default 10 :param as_binary: If True, the explicit feedback will be transform to binary :type as_binary: bool, default False :param as_similar_first: If True, for each unknown item, which will be predicted, we first look for its k most similar users and then take the intersection with the users that seen that item. :type as_similar_first: bool, default True :param metadata_as_binary: f True, the explicit value will be transform to binary :type metadata_as_binary: bool, default False :param metadata_similarity_sep: Delimiter for similarity or metadata file :type metadata_similarity_sep: str, default '\t' :param similarity_metric: :type similarity_metric: str, default cosine :param sep: Delimiter for input files file :type sep: str, default '\t' :param output_sep: Delimiter for output file :type output_sep: str, default '\t' """ super(UserAttributeKNN, self).__init__(train_file=train_file, test_file=test_file, output_file=output_file, k_neighbors=k_neighbors, rank_length=rank_length, as_binary=as_binary, as_similar_first=as_similar_first, similarity_metric=similarity_metric, sep=sep, output_sep=output_sep) self.recommender_name = 'User Attribute KNN Algorithm' self.metadata_file = metadata_file self.similarity_file = similarity_file self.metadata_as_binary = metadata_as_binary self.metadata_similarity_sep = metadata_similarity_sep
[ "def", "__init__", "(", "self", ",", "train_file", "=", "None", ",", "test_file", "=", "None", ",", "output_file", "=", "None", ",", "metadata_file", "=", "None", ",", "similarity_file", "=", "None", ",", "k_neighbors", "=", "30", ",", "rank_length", "=", "10", ",", "as_binary", "=", "False", ",", "as_similar_first", "=", "True", ",", "metadata_as_binary", "=", "False", ",", "metadata_similarity_sep", "=", "'\\t'", ",", "similarity_metric", "=", "\"cosine\"", ",", "sep", "=", "'\\t'", ",", "output_sep", "=", "'\\t'", ")", ":", "super", "(", "UserAttributeKNN", ",", "self", ")", ".", "__init__", "(", "train_file", "=", "train_file", ",", "test_file", "=", "test_file", ",", "output_file", "=", "output_file", ",", "k_neighbors", "=", "k_neighbors", ",", "rank_length", "=", "rank_length", ",", "as_binary", "=", "as_binary", ",", "as_similar_first", "=", "as_similar_first", ",", "similarity_metric", "=", "similarity_metric", ",", "sep", "=", "sep", ",", "output_sep", "=", "output_sep", ")", "self", ".", "recommender_name", "=", "'User Attribute KNN Algorithm'", "self", ".", "metadata_file", "=", "metadata_file", "self", ".", "similarity_file", "=", "similarity_file", "self", ".", "metadata_as_binary", "=", "metadata_as_binary", "self", ".", "metadata_similarity_sep", "=", "metadata_similarity_sep" ]
[ 23, 4 ]
[ 95, 62 ]
python
en
['en', 'error', 'th']
False
UserAttributeKNN.init_model
(self)
Method to fit the model. Create and calculate a similarity matrix by metadata file or a pre-computed similarity matrix
Method to fit the model. Create and calculate a similarity matrix by metadata file or a pre-computed similarity matrix
def init_model(self): """ Method to fit the model. Create and calculate a similarity matrix by metadata file or a pre-computed similarity matrix """ for item in self.items: for user in self.train_set['users_viewed_item'].get(item, []): self.users_id_viewed_item.setdefault(item, []).append(self.user_to_user_id[user]) # Set the value for k if self.k_neighbors is None: self.k_neighbors = int(np.sqrt(len(self.users))) if self.metadata_file is not None: metadata = ReadFile(self.metadata_file, sep=self.metadata_similarity_sep, as_binary=self.metadata_as_binary ).read_metadata_or_similarity() self.matrix = np.zeros((len(self.users), len(metadata['col_2']))) meta_to_meta_id = {} for m, data in enumerate(metadata['col_2']): meta_to_meta_id[data] = m for user_m in metadata['col_1']: for m1 in metadata['dict'][user_m]: self.matrix[self.user_to_user_id[user_m], meta_to_meta_id[m1]] = metadata['dict'][user_m][m1] # create header info for metadata sparsity = (1 - (metadata['number_interactions'] / (len(metadata['col_1']) * len(metadata['col_2'])))) * 100 self.extra_info_header = ">> metadata:: %d users and %d metadata (%d interactions) | sparsity:: %.2f%%" % \ (len(metadata['col_1']), len(metadata['col_2']), metadata['number_interactions'], sparsity) # Create similarity matrix based on metadata or similarity file self.su_matrix = self.compute_similarity(transpose=False) elif self.similarity_file is not None: similarity = ReadFile(self.similarity_file, sep=self.metadata_similarity_sep, as_binary=False ).read_metadata_or_similarity() self.su_matrix = np.zeros((len(self.users), len(self.users))) # Fill similarity matrix for u in similarity['col_1']: for u_j in similarity['dict'][u]: self.su_matrix[self.user_to_user_id[u], self.user_to_user_id[int(u_j)]] = similarity['dict'][u][u_j] # Remove NaNs self.su_matrix[np.isnan(self.su_matrix)] = 0.0 else: raise ValueError("This algorithm needs a similarity matrix or a metadata file!") # Create original matrix user x item for prediction process self.create_matrix()
[ "def", "init_model", "(", "self", ")", ":", "for", "item", "in", "self", ".", "items", ":", "for", "user", "in", "self", ".", "train_set", "[", "'users_viewed_item'", "]", ".", "get", "(", "item", ",", "[", "]", ")", ":", "self", ".", "users_id_viewed_item", ".", "setdefault", "(", "item", ",", "[", "]", ")", ".", "append", "(", "self", ".", "user_to_user_id", "[", "user", "]", ")", "# Set the value for k", "if", "self", ".", "k_neighbors", "is", "None", ":", "self", ".", "k_neighbors", "=", "int", "(", "np", ".", "sqrt", "(", "len", "(", "self", ".", "users", ")", ")", ")", "if", "self", ".", "metadata_file", "is", "not", "None", ":", "metadata", "=", "ReadFile", "(", "self", ".", "metadata_file", ",", "sep", "=", "self", ".", "metadata_similarity_sep", ",", "as_binary", "=", "self", ".", "metadata_as_binary", ")", ".", "read_metadata_or_similarity", "(", ")", "self", ".", "matrix", "=", "np", ".", "zeros", "(", "(", "len", "(", "self", ".", "users", ")", ",", "len", "(", "metadata", "[", "'col_2'", "]", ")", ")", ")", "meta_to_meta_id", "=", "{", "}", "for", "m", ",", "data", "in", "enumerate", "(", "metadata", "[", "'col_2'", "]", ")", ":", "meta_to_meta_id", "[", "data", "]", "=", "m", "for", "user_m", "in", "metadata", "[", "'col_1'", "]", ":", "for", "m1", "in", "metadata", "[", "'dict'", "]", "[", "user_m", "]", ":", "self", ".", "matrix", "[", "self", ".", "user_to_user_id", "[", "user_m", "]", ",", "meta_to_meta_id", "[", "m1", "]", "]", "=", "metadata", "[", "'dict'", "]", "[", "user_m", "]", "[", "m1", "]", "# create header info for metadata", "sparsity", "=", "(", "1", "-", "(", "metadata", "[", "'number_interactions'", "]", "/", "(", "len", "(", "metadata", "[", "'col_1'", "]", ")", "*", "len", "(", "metadata", "[", "'col_2'", "]", ")", ")", ")", ")", "*", "100", "self", ".", "extra_info_header", "=", "\">> metadata:: %d users and %d metadata (%d interactions) | sparsity:: %.2f%%\"", "%", "(", "len", "(", "metadata", "[", "'col_1'", "]", ")", ",", "len", "(", "metadata", "[", "'col_2'", "]", ")", ",", "metadata", "[", "'number_interactions'", "]", ",", "sparsity", ")", "# Create similarity matrix based on metadata or similarity file", "self", ".", "su_matrix", "=", "self", ".", "compute_similarity", "(", "transpose", "=", "False", ")", "elif", "self", ".", "similarity_file", "is", "not", "None", ":", "similarity", "=", "ReadFile", "(", "self", ".", "similarity_file", ",", "sep", "=", "self", ".", "metadata_similarity_sep", ",", "as_binary", "=", "False", ")", ".", "read_metadata_or_similarity", "(", ")", "self", ".", "su_matrix", "=", "np", ".", "zeros", "(", "(", "len", "(", "self", ".", "users", ")", ",", "len", "(", "self", ".", "users", ")", ")", ")", "# Fill similarity matrix", "for", "u", "in", "similarity", "[", "'col_1'", "]", ":", "for", "u_j", "in", "similarity", "[", "'dict'", "]", "[", "u", "]", ":", "self", ".", "su_matrix", "[", "self", ".", "user_to_user_id", "[", "u", "]", ",", "self", ".", "user_to_user_id", "[", "int", "(", "u_j", ")", "]", "]", "=", "similarity", "[", "'dict'", "]", "[", "u", "]", "[", "u_j", "]", "# Remove NaNs", "self", ".", "su_matrix", "[", "np", ".", "isnan", "(", "self", ".", "su_matrix", ")", "]", "=", "0.0", "else", ":", "raise", "ValueError", "(", "\"This algorithm needs a similarity matrix or a metadata file!\"", ")", "# Create original matrix user x item for prediction process", "self", ".", "create_matrix", "(", ")" ]
[ 97, 4 ]
[ 154, 28 ]
python
en
['en', 'error', 'th']
False
filter_known_solutions
(known_solutions: List[str], mode: str, task_tier: str)
Filter the list of known solutions according to the mode.
Filter the list of known solutions according to the mode.
def filter_known_solutions(known_solutions: List[str], mode: str, task_tier: str) -> List[str]: """Filter the list of known solutions according to the mode.""" if mode in (PROD_MODE, DEMO_MODE): # In prod and demo mode show inly stable ball solutions. good_codes = [TIER_TO_CODE[t.lower()] for t in PROD_TIERS] known_solutions = [ code for code in known_solutions if code in good_codes ] if mode == DEMO_MODE: # In demo mode show only one solution. In theory it should be a # solution for the tier. But none exists, any solution will work. expected_code = TIER_TO_CODE[task_tier.lower()] if expected_code in known_solutions: return [expected_code] else: print(f'Warning! No {expected_code} solution found') return [known_solutions[0]] else: return known_solutions else: return known_solutions
[ "def", "filter_known_solutions", "(", "known_solutions", ":", "List", "[", "str", "]", ",", "mode", ":", "str", ",", "task_tier", ":", "str", ")", "->", "List", "[", "str", "]", ":", "if", "mode", "in", "(", "PROD_MODE", ",", "DEMO_MODE", ")", ":", "# In prod and demo mode show inly stable ball solutions.", "good_codes", "=", "[", "TIER_TO_CODE", "[", "t", ".", "lower", "(", ")", "]", "for", "t", "in", "PROD_TIERS", "]", "known_solutions", "=", "[", "code", "for", "code", "in", "known_solutions", "if", "code", "in", "good_codes", "]", "if", "mode", "==", "DEMO_MODE", ":", "# In demo mode show only one solution. In theory it should be a", "# solution for the tier. But none exists, any solution will work.", "expected_code", "=", "TIER_TO_CODE", "[", "task_tier", ".", "lower", "(", ")", "]", "if", "expected_code", "in", "known_solutions", ":", "return", "[", "expected_code", "]", "else", ":", "print", "(", "f'Warning! No {expected_code} solution found'", ")", "return", "[", "known_solutions", "[", "0", "]", "]", "else", ":", "return", "known_solutions", "else", ":", "return", "known_solutions" ]
[ 405, 0 ]
[ 426, 30 ]
python
en
['en', 'en', 'en']
True
ServiceHandler._initize_task_cache
(self)
Read task list from a pickle.
Read task list from a pickle.
def _initize_task_cache(self): """Read task list from a pickle.""" if self._test_mode: self._last_read_timestamp = 0 self._task_cache = {} logging.info('Reading all tasks for a pickle') self._task_cache = loader.load_compiled_task_dict() if self._config['mode'] != DEV_MODE: self._task_cache = { key: task for key, task in self._task_cache.items() if task.tier in PROD_TIERS } path = str(settings.TASK_DIR / settings.TASK_PICKLE_NAME) self._last_read_timestamp = os.path.getmtime(path)
[ "def", "_initize_task_cache", "(", "self", ")", ":", "if", "self", ".", "_test_mode", ":", "self", ".", "_last_read_timestamp", "=", "0", "self", ".", "_task_cache", "=", "{", "}", "logging", ".", "info", "(", "'Reading all tasks for a pickle'", ")", "self", ".", "_task_cache", "=", "loader", ".", "load_compiled_task_dict", "(", ")", "if", "self", ".", "_config", "[", "'mode'", "]", "!=", "DEV_MODE", ":", "self", ".", "_task_cache", "=", "{", "key", ":", "task", "for", "key", ",", "task", "in", "self", ".", "_task_cache", ".", "items", "(", ")", "if", "task", ".", "tier", "in", "PROD_TIERS", "}", "path", "=", "str", "(", "settings", ".", "TASK_DIR", "/", "settings", ".", "TASK_PICKLE_NAME", ")", "self", ".", "_last_read_timestamp", "=", "os", ".", "path", ".", "getmtime", "(", "path", ")" ]
[ 80, 4 ]
[ 94, 58 ]
python
en
['en', 'en', 'en']
True
VendorImporter.search_path
(self)
Search first the vendor package then as a natural package.
Search first the vendor package then as a natural package.
def search_path(self): """ Search first the vendor package then as a natural package. """ yield self.vendor_pkg + '.' yield ''
[ "def", "search_path", "(", "self", ")", ":", "yield", "self", ".", "vendor_pkg", "+", "'.'", "yield", "''" ]
[ 15, 4 ]
[ 20, 16 ]
python
en
['en', 'error', 'th']
False
VendorImporter.find_module
(self, fullname, path=None)
Return self when fullname starts with root_name and the target module is one vendored through this importer.
Return self when fullname starts with root_name and the target module is one vendored through this importer.
def find_module(self, fullname, path=None): """ Return self when fullname starts with root_name and the target module is one vendored through this importer. """ root, base, target = fullname.partition(self.root_name + '.') if root: return if not any(map(target.startswith, self.vendored_names)): return return self
[ "def", "find_module", "(", "self", ",", "fullname", ",", "path", "=", "None", ")", ":", "root", ",", "base", ",", "target", "=", "fullname", ".", "partition", "(", "self", ".", "root_name", "+", "'.'", ")", "if", "root", ":", "return", "if", "not", "any", "(", "map", "(", "target", ".", "startswith", ",", "self", ".", "vendored_names", ")", ")", ":", "return", "return", "self" ]
[ 22, 4 ]
[ 32, 19 ]
python
en
['en', 'error', 'th']
False
VendorImporter.load_module
(self, fullname)
Iterate over the search path to locate and load fullname.
Iterate over the search path to locate and load fullname.
def load_module(self, fullname): """ Iterate over the search path to locate and load fullname. """ root, base, target = fullname.partition(self.root_name + '.') for prefix in self.search_path: try: extant = prefix + target __import__(extant) mod = sys.modules[extant] sys.modules[fullname] = mod # mysterious hack: # Remove the reference to the extant package/module # on later Python versions to cause relative imports # in the vendor package to resolve the same modules # as those going through this importer. if sys.version_info > (3, 3): del sys.modules[extant] return mod except ImportError: pass else: raise ImportError( "The '{target}' package is required; " "normally this is bundled with this package so if you get " "this warning, consult the packager of your " "distribution.".format(**locals()) )
[ "def", "load_module", "(", "self", ",", "fullname", ")", ":", "root", ",", "base", ",", "target", "=", "fullname", ".", "partition", "(", "self", ".", "root_name", "+", "'.'", ")", "for", "prefix", "in", "self", ".", "search_path", ":", "try", ":", "extant", "=", "prefix", "+", "target", "__import__", "(", "extant", ")", "mod", "=", "sys", ".", "modules", "[", "extant", "]", "sys", ".", "modules", "[", "fullname", "]", "=", "mod", "# mysterious hack:", "# Remove the reference to the extant package/module", "# on later Python versions to cause relative imports", "# in the vendor package to resolve the same modules", "# as those going through this importer.", "if", "sys", ".", "version_info", ">", "(", "3", ",", "3", ")", ":", "del", "sys", ".", "modules", "[", "extant", "]", "return", "mod", "except", "ImportError", ":", "pass", "else", ":", "raise", "ImportError", "(", "\"The '{target}' package is required; \"", "\"normally this is bundled with this package so if you get \"", "\"this warning, consult the packager of your \"", "\"distribution.\"", ".", "format", "(", "*", "*", "locals", "(", ")", ")", ")" ]
[ 34, 4 ]
[ 61, 13 ]
python
en
['en', 'error', 'th']
False
VendorImporter.install
(self)
Install this importer into sys.meta_path if not already present.
Install this importer into sys.meta_path if not already present.
def install(self): """ Install this importer into sys.meta_path if not already present. """ if self not in sys.meta_path: sys.meta_path.append(self)
[ "def", "install", "(", "self", ")", ":", "if", "self", "not", "in", "sys", ".", "meta_path", ":", "sys", ".", "meta_path", ".", "append", "(", "self", ")" ]
[ 63, 4 ]
[ 68, 38 ]
python
en
['en', 'error', 'th']
False
MLEngineHook.normalize_mlengine_job_id
(self, job_id)
Replaces invalid MLEngine job_id characters with '_'. This also adds a leading 'z' in case job_id starts with an invalid character. Args: job_id: A job_id str that may have invalid characters. Returns: A valid job_id representation.
Replaces invalid MLEngine job_id characters with '_'.
def normalize_mlengine_job_id(self, job_id): """Replaces invalid MLEngine job_id characters with '_'. This also adds a leading 'z' in case job_id starts with an invalid character. Args: job_id: A job_id str that may have invalid characters. Returns: A valid job_id representation. """ match = re.search(r'\d', job_id) if match and match.start() is 0: job_id = 'z_{}'.format(job_id) return re.sub('[^0-9a-zA-Z]+', '_', job_id)
[ "def", "normalize_mlengine_job_id", "(", "self", ",", "job_id", ")", ":", "match", "=", "re", ".", "search", "(", "r'\\d'", ",", "job_id", ")", "if", "match", "and", "match", ".", "start", "(", ")", "is", "0", ":", "job_id", "=", "'z_{}'", ".", "format", "(", "job_id", ")", "return", "re", ".", "sub", "(", "'[^0-9a-zA-Z]+'", ",", "'_'", ",", "job_id", ")" ]
[ 38, 2 ]
[ 53, 47 ]
python
en
['en', 'en', 'en']
True
MLEngineHook.get_conn
(self)
Returns a Google MLEngine service object.
Returns a Google MLEngine service object.
def get_conn(self): """Returns a Google MLEngine service object.""" credentials = GoogleCredentials.get_application_default() return build('ml', 'v1', credentials=credentials)
[ "def", "get_conn", "(", "self", ")", ":", "credentials", "=", "GoogleCredentials", ".", "get_application_default", "(", ")", "return", "build", "(", "'ml'", ",", "'v1'", ",", "credentials", "=", "credentials", ")" ]
[ 55, 2 ]
[ 58, 53 ]
python
en
['en', 'cs', 'en']
True
MLEngineHook.create_job
(self, project_id, job, use_existing_job_fn=None)
Launches a MLEngine job and wait for it to reach a terminal state. Args: project_id: project id job: job name use_existing_job_fn: existing job to use Returns: The MLEngine job object if the job successfully reach a terminal state (which might be FAILED or CANCELLED state).
Launches a MLEngine job and wait for it to reach a terminal state.
def create_job(self, project_id, job, use_existing_job_fn=None): """Launches a MLEngine job and wait for it to reach a terminal state. Args: project_id: project id job: job name use_existing_job_fn: existing job to use Returns: The MLEngine job object if the job successfully reach a terminal state (which might be FAILED or CANCELLED state). """ request = self._mlengine.projects().jobs().create( parent='projects/{}'.format(project_id), body=job) job_id = job['jobId'] try: request.execute() except errors.HttpError as e: # 409 means there is an existing job with the same job ID. if e.resp.status == 409: if use_existing_job_fn is not None: existing_job = self._get_job(project_id, job_id) if not use_existing_job_fn(existing_job): logging.error( 'Job with job_id %s already exist, but it does ' 'not match our expectation: %s', job_id, existing_job ) raise logging.info( 'Job with job_id %s already exist. Will waiting for it to finish', job_id ) else: logging.error('Failed to create MLEngine job: %s', e) raise return self._wait_for_job_done(project_id, job_id)
[ "def", "create_job", "(", "self", ",", "project_id", ",", "job", ",", "use_existing_job_fn", "=", "None", ")", ":", "request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "jobs", "(", ")", ".", "create", "(", "parent", "=", "'projects/{}'", ".", "format", "(", "project_id", ")", ",", "body", "=", "job", ")", "job_id", "=", "job", "[", "'jobId'", "]", "try", ":", "request", ".", "execute", "(", ")", "except", "errors", ".", "HttpError", "as", "e", ":", "# 409 means there is an existing job with the same job ID.", "if", "e", ".", "resp", ".", "status", "==", "409", ":", "if", "use_existing_job_fn", "is", "not", "None", ":", "existing_job", "=", "self", ".", "_get_job", "(", "project_id", ",", "job_id", ")", "if", "not", "use_existing_job_fn", "(", "existing_job", ")", ":", "logging", ".", "error", "(", "'Job with job_id %s already exist, but it does '", "'not match our expectation: %s'", ",", "job_id", ",", "existing_job", ")", "raise", "logging", ".", "info", "(", "'Job with job_id %s already exist. Will waiting for it to finish'", ",", "job_id", ")", "else", ":", "logging", ".", "error", "(", "'Failed to create MLEngine job: %s'", ",", "e", ")", "raise", "return", "self", ".", "_wait_for_job_done", "(", "project_id", ",", "job_id", ")" ]
[ 60, 2 ]
[ 99, 54 ]
python
en
['en', 'en', 'en']
True
MLEngineHook._get_job
(self, project_id, job_id)
Gets a MLEngine job based on the job name. Args: project_id: project id job_id: job id Returns: MLEngine job object if succeed. Raises: apiclient.errors.HttpError: if HTTP error is returned from server
Gets a MLEngine job based on the job name.
def _get_job(self, project_id, job_id): """Gets a MLEngine job based on the job name. Args: project_id: project id job_id: job id Returns: MLEngine job object if succeed. Raises: apiclient.errors.HttpError: if HTTP error is returned from server """ job_name = 'projects/{}/jobs/{}'.format(project_id, job_id) request = self._mlengine.projects().jobs().get(name=job_name) while True: try: return request.execute() except errors.HttpError as e: if e.resp.status == 429: # polling after 30 seconds when quota failure occurs time.sleep(30) else: logging.error('Failed to get MLEngine job: %s', e) raise
[ "def", "_get_job", "(", "self", ",", "project_id", ",", "job_id", ")", ":", "job_name", "=", "'projects/{}/jobs/{}'", ".", "format", "(", "project_id", ",", "job_id", ")", "request", "=", "self", ".", "_mlengine", ".", "projects", "(", ")", ".", "jobs", "(", ")", ".", "get", "(", "name", "=", "job_name", ")", "while", "True", ":", "try", ":", "return", "request", ".", "execute", "(", ")", "except", "errors", ".", "HttpError", "as", "e", ":", "if", "e", ".", "resp", ".", "status", "==", "429", ":", "# polling after 30 seconds when quota failure occurs", "time", ".", "sleep", "(", "30", ")", "else", ":", "logging", ".", "error", "(", "'Failed to get MLEngine job: %s'", ",", "e", ")", "raise" ]
[ 101, 2 ]
[ 125, 15 ]
python
en
['en', 'ceb', 'en']
True
MLEngineHook._wait_for_job_done
(self, project_id, job_id, interval=30)
Waits for the Job to reach a terminal state. This method will periodically check the job state until the job reach a terminal state. Args: project_id: project id job_id: job id interval: check interval in seconds Returns: MLEngine job object if succeed. Raises: apiclient.errors.HttpError: if HTTP error is returned when getting the job
Waits for the Job to reach a terminal state.
def _wait_for_job_done(self, project_id, job_id, interval=30): """Waits for the Job to reach a terminal state. This method will periodically check the job state until the job reach a terminal state. Args: project_id: project id job_id: job id interval: check interval in seconds Returns: MLEngine job object if succeed. Raises: apiclient.errors.HttpError: if HTTP error is returned when getting the job """ assert interval > 0 while True: job = self._get_job(project_id, job_id) if job['state'] in ['SUCCEEDED', 'FAILED', 'CANCELLED']: return job time.sleep(interval)
[ "def", "_wait_for_job_done", "(", "self", ",", "project_id", ",", "job_id", ",", "interval", "=", "30", ")", ":", "assert", "interval", ">", "0", "while", "True", ":", "job", "=", "self", ".", "_get_job", "(", "project_id", ",", "job_id", ")", "if", "job", "[", "'state'", "]", "in", "[", "'SUCCEEDED'", ",", "'FAILED'", ",", "'CANCELLED'", "]", ":", "return", "job", "time", ".", "sleep", "(", "interval", ")" ]
[ 127, 2 ]
[ 150, 26 ]
python
en
['en', 'en', 'en']
True
_subst_vars
(path, local_vars)
In the string `path`, replace tokens like {some.thing} with the corresponding value from the map `local_vars`. If there is no corresponding value, leave the token unchanged.
In the string `path`, replace tokens like {some.thing} with the corresponding value from the map `local_vars`.
def _subst_vars(path, local_vars): """In the string `path`, replace tokens like {some.thing} with the corresponding value from the map `local_vars`. If there is no corresponding value, leave the token unchanged. """ def _replacer(matchobj): name = matchobj.group(1) if name in local_vars: return local_vars[name] elif name in os.environ: return os.environ[name] return matchobj.group(0) return _VAR_REPL.sub(_replacer, path)
[ "def", "_subst_vars", "(", "path", ",", "local_vars", ")", ":", "def", "_replacer", "(", "matchobj", ")", ":", "name", "=", "matchobj", ".", "group", "(", "1", ")", "if", "name", "in", "local_vars", ":", "return", "local_vars", "[", "name", "]", "elif", "name", "in", "os", ".", "environ", ":", "return", "os", ".", "environ", "[", "name", "]", "return", "matchobj", ".", "group", "(", "0", ")", "return", "_VAR_REPL", ".", "sub", "(", "_replacer", ",", "path", ")" ]
[ 130, 0 ]
[ 143, 41 ]
python
en
['en', 'en', 'en']
True
_parse_makefile
(filename, vars=None)
Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary.
Parse a Makefile-style file.
def _parse_makefile(filename, vars=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ # Regexes needed for parsing Makefile (and similar syntaxes, # like old-style Setup files). _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") if vars is None: vars = {} done = {} notdone = {} with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: lines = f.readlines() for line in lines: if line.startswith('#') or line.strip() == '': continue m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = v.strip() # `$$' is a literal `$' in make tmpv = v.replace('$$', '') if "$" in tmpv: notdone[n] = v else: try: v = int(v) except ValueError: # insert literal `$' done[n] = v.replace('$$', '$') else: done[n] = v # do variable interpolation here variables = list(notdone.keys()) # Variables with a 'PY_' prefix in the makefile. These need to # be made available without that prefix through sysconfig. # Special care is needed to ensure that variable expansion works, even # if the expansion uses the name without a prefix. renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') while len(variables) > 0: for name in tuple(variables): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m is not None: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: # get it on a subsequent round found = False elif n in os.environ: # do it like make: fall back to environment item = os.environ[n] elif n in renamed_variables: if (name.startswith('PY_') and name[3:] in renamed_variables): item = "" elif 'PY_' + n in notdone: found = False else: item = str(done['PY_' + n]) else: done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = value.strip() else: done[name] = value variables.remove(name) if (name.startswith('PY_') and name[3:] in renamed_variables): name = name[3:] if name not in done: done[name] = value else: # bogus variable reference (e.g. "prefix=$/opt/python"); # just drop it since we can't deal done[name] = value variables.remove(name) # strip spurious spaces for k, v in done.items(): if isinstance(v, str): done[k] = v.strip() # save the results in the global dictionary vars.update(done) return vars
[ "def", "_parse_makefile", "(", "filename", ",", "vars", "=", "None", ")", ":", "# Regexes needed for parsing Makefile (and similar syntaxes,", "# like old-style Setup files).", "_variable_rx", "=", "re", ".", "compile", "(", "r\"([a-zA-Z][a-zA-Z0-9_]+)\\s*=\\s*(.*)\"", ")", "_findvar1_rx", "=", "re", ".", "compile", "(", "r\"\\$\\(([A-Za-z][A-Za-z0-9_]*)\\)\"", ")", "_findvar2_rx", "=", "re", ".", "compile", "(", "r\"\\${([A-Za-z][A-Za-z0-9_]*)}\"", ")", "if", "vars", "is", "None", ":", "vars", "=", "{", "}", "done", "=", "{", "}", "notdone", "=", "{", "}", "with", "codecs", ".", "open", "(", "filename", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "\"surrogateescape\"", ")", "as", "f", ":", "lines", "=", "f", ".", "readlines", "(", ")", "for", "line", "in", "lines", ":", "if", "line", ".", "startswith", "(", "'#'", ")", "or", "line", ".", "strip", "(", ")", "==", "''", ":", "continue", "m", "=", "_variable_rx", ".", "match", "(", "line", ")", "if", "m", ":", "n", ",", "v", "=", "m", ".", "group", "(", "1", ",", "2", ")", "v", "=", "v", ".", "strip", "(", ")", "# `$$' is a literal `$' in make", "tmpv", "=", "v", ".", "replace", "(", "'$$'", ",", "''", ")", "if", "\"$\"", "in", "tmpv", ":", "notdone", "[", "n", "]", "=", "v", "else", ":", "try", ":", "v", "=", "int", "(", "v", ")", "except", "ValueError", ":", "# insert literal `$'", "done", "[", "n", "]", "=", "v", ".", "replace", "(", "'$$'", ",", "'$'", ")", "else", ":", "done", "[", "n", "]", "=", "v", "# do variable interpolation here", "variables", "=", "list", "(", "notdone", ".", "keys", "(", ")", ")", "# Variables with a 'PY_' prefix in the makefile. These need to", "# be made available without that prefix through sysconfig.", "# Special care is needed to ensure that variable expansion works, even", "# if the expansion uses the name without a prefix.", "renamed_variables", "=", "(", "'CFLAGS'", ",", "'LDFLAGS'", ",", "'CPPFLAGS'", ")", "while", "len", "(", "variables", ")", ">", "0", ":", "for", "name", "in", "tuple", "(", "variables", ")", ":", "value", "=", "notdone", "[", "name", "]", "m", "=", "_findvar1_rx", ".", "search", "(", "value", ")", "or", "_findvar2_rx", ".", "search", "(", "value", ")", "if", "m", "is", "not", "None", ":", "n", "=", "m", ".", "group", "(", "1", ")", "found", "=", "True", "if", "n", "in", "done", ":", "item", "=", "str", "(", "done", "[", "n", "]", ")", "elif", "n", "in", "notdone", ":", "# get it on a subsequent round", "found", "=", "False", "elif", "n", "in", "os", ".", "environ", ":", "# do it like make: fall back to environment", "item", "=", "os", ".", "environ", "[", "n", "]", "elif", "n", "in", "renamed_variables", ":", "if", "(", "name", ".", "startswith", "(", "'PY_'", ")", "and", "name", "[", "3", ":", "]", "in", "renamed_variables", ")", ":", "item", "=", "\"\"", "elif", "'PY_'", "+", "n", "in", "notdone", ":", "found", "=", "False", "else", ":", "item", "=", "str", "(", "done", "[", "'PY_'", "+", "n", "]", ")", "else", ":", "done", "[", "n", "]", "=", "item", "=", "\"\"", "if", "found", ":", "after", "=", "value", "[", "m", ".", "end", "(", ")", ":", "]", "value", "=", "value", "[", ":", "m", ".", "start", "(", ")", "]", "+", "item", "+", "after", "if", "\"$\"", "in", "after", ":", "notdone", "[", "name", "]", "=", "value", "else", ":", "try", ":", "value", "=", "int", "(", "value", ")", "except", "ValueError", ":", "done", "[", "name", "]", "=", "value", ".", "strip", "(", ")", "else", ":", "done", "[", "name", "]", "=", "value", "variables", ".", "remove", "(", "name", ")", "if", "(", "name", ".", "startswith", "(", "'PY_'", ")", "and", "name", "[", "3", ":", "]", "in", "renamed_variables", ")", ":", "name", "=", "name", "[", "3", ":", "]", "if", "name", "not", "in", "done", ":", "done", "[", "name", "]", "=", "value", "else", ":", "# bogus variable reference (e.g. \"prefix=$/opt/python\");", "# just drop it since we can't deal", "done", "[", "name", "]", "=", "value", "variables", ".", "remove", "(", "name", ")", "# strip spurious spaces", "for", "k", ",", "v", "in", "done", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "str", ")", ":", "done", "[", "k", "]", "=", "v", ".", "strip", "(", ")", "# save the results in the global dictionary", "vars", ".", "update", "(", "done", ")", "return", "vars" ]
[ 212, 0 ]
[ 327, 15 ]
python
en
['en', 'en', 'en']
True
get_makefile_filename
()
Return the path of the Makefile.
Return the path of the Makefile.
def get_makefile_filename(): """Return the path of the Makefile.""" if _PYTHON_BUILD: return os.path.join(_PROJECT_BASE, "Makefile") if hasattr(sys, 'abiflags'): config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) else: config_dir_name = 'config' return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
[ "def", "get_makefile_filename", "(", ")", ":", "if", "_PYTHON_BUILD", ":", "return", "os", ".", "path", ".", "join", "(", "_PROJECT_BASE", ",", "\"Makefile\"", ")", "if", "hasattr", "(", "sys", ",", "'abiflags'", ")", ":", "config_dir_name", "=", "'config-%s%s'", "%", "(", "_PY_VERSION_SHORT", ",", "sys", ".", "abiflags", ")", "else", ":", "config_dir_name", "=", "'config'", "return", "os", ".", "path", ".", "join", "(", "get_path", "(", "'stdlib'", ")", ",", "config_dir_name", ",", "'Makefile'", ")" ]
[ 330, 0 ]
[ 338, 72 ]
python
en
['en', 'en', 'en']
True
_init_posix
(vars)
Initialize the module as appropriate for POSIX systems.
Initialize the module as appropriate for POSIX systems.
def _init_posix(vars): """Initialize the module as appropriate for POSIX systems.""" # load the installed Makefile: makefile = get_makefile_filename() try: _parse_makefile(makefile, vars) except IOError as e: msg = "invalid Python installation: unable to open %s" % makefile if hasattr(e, "strerror"): msg = msg + " (%s)" % e.strerror raise IOError(msg) # load the installed pyconfig.h: config_h = get_config_h_filename() try: with open(config_h) as f: parse_config_h(f, vars) except IOError as e: msg = "invalid Python installation: unable to open %s" % config_h if hasattr(e, "strerror"): msg = msg + " (%s)" % e.strerror raise IOError(msg) # On AIX, there are wrong paths to the linker scripts in the Makefile # -- these paths are relative to the Python source, but when installed # the scripts are in another directory. if _PYTHON_BUILD: vars['LDSHARED'] = vars['BLDSHARED']
[ "def", "_init_posix", "(", "vars", ")", ":", "# load the installed Makefile:", "makefile", "=", "get_makefile_filename", "(", ")", "try", ":", "_parse_makefile", "(", "makefile", ",", "vars", ")", "except", "IOError", "as", "e", ":", "msg", "=", "\"invalid Python installation: unable to open %s\"", "%", "makefile", "if", "hasattr", "(", "e", ",", "\"strerror\"", ")", ":", "msg", "=", "msg", "+", "\" (%s)\"", "%", "e", ".", "strerror", "raise", "IOError", "(", "msg", ")", "# load the installed pyconfig.h:", "config_h", "=", "get_config_h_filename", "(", ")", "try", ":", "with", "open", "(", "config_h", ")", "as", "f", ":", "parse_config_h", "(", "f", ",", "vars", ")", "except", "IOError", "as", "e", ":", "msg", "=", "\"invalid Python installation: unable to open %s\"", "%", "config_h", "if", "hasattr", "(", "e", ",", "\"strerror\"", ")", ":", "msg", "=", "msg", "+", "\" (%s)\"", "%", "e", ".", "strerror", "raise", "IOError", "(", "msg", ")", "# On AIX, there are wrong paths to the linker scripts in the Makefile", "# -- these paths are relative to the Python source, but when installed", "# the scripts are in another directory.", "if", "_PYTHON_BUILD", ":", "vars", "[", "'LDSHARED'", "]", "=", "vars", "[", "'BLDSHARED'", "]" ]
[ 341, 0 ]
[ 366, 44 ]
python
en
['en', 'en', 'en']
True
_init_non_posix
(vars)
Initialize the module as appropriate for NT
Initialize the module as appropriate for NT
def _init_non_posix(vars): """Initialize the module as appropriate for NT""" # set basic install directories vars['LIBDEST'] = get_path('stdlib') vars['BINLIBDEST'] = get_path('platstdlib') vars['INCLUDEPY'] = get_path('include') vars['SO'] = '.pyd' vars['EXE'] = '.exe' vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
[ "def", "_init_non_posix", "(", "vars", ")", ":", "# set basic install directories", "vars", "[", "'LIBDEST'", "]", "=", "get_path", "(", "'stdlib'", ")", "vars", "[", "'BINLIBDEST'", "]", "=", "get_path", "(", "'platstdlib'", ")", "vars", "[", "'INCLUDEPY'", "]", "=", "get_path", "(", "'include'", ")", "vars", "[", "'SO'", "]", "=", "'.pyd'", "vars", "[", "'EXE'", "]", "=", "'.exe'", "vars", "[", "'VERSION'", "]", "=", "_PY_VERSION_SHORT_NO_DOT", "vars", "[", "'BINDIR'", "]", "=", "os", ".", "path", ".", "dirname", "(", "_safe_realpath", "(", "sys", ".", "executable", ")", ")" ]
[ 369, 0 ]
[ 378, 68 ]
python
en
['en', 'en', 'en']
True
parse_config_h
(fp, vars=None)
Parse a config.h-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary.
Parse a config.h-style file.
def parse_config_h(fp, vars=None): """Parse a config.h-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ if vars is None: vars = {} define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") while True: line = fp.readline() if not line: break m = define_rx.match(line) if m: n, v = m.group(1, 2) try: v = int(v) except ValueError: pass vars[n] = v else: m = undef_rx.match(line) if m: vars[m.group(1)] = 0 return vars
[ "def", "parse_config_h", "(", "fp", ",", "vars", "=", "None", ")", ":", "if", "vars", "is", "None", ":", "vars", "=", "{", "}", "define_rx", "=", "re", ".", "compile", "(", "\"#define ([A-Z][A-Za-z0-9_]+) (.*)\\n\"", ")", "undef_rx", "=", "re", ".", "compile", "(", "\"/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\\n\"", ")", "while", "True", ":", "line", "=", "fp", ".", "readline", "(", ")", "if", "not", "line", ":", "break", "m", "=", "define_rx", ".", "match", "(", "line", ")", "if", "m", ":", "n", ",", "v", "=", "m", ".", "group", "(", "1", ",", "2", ")", "try", ":", "v", "=", "int", "(", "v", ")", "except", "ValueError", ":", "pass", "vars", "[", "n", "]", "=", "v", "else", ":", "m", "=", "undef_rx", ".", "match", "(", "line", ")", "if", "m", ":", "vars", "[", "m", ".", "group", "(", "1", ")", "]", "=", "0", "return", "vars" ]
[ 385, 0 ]
[ 413, 15 ]
python
en
['es', 'en', 'en']
True
get_config_h_filename
()
Return the path of pyconfig.h.
Return the path of pyconfig.h.
def get_config_h_filename(): """Return the path of pyconfig.h.""" if _PYTHON_BUILD: if os.name == "nt": inc_dir = os.path.join(_PROJECT_BASE, "PC") else: inc_dir = _PROJECT_BASE else: inc_dir = get_path('platinclude') return os.path.join(inc_dir, 'pyconfig.h')
[ "def", "get_config_h_filename", "(", ")", ":", "if", "_PYTHON_BUILD", ":", "if", "os", ".", "name", "==", "\"nt\"", ":", "inc_dir", "=", "os", ".", "path", ".", "join", "(", "_PROJECT_BASE", ",", "\"PC\"", ")", "else", ":", "inc_dir", "=", "_PROJECT_BASE", "else", ":", "inc_dir", "=", "get_path", "(", "'platinclude'", ")", "return", "os", ".", "path", ".", "join", "(", "inc_dir", ",", "'pyconfig.h'", ")" ]
[ 416, 0 ]
[ 425, 46 ]
python
en
['en', 'en', 'en']
True
get_scheme_names
()
Return a tuple containing the schemes names.
Return a tuple containing the schemes names.
def get_scheme_names(): """Return a tuple containing the schemes names.""" return tuple(sorted(_SCHEMES.sections()))
[ "def", "get_scheme_names", "(", ")", ":", "return", "tuple", "(", "sorted", "(", "_SCHEMES", ".", "sections", "(", ")", ")", ")" ]
[ 428, 0 ]
[ 430, 45 ]
python
en
['en', 'en', 'en']
True
get_path_names
()
Return a tuple containing the paths names.
Return a tuple containing the paths names.
def get_path_names(): """Return a tuple containing the paths names.""" # xxx see if we want a static list return _SCHEMES.options('posix_prefix')
[ "def", "get_path_names", "(", ")", ":", "# xxx see if we want a static list", "return", "_SCHEMES", ".", "options", "(", "'posix_prefix'", ")" ]
[ 433, 0 ]
[ 436, 43 ]
python
en
['en', 'en', 'en']
True
get_paths
(scheme=_get_default_scheme(), vars=None, expand=True)
Return a mapping containing an install scheme. ``scheme`` is the install scheme name. If not provided, it will return the default scheme for the current platform.
Return a mapping containing an install scheme.
def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): """Return a mapping containing an install scheme. ``scheme`` is the install scheme name. If not provided, it will return the default scheme for the current platform. """ _ensure_cfg_read() if expand: return _expand_vars(scheme, vars) else: return dict(_SCHEMES.items(scheme))
[ "def", "get_paths", "(", "scheme", "=", "_get_default_scheme", "(", ")", ",", "vars", "=", "None", ",", "expand", "=", "True", ")", ":", "_ensure_cfg_read", "(", ")", "if", "expand", ":", "return", "_expand_vars", "(", "scheme", ",", "vars", ")", "else", ":", "return", "dict", "(", "_SCHEMES", ".", "items", "(", "scheme", ")", ")" ]
[ 439, 0 ]
[ 449, 43 ]
python
en
['en', 'en', 'en']
True
get_path
(name, scheme=_get_default_scheme(), vars=None, expand=True)
Return a path corresponding to the scheme. ``scheme`` is the install scheme name.
Return a path corresponding to the scheme.
def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): """Return a path corresponding to the scheme. ``scheme`` is the install scheme name. """ return get_paths(scheme, vars, expand)[name]
[ "def", "get_path", "(", "name", ",", "scheme", "=", "_get_default_scheme", "(", ")", ",", "vars", "=", "None", ",", "expand", "=", "True", ")", ":", "return", "get_paths", "(", "scheme", ",", "vars", ",", "expand", ")", "[", "name", "]" ]
[ 452, 0 ]
[ 457, 48 ]
python
en
['en', 'el-Latn', 'en']
True
get_config_vars
(*args)
With no arguments, return a dictionary of all configuration variables relevant for the current platform. On Unix, this means every variable defined in Python's installed Makefile; On Windows and Mac OS it's a much smaller set. With arguments, return a list of values that result from looking up each argument in the configuration variable dictionary.
With no arguments, return a dictionary of all configuration variables relevant for the current platform.
def get_config_vars(*args): """With no arguments, return a dictionary of all configuration variables relevant for the current platform. On Unix, this means every variable defined in Python's installed Makefile; On Windows and Mac OS it's a much smaller set. With arguments, return a list of values that result from looking up each argument in the configuration variable dictionary. """ global _CONFIG_VARS if _CONFIG_VARS is None: _CONFIG_VARS = {} # Normalized versions of prefix and exec_prefix are handy to have; # in fact, these are the standard versions used most places in the # distutils2 module. _CONFIG_VARS['prefix'] = _PREFIX _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX _CONFIG_VARS['py_version'] = _PY_VERSION _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] _CONFIG_VARS['base'] = _PREFIX _CONFIG_VARS['platbase'] = _EXEC_PREFIX _CONFIG_VARS['projectbase'] = _PROJECT_BASE try: _CONFIG_VARS['abiflags'] = sys.abiflags except AttributeError: # sys.abiflags may not be defined on all platforms. _CONFIG_VARS['abiflags'] = '' if os.name in ('nt', 'os2'): _init_non_posix(_CONFIG_VARS) if os.name == 'posix': _init_posix(_CONFIG_VARS) # Setting 'userbase' is done below the call to the # init function to enable using 'get_config_var' in # the init-function. if sys.version >= '2.6': _CONFIG_VARS['userbase'] = _getuserbase() if 'srcdir' not in _CONFIG_VARS: _CONFIG_VARS['srcdir'] = _PROJECT_BASE else: _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) # Convert srcdir into an absolute path if it appears necessary. # Normally it is relative to the build directory. However, during # testing, for example, we might be running a non-installed python # from a different directory. if _PYTHON_BUILD and os.name == "posix": base = _PROJECT_BASE try: cwd = os.getcwd() except OSError: cwd = None if (not os.path.isabs(_CONFIG_VARS['srcdir']) and base != cwd): # srcdir is relative and we are not in the same directory # as the executable. Assume executable is in the build # directory and make srcdir absolute. srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) if sys.platform == 'darwin': kernel_version = os.uname()[2] # Kernel version (8.4.3) major_version = int(kernel_version.split('.')[0]) if major_version < 8: # On Mac OS X before 10.4, check if -arch and -isysroot # are in CFLAGS or LDFLAGS and remove them if they are. # This is needed when building extensions on a 10.3 system # using a universal build of python. for key in ('LDFLAGS', 'BASECFLAGS', # a number of derived variables. These need to be # patched up as well. 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): flags = _CONFIG_VARS[key] flags = re.sub(r'-arch\s+\w+\s', ' ', flags) flags = re.sub('-isysroot [^ \t]*', ' ', flags) _CONFIG_VARS[key] = flags else: # Allow the user to override the architecture flags using # an environment variable. # NOTE: This name was introduced by Apple in OSX 10.5 and # is used by several scripting languages distributed with # that OS release. if 'ARCHFLAGS' in os.environ: arch = os.environ['ARCHFLAGS'] for key in ('LDFLAGS', 'BASECFLAGS', # a number of derived variables. These need to be # patched up as well. 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): flags = _CONFIG_VARS[key] flags = re.sub(r'-arch\s+\w+\s', ' ', flags) flags = flags + ' ' + arch _CONFIG_VARS[key] = flags # If we're on OSX 10.5 or later and the user tries to # compiles an extension using an SDK that is not present # on the current machine it is better to not use an SDK # than to fail. # # The major usecase for this is users using a Python.org # binary installer on OSX 10.6: that installer uses # the 10.4u SDK, but that SDK is not installed by default # when you install Xcode. # CFLAGS = _CONFIG_VARS.get('CFLAGS', '') m = re.search(r'-isysroot\s+(\S+)', CFLAGS) if m is not None: sdk = m.group(1) if not os.path.exists(sdk): for key in ('LDFLAGS', 'BASECFLAGS', # a number of derived variables. These need to be # patched up as well. 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): flags = _CONFIG_VARS[key] flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) _CONFIG_VARS[key] = flags if args: vals = [] for name in args: vals.append(_CONFIG_VARS.get(name)) return vals else: return _CONFIG_VARS
[ "def", "get_config_vars", "(", "*", "args", ")", ":", "global", "_CONFIG_VARS", "if", "_CONFIG_VARS", "is", "None", ":", "_CONFIG_VARS", "=", "{", "}", "# Normalized versions of prefix and exec_prefix are handy to have;", "# in fact, these are the standard versions used most places in the", "# distutils2 module.", "_CONFIG_VARS", "[", "'prefix'", "]", "=", "_PREFIX", "_CONFIG_VARS", "[", "'exec_prefix'", "]", "=", "_EXEC_PREFIX", "_CONFIG_VARS", "[", "'py_version'", "]", "=", "_PY_VERSION", "_CONFIG_VARS", "[", "'py_version_short'", "]", "=", "_PY_VERSION_SHORT", "_CONFIG_VARS", "[", "'py_version_nodot'", "]", "=", "_PY_VERSION", "[", "0", "]", "+", "_PY_VERSION", "[", "2", "]", "_CONFIG_VARS", "[", "'base'", "]", "=", "_PREFIX", "_CONFIG_VARS", "[", "'platbase'", "]", "=", "_EXEC_PREFIX", "_CONFIG_VARS", "[", "'projectbase'", "]", "=", "_PROJECT_BASE", "try", ":", "_CONFIG_VARS", "[", "'abiflags'", "]", "=", "sys", ".", "abiflags", "except", "AttributeError", ":", "# sys.abiflags may not be defined on all platforms.", "_CONFIG_VARS", "[", "'abiflags'", "]", "=", "''", "if", "os", ".", "name", "in", "(", "'nt'", ",", "'os2'", ")", ":", "_init_non_posix", "(", "_CONFIG_VARS", ")", "if", "os", ".", "name", "==", "'posix'", ":", "_init_posix", "(", "_CONFIG_VARS", ")", "# Setting 'userbase' is done below the call to the", "# init function to enable using 'get_config_var' in", "# the init-function.", "if", "sys", ".", "version", ">=", "'2.6'", ":", "_CONFIG_VARS", "[", "'userbase'", "]", "=", "_getuserbase", "(", ")", "if", "'srcdir'", "not", "in", "_CONFIG_VARS", ":", "_CONFIG_VARS", "[", "'srcdir'", "]", "=", "_PROJECT_BASE", "else", ":", "_CONFIG_VARS", "[", "'srcdir'", "]", "=", "_safe_realpath", "(", "_CONFIG_VARS", "[", "'srcdir'", "]", ")", "# Convert srcdir into an absolute path if it appears necessary.", "# Normally it is relative to the build directory. However, during", "# testing, for example, we might be running a non-installed python", "# from a different directory.", "if", "_PYTHON_BUILD", "and", "os", ".", "name", "==", "\"posix\"", ":", "base", "=", "_PROJECT_BASE", "try", ":", "cwd", "=", "os", ".", "getcwd", "(", ")", "except", "OSError", ":", "cwd", "=", "None", "if", "(", "not", "os", ".", "path", ".", "isabs", "(", "_CONFIG_VARS", "[", "'srcdir'", "]", ")", "and", "base", "!=", "cwd", ")", ":", "# srcdir is relative and we are not in the same directory", "# as the executable. Assume executable is in the build", "# directory and make srcdir absolute.", "srcdir", "=", "os", ".", "path", ".", "join", "(", "base", ",", "_CONFIG_VARS", "[", "'srcdir'", "]", ")", "_CONFIG_VARS", "[", "'srcdir'", "]", "=", "os", ".", "path", ".", "normpath", "(", "srcdir", ")", "if", "sys", ".", "platform", "==", "'darwin'", ":", "kernel_version", "=", "os", ".", "uname", "(", ")", "[", "2", "]", "# Kernel version (8.4.3)", "major_version", "=", "int", "(", "kernel_version", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", "if", "major_version", "<", "8", ":", "# On Mac OS X before 10.4, check if -arch and -isysroot", "# are in CFLAGS or LDFLAGS and remove them if they are.", "# This is needed when building extensions on a 10.3 system", "# using a universal build of python.", "for", "key", "in", "(", "'LDFLAGS'", ",", "'BASECFLAGS'", ",", "# a number of derived variables. These need to be", "# patched up as well.", "'CFLAGS'", ",", "'PY_CFLAGS'", ",", "'BLDSHARED'", ")", ":", "flags", "=", "_CONFIG_VARS", "[", "key", "]", "flags", "=", "re", ".", "sub", "(", "r'-arch\\s+\\w+\\s'", ",", "' '", ",", "flags", ")", "flags", "=", "re", ".", "sub", "(", "'-isysroot [^ \\t]*'", ",", "' '", ",", "flags", ")", "_CONFIG_VARS", "[", "key", "]", "=", "flags", "else", ":", "# Allow the user to override the architecture flags using", "# an environment variable.", "# NOTE: This name was introduced by Apple in OSX 10.5 and", "# is used by several scripting languages distributed with", "# that OS release.", "if", "'ARCHFLAGS'", "in", "os", ".", "environ", ":", "arch", "=", "os", ".", "environ", "[", "'ARCHFLAGS'", "]", "for", "key", "in", "(", "'LDFLAGS'", ",", "'BASECFLAGS'", ",", "# a number of derived variables. These need to be", "# patched up as well.", "'CFLAGS'", ",", "'PY_CFLAGS'", ",", "'BLDSHARED'", ")", ":", "flags", "=", "_CONFIG_VARS", "[", "key", "]", "flags", "=", "re", ".", "sub", "(", "r'-arch\\s+\\w+\\s'", ",", "' '", ",", "flags", ")", "flags", "=", "flags", "+", "' '", "+", "arch", "_CONFIG_VARS", "[", "key", "]", "=", "flags", "# If we're on OSX 10.5 or later and the user tries to", "# compiles an extension using an SDK that is not present", "# on the current machine it is better to not use an SDK", "# than to fail.", "#", "# The major usecase for this is users using a Python.org", "# binary installer on OSX 10.6: that installer uses", "# the 10.4u SDK, but that SDK is not installed by default", "# when you install Xcode.", "#", "CFLAGS", "=", "_CONFIG_VARS", ".", "get", "(", "'CFLAGS'", ",", "''", ")", "m", "=", "re", ".", "search", "(", "r'-isysroot\\s+(\\S+)'", ",", "CFLAGS", ")", "if", "m", "is", "not", "None", ":", "sdk", "=", "m", ".", "group", "(", "1", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "sdk", ")", ":", "for", "key", "in", "(", "'LDFLAGS'", ",", "'BASECFLAGS'", ",", "# a number of derived variables. These need to be", "# patched up as well.", "'CFLAGS'", ",", "'PY_CFLAGS'", ",", "'BLDSHARED'", ")", ":", "flags", "=", "_CONFIG_VARS", "[", "key", "]", "flags", "=", "re", ".", "sub", "(", "r'-isysroot\\s+\\S+(\\s|$)'", ",", "' '", ",", "flags", ")", "_CONFIG_VARS", "[", "key", "]", "=", "flags", "if", "args", ":", "vals", "=", "[", "]", "for", "name", "in", "args", ":", "vals", ".", "append", "(", "_CONFIG_VARS", ".", "get", "(", "name", ")", ")", "return", "vals", "else", ":", "return", "_CONFIG_VARS" ]
[ 460, 0 ]
[ 588, 27 ]
python
en
['en', 'en', 'en']
True
get_config_var
(name)
Return the value of a single variable using the dictionary returned by 'get_config_vars()'. Equivalent to get_config_vars().get(name)
Return the value of a single variable using the dictionary returned by 'get_config_vars()'.
def get_config_var(name): """Return the value of a single variable using the dictionary returned by 'get_config_vars()'. Equivalent to get_config_vars().get(name) """ return get_config_vars().get(name)
[ "def", "get_config_var", "(", "name", ")", ":", "return", "get_config_vars", "(", ")", ".", "get", "(", "name", ")" ]
[ 591, 0 ]
[ 597, 38 ]
python
en
['en', 'en', 'en']
True
get_platform
()
Return a string that identifies the current platform. This is used mainly to distinguish platform-specific build directories and platform-specific built distributions. Typically includes the OS name and version and the architecture (as supplied by 'os.uname()'), although the exact information included depends on the OS; eg. for IRIX the architecture isn't particularly important (IRIX only runs on SGI hardware), but for Linux the kernel version isn't particularly important. Examples of returned values: linux-i586 linux-alpha (?) solaris-2.6-sun4u irix-5.3 irix64-6.2 Windows will return one of: win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) win-ia64 (64bit Windows on Itanium) win32 (all others - specifically, sys.platform is returned) For other non-POSIX platforms, currently just returns 'sys.platform'.
Return a string that identifies the current platform.
def get_platform(): """Return a string that identifies the current platform. This is used mainly to distinguish platform-specific build directories and platform-specific built distributions. Typically includes the OS name and version and the architecture (as supplied by 'os.uname()'), although the exact information included depends on the OS; eg. for IRIX the architecture isn't particularly important (IRIX only runs on SGI hardware), but for Linux the kernel version isn't particularly important. Examples of returned values: linux-i586 linux-alpha (?) solaris-2.6-sun4u irix-5.3 irix64-6.2 Windows will return one of: win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) win-ia64 (64bit Windows on Itanium) win32 (all others - specifically, sys.platform is returned) For other non-POSIX platforms, currently just returns 'sys.platform'. """ if os.name == 'nt': # sniff sys.version for architecture. prefix = " bit (" i = sys.version.find(prefix) if i == -1: return sys.platform j = sys.version.find(")", i) look = sys.version[i+len(prefix):j].lower() if look == 'amd64': return 'win-amd64' if look == 'itanium': return 'win-ia64' return sys.platform if os.name != "posix" or not hasattr(os, 'uname'): # XXX what about the architecture? NT is Intel or Alpha, # Mac OS is M68k or PPC, etc. return sys.platform # Try to distinguish various flavours of Unix osname, host, release, version, machine = os.uname() # Convert the OS name to lowercase, remove '/' characters # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") osname = osname.lower().replace('/', '') machine = machine.replace(' ', '_') machine = machine.replace('/', '-') if osname[:5] == "linux": # At least on Linux/Intel, 'machine' is the processor -- # i386, etc. # XXX what about Alpha, SPARC, etc? return "%s-%s" % (osname, machine) elif osname[:5] == "sunos": if release[0] >= "5": # SunOS 5 == Solaris 2 osname = "solaris" release = "%d.%s" % (int(release[0]) - 3, release[2:]) # fall through to standard osname-release-machine representation elif osname[:4] == "irix": # could be "irix64"! return "%s-%s" % (osname, release) elif osname[:3] == "aix": return "%s-%s.%s" % (osname, version, release) elif osname[:6] == "cygwin": osname = "cygwin" rel_re = re.compile(r'[\d.]+') m = rel_re.match(release) if m: release = m.group() elif osname[:6] == "darwin": # # For our purposes, we'll assume that the system version from # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set # to. This makes the compatibility story a bit more sane because the # machine is going to compile and link as if it were # MACOSX_DEPLOYMENT_TARGET. cfgvars = get_config_vars() macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') if True: # Always calculate the release of the running machine, # needed to determine if we can build fat binaries or not. macrelease = macver # Get the system version. Reading this plist is a documented # way to get the system version (see the documentation for # the Gestalt Manager) try: f = open('/System/Library/CoreServices/SystemVersion.plist') except IOError: # We're on a plain darwin box, fall back to the default # behaviour. pass else: try: m = re.search(r'<key>ProductUserVisibleVersion</key>\s*' r'<string>(.*?)</string>', f.read()) finally: f.close() if m is not None: macrelease = '.'.join(m.group(1).split('.')[:2]) # else: fall back to the default behaviour if not macver: macver = macrelease if macver: release = macver osname = "macosx" if ((macrelease + '.') >= '10.4.' and '-arch' in get_config_vars().get('CFLAGS', '').strip()): # The universal build will build fat binaries, but not on # systems before 10.4 # # Try to detect 4-way universal builds, those have machine-type # 'universal' instead of 'fat'. machine = 'fat' cflags = get_config_vars().get('CFLAGS') archs = re.findall(r'-arch\s+(\S+)', cflags) archs = tuple(sorted(set(archs))) if len(archs) == 1: machine = archs[0] elif archs == ('i386', 'ppc'): machine = 'fat' elif archs == ('i386', 'x86_64'): machine = 'intel' elif archs == ('i386', 'ppc', 'x86_64'): machine = 'fat3' elif archs == ('ppc64', 'x86_64'): machine = 'fat64' elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): machine = 'universal' else: raise ValueError( "Don't know machine value for archs=%r" % (archs,)) elif machine == 'i386': # On OSX the machine type returned by uname is always the # 32-bit variant, even if the executable architecture is # the 64-bit variant if sys.maxsize >= 2**32: machine = 'x86_64' elif machine in ('PowerPC', 'Power_Macintosh'): # Pick a sane name for the PPC architecture. # See 'i386' case if sys.maxsize >= 2**32: machine = 'ppc64' else: machine = 'ppc' return "%s-%s-%s" % (osname, release, machine)
[ "def", "get_platform", "(", ")", ":", "if", "os", ".", "name", "==", "'nt'", ":", "# sniff sys.version for architecture.", "prefix", "=", "\" bit (\"", "i", "=", "sys", ".", "version", ".", "find", "(", "prefix", ")", "if", "i", "==", "-", "1", ":", "return", "sys", ".", "platform", "j", "=", "sys", ".", "version", ".", "find", "(", "\")\"", ",", "i", ")", "look", "=", "sys", ".", "version", "[", "i", "+", "len", "(", "prefix", ")", ":", "j", "]", ".", "lower", "(", ")", "if", "look", "==", "'amd64'", ":", "return", "'win-amd64'", "if", "look", "==", "'itanium'", ":", "return", "'win-ia64'", "return", "sys", ".", "platform", "if", "os", ".", "name", "!=", "\"posix\"", "or", "not", "hasattr", "(", "os", ",", "'uname'", ")", ":", "# XXX what about the architecture? NT is Intel or Alpha,", "# Mac OS is M68k or PPC, etc.", "return", "sys", ".", "platform", "# Try to distinguish various flavours of Unix", "osname", ",", "host", ",", "release", ",", "version", ",", "machine", "=", "os", ".", "uname", "(", ")", "# Convert the OS name to lowercase, remove '/' characters", "# (to accommodate BSD/OS), and translate spaces (for \"Power Macintosh\")", "osname", "=", "osname", ".", "lower", "(", ")", ".", "replace", "(", "'/'", ",", "''", ")", "machine", "=", "machine", ".", "replace", "(", "' '", ",", "'_'", ")", "machine", "=", "machine", ".", "replace", "(", "'/'", ",", "'-'", ")", "if", "osname", "[", ":", "5", "]", "==", "\"linux\"", ":", "# At least on Linux/Intel, 'machine' is the processor --", "# i386, etc.", "# XXX what about Alpha, SPARC, etc?", "return", "\"%s-%s\"", "%", "(", "osname", ",", "machine", ")", "elif", "osname", "[", ":", "5", "]", "==", "\"sunos\"", ":", "if", "release", "[", "0", "]", ">=", "\"5\"", ":", "# SunOS 5 == Solaris 2", "osname", "=", "\"solaris\"", "release", "=", "\"%d.%s\"", "%", "(", "int", "(", "release", "[", "0", "]", ")", "-", "3", ",", "release", "[", "2", ":", "]", ")", "# fall through to standard osname-release-machine representation", "elif", "osname", "[", ":", "4", "]", "==", "\"irix\"", ":", "# could be \"irix64\"!", "return", "\"%s-%s\"", "%", "(", "osname", ",", "release", ")", "elif", "osname", "[", ":", "3", "]", "==", "\"aix\"", ":", "return", "\"%s-%s.%s\"", "%", "(", "osname", ",", "version", ",", "release", ")", "elif", "osname", "[", ":", "6", "]", "==", "\"cygwin\"", ":", "osname", "=", "\"cygwin\"", "rel_re", "=", "re", ".", "compile", "(", "r'[\\d.]+'", ")", "m", "=", "rel_re", ".", "match", "(", "release", ")", "if", "m", ":", "release", "=", "m", ".", "group", "(", ")", "elif", "osname", "[", ":", "6", "]", "==", "\"darwin\"", ":", "#", "# For our purposes, we'll assume that the system version from", "# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set", "# to. This makes the compatibility story a bit more sane because the", "# machine is going to compile and link as if it were", "# MACOSX_DEPLOYMENT_TARGET.", "cfgvars", "=", "get_config_vars", "(", ")", "macver", "=", "cfgvars", ".", "get", "(", "'MACOSX_DEPLOYMENT_TARGET'", ")", "if", "True", ":", "# Always calculate the release of the running machine,", "# needed to determine if we can build fat binaries or not.", "macrelease", "=", "macver", "# Get the system version. Reading this plist is a documented", "# way to get the system version (see the documentation for", "# the Gestalt Manager)", "try", ":", "f", "=", "open", "(", "'/System/Library/CoreServices/SystemVersion.plist'", ")", "except", "IOError", ":", "# We're on a plain darwin box, fall back to the default", "# behaviour.", "pass", "else", ":", "try", ":", "m", "=", "re", ".", "search", "(", "r'<key>ProductUserVisibleVersion</key>\\s*'", "r'<string>(.*?)</string>'", ",", "f", ".", "read", "(", ")", ")", "finally", ":", "f", ".", "close", "(", ")", "if", "m", "is", "not", "None", ":", "macrelease", "=", "'.'", ".", "join", "(", "m", ".", "group", "(", "1", ")", ".", "split", "(", "'.'", ")", "[", ":", "2", "]", ")", "# else: fall back to the default behaviour", "if", "not", "macver", ":", "macver", "=", "macrelease", "if", "macver", ":", "release", "=", "macver", "osname", "=", "\"macosx\"", "if", "(", "(", "macrelease", "+", "'.'", ")", ">=", "'10.4.'", "and", "'-arch'", "in", "get_config_vars", "(", ")", ".", "get", "(", "'CFLAGS'", ",", "''", ")", ".", "strip", "(", ")", ")", ":", "# The universal build will build fat binaries, but not on", "# systems before 10.4", "#", "# Try to detect 4-way universal builds, those have machine-type", "# 'universal' instead of 'fat'.", "machine", "=", "'fat'", "cflags", "=", "get_config_vars", "(", ")", ".", "get", "(", "'CFLAGS'", ")", "archs", "=", "re", ".", "findall", "(", "r'-arch\\s+(\\S+)'", ",", "cflags", ")", "archs", "=", "tuple", "(", "sorted", "(", "set", "(", "archs", ")", ")", ")", "if", "len", "(", "archs", ")", "==", "1", ":", "machine", "=", "archs", "[", "0", "]", "elif", "archs", "==", "(", "'i386'", ",", "'ppc'", ")", ":", "machine", "=", "'fat'", "elif", "archs", "==", "(", "'i386'", ",", "'x86_64'", ")", ":", "machine", "=", "'intel'", "elif", "archs", "==", "(", "'i386'", ",", "'ppc'", ",", "'x86_64'", ")", ":", "machine", "=", "'fat3'", "elif", "archs", "==", "(", "'ppc64'", ",", "'x86_64'", ")", ":", "machine", "=", "'fat64'", "elif", "archs", "==", "(", "'i386'", ",", "'ppc'", ",", "'ppc64'", ",", "'x86_64'", ")", ":", "machine", "=", "'universal'", "else", ":", "raise", "ValueError", "(", "\"Don't know machine value for archs=%r\"", "%", "(", "archs", ",", ")", ")", "elif", "machine", "==", "'i386'", ":", "# On OSX the machine type returned by uname is always the", "# 32-bit variant, even if the executable architecture is", "# the 64-bit variant", "if", "sys", ".", "maxsize", ">=", "2", "**", "32", ":", "machine", "=", "'x86_64'", "elif", "machine", "in", "(", "'PowerPC'", ",", "'Power_Macintosh'", ")", ":", "# Pick a sane name for the PPC architecture.", "# See 'i386' case", "if", "sys", ".", "maxsize", ">=", "2", "**", "32", ":", "machine", "=", "'ppc64'", "else", ":", "machine", "=", "'ppc'", "return", "\"%s-%s-%s\"", "%", "(", "osname", ",", "release", ",", "machine", ")" ]
[ 600, 0 ]
[ 759, 50 ]
python
en
['en', 'en', 'en']
True
_main
()
Display all information sysconfig detains.
Display all information sysconfig detains.
def _main(): """Display all information sysconfig detains.""" print('Platform: "%s"' % get_platform()) print('Python version: "%s"' % get_python_version()) print('Current installation scheme: "%s"' % _get_default_scheme()) print() _print_dict('Paths', get_paths()) print() _print_dict('Variables', get_config_vars())
[ "def", "_main", "(", ")", ":", "print", "(", "'Platform: \"%s\"'", "%", "get_platform", "(", ")", ")", "print", "(", "'Python version: \"%s\"'", "%", "get_python_version", "(", ")", ")", "print", "(", "'Current installation scheme: \"%s\"'", "%", "_get_default_scheme", "(", ")", ")", "print", "(", ")", "_print_dict", "(", "'Paths'", ",", "get_paths", "(", ")", ")", "print", "(", ")", "_print_dict", "(", "'Variables'", ",", "get_config_vars", "(", ")", ")" ]
[ 773, 0 ]
[ 781, 47 ]
python
en
['en', 'en', 'en']
True
messages
(request)
Return a lazy 'messages' context variable as well as 'DEFAULT_MESSAGE_LEVELS'.
Return a lazy 'messages' context variable as well as 'DEFAULT_MESSAGE_LEVELS'.
def messages(request): """ Return a lazy 'messages' context variable as well as 'DEFAULT_MESSAGE_LEVELS'. """ return { 'messages': get_messages(request), 'DEFAULT_MESSAGE_LEVELS': DEFAULT_LEVELS, }
[ "def", "messages", "(", "request", ")", ":", "return", "{", "'messages'", ":", "get_messages", "(", "request", ")", ",", "'DEFAULT_MESSAGE_LEVELS'", ":", "DEFAULT_LEVELS", ",", "}" ]
[ 4, 0 ]
[ 12, 5 ]
python
en
['en', 'error', 'th']
False
raise_option_error
(parser: OptionParser, option: Option, msg: str)
Raise an option parsing error using parser.error(). Args: parser: an OptionParser instance. option: an Option instance. msg: the error text.
Raise an option parsing error using parser.error().
def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None: """ Raise an option parsing error using parser.error(). Args: parser: an OptionParser instance. option: an Option instance. msg: the error text. """ msg = f"{option} error: {msg}" msg = textwrap.fill(" ".join(msg.split())) parser.error(msg)
[ "def", "raise_option_error", "(", "parser", ":", "OptionParser", ",", "option", ":", "Option", ",", "msg", ":", "str", ")", "->", "None", ":", "msg", "=", "f\"{option} error: {msg}\"", "msg", "=", "textwrap", ".", "fill", "(", "\" \"", ".", "join", "(", "msg", ".", "split", "(", ")", ")", ")", "parser", ".", "error", "(", "msg", ")" ]
[ 33, 0 ]
[ 44, 21 ]
python
en
['en', 'error', 'th']
False
make_option_group
(group: Dict[str, Any], parser: ConfigOptionParser)
Return an OptionGroup object group -- assumed to be dict with 'name' and 'options' keys parser -- an optparse Parser
Return an OptionGroup object group -- assumed to be dict with 'name' and 'options' keys parser -- an optparse Parser
def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup: """ Return an OptionGroup object group -- assumed to be dict with 'name' and 'options' keys parser -- an optparse Parser """ option_group = OptionGroup(parser, group["name"]) for option in group["options"]: option_group.add_option(option()) return option_group
[ "def", "make_option_group", "(", "group", ":", "Dict", "[", "str", ",", "Any", "]", ",", "parser", ":", "ConfigOptionParser", ")", "->", "OptionGroup", ":", "option_group", "=", "OptionGroup", "(", "parser", ",", "group", "[", "\"name\"", "]", ")", "for", "option", "in", "group", "[", "\"options\"", "]", ":", "option_group", ".", "add_option", "(", "option", "(", ")", ")", "return", "option_group" ]
[ 47, 0 ]
[ 56, 23 ]
python
en
['en', 'error', 'th']
False
check_install_build_global
( options: Values, check_options: Optional[Values] = None )
Disable wheels if per-setup.py call options are set. :param options: The OptionParser options to update. :param check_options: The options to check, if not supplied defaults to options.
Disable wheels if per-setup.py call options are set.
def check_install_build_global( options: Values, check_options: Optional[Values] = None ) -> None: """Disable wheels if per-setup.py call options are set. :param options: The OptionParser options to update. :param check_options: The options to check, if not supplied defaults to options. """ if check_options is None: check_options = options def getname(n: str) -> Optional[Any]: return getattr(check_options, n, None) names = ["build_options", "global_options", "install_options"] if any(map(getname, names)): control = options.format_control control.disallow_binaries() warnings.warn( "Disabling all use of wheels due to the use of --build-option " "/ --global-option / --install-option.", stacklevel=2, )
[ "def", "check_install_build_global", "(", "options", ":", "Values", ",", "check_options", ":", "Optional", "[", "Values", "]", "=", "None", ")", "->", "None", ":", "if", "check_options", "is", "None", ":", "check_options", "=", "options", "def", "getname", "(", "n", ":", "str", ")", "->", "Optional", "[", "Any", "]", ":", "return", "getattr", "(", "check_options", ",", "n", ",", "None", ")", "names", "=", "[", "\"build_options\"", ",", "\"global_options\"", ",", "\"install_options\"", "]", "if", "any", "(", "map", "(", "getname", ",", "names", ")", ")", ":", "control", "=", "options", ".", "format_control", "control", ".", "disallow_binaries", "(", ")", "warnings", ".", "warn", "(", "\"Disabling all use of wheels due to the use of --build-option \"", "\"/ --global-option / --install-option.\"", ",", "stacklevel", "=", "2", ",", ")" ]
[ 59, 0 ]
[ 82, 9 ]
python
en
['en', 'en', 'en']
True
check_dist_restriction
(options: Values, check_target: bool = False)
Function for determining if custom platform options are allowed. :param options: The OptionParser options. :param check_target: Whether or not to check if --target is being used.
Function for determining if custom platform options are allowed.
def check_dist_restriction(options: Values, check_target: bool = False) -> None: """Function for determining if custom platform options are allowed. :param options: The OptionParser options. :param check_target: Whether or not to check if --target is being used. """ dist_restriction_set = any( [ options.python_version, options.platforms, options.abis, options.implementation, ] ) binary_only = FormatControl(set(), {":all:"}) sdist_dependencies_allowed = ( options.format_control != binary_only and not options.ignore_dependencies ) # Installations or downloads using dist restrictions must not combine # source distributions and dist-specific wheels, as they are not # guaranteed to be locally compatible. if dist_restriction_set and sdist_dependencies_allowed: raise CommandError( "When restricting platform and interpreter constraints using " "--python-version, --platform, --abi, or --implementation, " "either --no-deps must be set, or --only-binary=:all: must be " "set and --no-binary must not be set (or must be set to " ":none:)." ) if check_target: if dist_restriction_set and not options.target_dir: raise CommandError( "Can not use any platform or abi specific options unless " "installing via '--target'" )
[ "def", "check_dist_restriction", "(", "options", ":", "Values", ",", "check_target", ":", "bool", "=", "False", ")", "->", "None", ":", "dist_restriction_set", "=", "any", "(", "[", "options", ".", "python_version", ",", "options", ".", "platforms", ",", "options", ".", "abis", ",", "options", ".", "implementation", ",", "]", ")", "binary_only", "=", "FormatControl", "(", "set", "(", ")", ",", "{", "\":all:\"", "}", ")", "sdist_dependencies_allowed", "=", "(", "options", ".", "format_control", "!=", "binary_only", "and", "not", "options", ".", "ignore_dependencies", ")", "# Installations or downloads using dist restrictions must not combine", "# source distributions and dist-specific wheels, as they are not", "# guaranteed to be locally compatible.", "if", "dist_restriction_set", "and", "sdist_dependencies_allowed", ":", "raise", "CommandError", "(", "\"When restricting platform and interpreter constraints using \"", "\"--python-version, --platform, --abi, or --implementation, \"", "\"either --no-deps must be set, or --only-binary=:all: must be \"", "\"set and --no-binary must not be set (or must be set to \"", "\":none:).\"", ")", "if", "check_target", ":", "if", "dist_restriction_set", "and", "not", "options", ".", "target_dir", ":", "raise", "CommandError", "(", "\"Can not use any platform or abi specific options unless \"", "\"installing via '--target'\"", ")" ]
[ 85, 0 ]
[ 122, 13 ]
python
en
['en', 'en', 'en']
True
_get_format_control
(values: Values, option: Option)
Get a format_control object.
Get a format_control object.
def _get_format_control(values: Values, option: Option) -> Any: """Get a format_control object.""" return getattr(values, option.dest)
[ "def", "_get_format_control", "(", "values", ":", "Values", ",", "option", ":", "Option", ")", "->", "Any", ":", "return", "getattr", "(", "values", ",", "option", ".", "dest", ")" ]
[ 454, 0 ]
[ 456, 39 ]
python
en
['en', 'en', 'en']
True
_convert_python_version
(value: str)
Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. :return: A 2-tuple (version_info, error_msg), where `error_msg` is non-None if and only if there was a parsing error.
Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]: """ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. :return: A 2-tuple (version_info, error_msg), where `error_msg` is non-None if and only if there was a parsing error. """ if not value: # The empty string is the same as not providing a value. return (None, None) parts = value.split(".") if len(parts) > 3: return ((), "at most three version parts are allowed") if len(parts) == 1: # Then we are in the case of "3" or "37". value = parts[0] if len(value) > 1: parts = [value[0], value[1:]] try: version_info = tuple(int(part) for part in parts) except ValueError: return ((), "each version part must be an integer") return (version_info, None)
[ "def", "_convert_python_version", "(", "value", ":", "str", ")", "->", "Tuple", "[", "Tuple", "[", "int", ",", "...", "]", ",", "Optional", "[", "str", "]", "]", ":", "if", "not", "value", ":", "# The empty string is the same as not providing a value.", "return", "(", "None", ",", "None", ")", "parts", "=", "value", ".", "split", "(", "\".\"", ")", "if", "len", "(", "parts", ")", ">", "3", ":", "return", "(", "(", ")", ",", "\"at most three version parts are allowed\"", ")", "if", "len", "(", "parts", ")", "==", "1", ":", "# Then we are in the case of \"3\" or \"37\".", "value", "=", "parts", "[", "0", "]", "if", "len", "(", "value", ")", ">", "1", ":", "parts", "=", "[", "value", "[", "0", "]", ",", "value", "[", "1", ":", "]", "]", "try", ":", "version_info", "=", "tuple", "(", "int", "(", "part", ")", "for", "part", "in", "parts", ")", "except", "ValueError", ":", "return", "(", "(", ")", ",", "\"each version part must be an integer\"", ")", "return", "(", "version_info", ",", "None", ")" ]
[ 533, 0 ]
[ 559, 31 ]
python
en
['en', 'error', 'th']
False
_handle_python_version
( option: Option, opt_str: str, value: str, parser: OptionParser )
Handle a provided --python-version value.
Handle a provided --python-version value.
def _handle_python_version( option: Option, opt_str: str, value: str, parser: OptionParser ) -> None: """ Handle a provided --python-version value. """ version_info, error_msg = _convert_python_version(value) if error_msg is not None: msg = "invalid --python-version value: {!r}: {}".format( value, error_msg, ) raise_option_error(parser, option=option, msg=msg) parser.values.python_version = version_info
[ "def", "_handle_python_version", "(", "option", ":", "Option", ",", "opt_str", ":", "str", ",", "value", ":", "str", ",", "parser", ":", "OptionParser", ")", "->", "None", ":", "version_info", ",", "error_msg", "=", "_convert_python_version", "(", "value", ")", "if", "error_msg", "is", "not", "None", ":", "msg", "=", "\"invalid --python-version value: {!r}: {}\"", ".", "format", "(", "value", ",", "error_msg", ",", ")", "raise_option_error", "(", "parser", ",", "option", "=", "option", ",", "msg", "=", "msg", ")", "parser", ".", "values", ".", "python_version", "=", "version_info" ]
[ 562, 0 ]
[ 576, 47 ]
python
en
['en', 'error', 'th']
False
_handle_no_cache_dir
( option: Option, opt: str, value: str, parser: OptionParser )
Process a value provided for the --no-cache-dir option. This is an optparse.Option callback for the --no-cache-dir option.
Process a value provided for the --no-cache-dir option.
def _handle_no_cache_dir( option: Option, opt: str, value: str, parser: OptionParser ) -> None: """ Process a value provided for the --no-cache-dir option. This is an optparse.Option callback for the --no-cache-dir option. """ # The value argument will be None if --no-cache-dir is passed via the # command-line, since the option doesn't accept arguments. However, # the value can be non-None if the option is triggered e.g. by an # environment variable, like PIP_NO_CACHE_DIR=true. if value is not None: # Then parse the string value to get argument error-checking. try: strtobool(value) except ValueError as exc: raise_option_error(parser, option=option, msg=str(exc)) # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() # converted to 0 (like "false" or "no") caused cache_dir to be disabled # rather than enabled (logic would say the latter). Thus, we disable # the cache directory not just on values that parse to True, but (for # backwards compatibility reasons) also on values that parse to False. # In other words, always set it to False if the option is provided in # some (valid) form. parser.values.cache_dir = False
[ "def", "_handle_no_cache_dir", "(", "option", ":", "Option", ",", "opt", ":", "str", ",", "value", ":", "str", ",", "parser", ":", "OptionParser", ")", "->", "None", ":", "# The value argument will be None if --no-cache-dir is passed via the", "# command-line, since the option doesn't accept arguments. However,", "# the value can be non-None if the option is triggered e.g. by an", "# environment variable, like PIP_NO_CACHE_DIR=true.", "if", "value", "is", "not", "None", ":", "# Then parse the string value to get argument error-checking.", "try", ":", "strtobool", "(", "value", ")", "except", "ValueError", "as", "exc", ":", "raise_option_error", "(", "parser", ",", "option", "=", "option", ",", "msg", "=", "str", "(", "exc", ")", ")", "# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()", "# converted to 0 (like \"false\" or \"no\") caused cache_dir to be disabled", "# rather than enabled (logic would say the latter). Thus, we disable", "# the cache directory not just on values that parse to True, but (for", "# backwards compatibility reasons) also on values that parse to False.", "# In other words, always set it to False if the option is provided in", "# some (valid) form.", "parser", ".", "values", ".", "cache_dir", "=", "False" ]
[ 673, 0 ]
[ 699, 35 ]
python
en
['en', 'error', 'th']
False
_handle_no_use_pep517
( option: Option, opt: str, value: str, parser: OptionParser )
Process a value provided for the --no-use-pep517 option. This is an optparse.Option callback for the no_use_pep517 option.
Process a value provided for the --no-use-pep517 option.
def _handle_no_use_pep517( option: Option, opt: str, value: str, parser: OptionParser ) -> None: """ Process a value provided for the --no-use-pep517 option. This is an optparse.Option callback for the no_use_pep517 option. """ # Since --no-use-pep517 doesn't accept arguments, the value argument # will be None if --no-use-pep517 is passed via the command-line. # However, the value can be non-None if the option is triggered e.g. # by an environment variable, for example "PIP_NO_USE_PEP517=true". if value is not None: msg = """A value was passed for --no-use-pep517, probably using either the PIP_NO_USE_PEP517 environment variable or the "no-use-pep517" config file option. Use an appropriate value of the PIP_USE_PEP517 environment variable or the "use-pep517" config file option instead. """ raise_option_error(parser, option=option, msg=msg) # Otherwise, --no-use-pep517 was passed via the command-line. parser.values.use_pep517 = False
[ "def", "_handle_no_use_pep517", "(", "option", ":", "Option", ",", "opt", ":", "str", ",", "value", ":", "str", ",", "parser", ":", "OptionParser", ")", "->", "None", ":", "# Since --no-use-pep517 doesn't accept arguments, the value argument", "# will be None if --no-use-pep517 is passed via the command-line.", "# However, the value can be non-None if the option is triggered e.g.", "# by an environment variable, for example \"PIP_NO_USE_PEP517=true\".", "if", "value", "is", "not", "None", ":", "msg", "=", "\"\"\"A value was passed for --no-use-pep517,\n probably using either the PIP_NO_USE_PEP517 environment variable\n or the \"no-use-pep517\" config file option. Use an appropriate value\n of the PIP_USE_PEP517 environment variable or the \"use-pep517\"\n config file option instead.\n \"\"\"", "raise_option_error", "(", "parser", ",", "option", "=", "option", ",", "msg", "=", "msg", ")", "# Otherwise, --no-use-pep517 was passed via the command-line.", "parser", ".", "values", ".", "use_pep517", "=", "False" ]
[ 753, 0 ]
[ 775, 36 ]
python
en
['en', 'error', 'th']
False
_handle_merge_hash
( option: Option, opt_str: str, value: str, parser: OptionParser )
Given a value spelled "algo:digest", append the digest to a list pointed to in a dict by the algo name.
Given a value spelled "algo:digest", append the digest to a list pointed to in a dict by the algo name.
def _handle_merge_hash( option: Option, opt_str: str, value: str, parser: OptionParser ) -> None: """Given a value spelled "algo:digest", append the digest to a list pointed to in a dict by the algo name.""" if not parser.values.hashes: parser.values.hashes = {} try: algo, digest = value.split(":", 1) except ValueError: parser.error( "Arguments to {} must be a hash name " # noqa "followed by a value, like --hash=sha256:" "abcde...".format(opt_str) ) if algo not in STRONG_HASHES: parser.error( "Allowed hash algorithms for {} are {}.".format( # noqa opt_str, ", ".join(STRONG_HASHES) ) ) parser.values.hashes.setdefault(algo, []).append(digest)
[ "def", "_handle_merge_hash", "(", "option", ":", "Option", ",", "opt_str", ":", "str", ",", "value", ":", "str", ",", "parser", ":", "OptionParser", ")", "->", "None", ":", "if", "not", "parser", ".", "values", ".", "hashes", ":", "parser", ".", "values", ".", "hashes", "=", "{", "}", "try", ":", "algo", ",", "digest", "=", "value", ".", "split", "(", "\":\"", ",", "1", ")", "except", "ValueError", ":", "parser", ".", "error", "(", "\"Arguments to {} must be a hash name \"", "# noqa", "\"followed by a value, like --hash=sha256:\"", "\"abcde...\"", ".", "format", "(", "opt_str", ")", ")", "if", "algo", "not", "in", "STRONG_HASHES", ":", "parser", ".", "error", "(", "\"Allowed hash algorithms for {} are {}.\"", ".", "format", "(", "# noqa", "opt_str", ",", "\", \"", ".", "join", "(", "STRONG_HASHES", ")", ")", ")", "parser", ".", "values", ".", "hashes", ".", "setdefault", "(", "algo", ",", "[", "]", ")", ".", "append", "(", "digest", ")" ]
[ 858, 0 ]
[ 879, 60 ]
python
en
['en', 'en', 'en']
True
preprocess_train_and_deploy
( project='ai-analytics-solutions', bucket='ai-analytics-solutions-kfpdemo', start_year='2000' )
End-to-end Pipeline to train and deploy babyweight model
End-to-end Pipeline to train and deploy babyweight model
def preprocess_train_and_deploy( project='ai-analytics-solutions', bucket='ai-analytics-solutions-kfpdemo', start_year='2000' ): """End-to-end Pipeline to train and deploy babyweight model""" # Step 1: create training dataset using Apache Beam on Cloud Dataflow preprocess = dsl.ContainerOp( name='preprocess', # image needs to be a compile-time string image='gcr.io/ai-analytics-solutions/babyweight-pipeline-bqtocsv:latest', arguments=[ '--project', project, '--mode', 'cloud', '--bucket', bucket, '--start_year', start_year ], file_outputs={'bucket': '/output.txt'} ).apply(use_gcp_secret('user-gcp-sa')) # Step 2: Do hyperparameter tuning of the model on Cloud ML Engine hparam_train = dsl.ContainerOp( name='hypertrain', # image needs to be a compile-time string image='gcr.io/ai-analytics-solutions/babyweight-pipeline-hypertrain:latest', arguments=[ preprocess.outputs['bucket'] ], file_outputs={'jobname': '/output.txt'} ).apply(use_gcp_secret('user-gcp-sa')) # core ML part of pipeline deploy_cmle = train_and_deploy_helper(preprocess, hparam_train) # Step 5: Deploy web app deploy_app = dsl.ContainerOp( name='deployapp', # image needs to be a compile-time string image='gcr.io/ai-analytics-solutions/babyweight-pipeline-deployapp:latest', arguments=[ deploy_cmle.outputs['model'], deploy_cmle.outputs['version'] ], file_outputs={ 'appurl': '/appurl.txt' } ).apply(use_gcp_secret('user-gcp-sa'))
[ "def", "preprocess_train_and_deploy", "(", "project", "=", "'ai-analytics-solutions'", ",", "bucket", "=", "'ai-analytics-solutions-kfpdemo'", ",", "start_year", "=", "'2000'", ")", ":", "# Step 1: create training dataset using Apache Beam on Cloud Dataflow", "preprocess", "=", "dsl", ".", "ContainerOp", "(", "name", "=", "'preprocess'", ",", "# image needs to be a compile-time string", "image", "=", "'gcr.io/ai-analytics-solutions/babyweight-pipeline-bqtocsv:latest'", ",", "arguments", "=", "[", "'--project'", ",", "project", ",", "'--mode'", ",", "'cloud'", ",", "'--bucket'", ",", "bucket", ",", "'--start_year'", ",", "start_year", "]", ",", "file_outputs", "=", "{", "'bucket'", ":", "'/output.txt'", "}", ")", ".", "apply", "(", "use_gcp_secret", "(", "'user-gcp-sa'", ")", ")", "# Step 2: Do hyperparameter tuning of the model on Cloud ML Engine", "hparam_train", "=", "dsl", ".", "ContainerOp", "(", "name", "=", "'hypertrain'", ",", "# image needs to be a compile-time string", "image", "=", "'gcr.io/ai-analytics-solutions/babyweight-pipeline-hypertrain:latest'", ",", "arguments", "=", "[", "preprocess", ".", "outputs", "[", "'bucket'", "]", "]", ",", "file_outputs", "=", "{", "'jobname'", ":", "'/output.txt'", "}", ")", ".", "apply", "(", "use_gcp_secret", "(", "'user-gcp-sa'", ")", ")", "# core ML part of pipeline", "deploy_cmle", "=", "train_and_deploy_helper", "(", "preprocess", ",", "hparam_train", ")", "# Step 5: Deploy web app", "deploy_app", "=", "dsl", ".", "ContainerOp", "(", "name", "=", "'deployapp'", ",", "# image needs to be a compile-time string", "image", "=", "'gcr.io/ai-analytics-solutions/babyweight-pipeline-deployapp:latest'", ",", "arguments", "=", "[", "deploy_cmle", ".", "outputs", "[", "'model'", "]", ",", "deploy_cmle", ".", "outputs", "[", "'version'", "]", "]", ",", "file_outputs", "=", "{", "'appurl'", ":", "'/appurl.txt'", "}", ")", ".", "apply", "(", "use_gcp_secret", "(", "'user-gcp-sa'", ")", ")" ]
[ 36, 0 ]
[ 83, 46 ]
python
en
['en', 'en', 'en']
True
train_and_deploy
( project='ai-analytics-solutions', bucket='ai-analytics-solutions-kfpdemo', start_year='2000' )
Pipeline to retrain and deploy babyweight ML model only
Pipeline to retrain and deploy babyweight ML model only
def train_and_deploy( project='ai-analytics-solutions', bucket='ai-analytics-solutions-kfpdemo', start_year='2000' ): """Pipeline to retrain and deploy babyweight ML model only""" # Create dictionaries that correspond to output of previous steps preprocess = ObjectDict({ 'outputs': { 'bucket': bucket } }) # Step 2: hyperparam train hparam_train = ObjectDict({ 'outputs': { 'jobname': os.environ.get('HPARAM_JOB', 'babyweight_200207_231639') } }) # actual pipeline we want to run deploy_cmle = train_and_deploy_helper(preprocess, hparam_train)
[ "def", "train_and_deploy", "(", "project", "=", "'ai-analytics-solutions'", ",", "bucket", "=", "'ai-analytics-solutions-kfpdemo'", ",", "start_year", "=", "'2000'", ")", ":", "# Create dictionaries that correspond to output of previous steps", "preprocess", "=", "ObjectDict", "(", "{", "'outputs'", ":", "{", "'bucket'", ":", "bucket", "}", "}", ")", "# Step 2: hyperparam train", "hparam_train", "=", "ObjectDict", "(", "{", "'outputs'", ":", "{", "'jobname'", ":", "os", ".", "environ", ".", "get", "(", "'HPARAM_JOB'", ",", "'babyweight_200207_231639'", ")", "}", "}", ")", "# actual pipeline we want to run", "deploy_cmle", "=", "train_and_deploy_helper", "(", "preprocess", ",", "hparam_train", ")" ]
[ 92, 0 ]
[ 113, 67 ]
python
en
['en', 'en', 'en']
True
train_and_deploy_helper
(preprocess, hparam_train)
Helper function called from the two pipeline functions
Helper function called from the two pipeline functions
def train_and_deploy_helper(preprocess, hparam_train): """Helper function called from the two pipeline functions""" # Step 3: Train the model some more, but on the pipelines cluster itself train_tuned = dsl.ContainerOp( name='traintuned', # image needs to be a compile-time string image='gcr.io/ai-analytics-solutions/babyweight-pipeline-traintuned:latest', arguments=[ hparam_train.outputs['jobname'], preprocess.outputs['bucket'] ], file_outputs={'train': '/output.txt'} ).apply(use_gcp_secret('user-gcp-sa')) train_tuned.set_memory_request('2G') train_tuned.set_cpu_request('1') # Step 4: Deploy the trained model to Cloud ML Engine deploy_cmle = dsl.ContainerOp( name='deploycmle', # image needs to be a compile-time string image='gcr.io/ai-analytics-solutions/babyweight-pipeline-deploycmle:latest', arguments=[ train_tuned.outputs['train'], # modeldir 'babyweight', 'mlp' ], file_outputs={ 'model': '/model.txt', 'version': '/version.txt' } ).apply(use_gcp_secret('user-gcp-sa')) return deploy_cmle
[ "def", "train_and_deploy_helper", "(", "preprocess", ",", "hparam_train", ")", ":", "# Step 3: Train the model some more, but on the pipelines cluster itself", "train_tuned", "=", "dsl", ".", "ContainerOp", "(", "name", "=", "'traintuned'", ",", "# image needs to be a compile-time string", "image", "=", "'gcr.io/ai-analytics-solutions/babyweight-pipeline-traintuned:latest'", ",", "arguments", "=", "[", "hparam_train", ".", "outputs", "[", "'jobname'", "]", ",", "preprocess", ".", "outputs", "[", "'bucket'", "]", "]", ",", "file_outputs", "=", "{", "'train'", ":", "'/output.txt'", "}", ")", ".", "apply", "(", "use_gcp_secret", "(", "'user-gcp-sa'", ")", ")", "train_tuned", ".", "set_memory_request", "(", "'2G'", ")", "train_tuned", ".", "set_cpu_request", "(", "'1'", ")", "# Step 4: Deploy the trained model to Cloud ML Engine", "deploy_cmle", "=", "dsl", ".", "ContainerOp", "(", "name", "=", "'deploycmle'", ",", "# image needs to be a compile-time string", "image", "=", "'gcr.io/ai-analytics-solutions/babyweight-pipeline-deploycmle:latest'", ",", "arguments", "=", "[", "train_tuned", ".", "outputs", "[", "'train'", "]", ",", "# modeldir", "'babyweight'", ",", "'mlp'", "]", ",", "file_outputs", "=", "{", "'model'", ":", "'/model.txt'", ",", "'version'", ":", "'/version.txt'", "}", ")", ".", "apply", "(", "use_gcp_secret", "(", "'user-gcp-sa'", ")", ")", "return", "deploy_cmle" ]
[ 119, 0 ]
[ 152, 22 ]
python
en
['en', 'en', 'en']
True
finetune_and_deploy
(filename)
invoked from a Cloud Function or a Cloud Run, it launches a Pipeline on kfp
invoked from a Cloud Function or a Cloud Run, it launches a Pipeline on kfp
def finetune_and_deploy(filename): """invoked from a Cloud Function or a Cloud Run, it launches a Pipeline on kfp""" import kfp import sys if 'babyweight/preproc/train' in filename: PIPELINES_HOST = os.environ.get('PIPELINES_HOST', "Environment variable PIPELINES_HOST not set") PROJECT = os.environ.get('PROJECT', "Environment variable PROJECT not set") BUCKET = os.environ.get('BUCKET', "Environment variable BUCKET not set") print("New file {}: Launching ML pipeline on {} to finetune model in {}".format( filename, PIPELINES_HOST, BUCKET)) sys.stdout.flush() client = kfp.Client(host=PIPELINES_HOST) args = { 'project' : PROJECT, 'bucket' : BUCKET, } pipeline = client.create_run_from_pipeline_func(train_and_deploy, args) return 'Fine tuning job Launched!'
[ "def", "finetune_and_deploy", "(", "filename", ")", ":", "import", "kfp", "import", "sys", "if", "'babyweight/preproc/train'", "in", "filename", ":", "PIPELINES_HOST", "=", "os", ".", "environ", ".", "get", "(", "'PIPELINES_HOST'", ",", "\"Environment variable PIPELINES_HOST not set\"", ")", "PROJECT", "=", "os", ".", "environ", ".", "get", "(", "'PROJECT'", ",", "\"Environment variable PROJECT not set\"", ")", "BUCKET", "=", "os", ".", "environ", ".", "get", "(", "'BUCKET'", ",", "\"Environment variable BUCKET not set\"", ")", "print", "(", "\"New file {}: Launching ML pipeline on {} to finetune model in {}\"", ".", "format", "(", "filename", ",", "PIPELINES_HOST", ",", "BUCKET", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "client", "=", "kfp", ".", "Client", "(", "host", "=", "PIPELINES_HOST", ")", "args", "=", "{", "'project'", ":", "PROJECT", ",", "'bucket'", ":", "BUCKET", ",", "}", "pipeline", "=", "client", ".", "create_run_from_pipeline_func", "(", "train_and_deploy", ",", "args", ")", "return", "'Fine tuning job Launched!'" ]
[ 155, 0 ]
[ 173, 42 ]
python
en
['en', 'en', 'en']
True
SessionStorage._get
(self, *args, **kwargs)
Retrieve a list of messages from the request's session. This storage always stores everything it is given, so return True for the all_retrieved flag.
Retrieve a list of messages from the request's session. This storage always stores everything it is given, so return True for the all_retrieved flag.
def _get(self, *args, **kwargs): """ Retrieve a list of messages from the request's session. This storage always stores everything it is given, so return True for the all_retrieved flag. """ return self.deserialize_messages(self.request.session.get(self.session_key)), True
[ "def", "_get", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "deserialize_messages", "(", "self", ".", "request", ".", "session", ".", "get", "(", "self", ".", "session_key", ")", ")", ",", "True" ]
[ 21, 4 ]
[ 27, 90 ]
python
en
['en', 'error', 'th']
False
SessionStorage._store
(self, messages, response, *args, **kwargs)
Store a list of messages to the request's session.
Store a list of messages to the request's session.
def _store(self, messages, response, *args, **kwargs): """ Store a list of messages to the request's session. """ if messages: self.request.session[self.session_key] = self.serialize_messages(messages) else: self.request.session.pop(self.session_key, None) return []
[ "def", "_store", "(", "self", ",", "messages", ",", "response", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "messages", ":", "self", ".", "request", ".", "session", "[", "self", ".", "session_key", "]", "=", "self", ".", "serialize_messages", "(", "messages", ")", "else", ":", "self", ".", "request", ".", "session", ".", "pop", "(", "self", ".", "session_key", ",", "None", ")", "return", "[", "]" ]
[ 29, 4 ]
[ 37, 17 ]
python
en
['en', 'error', 'th']
False
create_confusion_matrix_thresh_vars
(scope, var_name, size)
Creates confusion matrix threshold variables. Given variable scope, name, and size, create and return confusion matrix threshold variables for true positives, false negatives, false positives, true negatives. Args: scope: String of variable scope name. var_name: String denoting which set of variables to create. Values are "time" and "feat". size: The size of the variable, number of time/feature thresholds. Returns: Confusion matrix threshold variables for true positives, false negatives, false positives, true negatives.
Creates confusion matrix threshold variables.
def create_confusion_matrix_thresh_vars(scope, var_name, size): """Creates confusion matrix threshold variables. Given variable scope, name, and size, create and return confusion matrix threshold variables for true positives, false negatives, false positives, true negatives. Args: scope: String of variable scope name. var_name: String denoting which set of variables to create. Values are "time" and "feat". size: The size of the variable, number of time/feature thresholds. Returns: Confusion matrix threshold variables for true positives, false negatives, false positives, true negatives. """ with tf.variable_scope( name_or_scope=scope, reuse=tf.AUTO_REUSE): tp_thresh_var = tf.get_variable( name="tp_thresh_{0}_var".format(var_name), dtype=tf.int64, initializer=tf.zeros( shape=size, dtype=tf.int64), trainable=False) fn_thresh_var = tf.get_variable( name="fn_thresh_{0}_var".format(var_name), dtype=tf.int64, initializer=tf.zeros( shape=size, dtype=tf.int64), trainable=False) fp_thresh_var = tf.get_variable( name="fp_thresh_{0}_var".format(var_name), dtype=tf.int64, initializer=tf.zeros( shape=size, dtype=tf.int64), trainable=False) tn_thresh_var = tf.get_variable( name="tn_thresh_{0}_var".format(var_name), dtype=tf.int64, initializer=tf.zeros( shape=size, dtype=tf.int64), trainable=False) return (tp_thresh_var, fn_thresh_var, fp_thresh_var, tn_thresh_var)
[ "def", "create_confusion_matrix_thresh_vars", "(", "scope", ",", "var_name", ",", "size", ")", ":", "with", "tf", ".", "variable_scope", "(", "name_or_scope", "=", "scope", ",", "reuse", "=", "tf", ".", "AUTO_REUSE", ")", ":", "tp_thresh_var", "=", "tf", ".", "get_variable", "(", "name", "=", "\"tp_thresh_{0}_var\"", ".", "format", "(", "var_name", ")", ",", "dtype", "=", "tf", ".", "int64", ",", "initializer", "=", "tf", ".", "zeros", "(", "shape", "=", "size", ",", "dtype", "=", "tf", ".", "int64", ")", ",", "trainable", "=", "False", ")", "fn_thresh_var", "=", "tf", ".", "get_variable", "(", "name", "=", "\"fn_thresh_{0}_var\"", ".", "format", "(", "var_name", ")", ",", "dtype", "=", "tf", ".", "int64", ",", "initializer", "=", "tf", ".", "zeros", "(", "shape", "=", "size", ",", "dtype", "=", "tf", ".", "int64", ")", ",", "trainable", "=", "False", ")", "fp_thresh_var", "=", "tf", ".", "get_variable", "(", "name", "=", "\"fp_thresh_{0}_var\"", ".", "format", "(", "var_name", ")", ",", "dtype", "=", "tf", ".", "int64", ",", "initializer", "=", "tf", ".", "zeros", "(", "shape", "=", "size", ",", "dtype", "=", "tf", ".", "int64", ")", ",", "trainable", "=", "False", ")", "tn_thresh_var", "=", "tf", ".", "get_variable", "(", "name", "=", "\"tn_thresh_{0}_var\"", ".", "format", "(", "var_name", ")", ",", "dtype", "=", "tf", ".", "int64", ",", "initializer", "=", "tf", ".", "zeros", "(", "shape", "=", "size", ",", "dtype", "=", "tf", ".", "int64", ")", ",", "trainable", "=", "False", ")", "return", "(", "tp_thresh_var", ",", "fn_thresh_var", ",", "fp_thresh_var", ",", "tn_thresh_var", ")" ]
[ 3, 0 ]
[ 53, 26 ]
python
en
['en', 'fil', 'en']
True
create_both_confusion_matrix_thresh_vars
( scope, time_thresh_size, feat_thresh_size)
Creates both time & feature major confusion matrix threshold variables. Given variable scope and sizes, create and return confusion matrix threshold variables for true positives, false negatives, false positives, and true negatives for both time and feature major representations. Args: scope: String of variable scope name. time_thresh_size: Variable size of number of time major thresholds. feat_thresh_size: Variable size of number of feature major thresholds. Returns: Confusion matrix threshold variables for true positives, false negatives, false positives, true negatives for both time and feature major representations.
Creates both time & feature major confusion matrix threshold variables.
def create_both_confusion_matrix_thresh_vars( scope, time_thresh_size, feat_thresh_size): """Creates both time & feature major confusion matrix threshold variables. Given variable scope and sizes, create and return confusion matrix threshold variables for true positives, false negatives, false positives, and true negatives for both time and feature major representations. Args: scope: String of variable scope name. time_thresh_size: Variable size of number of time major thresholds. feat_thresh_size: Variable size of number of feature major thresholds. Returns: Confusion matrix threshold variables for true positives, false negatives, false positives, true negatives for both time and feature major representations. """ # Time based (tp_thresh_time_var, fn_thresh_time_var, fp_thresh_time_var, tn_thresh_time_var) = create_confusion_matrix_thresh_vars( scope=scope, var_name="time", size=time_thresh_size) # Features based (tp_thresh_feat_var, fn_thresh_feat_var, fp_thresh_feat_var, tn_thresh_feat_var) = create_confusion_matrix_thresh_vars( scope=scope, var_name="feat", size=feat_thresh_size) return (tp_thresh_time_var, fn_thresh_time_var, fp_thresh_time_var, tn_thresh_time_var, tp_thresh_feat_var, fn_thresh_feat_var, fp_thresh_feat_var, tn_thresh_feat_var)
[ "def", "create_both_confusion_matrix_thresh_vars", "(", "scope", ",", "time_thresh_size", ",", "feat_thresh_size", ")", ":", "# Time based", "(", "tp_thresh_time_var", ",", "fn_thresh_time_var", ",", "fp_thresh_time_var", ",", "tn_thresh_time_var", ")", "=", "create_confusion_matrix_thresh_vars", "(", "scope", "=", "scope", ",", "var_name", "=", "\"time\"", ",", "size", "=", "time_thresh_size", ")", "# Features based", "(", "tp_thresh_feat_var", ",", "fn_thresh_feat_var", ",", "fp_thresh_feat_var", ",", "tn_thresh_feat_var", ")", "=", "create_confusion_matrix_thresh_vars", "(", "scope", "=", "scope", ",", "var_name", "=", "\"feat\"", ",", "size", "=", "feat_thresh_size", ")", "return", "(", "tp_thresh_time_var", ",", "fn_thresh_time_var", ",", "fp_thresh_time_var", ",", "tn_thresh_time_var", ",", "tp_thresh_feat_var", ",", "fn_thresh_feat_var", ",", "fp_thresh_feat_var", ",", "tn_thresh_feat_var", ")" ]
[ 56, 0 ]
[ 96, 29 ]
python
en
['en', 'en', 'en']
True
create_mahalanobis_unsupervised_thresh_vars
(scope, var_name)
Creates mahalanobis unsupervised threshold variables. Given variable scope and name, create and return mahalanobis unsupervised threshold variables of mean and standard deviation. Args: scope: String of variable scope name. var_name: String denoting which set of variables to create. Values are "time" and "feat". Returns: Mahalanobis unsupervised threshold variables of count, mean, and standard deviation.
Creates mahalanobis unsupervised threshold variables.
def create_mahalanobis_unsupervised_thresh_vars(scope, var_name): """Creates mahalanobis unsupervised threshold variables. Given variable scope and name, create and return mahalanobis unsupervised threshold variables of mean and standard deviation. Args: scope: String of variable scope name. var_name: String denoting which set of variables to create. Values are "time" and "feat". Returns: Mahalanobis unsupervised threshold variables of count, mean, and standard deviation. """ with tf.variable_scope( name_or_scope=scope, reuse=tf.AUTO_REUSE): count_thresh_var = tf.get_variable( name="count_thresh_{0}_var".format(var_name), dtype=tf.int64, initializer=tf.zeros( shape=[], dtype=tf.int64), trainable=False) mean_thresh_var = tf.get_variable( name="mean_thresh_{0}_var".format(var_name), dtype=tf.float64, initializer=tf.zeros( shape=[], dtype=tf.float64), trainable=False) var_thresh_var = tf.get_variable( name="var_thresh_{0}_var".format(var_name), dtype=tf.float64, initializer=tf.zeros( shape=[], dtype=tf.float64), trainable=False) return (count_thresh_var, mean_thresh_var, var_thresh_var)
[ "def", "create_mahalanobis_unsupervised_thresh_vars", "(", "scope", ",", "var_name", ")", ":", "with", "tf", ".", "variable_scope", "(", "name_or_scope", "=", "scope", ",", "reuse", "=", "tf", ".", "AUTO_REUSE", ")", ":", "count_thresh_var", "=", "tf", ".", "get_variable", "(", "name", "=", "\"count_thresh_{0}_var\"", ".", "format", "(", "var_name", ")", ",", "dtype", "=", "tf", ".", "int64", ",", "initializer", "=", "tf", ".", "zeros", "(", "shape", "=", "[", "]", ",", "dtype", "=", "tf", ".", "int64", ")", ",", "trainable", "=", "False", ")", "mean_thresh_var", "=", "tf", ".", "get_variable", "(", "name", "=", "\"mean_thresh_{0}_var\"", ".", "format", "(", "var_name", ")", ",", "dtype", "=", "tf", ".", "float64", ",", "initializer", "=", "tf", ".", "zeros", "(", "shape", "=", "[", "]", ",", "dtype", "=", "tf", ".", "float64", ")", ",", "trainable", "=", "False", ")", "var_thresh_var", "=", "tf", ".", "get_variable", "(", "name", "=", "\"var_thresh_{0}_var\"", ".", "format", "(", "var_name", ")", ",", "dtype", "=", "tf", ".", "float64", ",", "initializer", "=", "tf", ".", "zeros", "(", "shape", "=", "[", "]", ",", "dtype", "=", "tf", ".", "float64", ")", ",", "trainable", "=", "False", ")", "return", "(", "count_thresh_var", ",", "mean_thresh_var", ",", "var_thresh_var", ")" ]
[ 99, 0 ]
[ 139, 27 ]
python
en
['en', 'et', 'en']
True
create_both_mahalanobis_unsupervised_thresh_vars
(scope)
Creates time & feature mahalanobis unsupervised threshold variables. Given variable scope, create and return mahalanobis unsupervised threshold variables of mean and standard deviation for both time and feature major representations. Args: scope: String of variable scope name. Returns: Mahalanobis unsupervised threshold variables of mean and standard deviation for both time and feature major representations.
Creates time & feature mahalanobis unsupervised threshold variables.
def create_both_mahalanobis_unsupervised_thresh_vars(scope): """Creates time & feature mahalanobis unsupervised threshold variables. Given variable scope, create and return mahalanobis unsupervised threshold variables of mean and standard deviation for both time and feature major representations. Args: scope: String of variable scope name. Returns: Mahalanobis unsupervised threshold variables of mean and standard deviation for both time and feature major representations. """ # Time based (count_thresh_time_var, mean_thresh_time_var, var_thresh_time_var) = create_mahalanobis_unsupervised_thresh_vars( scope=scope, var_name="time") # Features based (count_thresh_feat_var, mean_thresh_feat_var, var_thresh_feat_var) = create_mahalanobis_unsupervised_thresh_vars( scope=scope, var_name="feat") return (count_thresh_time_var, mean_thresh_time_var, var_thresh_time_var, count_thresh_feat_var, mean_thresh_feat_var, var_thresh_feat_var)
[ "def", "create_both_mahalanobis_unsupervised_thresh_vars", "(", "scope", ")", ":", "# Time based", "(", "count_thresh_time_var", ",", "mean_thresh_time_var", ",", "var_thresh_time_var", ")", "=", "create_mahalanobis_unsupervised_thresh_vars", "(", "scope", "=", "scope", ",", "var_name", "=", "\"time\"", ")", "# Features based", "(", "count_thresh_feat_var", ",", "mean_thresh_feat_var", ",", "var_thresh_feat_var", ")", "=", "create_mahalanobis_unsupervised_thresh_vars", "(", "scope", "=", "scope", ",", "var_name", "=", "\"feat\"", ")", "return", "(", "count_thresh_time_var", ",", "mean_thresh_time_var", ",", "var_thresh_time_var", ",", "count_thresh_feat_var", ",", "mean_thresh_feat_var", ",", "var_thresh_feat_var", ")" ]
[ 142, 0 ]
[ 173, 30 ]
python
en
['en', 'et', 'en']
True
check_err
(code, cpl=False)
Check the given CPL/OGRERR and raise an exception where appropriate.
Check the given CPL/OGRERR and raise an exception where appropriate.
def check_err(code, cpl=False): """ Check the given CPL/OGRERR and raise an exception where appropriate. """ err_dict = CPLERR_DICT if cpl else OGRERR_DICT if code == ERR_NONE: return elif code in err_dict: e, msg = err_dict[code] raise e(msg) else: raise GDALException('Unknown error code: "%s"' % code)
[ "def", "check_err", "(", "code", ",", "cpl", "=", "False", ")", ":", "err_dict", "=", "CPLERR_DICT", "if", "cpl", "else", "OGRERR_DICT", "if", "code", "==", "ERR_NONE", ":", "return", "elif", "code", "in", "err_dict", ":", "e", ",", "msg", "=", "err_dict", "[", "code", "]", "raise", "e", "(", "msg", ")", "else", ":", "raise", "GDALException", "(", "'Unknown error code: \"%s\"'", "%", "code", ")" ]
[ 48, 0 ]
[ 60, 62 ]
python
en
['en', 'error', 'th']
False
open
(filename)
Load texture from a Quake2 WAL texture file. By default, a Quake2 standard palette is attached to the texture. To override the palette, use the :py:func:`PIL.Image.Image.putpalette()` method. :param filename: WAL file name, or an opened file handle. :returns: An image instance.
Load texture from a Quake2 WAL texture file.
def open(filename): """ Load texture from a Quake2 WAL texture file. By default, a Quake2 standard palette is attached to the texture. To override the palette, use the :py:func:`PIL.Image.Image.putpalette()` method. :param filename: WAL file name, or an opened file handle. :returns: An image instance. """ # FIXME: modify to return a WalImageFile instance instead of # plain Image object ? def imopen(fp): # read header fields header = fp.read(32 + 24 + 32 + 12) size = i32(header, 32), i32(header, 36) offset = i32(header, 40) # load pixel data fp.seek(offset) Image._decompression_bomb_check(size) im = Image.frombytes("P", size, fp.read(size[0] * size[1])) im.putpalette(quake2palette) im.format = "WAL" im.format_description = "Quake2 Texture" # strings are null-terminated im.info["name"] = header[:32].split(b"\0", 1)[0] next_name = header[56 : 56 + 32].split(b"\0", 1)[0] if next_name: im.info["next_name"] = next_name return im if hasattr(filename, "read"): return imopen(filename) else: with builtins.open(filename, "rb") as fp: return imopen(fp)
[ "def", "open", "(", "filename", ")", ":", "# FIXME: modify to return a WalImageFile instance instead of", "# plain Image object ?", "def", "imopen", "(", "fp", ")", ":", "# read header fields", "header", "=", "fp", ".", "read", "(", "32", "+", "24", "+", "32", "+", "12", ")", "size", "=", "i32", "(", "header", ",", "32", ")", ",", "i32", "(", "header", ",", "36", ")", "offset", "=", "i32", "(", "header", ",", "40", ")", "# load pixel data", "fp", ".", "seek", "(", "offset", ")", "Image", ".", "_decompression_bomb_check", "(", "size", ")", "im", "=", "Image", ".", "frombytes", "(", "\"P\"", ",", "size", ",", "fp", ".", "read", "(", "size", "[", "0", "]", "*", "size", "[", "1", "]", ")", ")", "im", ".", "putpalette", "(", "quake2palette", ")", "im", ".", "format", "=", "\"WAL\"", "im", ".", "format_description", "=", "\"Quake2 Texture\"", "# strings are null-terminated", "im", ".", "info", "[", "\"name\"", "]", "=", "header", "[", ":", "32", "]", ".", "split", "(", "b\"\\0\"", ",", "1", ")", "[", "0", "]", "next_name", "=", "header", "[", "56", ":", "56", "+", "32", "]", ".", "split", "(", "b\"\\0\"", ",", "1", ")", "[", "0", "]", "if", "next_name", ":", "im", ".", "info", "[", "\"next_name\"", "]", "=", "next_name", "return", "im", "if", "hasattr", "(", "filename", ",", "\"read\"", ")", ":", "return", "imopen", "(", "filename", ")", "else", ":", "with", "builtins", ".", "open", "(", "filename", ",", "\"rb\"", ")", "as", "fp", ":", "return", "imopen", "(", "fp", ")" ]
[ 31, 0 ]
[ 72, 29 ]
python
en
['en', 'error', 'th']
False
rehash
(path, blocksize=1 << 20)
Return (encoded_digest, length) for path using hashlib.sha256()
Return (encoded_digest, length) for path using hashlib.sha256()
def rehash(path, blocksize=1 << 20): # type: (str, int) -> Tuple[str, str] """Return (encoded_digest, length) for path using hashlib.sha256()""" h, length = hash_file(path, blocksize) digest = 'sha256=' + urlsafe_b64encode( h.digest() ).decode('latin1').rstrip('=') return (digest, str(length))
[ "def", "rehash", "(", "path", ",", "blocksize", "=", "1", "<<", "20", ")", ":", "# type: (str, int) -> Tuple[str, str]", "h", ",", "length", "=", "hash_file", "(", "path", ",", "blocksize", ")", "digest", "=", "'sha256='", "+", "urlsafe_b64encode", "(", "h", ".", "digest", "(", ")", ")", ".", "decode", "(", "'latin1'", ")", ".", "rstrip", "(", "'='", ")", "return", "(", "digest", ",", "str", "(", "length", ")", ")" ]
[ 76, 0 ]
[ 83, 32 ]
python
cy
['da', 'cy', 'en']
False
csv_io_kwargs
(mode)
Return keyword arguments to properly open a CSV file in the given mode.
Return keyword arguments to properly open a CSV file in the given mode.
def csv_io_kwargs(mode): # type: (str) -> Dict[str, Any] """Return keyword arguments to properly open a CSV file in the given mode. """ return {'mode': mode, 'newline': '', 'encoding': 'utf-8'}
[ "def", "csv_io_kwargs", "(", "mode", ")", ":", "# type: (str) -> Dict[str, Any]", "return", "{", "'mode'", ":", "mode", ",", "'newline'", ":", "''", ",", "'encoding'", ":", "'utf-8'", "}" ]
[ 86, 0 ]
[ 91, 61 ]
python
en
['en', 'en', 'en']
True
fix_script
(path)
Replace #!python with #!/path/to/python Return True if file was changed.
Replace #!python with #!/path/to/python Return True if file was changed.
def fix_script(path): # type: (str) -> bool """Replace #!python with #!/path/to/python Return True if file was changed. """ # XXX RECORD hashes will need to be updated assert os.path.isfile(path) with open(path, 'rb') as script: firstline = script.readline() if not firstline.startswith(b'#!python'): return False exename = sys.executable.encode(sys.getfilesystemencoding()) firstline = b'#!' + exename + os.linesep.encode("ascii") rest = script.read() with open(path, 'wb') as script: script.write(firstline) script.write(rest) return True
[ "def", "fix_script", "(", "path", ")", ":", "# type: (str) -> bool", "# XXX RECORD hashes will need to be updated", "assert", "os", ".", "path", ".", "isfile", "(", "path", ")", "with", "open", "(", "path", ",", "'rb'", ")", "as", "script", ":", "firstline", "=", "script", ".", "readline", "(", ")", "if", "not", "firstline", ".", "startswith", "(", "b'#!python'", ")", ":", "return", "False", "exename", "=", "sys", ".", "executable", ".", "encode", "(", "sys", ".", "getfilesystemencoding", "(", ")", ")", "firstline", "=", "b'#!'", "+", "exename", "+", "os", ".", "linesep", ".", "encode", "(", "\"ascii\"", ")", "rest", "=", "script", ".", "read", "(", ")", "with", "open", "(", "path", ",", "'wb'", ")", "as", "script", ":", "script", ".", "write", "(", "firstline", ")", "script", ".", "write", "(", "rest", ")", "return", "True" ]
[ 94, 0 ]
[ 112, 15 ]
python
en
['en', 'lt', 'en']
True
message_about_scripts_not_on_PATH
(scripts)
Determine if any scripts are not on PATH and format a warning. Returns a warning message if one or more scripts are not on PATH, otherwise None.
Determine if any scripts are not on PATH and format a warning. Returns a warning message if one or more scripts are not on PATH, otherwise None.
def message_about_scripts_not_on_PATH(scripts): # type: (Sequence[str]) -> Optional[str] """Determine if any scripts are not on PATH and format a warning. Returns a warning message if one or more scripts are not on PATH, otherwise None. """ if not scripts: return None # Group scripts by the path they were installed in grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]] for destfile in scripts: parent_dir = os.path.dirname(destfile) script_name = os.path.basename(destfile) grouped_by_dir[parent_dir].add(script_name) # We don't want to warn for directories that are on PATH. not_warn_dirs = [ os.path.normcase(i).rstrip(os.sep) for i in os.environ.get("PATH", "").split(os.pathsep) ] # If an executable sits with sys.executable, we don't warn for it. # This covers the case of venv invocations without activating the venv. not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) warn_for = { parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() if os.path.normcase(parent_dir) not in not_warn_dirs } # type: Dict[str, Set[str]] if not warn_for: return None # Format a message msg_lines = [] for parent_dir, dir_scripts in warn_for.items(): sorted_scripts = sorted(dir_scripts) # type: List[str] if len(sorted_scripts) == 1: start_text = "script {} is".format(sorted_scripts[0]) else: start_text = "scripts {} are".format( ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] ) msg_lines.append( "The {} installed in '{}' which is not on PATH." .format(start_text, parent_dir) ) last_line_fmt = ( "Consider adding {} to PATH or, if you prefer " "to suppress this warning, use --no-warn-script-location." ) if len(msg_lines) == 1: msg_lines.append(last_line_fmt.format("this directory")) else: msg_lines.append(last_line_fmt.format("these directories")) # Add a note if any directory starts with ~ warn_for_tilde = any( i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i ) if warn_for_tilde: tilde_warning_msg = ( "NOTE: The current PATH contains path(s) starting with `~`, " "which may not be expanded by all applications." ) msg_lines.append(tilde_warning_msg) # Returns the formatted multiline message return "\n".join(msg_lines)
[ "def", "message_about_scripts_not_on_PATH", "(", "scripts", ")", ":", "# type: (Sequence[str]) -> Optional[str]", "if", "not", "scripts", ":", "return", "None", "# Group scripts by the path they were installed in", "grouped_by_dir", "=", "collections", ".", "defaultdict", "(", "set", ")", "# type: Dict[str, Set[str]]", "for", "destfile", "in", "scripts", ":", "parent_dir", "=", "os", ".", "path", ".", "dirname", "(", "destfile", ")", "script_name", "=", "os", ".", "path", ".", "basename", "(", "destfile", ")", "grouped_by_dir", "[", "parent_dir", "]", ".", "add", "(", "script_name", ")", "# We don't want to warn for directories that are on PATH.", "not_warn_dirs", "=", "[", "os", ".", "path", ".", "normcase", "(", "i", ")", ".", "rstrip", "(", "os", ".", "sep", ")", "for", "i", "in", "os", ".", "environ", ".", "get", "(", "\"PATH\"", ",", "\"\"", ")", ".", "split", "(", "os", ".", "pathsep", ")", "]", "# If an executable sits with sys.executable, we don't warn for it.", "# This covers the case of venv invocations without activating the venv.", "not_warn_dirs", ".", "append", "(", "os", ".", "path", ".", "normcase", "(", "os", ".", "path", ".", "dirname", "(", "sys", ".", "executable", ")", ")", ")", "warn_for", "=", "{", "parent_dir", ":", "scripts", "for", "parent_dir", ",", "scripts", "in", "grouped_by_dir", ".", "items", "(", ")", "if", "os", ".", "path", ".", "normcase", "(", "parent_dir", ")", "not", "in", "not_warn_dirs", "}", "# type: Dict[str, Set[str]]", "if", "not", "warn_for", ":", "return", "None", "# Format a message", "msg_lines", "=", "[", "]", "for", "parent_dir", ",", "dir_scripts", "in", "warn_for", ".", "items", "(", ")", ":", "sorted_scripts", "=", "sorted", "(", "dir_scripts", ")", "# type: List[str]", "if", "len", "(", "sorted_scripts", ")", "==", "1", ":", "start_text", "=", "\"script {} is\"", ".", "format", "(", "sorted_scripts", "[", "0", "]", ")", "else", ":", "start_text", "=", "\"scripts {} are\"", ".", "format", "(", "\", \"", ".", "join", "(", "sorted_scripts", "[", ":", "-", "1", "]", ")", "+", "\" and \"", "+", "sorted_scripts", "[", "-", "1", "]", ")", "msg_lines", ".", "append", "(", "\"The {} installed in '{}' which is not on PATH.\"", ".", "format", "(", "start_text", ",", "parent_dir", ")", ")", "last_line_fmt", "=", "(", "\"Consider adding {} to PATH or, if you prefer \"", "\"to suppress this warning, use --no-warn-script-location.\"", ")", "if", "len", "(", "msg_lines", ")", "==", "1", ":", "msg_lines", ".", "append", "(", "last_line_fmt", ".", "format", "(", "\"this directory\"", ")", ")", "else", ":", "msg_lines", ".", "append", "(", "last_line_fmt", ".", "format", "(", "\"these directories\"", ")", ")", "# Add a note if any directory starts with ~", "warn_for_tilde", "=", "any", "(", "i", "[", "0", "]", "==", "\"~\"", "for", "i", "in", "os", ".", "environ", ".", "get", "(", "\"PATH\"", ",", "\"\"", ")", ".", "split", "(", "os", ".", "pathsep", ")", "if", "i", ")", "if", "warn_for_tilde", ":", "tilde_warning_msg", "=", "(", "\"NOTE: The current PATH contains path(s) starting with `~`, \"", "\"which may not be expanded by all applications.\"", ")", "msg_lines", ".", "append", "(", "tilde_warning_msg", ")", "# Returns the formatted multiline message", "return", "\"\\n\"", ".", "join", "(", "msg_lines", ")" ]
[ 131, 0 ]
[ 199, 31 ]
python
en
['en', 'en', 'en']
True
_normalized_outrows
(outrows)
Normalize the given rows of a RECORD file. Items in each row are converted into str. Rows are then sorted to make the value more predictable for tests. Each row is a 3-tuple (path, hash, size) and corresponds to a record of a RECORD file (see PEP 376 and PEP 427 for details). For the rows passed to this function, the size can be an integer as an int or string, or the empty string.
Normalize the given rows of a RECORD file.
def _normalized_outrows(outrows): # type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]] """Normalize the given rows of a RECORD file. Items in each row are converted into str. Rows are then sorted to make the value more predictable for tests. Each row is a 3-tuple (path, hash, size) and corresponds to a record of a RECORD file (see PEP 376 and PEP 427 for details). For the rows passed to this function, the size can be an integer as an int or string, or the empty string. """ # Normally, there should only be one row per path, in which case the # second and third elements don't come into play when sorting. # However, in cases in the wild where a path might happen to occur twice, # we don't want the sort operation to trigger an error (but still want # determinism). Since the third element can be an int or string, we # coerce each element to a string to avoid a TypeError in this case. # For additional background, see-- # https://github.com/pypa/pip/issues/5868 return sorted( (ensure_str(record_path, encoding='utf-8'), hash_, str(size)) for record_path, hash_, size in outrows )
[ "def", "_normalized_outrows", "(", "outrows", ")", ":", "# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]", "# Normally, there should only be one row per path, in which case the", "# second and third elements don't come into play when sorting.", "# However, in cases in the wild where a path might happen to occur twice,", "# we don't want the sort operation to trigger an error (but still want", "# determinism). Since the third element can be an int or string, we", "# coerce each element to a string to avoid a TypeError in this case.", "# For additional background, see--", "# https://github.com/pypa/pip/issues/5868", "return", "sorted", "(", "(", "ensure_str", "(", "record_path", ",", "encoding", "=", "'utf-8'", ")", ",", "hash_", ",", "str", "(", "size", ")", ")", "for", "record_path", ",", "hash_", ",", "size", "in", "outrows", ")" ]
[ 202, 0 ]
[ 225, 5 ]
python
en
['en', 'en', 'en']
True
get_csv_rows_for_installed
( old_csv_rows, # type: List[List[str]] installed, # type: Dict[RecordPath, RecordPath] changed, # type: Set[RecordPath] generated, # type: List[str] lib_dir, # type: str )
:param installed: A map from archive RECORD path to installation RECORD path.
:param installed: A map from archive RECORD path to installation RECORD path.
def get_csv_rows_for_installed( old_csv_rows, # type: List[List[str]] installed, # type: Dict[RecordPath, RecordPath] changed, # type: Set[RecordPath] generated, # type: List[str] lib_dir, # type: str ): # type: (...) -> List[InstalledCSVRow] """ :param installed: A map from archive RECORD path to installation RECORD path. """ installed_rows = [] # type: List[InstalledCSVRow] for row in old_csv_rows: if len(row) > 3: logger.warning('RECORD line has more than three elements: %s', row) old_record_path = _parse_record_path(row[0]) new_record_path = installed.pop(old_record_path, old_record_path) if new_record_path in changed: digest, length = rehash(_record_to_fs_path(new_record_path)) else: digest = row[1] if len(row) > 1 else '' length = row[2] if len(row) > 2 else '' installed_rows.append((new_record_path, digest, length)) for f in generated: path = _fs_to_record_path(f, lib_dir) digest, length = rehash(f) installed_rows.append((path, digest, length)) for installed_record_path in installed.values(): installed_rows.append((installed_record_path, '', '')) return installed_rows
[ "def", "get_csv_rows_for_installed", "(", "old_csv_rows", ",", "# type: List[List[str]]", "installed", ",", "# type: Dict[RecordPath, RecordPath]", "changed", ",", "# type: Set[RecordPath]", "generated", ",", "# type: List[str]", "lib_dir", ",", "# type: str", ")", ":", "# type: (...) -> List[InstalledCSVRow]", "installed_rows", "=", "[", "]", "# type: List[InstalledCSVRow]", "for", "row", "in", "old_csv_rows", ":", "if", "len", "(", "row", ")", ">", "3", ":", "logger", ".", "warning", "(", "'RECORD line has more than three elements: %s'", ",", "row", ")", "old_record_path", "=", "_parse_record_path", "(", "row", "[", "0", "]", ")", "new_record_path", "=", "installed", ".", "pop", "(", "old_record_path", ",", "old_record_path", ")", "if", "new_record_path", "in", "changed", ":", "digest", ",", "length", "=", "rehash", "(", "_record_to_fs_path", "(", "new_record_path", ")", ")", "else", ":", "digest", "=", "row", "[", "1", "]", "if", "len", "(", "row", ")", ">", "1", "else", "''", "length", "=", "row", "[", "2", "]", "if", "len", "(", "row", ")", ">", "2", "else", "''", "installed_rows", ".", "append", "(", "(", "new_record_path", ",", "digest", ",", "length", ")", ")", "for", "f", "in", "generated", ":", "path", "=", "_fs_to_record_path", "(", "f", ",", "lib_dir", ")", "digest", ",", "length", "=", "rehash", "(", "f", ")", "installed_rows", ".", "append", "(", "(", "path", ",", "digest", ",", "length", ")", ")", "for", "installed_record_path", "in", "installed", ".", "values", "(", ")", ":", "installed_rows", ".", "append", "(", "(", "installed_record_path", ",", "''", ",", "''", ")", ")", "return", "installed_rows" ]
[ 251, 0 ]
[ 281, 25 ]
python
en
['en', 'error', 'th']
False
get_console_script_specs
(console)
Given the mapping from entrypoint name to callable, return the relevant console script specs.
Given the mapping from entrypoint name to callable, return the relevant console script specs.
def get_console_script_specs(console): # type: (Dict[str, str]) -> List[str] """ Given the mapping from entrypoint name to callable, return the relevant console script specs. """ # Don't mutate caller's version console = console.copy() scripts_to_generate = [] # Special case pip and setuptools to generate versioned wrappers # # The issue is that some projects (specifically, pip and setuptools) use # code in setup.py to create "versioned" entry points - pip2.7 on Python # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into # the wheel metadata at build time, and so if the wheel is installed with # a *different* version of Python the entry points will be wrong. The # correct fix for this is to enhance the metadata to be able to describe # such versioned entry points, but that won't happen till Metadata 2.0 is # available. # In the meantime, projects using versioned entry points will either have # incorrect versioned entry points, or they will not be able to distribute # "universal" wheels (i.e., they will need a wheel per Python version). # # Because setuptools and pip are bundled with _ensurepip and virtualenv, # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we # override the versioned entry points in the wheel and generate the # correct ones. This code is purely a short-term measure until Metadata 2.0 # is available. # # To add the level of hack in this section of code, in order to support # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment # variable which will control which version scripts get installed. # # ENSUREPIP_OPTIONS=altinstall # - Only pipX.Y and easy_install-X.Y will be generated and installed # ENSUREPIP_OPTIONS=install # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note # that this option is technically if ENSUREPIP_OPTIONS is set and is # not altinstall # DEFAULT # - The default behavior is to install pip, pipX, pipX.Y, easy_install # and easy_install-X.Y. pip_script = console.pop('pip', None) if pip_script: if "ENSUREPIP_OPTIONS" not in os.environ: scripts_to_generate.append('pip = ' + pip_script) if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": scripts_to_generate.append( 'pip{} = {}'.format(sys.version_info[0], pip_script) ) scripts_to_generate.append( f'pip{get_major_minor_version()} = {pip_script}' ) # Delete any other versioned pip entry points pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] for k in pip_ep: del console[k] easy_install_script = console.pop('easy_install', None) if easy_install_script: if "ENSUREPIP_OPTIONS" not in os.environ: scripts_to_generate.append( 'easy_install = ' + easy_install_script ) scripts_to_generate.append( 'easy_install-{} = {}'.format( get_major_minor_version(), easy_install_script ) ) # Delete any other versioned easy_install entry points easy_install_ep = [ k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) ] for k in easy_install_ep: del console[k] # Generate the console entry points specified in the wheel scripts_to_generate.extend(starmap('{} = {}'.format, console.items())) return scripts_to_generate
[ "def", "get_console_script_specs", "(", "console", ")", ":", "# type: (Dict[str, str]) -> List[str]", "# Don't mutate caller's version", "console", "=", "console", ".", "copy", "(", ")", "scripts_to_generate", "=", "[", "]", "# Special case pip and setuptools to generate versioned wrappers", "#", "# The issue is that some projects (specifically, pip and setuptools) use", "# code in setup.py to create \"versioned\" entry points - pip2.7 on Python", "# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into", "# the wheel metadata at build time, and so if the wheel is installed with", "# a *different* version of Python the entry points will be wrong. The", "# correct fix for this is to enhance the metadata to be able to describe", "# such versioned entry points, but that won't happen till Metadata 2.0 is", "# available.", "# In the meantime, projects using versioned entry points will either have", "# incorrect versioned entry points, or they will not be able to distribute", "# \"universal\" wheels (i.e., they will need a wheel per Python version).", "#", "# Because setuptools and pip are bundled with _ensurepip and virtualenv,", "# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we", "# override the versioned entry points in the wheel and generate the", "# correct ones. This code is purely a short-term measure until Metadata 2.0", "# is available.", "#", "# To add the level of hack in this section of code, in order to support", "# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment", "# variable which will control which version scripts get installed.", "#", "# ENSUREPIP_OPTIONS=altinstall", "# - Only pipX.Y and easy_install-X.Y will be generated and installed", "# ENSUREPIP_OPTIONS=install", "# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note", "# that this option is technically if ENSUREPIP_OPTIONS is set and is", "# not altinstall", "# DEFAULT", "# - The default behavior is to install pip, pipX, pipX.Y, easy_install", "# and easy_install-X.Y.", "pip_script", "=", "console", ".", "pop", "(", "'pip'", ",", "None", ")", "if", "pip_script", ":", "if", "\"ENSUREPIP_OPTIONS\"", "not", "in", "os", ".", "environ", ":", "scripts_to_generate", ".", "append", "(", "'pip = '", "+", "pip_script", ")", "if", "os", ".", "environ", ".", "get", "(", "\"ENSUREPIP_OPTIONS\"", ",", "\"\"", ")", "!=", "\"altinstall\"", ":", "scripts_to_generate", ".", "append", "(", "'pip{} = {}'", ".", "format", "(", "sys", ".", "version_info", "[", "0", "]", ",", "pip_script", ")", ")", "scripts_to_generate", ".", "append", "(", "f'pip{get_major_minor_version()} = {pip_script}'", ")", "# Delete any other versioned pip entry points", "pip_ep", "=", "[", "k", "for", "k", "in", "console", "if", "re", ".", "match", "(", "r'pip(\\d(\\.\\d)?)?$'", ",", "k", ")", "]", "for", "k", "in", "pip_ep", ":", "del", "console", "[", "k", "]", "easy_install_script", "=", "console", ".", "pop", "(", "'easy_install'", ",", "None", ")", "if", "easy_install_script", ":", "if", "\"ENSUREPIP_OPTIONS\"", "not", "in", "os", ".", "environ", ":", "scripts_to_generate", ".", "append", "(", "'easy_install = '", "+", "easy_install_script", ")", "scripts_to_generate", ".", "append", "(", "'easy_install-{} = {}'", ".", "format", "(", "get_major_minor_version", "(", ")", ",", "easy_install_script", ")", ")", "# Delete any other versioned easy_install entry points", "easy_install_ep", "=", "[", "k", "for", "k", "in", "console", "if", "re", ".", "match", "(", "r'easy_install(-\\d\\.\\d)?$'", ",", "k", ")", "]", "for", "k", "in", "easy_install_ep", ":", "del", "console", "[", "k", "]", "# Generate the console entry points specified in the wheel", "scripts_to_generate", ".", "extend", "(", "starmap", "(", "'{} = {}'", ".", "format", ",", "console", ".", "items", "(", ")", ")", ")", "return", "scripts_to_generate" ]
[ 284, 0 ]
[ 367, 30 ]
python
en
['en', 'error', 'th']
False
_install_wheel
( name, # type: str wheel_zip, # type: ZipFile wheel_path, # type: str scheme, # type: Scheme pycompile=True, # type: bool warn_script_location=True, # type: bool direct_url=None, # type: Optional[DirectUrl] requested=False, # type: bool )
Install a wheel. :param name: Name of the project to install :param wheel_zip: open ZipFile for wheel being installed :param scheme: Distutils scheme dictating the install directories :param req_description: String used in place of the requirement, for logging :param pycompile: Whether to byte-compile installed Python files :param warn_script_location: Whether to check that scripts are installed into a directory on PATH :raises UnsupportedWheel: * when the directory holds an unpacked wheel with incompatible Wheel-Version * when the .dist-info dir does not match the wheel
Install a wheel.
def _install_wheel( name, # type: str wheel_zip, # type: ZipFile wheel_path, # type: str scheme, # type: Scheme pycompile=True, # type: bool warn_script_location=True, # type: bool direct_url=None, # type: Optional[DirectUrl] requested=False, # type: bool ): # type: (...) -> None """Install a wheel. :param name: Name of the project to install :param wheel_zip: open ZipFile for wheel being installed :param scheme: Distutils scheme dictating the install directories :param req_description: String used in place of the requirement, for logging :param pycompile: Whether to byte-compile installed Python files :param warn_script_location: Whether to check that scripts are installed into a directory on PATH :raises UnsupportedWheel: * when the directory holds an unpacked wheel with incompatible Wheel-Version * when the .dist-info dir does not match the wheel """ info_dir, metadata = parse_wheel(wheel_zip, name) if wheel_root_is_purelib(metadata): lib_dir = scheme.purelib else: lib_dir = scheme.platlib # Record details of the files moved # installed = files copied from the wheel to the destination # changed = files changed while installing (scripts #! line typically) # generated = files newly generated during the install (script wrappers) installed = {} # type: Dict[RecordPath, RecordPath] changed = set() # type: Set[RecordPath] generated = [] # type: List[str] def record_installed(srcfile, destfile, modified=False): # type: (RecordPath, str, bool) -> None """Map archive RECORD paths to installation RECORD paths.""" newpath = _fs_to_record_path(destfile, lib_dir) installed[srcfile] = newpath if modified: changed.add(_fs_to_record_path(destfile)) def all_paths(): # type: () -> Iterable[RecordPath] names = wheel_zip.namelist() # If a flag is set, names may be unicode in Python 2. We convert to # text explicitly so these are valid for lookup in RECORD. decoded_names = map(ensure_text, names) for name in decoded_names: yield cast("RecordPath", name) def is_dir_path(path): # type: (RecordPath) -> bool return path.endswith("/") def assert_no_path_traversal(dest_dir_path, target_path): # type: (str, str) -> None if not is_within_directory(dest_dir_path, target_path): message = ( "The wheel {!r} has a file {!r} trying to install" " outside the target directory {!r}" ) raise InstallationError( message.format(wheel_path, target_path, dest_dir_path) ) def root_scheme_file_maker(zip_file, dest): # type: (ZipFile, str) -> Callable[[RecordPath], File] def make_root_scheme_file(record_path): # type: (RecordPath) -> File normed_path = os.path.normpath(record_path) dest_path = os.path.join(dest, normed_path) assert_no_path_traversal(dest, dest_path) return ZipBackedFile(record_path, dest_path, zip_file) return make_root_scheme_file def data_scheme_file_maker(zip_file, scheme): # type: (ZipFile, Scheme) -> Callable[[RecordPath], File] scheme_paths = {} for key in SCHEME_KEYS: encoded_key = ensure_text(key) scheme_paths[encoded_key] = ensure_text( getattr(scheme, key), encoding=sys.getfilesystemencoding() ) def make_data_scheme_file(record_path): # type: (RecordPath) -> File normed_path = os.path.normpath(record_path) try: _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2) except ValueError: message = ( "Unexpected file in {}: {!r}. .data directory contents" " should be named like: '<scheme key>/<path>'." ).format(wheel_path, record_path) raise InstallationError(message) try: scheme_path = scheme_paths[scheme_key] except KeyError: valid_scheme_keys = ", ".join(sorted(scheme_paths)) message = ( "Unknown scheme key used in {}: {} (for file {!r}). .data" " directory contents should be in subdirectories named" " with a valid scheme key ({})" ).format( wheel_path, scheme_key, record_path, valid_scheme_keys ) raise InstallationError(message) dest_path = os.path.join(scheme_path, dest_subpath) assert_no_path_traversal(scheme_path, dest_path) return ZipBackedFile(record_path, dest_path, zip_file) return make_data_scheme_file def is_data_scheme_path(path): # type: (RecordPath) -> bool return path.split("/", 1)[0].endswith(".data") paths = all_paths() file_paths = filterfalse(is_dir_path, paths) root_scheme_paths, data_scheme_paths = partition( is_data_scheme_path, file_paths ) make_root_scheme_file = root_scheme_file_maker( wheel_zip, ensure_text(lib_dir, encoding=sys.getfilesystemencoding()), ) files = map(make_root_scheme_file, root_scheme_paths) def is_script_scheme_path(path): # type: (RecordPath) -> bool parts = path.split("/", 2) return ( len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts" ) other_scheme_paths, script_scheme_paths = partition( is_script_scheme_path, data_scheme_paths ) make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme) other_scheme_files = map(make_data_scheme_file, other_scheme_paths) files = chain(files, other_scheme_files) # Get the defined entry points distribution = get_wheel_distribution(wheel_path, canonicalize_name(name)) console, gui = get_entrypoints(distribution) def is_entrypoint_wrapper(file): # type: (File) -> bool # EP, EP.exe and EP-script.py are scripts generated for # entry point EP by setuptools path = file.dest_path name = os.path.basename(path) if name.lower().endswith('.exe'): matchname = name[:-4] elif name.lower().endswith('-script.py'): matchname = name[:-10] elif name.lower().endswith(".pya"): matchname = name[:-4] else: matchname = name # Ignore setuptools-generated scripts return (matchname in console or matchname in gui) script_scheme_files = map(make_data_scheme_file, script_scheme_paths) script_scheme_files = filterfalse( is_entrypoint_wrapper, script_scheme_files ) script_scheme_files = map(ScriptFile, script_scheme_files) files = chain(files, script_scheme_files) for file in files: file.save() record_installed(file.src_record_path, file.dest_path, file.changed) def pyc_source_file_paths(): # type: () -> Iterator[str] # We de-duplicate installation paths, since there can be overlap (e.g. # file in .data maps to same location as file in wheel root). # Sorting installation paths makes it easier to reproduce and debug # issues related to permissions on existing files. for installed_path in sorted(set(installed.values())): full_installed_path = os.path.join(lib_dir, installed_path) if not os.path.isfile(full_installed_path): continue if not full_installed_path.endswith('.py'): continue yield full_installed_path def pyc_output_path(path): # type: (str) -> str """Return the path the pyc file would have been written to. """ return importlib.util.cache_from_source(path) # Compile all of the pyc files for the installed files if pycompile: with captured_stdout() as stdout: with warnings.catch_warnings(): warnings.filterwarnings('ignore') for path in pyc_source_file_paths(): # Python 2's `compileall.compile_file` requires a str in # error cases, so we must convert to the native type. path_arg = ensure_str( path, encoding=sys.getfilesystemencoding() ) success = compileall.compile_file( path_arg, force=True, quiet=True ) if success: pyc_path = pyc_output_path(path) assert os.path.exists(pyc_path) pyc_record_path = cast( "RecordPath", pyc_path.replace(os.path.sep, "/") ) record_installed(pyc_record_path, pyc_path) logger.debug(stdout.getvalue()) maker = PipScriptMaker(None, scheme.scripts) # Ensure old scripts are overwritten. # See https://github.com/pypa/pip/issues/1800 maker.clobber = True # Ensure we don't generate any variants for scripts because this is almost # never what somebody wants. # See https://bitbucket.org/pypa/distlib/issue/35/ maker.variants = {''} # This is required because otherwise distlib creates scripts that are not # executable. # See https://bitbucket.org/pypa/distlib/issue/32/ maker.set_mode = True # Generate the console and GUI entry points specified in the wheel scripts_to_generate = get_console_script_specs(console) gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items())) generated_console_scripts = maker.make_multiple(scripts_to_generate) generated.extend(generated_console_scripts) generated.extend( maker.make_multiple(gui_scripts_to_generate, {'gui': True}) ) if warn_script_location: msg = message_about_scripts_not_on_PATH(generated_console_scripts) if msg is not None: logger.warning(msg) generated_file_mode = 0o666 & ~current_umask() @contextlib.contextmanager def _generate_file(path, **kwargs): # type: (str, **Any) -> Iterator[BinaryIO] with adjacent_tmp_file(path, **kwargs) as f: yield f os.chmod(f.name, generated_file_mode) replace(f.name, path) dest_info_dir = os.path.join(lib_dir, info_dir) # Record pip as the installer installer_path = os.path.join(dest_info_dir, 'INSTALLER') with _generate_file(installer_path) as installer_file: installer_file.write(b'pip\n') generated.append(installer_path) # Record the PEP 610 direct URL reference if direct_url is not None: direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME) with _generate_file(direct_url_path) as direct_url_file: direct_url_file.write(direct_url.to_json().encode("utf-8")) generated.append(direct_url_path) # Record the REQUESTED file if requested: requested_path = os.path.join(dest_info_dir, 'REQUESTED') with open(requested_path, "wb"): pass generated.append(requested_path) record_text = distribution.read_text('RECORD') record_rows = list(csv.reader(record_text.splitlines())) rows = get_csv_rows_for_installed( record_rows, installed=installed, changed=changed, generated=generated, lib_dir=lib_dir) # Record details of all files installed record_path = os.path.join(dest_info_dir, 'RECORD') with _generate_file(record_path, **csv_io_kwargs('w')) as record_file: # The type mypy infers for record_file is different for Python 3 # (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly # cast to typing.IO[str] as a workaround. writer = csv.writer(cast('IO[str]', record_file)) writer.writerows(_normalized_outrows(rows))
[ "def", "_install_wheel", "(", "name", ",", "# type: str", "wheel_zip", ",", "# type: ZipFile", "wheel_path", ",", "# type: str", "scheme", ",", "# type: Scheme", "pycompile", "=", "True", ",", "# type: bool", "warn_script_location", "=", "True", ",", "# type: bool", "direct_url", "=", "None", ",", "# type: Optional[DirectUrl]", "requested", "=", "False", ",", "# type: bool", ")", ":", "# type: (...) -> None", "info_dir", ",", "metadata", "=", "parse_wheel", "(", "wheel_zip", ",", "name", ")", "if", "wheel_root_is_purelib", "(", "metadata", ")", ":", "lib_dir", "=", "scheme", ".", "purelib", "else", ":", "lib_dir", "=", "scheme", ".", "platlib", "# Record details of the files moved", "# installed = files copied from the wheel to the destination", "# changed = files changed while installing (scripts #! line typically)", "# generated = files newly generated during the install (script wrappers)", "installed", "=", "{", "}", "# type: Dict[RecordPath, RecordPath]", "changed", "=", "set", "(", ")", "# type: Set[RecordPath]", "generated", "=", "[", "]", "# type: List[str]", "def", "record_installed", "(", "srcfile", ",", "destfile", ",", "modified", "=", "False", ")", ":", "# type: (RecordPath, str, bool) -> None", "\"\"\"Map archive RECORD paths to installation RECORD paths.\"\"\"", "newpath", "=", "_fs_to_record_path", "(", "destfile", ",", "lib_dir", ")", "installed", "[", "srcfile", "]", "=", "newpath", "if", "modified", ":", "changed", ".", "add", "(", "_fs_to_record_path", "(", "destfile", ")", ")", "def", "all_paths", "(", ")", ":", "# type: () -> Iterable[RecordPath]", "names", "=", "wheel_zip", ".", "namelist", "(", ")", "# If a flag is set, names may be unicode in Python 2. We convert to", "# text explicitly so these are valid for lookup in RECORD.", "decoded_names", "=", "map", "(", "ensure_text", ",", "names", ")", "for", "name", "in", "decoded_names", ":", "yield", "cast", "(", "\"RecordPath\"", ",", "name", ")", "def", "is_dir_path", "(", "path", ")", ":", "# type: (RecordPath) -> bool", "return", "path", ".", "endswith", "(", "\"/\"", ")", "def", "assert_no_path_traversal", "(", "dest_dir_path", ",", "target_path", ")", ":", "# type: (str, str) -> None", "if", "not", "is_within_directory", "(", "dest_dir_path", ",", "target_path", ")", ":", "message", "=", "(", "\"The wheel {!r} has a file {!r} trying to install\"", "\" outside the target directory {!r}\"", ")", "raise", "InstallationError", "(", "message", ".", "format", "(", "wheel_path", ",", "target_path", ",", "dest_dir_path", ")", ")", "def", "root_scheme_file_maker", "(", "zip_file", ",", "dest", ")", ":", "# type: (ZipFile, str) -> Callable[[RecordPath], File]", "def", "make_root_scheme_file", "(", "record_path", ")", ":", "# type: (RecordPath) -> File", "normed_path", "=", "os", ".", "path", ".", "normpath", "(", "record_path", ")", "dest_path", "=", "os", ".", "path", ".", "join", "(", "dest", ",", "normed_path", ")", "assert_no_path_traversal", "(", "dest", ",", "dest_path", ")", "return", "ZipBackedFile", "(", "record_path", ",", "dest_path", ",", "zip_file", ")", "return", "make_root_scheme_file", "def", "data_scheme_file_maker", "(", "zip_file", ",", "scheme", ")", ":", "# type: (ZipFile, Scheme) -> Callable[[RecordPath], File]", "scheme_paths", "=", "{", "}", "for", "key", "in", "SCHEME_KEYS", ":", "encoded_key", "=", "ensure_text", "(", "key", ")", "scheme_paths", "[", "encoded_key", "]", "=", "ensure_text", "(", "getattr", "(", "scheme", ",", "key", ")", ",", "encoding", "=", "sys", ".", "getfilesystemencoding", "(", ")", ")", "def", "make_data_scheme_file", "(", "record_path", ")", ":", "# type: (RecordPath) -> File", "normed_path", "=", "os", ".", "path", ".", "normpath", "(", "record_path", ")", "try", ":", "_", ",", "scheme_key", ",", "dest_subpath", "=", "normed_path", ".", "split", "(", "os", ".", "path", ".", "sep", ",", "2", ")", "except", "ValueError", ":", "message", "=", "(", "\"Unexpected file in {}: {!r}. .data directory contents\"", "\" should be named like: '<scheme key>/<path>'.\"", ")", ".", "format", "(", "wheel_path", ",", "record_path", ")", "raise", "InstallationError", "(", "message", ")", "try", ":", "scheme_path", "=", "scheme_paths", "[", "scheme_key", "]", "except", "KeyError", ":", "valid_scheme_keys", "=", "\", \"", ".", "join", "(", "sorted", "(", "scheme_paths", ")", ")", "message", "=", "(", "\"Unknown scheme key used in {}: {} (for file {!r}). .data\"", "\" directory contents should be in subdirectories named\"", "\" with a valid scheme key ({})\"", ")", ".", "format", "(", "wheel_path", ",", "scheme_key", ",", "record_path", ",", "valid_scheme_keys", ")", "raise", "InstallationError", "(", "message", ")", "dest_path", "=", "os", ".", "path", ".", "join", "(", "scheme_path", ",", "dest_subpath", ")", "assert_no_path_traversal", "(", "scheme_path", ",", "dest_path", ")", "return", "ZipBackedFile", "(", "record_path", ",", "dest_path", ",", "zip_file", ")", "return", "make_data_scheme_file", "def", "is_data_scheme_path", "(", "path", ")", ":", "# type: (RecordPath) -> bool", "return", "path", ".", "split", "(", "\"/\"", ",", "1", ")", "[", "0", "]", ".", "endswith", "(", "\".data\"", ")", "paths", "=", "all_paths", "(", ")", "file_paths", "=", "filterfalse", "(", "is_dir_path", ",", "paths", ")", "root_scheme_paths", ",", "data_scheme_paths", "=", "partition", "(", "is_data_scheme_path", ",", "file_paths", ")", "make_root_scheme_file", "=", "root_scheme_file_maker", "(", "wheel_zip", ",", "ensure_text", "(", "lib_dir", ",", "encoding", "=", "sys", ".", "getfilesystemencoding", "(", ")", ")", ",", ")", "files", "=", "map", "(", "make_root_scheme_file", ",", "root_scheme_paths", ")", "def", "is_script_scheme_path", "(", "path", ")", ":", "# type: (RecordPath) -> bool", "parts", "=", "path", ".", "split", "(", "\"/\"", ",", "2", ")", "return", "(", "len", "(", "parts", ")", ">", "2", "and", "parts", "[", "0", "]", ".", "endswith", "(", "\".data\"", ")", "and", "parts", "[", "1", "]", "==", "\"scripts\"", ")", "other_scheme_paths", ",", "script_scheme_paths", "=", "partition", "(", "is_script_scheme_path", ",", "data_scheme_paths", ")", "make_data_scheme_file", "=", "data_scheme_file_maker", "(", "wheel_zip", ",", "scheme", ")", "other_scheme_files", "=", "map", "(", "make_data_scheme_file", ",", "other_scheme_paths", ")", "files", "=", "chain", "(", "files", ",", "other_scheme_files", ")", "# Get the defined entry points", "distribution", "=", "get_wheel_distribution", "(", "wheel_path", ",", "canonicalize_name", "(", "name", ")", ")", "console", ",", "gui", "=", "get_entrypoints", "(", "distribution", ")", "def", "is_entrypoint_wrapper", "(", "file", ")", ":", "# type: (File) -> bool", "# EP, EP.exe and EP-script.py are scripts generated for", "# entry point EP by setuptools", "path", "=", "file", ".", "dest_path", "name", "=", "os", ".", "path", ".", "basename", "(", "path", ")", "if", "name", ".", "lower", "(", ")", ".", "endswith", "(", "'.exe'", ")", ":", "matchname", "=", "name", "[", ":", "-", "4", "]", "elif", "name", ".", "lower", "(", ")", ".", "endswith", "(", "'-script.py'", ")", ":", "matchname", "=", "name", "[", ":", "-", "10", "]", "elif", "name", ".", "lower", "(", ")", ".", "endswith", "(", "\".pya\"", ")", ":", "matchname", "=", "name", "[", ":", "-", "4", "]", "else", ":", "matchname", "=", "name", "# Ignore setuptools-generated scripts", "return", "(", "matchname", "in", "console", "or", "matchname", "in", "gui", ")", "script_scheme_files", "=", "map", "(", "make_data_scheme_file", ",", "script_scheme_paths", ")", "script_scheme_files", "=", "filterfalse", "(", "is_entrypoint_wrapper", ",", "script_scheme_files", ")", "script_scheme_files", "=", "map", "(", "ScriptFile", ",", "script_scheme_files", ")", "files", "=", "chain", "(", "files", ",", "script_scheme_files", ")", "for", "file", "in", "files", ":", "file", ".", "save", "(", ")", "record_installed", "(", "file", ".", "src_record_path", ",", "file", ".", "dest_path", ",", "file", ".", "changed", ")", "def", "pyc_source_file_paths", "(", ")", ":", "# type: () -> Iterator[str]", "# We de-duplicate installation paths, since there can be overlap (e.g.", "# file in .data maps to same location as file in wheel root).", "# Sorting installation paths makes it easier to reproduce and debug", "# issues related to permissions on existing files.", "for", "installed_path", "in", "sorted", "(", "set", "(", "installed", ".", "values", "(", ")", ")", ")", ":", "full_installed_path", "=", "os", ".", "path", ".", "join", "(", "lib_dir", ",", "installed_path", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "full_installed_path", ")", ":", "continue", "if", "not", "full_installed_path", ".", "endswith", "(", "'.py'", ")", ":", "continue", "yield", "full_installed_path", "def", "pyc_output_path", "(", "path", ")", ":", "# type: (str) -> str", "\"\"\"Return the path the pyc file would have been written to.\n \"\"\"", "return", "importlib", ".", "util", ".", "cache_from_source", "(", "path", ")", "# Compile all of the pyc files for the installed files", "if", "pycompile", ":", "with", "captured_stdout", "(", ")", "as", "stdout", ":", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "filterwarnings", "(", "'ignore'", ")", "for", "path", "in", "pyc_source_file_paths", "(", ")", ":", "# Python 2's `compileall.compile_file` requires a str in", "# error cases, so we must convert to the native type.", "path_arg", "=", "ensure_str", "(", "path", ",", "encoding", "=", "sys", ".", "getfilesystemencoding", "(", ")", ")", "success", "=", "compileall", ".", "compile_file", "(", "path_arg", ",", "force", "=", "True", ",", "quiet", "=", "True", ")", "if", "success", ":", "pyc_path", "=", "pyc_output_path", "(", "path", ")", "assert", "os", ".", "path", ".", "exists", "(", "pyc_path", ")", "pyc_record_path", "=", "cast", "(", "\"RecordPath\"", ",", "pyc_path", ".", "replace", "(", "os", ".", "path", ".", "sep", ",", "\"/\"", ")", ")", "record_installed", "(", "pyc_record_path", ",", "pyc_path", ")", "logger", ".", "debug", "(", "stdout", ".", "getvalue", "(", ")", ")", "maker", "=", "PipScriptMaker", "(", "None", ",", "scheme", ".", "scripts", ")", "# Ensure old scripts are overwritten.", "# See https://github.com/pypa/pip/issues/1800", "maker", ".", "clobber", "=", "True", "# Ensure we don't generate any variants for scripts because this is almost", "# never what somebody wants.", "# See https://bitbucket.org/pypa/distlib/issue/35/", "maker", ".", "variants", "=", "{", "''", "}", "# This is required because otherwise distlib creates scripts that are not", "# executable.", "# See https://bitbucket.org/pypa/distlib/issue/32/", "maker", ".", "set_mode", "=", "True", "# Generate the console and GUI entry points specified in the wheel", "scripts_to_generate", "=", "get_console_script_specs", "(", "console", ")", "gui_scripts_to_generate", "=", "list", "(", "starmap", "(", "'{} = {}'", ".", "format", ",", "gui", ".", "items", "(", ")", ")", ")", "generated_console_scripts", "=", "maker", ".", "make_multiple", "(", "scripts_to_generate", ")", "generated", ".", "extend", "(", "generated_console_scripts", ")", "generated", ".", "extend", "(", "maker", ".", "make_multiple", "(", "gui_scripts_to_generate", ",", "{", "'gui'", ":", "True", "}", ")", ")", "if", "warn_script_location", ":", "msg", "=", "message_about_scripts_not_on_PATH", "(", "generated_console_scripts", ")", "if", "msg", "is", "not", "None", ":", "logger", ".", "warning", "(", "msg", ")", "generated_file_mode", "=", "0o666", "&", "~", "current_umask", "(", ")", "@", "contextlib", ".", "contextmanager", "def", "_generate_file", "(", "path", ",", "*", "*", "kwargs", ")", ":", "# type: (str, **Any) -> Iterator[BinaryIO]", "with", "adjacent_tmp_file", "(", "path", ",", "*", "*", "kwargs", ")", "as", "f", ":", "yield", "f", "os", ".", "chmod", "(", "f", ".", "name", ",", "generated_file_mode", ")", "replace", "(", "f", ".", "name", ",", "path", ")", "dest_info_dir", "=", "os", ".", "path", ".", "join", "(", "lib_dir", ",", "info_dir", ")", "# Record pip as the installer", "installer_path", "=", "os", ".", "path", ".", "join", "(", "dest_info_dir", ",", "'INSTALLER'", ")", "with", "_generate_file", "(", "installer_path", ")", "as", "installer_file", ":", "installer_file", ".", "write", "(", "b'pip\\n'", ")", "generated", ".", "append", "(", "installer_path", ")", "# Record the PEP 610 direct URL reference", "if", "direct_url", "is", "not", "None", ":", "direct_url_path", "=", "os", ".", "path", ".", "join", "(", "dest_info_dir", ",", "DIRECT_URL_METADATA_NAME", ")", "with", "_generate_file", "(", "direct_url_path", ")", "as", "direct_url_file", ":", "direct_url_file", ".", "write", "(", "direct_url", ".", "to_json", "(", ")", ".", "encode", "(", "\"utf-8\"", ")", ")", "generated", ".", "append", "(", "direct_url_path", ")", "# Record the REQUESTED file", "if", "requested", ":", "requested_path", "=", "os", ".", "path", ".", "join", "(", "dest_info_dir", ",", "'REQUESTED'", ")", "with", "open", "(", "requested_path", ",", "\"wb\"", ")", ":", "pass", "generated", ".", "append", "(", "requested_path", ")", "record_text", "=", "distribution", ".", "read_text", "(", "'RECORD'", ")", "record_rows", "=", "list", "(", "csv", ".", "reader", "(", "record_text", ".", "splitlines", "(", ")", ")", ")", "rows", "=", "get_csv_rows_for_installed", "(", "record_rows", ",", "installed", "=", "installed", ",", "changed", "=", "changed", ",", "generated", "=", "generated", ",", "lib_dir", "=", "lib_dir", ")", "# Record details of all files installed", "record_path", "=", "os", ".", "path", ".", "join", "(", "dest_info_dir", ",", "'RECORD'", ")", "with", "_generate_file", "(", "record_path", ",", "*", "*", "csv_io_kwargs", "(", "'w'", ")", ")", "as", "record_file", ":", "# The type mypy infers for record_file is different for Python 3", "# (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly", "# cast to typing.IO[str] as a workaround.", "writer", "=", "csv", ".", "writer", "(", "cast", "(", "'IO[str]'", ",", "record_file", ")", ")", "writer", ".", "writerows", "(", "_normalized_outrows", "(", "rows", ")", ")" ]
[ 450, 0 ]
[ 765, 51 ]
python
en
['en', 'cy', 'en']
True
validate_twilio_request
(f)
Validates that incoming requests genuinely originated from Twilio
Validates that incoming requests genuinely originated from Twilio
def validate_twilio_request(f): """Validates that incoming requests genuinely originated from Twilio""" @wraps(f) def decorated_function(request, *args, **kwargs): # Create an instance of the RequestValidator class validator = RequestValidator(os.environ.get('TWILIO_AUTH_TOKEN')) # Validate the request using its URL, POST data, # and X-TWILIO-SIGNATURE header request_valid = validator.validate( request.build_absolute_uri(), request.POST, request.META.get('HTTP_X_TWILIO_SIGNATURE', '')) # Continue processing the request if it's valid, return a 403 error if # it's not if request_valid: return f(request, *args, **kwargs) else: return HttpResponseForbidden() return decorated_function
[ "def", "validate_twilio_request", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "decorated_function", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Create an instance of the RequestValidator class", "validator", "=", "RequestValidator", "(", "os", ".", "environ", ".", "get", "(", "'TWILIO_AUTH_TOKEN'", ")", ")", "# Validate the request using its URL, POST data,", "# and X-TWILIO-SIGNATURE header", "request_valid", "=", "validator", ".", "validate", "(", "request", ".", "build_absolute_uri", "(", ")", ",", "request", ".", "POST", ",", "request", ".", "META", ".", "get", "(", "'HTTP_X_TWILIO_SIGNATURE'", ",", "''", ")", ")", "# Continue processing the request if it's valid, return a 403 error if", "# it's not", "if", "request_valid", ":", "return", "f", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "HttpResponseForbidden", "(", ")", "return", "decorated_function" ]
[ 8, 0 ]
[ 28, 29 ]
python
en
['en', 'en', 'en']
True
build_estimator
(model_dir, nbuckets, hidden_units)
Build an estimator starting from INPUT COLUMNS. These include feature transformations and synthetic features. The model is a wide-and-deep model.
Build an estimator starting from INPUT COLUMNS. These include feature transformations and synthetic features. The model is a wide-and-deep model.
def build_estimator(model_dir, nbuckets, hidden_units): """ Build an estimator starting from INPUT COLUMNS. These include feature transformations and synthetic features. The model is a wide-and-deep model. """ # Input columns (dayofweek, hourofday, latdiff, londiff, euclidean, plon, plat, dlon, dlat, pcount) = INPUT_COLUMNS # Bucketize the lats & lons latbuckets = np.linspace(0, 1.0, nbuckets).tolist() lonbuckets = np.linspace(0, 1.0, nbuckets).tolist() b_plat = tf.feature_column.bucketized_column(plat, latbuckets) b_dlat = tf.feature_column.bucketized_column(dlat, latbuckets) b_plon = tf.feature_column.bucketized_column(plon, lonbuckets) b_dlon = tf.feature_column.bucketized_column(dlon, lonbuckets) # Feature cross ploc = tf.feature_column.crossed_column([b_plat, b_plon], nbuckets * nbuckets) dloc = tf.feature_column.crossed_column([b_dlat, b_dlon], nbuckets * nbuckets) pd_pair = tf.feature_column.crossed_column([ploc, dloc], nbuckets ** 4 ) day_hr = tf.feature_column.crossed_column([dayofweek, hourofday], 24 * 7) # Wide columns and deep columns. wide_columns = [ # Feature crosses dloc, ploc, pd_pair, day_hr, # Sparse columns dayofweek, hourofday, # Anything with a linear relationship pcount ] deep_columns = [ # Embedding_column to "group" together ... tf.feature_column.embedding_column(pd_pair, 10), tf.feature_column.embedding_column(day_hr, 10), # Numeric columns plat, plon, dlat, dlon, latdiff, londiff, euclidean ] return tf.estimator.DNNLinearCombinedRegressor( model_dir = model_dir, linear_feature_columns = wide_columns, dnn_feature_columns = deep_columns, dnn_hidden_units = hidden_units)
[ "def", "build_estimator", "(", "model_dir", ",", "nbuckets", ",", "hidden_units", ")", ":", "# Input columns", "(", "dayofweek", ",", "hourofday", ",", "latdiff", ",", "londiff", ",", "euclidean", ",", "plon", ",", "plat", ",", "dlon", ",", "dlat", ",", "pcount", ")", "=", "INPUT_COLUMNS", "# Bucketize the lats & lons", "latbuckets", "=", "np", ".", "linspace", "(", "0", ",", "1.0", ",", "nbuckets", ")", ".", "tolist", "(", ")", "lonbuckets", "=", "np", ".", "linspace", "(", "0", ",", "1.0", ",", "nbuckets", ")", ".", "tolist", "(", ")", "b_plat", "=", "tf", ".", "feature_column", ".", "bucketized_column", "(", "plat", ",", "latbuckets", ")", "b_dlat", "=", "tf", ".", "feature_column", ".", "bucketized_column", "(", "dlat", ",", "latbuckets", ")", "b_plon", "=", "tf", ".", "feature_column", ".", "bucketized_column", "(", "plon", ",", "lonbuckets", ")", "b_dlon", "=", "tf", ".", "feature_column", ".", "bucketized_column", "(", "dlon", ",", "lonbuckets", ")", "# Feature cross", "ploc", "=", "tf", ".", "feature_column", ".", "crossed_column", "(", "[", "b_plat", ",", "b_plon", "]", ",", "nbuckets", "*", "nbuckets", ")", "dloc", "=", "tf", ".", "feature_column", ".", "crossed_column", "(", "[", "b_dlat", ",", "b_dlon", "]", ",", "nbuckets", "*", "nbuckets", ")", "pd_pair", "=", "tf", ".", "feature_column", ".", "crossed_column", "(", "[", "ploc", ",", "dloc", "]", ",", "nbuckets", "**", "4", ")", "day_hr", "=", "tf", ".", "feature_column", ".", "crossed_column", "(", "[", "dayofweek", ",", "hourofday", "]", ",", "24", "*", "7", ")", "# Wide columns and deep columns.", "wide_columns", "=", "[", "# Feature crosses", "dloc", ",", "ploc", ",", "pd_pair", ",", "day_hr", ",", "# Sparse columns", "dayofweek", ",", "hourofday", ",", "# Anything with a linear relationship", "pcount", "]", "deep_columns", "=", "[", "# Embedding_column to \"group\" together ...", "tf", ".", "feature_column", ".", "embedding_column", "(", "pd_pair", ",", "10", ")", ",", "tf", ".", "feature_column", ".", "embedding_column", "(", "day_hr", ",", "10", ")", ",", "# Numeric columns", "plat", ",", "plon", ",", "dlat", ",", "dlon", ",", "latdiff", ",", "londiff", ",", "euclidean", "]", "return", "tf", ".", "estimator", ".", "DNNLinearCombinedRegressor", "(", "model_dir", "=", "model_dir", ",", "linear_feature_columns", "=", "wide_columns", ",", "dnn_feature_columns", "=", "deep_columns", ",", "dnn_hidden_units", "=", "hidden_units", ")" ]
[ 53, 0 ]
[ 104, 40 ]
python
en
['en', 'error', 'th']
False
_mixed_join
(iterable, sentinel)
concatenate any string type in an intelligent way.
concatenate any string type in an intelligent way.
def _mixed_join(iterable, sentinel): """concatenate any string type in an intelligent way.""" iterator = iter(iterable) first_item = next(iterator, sentinel) if isinstance(first_item, bytes): return first_item + b"".join(iterator) return first_item + u"".join(iterator)
[ "def", "_mixed_join", "(", "iterable", ",", "sentinel", ")", ":", "iterator", "=", "iter", "(", "iterable", ")", "first_item", "=", "next", "(", "iterator", ",", "sentinel", ")", "if", "isinstance", "(", "first_item", ",", "bytes", ")", ":", "return", "first_item", "+", "b\"\"", ".", "join", "(", "iterator", ")", "return", "first_item", "+", "u\"\"", ".", "join", "(", "iterator", ")" ]
[ 58, 0 ]
[ 64, 42 ]
python
en
['en', 'en', 'en']
True
IterO._buf_append
(self, string)
Replace string directly without appending to an empty string, avoiding type issues.
Replace string directly without appending to an empty string, avoiding type issues.
def _buf_append(self, string): """Replace string directly without appending to an empty string, avoiding type issues.""" if not self._buf: self._buf = string else: self._buf += string
[ "def", "_buf_append", "(", "self", ",", "string", ")", ":", "if", "not", "self", ".", "_buf", ":", "self", ".", "_buf", "=", "string", "else", ":", "self", ".", "_buf", "+=", "string" ]
[ 239, 4 ]
[ 245, 31 ]
python
en
['en', 'en', 'en']
True
calculate_threshold_confusion_matrix
(labels_mask, preds, num_thresh)
Calculates confusion matrix based on thresholds. Given labels mask, predictions, and number of thresholds, returns count for cell in confusion matrix. Args: labels_mask: tf.bool vector tensor when label was normal or anomalous. preds: Predicted anomaly labels. num_thresh: Number of anomaly thresholds to try in parallel grid search. Returns: Count for cell in confusion matrix.
Calculates confusion matrix based on thresholds.
def calculate_threshold_confusion_matrix(labels_mask, preds, num_thresh): """Calculates confusion matrix based on thresholds. Given labels mask, predictions, and number of thresholds, returns count for cell in confusion matrix. Args: labels_mask: tf.bool vector tensor when label was normal or anomalous. preds: Predicted anomaly labels. num_thresh: Number of anomaly thresholds to try in parallel grid search. Returns: Count for cell in confusion matrix. """ count = tf.reduce_sum( input_tensor=tf.cast( x=tf.map_fn( fn=lambda threshold: tf.logical_and( x=labels_mask, y=preds[threshold, :]), elems=tf.range(start=0, limit=num_thresh, dtype=tf.int64), dtype=tf.bool), dtype=tf.int64), axis=1) return count
[ "def", "calculate_threshold_confusion_matrix", "(", "labels_mask", ",", "preds", ",", "num_thresh", ")", ":", "count", "=", "tf", ".", "reduce_sum", "(", "input_tensor", "=", "tf", ".", "cast", "(", "x", "=", "tf", ".", "map_fn", "(", "fn", "=", "lambda", "threshold", ":", "tf", ".", "logical_and", "(", "x", "=", "labels_mask", ",", "y", "=", "preds", "[", "threshold", ",", ":", "]", ")", ",", "elems", "=", "tf", ".", "range", "(", "start", "=", "0", ",", "limit", "=", "num_thresh", ",", "dtype", "=", "tf", ".", "int64", ")", ",", "dtype", "=", "tf", ".", "bool", ")", ",", "dtype", "=", "tf", ".", "int64", ")", ",", "axis", "=", "1", ")", "return", "count" ]
[ 3, 0 ]
[ 29, 14 ]
python
en
['en', 'en', 'en']
True
update_anom_thresh_vars
( labels_norm_mask, labels_anom_mask, num_thresh, anom_thresh, mahalanobis_dist, tp_at_thresh_var, fn_at_thresh_var, fp_at_thresh_var, tn_at_thresh_var, mode)
Updates anomaly threshold variables. Given masks for when labels are normal and anomalous, the number of anomaly thresholds and the thresholds themselves, the mahalanobis distance, variables for the confusion matrix, and the current Estimator mode, returns the updated variables for the confusion matrix. Args: labels_norm_mask: tf.bool vector tensor that is true when label was normal. labels_anom_mask: tf.bool vector tensor that is true when label was anomalous. num_thresh: Number of anomaly thresholds to try in parallel grid search. anom_thresh: tf.float64 vector tensor of grid of anomaly thresholds to try. mahalanobis_dist: tf.float64 matrix tensor of mahalanobis distances across batch. tp_at_thresh_var: tf.int64 variable tracking number of true positives at each possible anomaly threshold. fn_at_thresh_var: tf.int64 variable tracking number of false negatives at each possible anomaly threshold. fp_at_thresh_var: tf.int64 variable tracking number of false positives at each possible anomaly threshold. tn_at_thresh_var: tf.int64 variable tracking number of true negatives at each possible anomaly threshold. mode: Estimator ModeKeys, can take values of TRAIN and EVAL. Returns: Updated confusion matrix variables.
Updates anomaly threshold variables.
def update_anom_thresh_vars( labels_norm_mask, labels_anom_mask, num_thresh, anom_thresh, mahalanobis_dist, tp_at_thresh_var, fn_at_thresh_var, fp_at_thresh_var, tn_at_thresh_var, mode): """Updates anomaly threshold variables. Given masks for when labels are normal and anomalous, the number of anomaly thresholds and the thresholds themselves, the mahalanobis distance, variables for the confusion matrix, and the current Estimator mode, returns the updated variables for the confusion matrix. Args: labels_norm_mask: tf.bool vector tensor that is true when label was normal. labels_anom_mask: tf.bool vector tensor that is true when label was anomalous. num_thresh: Number of anomaly thresholds to try in parallel grid search. anom_thresh: tf.float64 vector tensor of grid of anomaly thresholds to try. mahalanobis_dist: tf.float64 matrix tensor of mahalanobis distances across batch. tp_at_thresh_var: tf.int64 variable tracking number of true positives at each possible anomaly threshold. fn_at_thresh_var: tf.int64 variable tracking number of false negatives at each possible anomaly threshold. fp_at_thresh_var: tf.int64 variable tracking number of false positives at each possible anomaly threshold. tn_at_thresh_var: tf.int64 variable tracking number of true negatives at each possible anomaly threshold. mode: Estimator ModeKeys, can take values of TRAIN and EVAL. Returns: Updated confusion matrix variables. """ if mode == tf.estimator.ModeKeys.TRAIN: # time_shape = (num_time_anom_thresh, cur_batch_size, seq_len) # feat_shape = (num_feat_anom_thresh, cur_batch_size, num_feat) mahalanobis_dist_over_thresh = tf.map_fn( fn=lambda anom_threshold: mahalanobis_dist > anom_threshold, elems=anom_thresh, dtype=tf.bool) else: # time_shape = (cur_batch_size, seq_len) # feat_shape = (cur_batch_size, num_feat) mahalanobis_dist_over_thresh = mahalanobis_dist > anom_thresh # time_shape = (num_time_anom_thresh, cur_batch_size) # feat_shape = (num_feat_anom_thresh, cur_batch_size) mahalanobis_dist_any_over_thresh = tf.reduce_any( input_tensor=mahalanobis_dist_over_thresh, axis=-1) if mode == tf.estimator.ModeKeys.EVAL: # time_shape = (1, cur_batch_size) # feat_shape = (1, cur_batch_size) mahalanobis_dist_any_over_thresh = tf.expand_dims( input=mahalanobis_dist_any_over_thresh, axis=0) # time_shape = (num_time_anom_thresh, cur_batch_size) # feat_shape = (num_feat_anom_thresh, cur_batch_size) predicted_normals = tf.equal( x=mahalanobis_dist_any_over_thresh, y=False) # time_shape = (num_time_anom_thresh, cur_batch_size) # feat_shape = (num_feat_anom_thresh, cur_batch_size) predicted_anomalies = tf.equal( x=mahalanobis_dist_any_over_thresh, y=True) # Calculate confusion matrix of current batch # time_shape = (num_time_anom_thresh,) # feat_shape = (num_feat_anom_thresh,) tp = calculate_threshold_confusion_matrix( labels_anom_mask, predicted_anomalies, num_thresh) fn = calculate_threshold_confusion_matrix( labels_anom_mask, predicted_normals, num_thresh) fp = calculate_threshold_confusion_matrix( labels_norm_mask, predicted_anomalies, num_thresh) tn = calculate_threshold_confusion_matrix( labels_norm_mask, predicted_normals, num_thresh) if mode == tf.estimator.ModeKeys.EVAL: # shape = () tp = tf.squeeze(input=tp) fn = tf.squeeze(input=fn) fp = tf.squeeze(input=fp) tn = tf.squeeze(input=tn) with tf.control_dependencies( control_inputs=[tf.assign_add(ref=tp_at_thresh_var, value=tp), tf.assign_add(ref=fn_at_thresh_var, value=fn), tf.assign_add(ref=fp_at_thresh_var, value=fp), tf.assign_add(ref=tn_at_thresh_var, value=tn)]): return (tf.identity(input=tp_at_thresh_var), tf.identity(input=fn_at_thresh_var), tf.identity(input=fp_at_thresh_var), tf.identity(input=tn_at_thresh_var))
[ "def", "update_anom_thresh_vars", "(", "labels_norm_mask", ",", "labels_anom_mask", ",", "num_thresh", ",", "anom_thresh", ",", "mahalanobis_dist", ",", "tp_at_thresh_var", ",", "fn_at_thresh_var", ",", "fp_at_thresh_var", ",", "tn_at_thresh_var", ",", "mode", ")", ":", "if", "mode", "==", "tf", ".", "estimator", ".", "ModeKeys", ".", "TRAIN", ":", "# time_shape = (num_time_anom_thresh, cur_batch_size, seq_len)", "# feat_shape = (num_feat_anom_thresh, cur_batch_size, num_feat)", "mahalanobis_dist_over_thresh", "=", "tf", ".", "map_fn", "(", "fn", "=", "lambda", "anom_threshold", ":", "mahalanobis_dist", ">", "anom_threshold", ",", "elems", "=", "anom_thresh", ",", "dtype", "=", "tf", ".", "bool", ")", "else", ":", "# time_shape = (cur_batch_size, seq_len)", "# feat_shape = (cur_batch_size, num_feat)", "mahalanobis_dist_over_thresh", "=", "mahalanobis_dist", ">", "anom_thresh", "# time_shape = (num_time_anom_thresh, cur_batch_size)", "# feat_shape = (num_feat_anom_thresh, cur_batch_size)", "mahalanobis_dist_any_over_thresh", "=", "tf", ".", "reduce_any", "(", "input_tensor", "=", "mahalanobis_dist_over_thresh", ",", "axis", "=", "-", "1", ")", "if", "mode", "==", "tf", ".", "estimator", ".", "ModeKeys", ".", "EVAL", ":", "# time_shape = (1, cur_batch_size)", "# feat_shape = (1, cur_batch_size)", "mahalanobis_dist_any_over_thresh", "=", "tf", ".", "expand_dims", "(", "input", "=", "mahalanobis_dist_any_over_thresh", ",", "axis", "=", "0", ")", "# time_shape = (num_time_anom_thresh, cur_batch_size)", "# feat_shape = (num_feat_anom_thresh, cur_batch_size)", "predicted_normals", "=", "tf", ".", "equal", "(", "x", "=", "mahalanobis_dist_any_over_thresh", ",", "y", "=", "False", ")", "# time_shape = (num_time_anom_thresh, cur_batch_size)", "# feat_shape = (num_feat_anom_thresh, cur_batch_size)", "predicted_anomalies", "=", "tf", ".", "equal", "(", "x", "=", "mahalanobis_dist_any_over_thresh", ",", "y", "=", "True", ")", "# Calculate confusion matrix of current batch", "# time_shape = (num_time_anom_thresh,)", "# feat_shape = (num_feat_anom_thresh,)", "tp", "=", "calculate_threshold_confusion_matrix", "(", "labels_anom_mask", ",", "predicted_anomalies", ",", "num_thresh", ")", "fn", "=", "calculate_threshold_confusion_matrix", "(", "labels_anom_mask", ",", "predicted_normals", ",", "num_thresh", ")", "fp", "=", "calculate_threshold_confusion_matrix", "(", "labels_norm_mask", ",", "predicted_anomalies", ",", "num_thresh", ")", "tn", "=", "calculate_threshold_confusion_matrix", "(", "labels_norm_mask", ",", "predicted_normals", ",", "num_thresh", ")", "if", "mode", "==", "tf", ".", "estimator", ".", "ModeKeys", ".", "EVAL", ":", "# shape = ()", "tp", "=", "tf", ".", "squeeze", "(", "input", "=", "tp", ")", "fn", "=", "tf", ".", "squeeze", "(", "input", "=", "fn", ")", "fp", "=", "tf", ".", "squeeze", "(", "input", "=", "fp", ")", "tn", "=", "tf", ".", "squeeze", "(", "input", "=", "tn", ")", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "[", "tf", ".", "assign_add", "(", "ref", "=", "tp_at_thresh_var", ",", "value", "=", "tp", ")", ",", "tf", ".", "assign_add", "(", "ref", "=", "fn_at_thresh_var", ",", "value", "=", "fn", ")", ",", "tf", ".", "assign_add", "(", "ref", "=", "fp_at_thresh_var", ",", "value", "=", "fp", ")", ",", "tf", ".", "assign_add", "(", "ref", "=", "tn_at_thresh_var", ",", "value", "=", "tn", ")", "]", ")", ":", "return", "(", "tf", ".", "identity", "(", "input", "=", "tp_at_thresh_var", ")", ",", "tf", ".", "identity", "(", "input", "=", "fn_at_thresh_var", ")", ",", "tf", ".", "identity", "(", "input", "=", "fp_at_thresh_var", ")", ",", "tf", ".", "identity", "(", "input", "=", "tn_at_thresh_var", ")", ")" ]
[ 32, 0 ]
[ 135, 48 ]
python
en
['en', 'en', 'en']
True
calculate_composite_classification_metrics
(tp, fn, fp, tn, f_score_beta)
Calculates compositive classification metrics from the confusion matrix. Given variables for the confusion matrix and the value of beta for f-beta score, returns accuracy, precision, recall, and f-beta score composite metrics. Args: tp: tf.int64 variable tracking number of true positives at each possible anomaly threshold. fn: tf.int64 variable tracking number of false negatives at each possible anomaly threshold. fp: tf.int64 variable tracking number of false positives at each possible anomaly threshold. tn: tf.int64 variable tracking number of true negatives at each possible anomaly threshold. f_score_beta: Value of beta for f-beta score. Returns: Accuracy, precision, recall, and f-beta score composite metric tensors.
Calculates compositive classification metrics from the confusion matrix.
def calculate_composite_classification_metrics(tp, fn, fp, tn, f_score_beta): """Calculates compositive classification metrics from the confusion matrix. Given variables for the confusion matrix and the value of beta for f-beta score, returns accuracy, precision, recall, and f-beta score composite metrics. Args: tp: tf.int64 variable tracking number of true positives at each possible anomaly threshold. fn: tf.int64 variable tracking number of false negatives at each possible anomaly threshold. fp: tf.int64 variable tracking number of false positives at each possible anomaly threshold. tn: tf.int64 variable tracking number of true negatives at each possible anomaly threshold. f_score_beta: Value of beta for f-beta score. Returns: Accuracy, precision, recall, and f-beta score composite metric tensors. """ # time_shape = (num_time_anom_thresh,) # feat_shape = (num_feat_anom_thresh,) acc = tf.cast(x=tp + tn, dtype=tf.float64) \ / tf.cast(x=tp + fn + fp + tn, dtype=tf.float64) tp_float64 = tf.cast(x=tp, dtype=tf.float64) pre = tp_float64 / tf.cast(x=tp + fp, dtype=tf.float64) rec = tp_float64 / tf.cast(x=tp + fn, dtype=tf.float64) f_beta_numerator = (1.0 + f_score_beta ** 2) * (pre * rec) f_beta_score = f_beta_numerator / (f_score_beta ** 2 * pre + rec) return acc, pre, rec, f_beta_score
[ "def", "calculate_composite_classification_metrics", "(", "tp", ",", "fn", ",", "fp", ",", "tn", ",", "f_score_beta", ")", ":", "# time_shape = (num_time_anom_thresh,)", "# feat_shape = (num_feat_anom_thresh,)", "acc", "=", "tf", ".", "cast", "(", "x", "=", "tp", "+", "tn", ",", "dtype", "=", "tf", ".", "float64", ")", "/", "tf", ".", "cast", "(", "x", "=", "tp", "+", "fn", "+", "fp", "+", "tn", ",", "dtype", "=", "tf", ".", "float64", ")", "tp_float64", "=", "tf", ".", "cast", "(", "x", "=", "tp", ",", "dtype", "=", "tf", ".", "float64", ")", "pre", "=", "tp_float64", "/", "tf", ".", "cast", "(", "x", "=", "tp", "+", "fp", ",", "dtype", "=", "tf", ".", "float64", ")", "rec", "=", "tp_float64", "/", "tf", ".", "cast", "(", "x", "=", "tp", "+", "fn", ",", "dtype", "=", "tf", ".", "float64", ")", "f_beta_numerator", "=", "(", "1.0", "+", "f_score_beta", "**", "2", ")", "*", "(", "pre", "*", "rec", ")", "f_beta_score", "=", "f_beta_numerator", "/", "(", "f_score_beta", "**", "2", "*", "pre", "+", "rec", ")", "return", "acc", ",", "pre", ",", "rec", ",", "f_beta_score" ]
[ 138, 0 ]
[ 169, 36 ]
python
en
['en', 'en', 'en']
True
find_best_anom_thresh
( anom_threshs, f_beta_score, anom_thresh_var)
Find best anomaly threshold to use for anomaly classification. Given vector of anomaly thresholds and the value of beta for f-beta score, returns updated variable that stores the best anomaly threshold value. Args: anom_threshs: tf.float64 vector tensor of grid of anomaly thresholds to try. f_beta_score: tf.float64 vector tensor of f-beta scores for each anomaly threshold. anom_thresh_var: tf.float64 variable that stores anomaly threshold value. Returns: Updated variable that stores the anomaly threshold value.
Find best anomaly threshold to use for anomaly classification.
def find_best_anom_thresh( anom_threshs, f_beta_score, anom_thresh_var): """Find best anomaly threshold to use for anomaly classification. Given vector of anomaly thresholds and the value of beta for f-beta score, returns updated variable that stores the best anomaly threshold value. Args: anom_threshs: tf.float64 vector tensor of grid of anomaly thresholds to try. f_beta_score: tf.float64 vector tensor of f-beta scores for each anomaly threshold. anom_thresh_var: tf.float64 variable that stores anomaly threshold value. Returns: Updated variable that stores the anomaly threshold value. """ # shape = () best_anom_thresh = tf.gather( params=anom_threshs, indices=tf.argmax(input=f_beta_score, axis=0)) with tf.control_dependencies( control_inputs=[tf.assign( ref=anom_thresh_var, value=best_anom_thresh)]): return tf.identity(input=anom_thresh_var)
[ "def", "find_best_anom_thresh", "(", "anom_threshs", ",", "f_beta_score", ",", "anom_thresh_var", ")", ":", "# shape = ()", "best_anom_thresh", "=", "tf", ".", "gather", "(", "params", "=", "anom_threshs", ",", "indices", "=", "tf", ".", "argmax", "(", "input", "=", "f_beta_score", ",", "axis", "=", "0", ")", ")", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "[", "tf", ".", "assign", "(", "ref", "=", "anom_thresh_var", ",", "value", "=", "best_anom_thresh", ")", "]", ")", ":", "return", "tf", ".", "identity", "(", "input", "=", "anom_thresh_var", ")" ]
[ 172, 0 ]
[ 196, 45 ]
python
en
['en', 'en', 'en']
True
optimize_anomaly_theshold
( var_name, labels_norm_mask, labels_anom_mask, mahalanobis_dist, tp_thresh_var, fn_thresh_var, fp_thresh_var, tn_thresh_var, params, mode, anom_thresh_var)
Optimizes anomaly threshold for anomaly classification. Given variable name, label masks, mahalanobis distance, variables for confusion matrix, and dictionary of parameters, returns accuracy, precision, recall, and f-beta score composite metrics. Args: var_name: String denoting which set of variables to use. Values are "time" and "feat". labels_norm_mask: tf.bool vector mask of labels for normals. labels_anom_mask: tf.bool vector mask of labels for anomalies. mahalanobis_dist: Mahalanobis distance of reconstruction error. tp_thresh_var: tf.int64 variable to track number of true positives wrt thresholds. fn_thresh_var: tf.int64 variable to track number of false negatives wrt thresholds. fp_thresh_var: tf.int64 variable to track number of false positives wrt thresholds. tn_thresh_var: tf.int64 variable to track number of true negatives wrt thresholds. params: Dictionary of parameters. mode: Estimator ModeKeys, can take values of TRAIN and EVAL. anom_thresh_var: tf.float64 variable that stores anomaly threshold value. Returns: Updated variable that stores the anomaly threshold value
Optimizes anomaly threshold for anomaly classification.
def optimize_anomaly_theshold( var_name, labels_norm_mask, labels_anom_mask, mahalanobis_dist, tp_thresh_var, fn_thresh_var, fp_thresh_var, tn_thresh_var, params, mode, anom_thresh_var): """Optimizes anomaly threshold for anomaly classification. Given variable name, label masks, mahalanobis distance, variables for confusion matrix, and dictionary of parameters, returns accuracy, precision, recall, and f-beta score composite metrics. Args: var_name: String denoting which set of variables to use. Values are "time" and "feat". labels_norm_mask: tf.bool vector mask of labels for normals. labels_anom_mask: tf.bool vector mask of labels for anomalies. mahalanobis_dist: Mahalanobis distance of reconstruction error. tp_thresh_var: tf.int64 variable to track number of true positives wrt thresholds. fn_thresh_var: tf.int64 variable to track number of false negatives wrt thresholds. fp_thresh_var: tf.int64 variable to track number of false positives wrt thresholds. tn_thresh_var: tf.int64 variable to track number of true negatives wrt thresholds. params: Dictionary of parameters. mode: Estimator ModeKeys, can take values of TRAIN and EVAL. anom_thresh_var: tf.float64 variable that stores anomaly threshold value. Returns: Updated variable that stores the anomaly threshold value """ # shape = (num_anom_thresh,) anom_threshs = tf.linspace( start=tf.constant( value=params["min_{}_anom_thresh".format(var_name)], dtype=tf.float64), stop=tf.constant( value=params["max_{}_anom_thresh".format(var_name)], dtype=tf.float64), num=params["num_{}_anom_thresh".format(var_name)]) with tf.variable_scope( name_or_scope="mahalanobis_dist_thresh_vars", reuse=tf.AUTO_REUSE): (tp_update_op, fn_update_op, fp_update_op, tn_update_op) = \ update_anom_thresh_vars( labels_norm_mask, labels_anom_mask, params["num_{}_anom_thresh".format(var_name)], anom_threshs, mahalanobis_dist, tp_thresh_var, fn_thresh_var, fp_thresh_var, tn_thresh_var, mode) with tf.control_dependencies( control_inputs=[ tp_update_op, fn_update_op, fp_update_op, tn_update_op]): _, pre, rec, f_beta = \ calculate_composite_classification_metrics( tp_thresh_var, fn_thresh_var, fp_thresh_var, tn_thresh_var, params["f_score_beta"]) with tf.control_dependencies(control_inputs=[pre, rec]): with tf.control_dependencies(control_inputs=[f_beta]): best_anom_thresh = find_best_anom_thresh( anom_threshs, f_beta, anom_thresh_var) with tf.control_dependencies(control_inputs=[best_anom_thresh]): return tf.identity(input=anom_thresh_var)
[ "def", "optimize_anomaly_theshold", "(", "var_name", ",", "labels_norm_mask", ",", "labels_anom_mask", ",", "mahalanobis_dist", ",", "tp_thresh_var", ",", "fn_thresh_var", ",", "fp_thresh_var", ",", "tn_thresh_var", ",", "params", ",", "mode", ",", "anom_thresh_var", ")", ":", "# shape = (num_anom_thresh,)", "anom_threshs", "=", "tf", ".", "linspace", "(", "start", "=", "tf", ".", "constant", "(", "value", "=", "params", "[", "\"min_{}_anom_thresh\"", ".", "format", "(", "var_name", ")", "]", ",", "dtype", "=", "tf", ".", "float64", ")", ",", "stop", "=", "tf", ".", "constant", "(", "value", "=", "params", "[", "\"max_{}_anom_thresh\"", ".", "format", "(", "var_name", ")", "]", ",", "dtype", "=", "tf", ".", "float64", ")", ",", "num", "=", "params", "[", "\"num_{}_anom_thresh\"", ".", "format", "(", "var_name", ")", "]", ")", "with", "tf", ".", "variable_scope", "(", "name_or_scope", "=", "\"mahalanobis_dist_thresh_vars\"", ",", "reuse", "=", "tf", ".", "AUTO_REUSE", ")", ":", "(", "tp_update_op", ",", "fn_update_op", ",", "fp_update_op", ",", "tn_update_op", ")", "=", "update_anom_thresh_vars", "(", "labels_norm_mask", ",", "labels_anom_mask", ",", "params", "[", "\"num_{}_anom_thresh\"", ".", "format", "(", "var_name", ")", "]", ",", "anom_threshs", ",", "mahalanobis_dist", ",", "tp_thresh_var", ",", "fn_thresh_var", ",", "fp_thresh_var", ",", "tn_thresh_var", ",", "mode", ")", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "[", "tp_update_op", ",", "fn_update_op", ",", "fp_update_op", ",", "tn_update_op", "]", ")", ":", "_", ",", "pre", ",", "rec", ",", "f_beta", "=", "calculate_composite_classification_metrics", "(", "tp_thresh_var", ",", "fn_thresh_var", ",", "fp_thresh_var", ",", "tn_thresh_var", ",", "params", "[", "\"f_score_beta\"", "]", ")", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "[", "pre", ",", "rec", "]", ")", ":", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "[", "f_beta", "]", ")", ":", "best_anom_thresh", "=", "find_best_anom_thresh", "(", "anom_threshs", ",", "f_beta", ",", "anom_thresh_var", ")", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "[", "best_anom_thresh", "]", ")", ":", "return", "tf", ".", "identity", "(", "input", "=", "anom_thresh_var", ")" ]
[ 199, 0 ]
[ 288, 51 ]
python
en
['en', 'en', 'en']
True
set_anom_thresh
(user_passed_anom_thresh, anom_thresh_var)
Set anomaly threshold to use for anomaly classification from user input. Given user passed anomaly threshold returns updated variable that stores the anomaly threshold value. Args: user_passed_anom_thresh: User passed anomaly threshold that overrides the threshold optimization. anom_thresh_var: tf.float64 variable that stores anomaly threshold value. Returns: Updated variable that stores the anomaly threshold value.
Set anomaly threshold to use for anomaly classification from user input.
def set_anom_thresh(user_passed_anom_thresh, anom_thresh_var): """Set anomaly threshold to use for anomaly classification from user input. Given user passed anomaly threshold returns updated variable that stores the anomaly threshold value. Args: user_passed_anom_thresh: User passed anomaly threshold that overrides the threshold optimization. anom_thresh_var: tf.float64 variable that stores anomaly threshold value. Returns: Updated variable that stores the anomaly threshold value. """ with tf.control_dependencies( control_inputs=[tf.assign( ref=anom_thresh_var, value=user_passed_anom_thresh)]): return tf.identity(input=anom_thresh_var)
[ "def", "set_anom_thresh", "(", "user_passed_anom_thresh", ",", "anom_thresh_var", ")", ":", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "[", "tf", ".", "assign", "(", "ref", "=", "anom_thresh_var", ",", "value", "=", "user_passed_anom_thresh", ")", "]", ")", ":", "return", "tf", ".", "identity", "(", "input", "=", "anom_thresh_var", ")" ]
[ 291, 0 ]
[ 309, 45 ]
python
en
['en', 'en', 'en']
True
tune_anomaly_thresholds_supervised_training
( labels_norm_mask, labels_anom_mask, mahalanobis_dist_time, tp_thresh_time_var, fn_thresh_time_var, fp_thresh_time_var, tn_thresh_time_var, time_anom_thresh_var, mahalanobis_dist_feat, tp_thresh_feat_var, fn_thresh_feat_var, fp_thresh_feat_var, tn_thresh_feat_var, feat_anom_thresh_var, params, mode, dummy_var)
Tunes anomaly thresholds during supervised training mode. Given label masks, mahalanobis distances, confusion matrices, and anomaly thresholds, returns loss and train_op. Args: labels_norm_mask: tf.bool vector mask of labels for normals. labels_anom_mask: tf.bool vector mask of labels for anomalies. mahalanobis_dist_time: Mahalanobis distance, time major. tp_thresh_time_var: tf.int64 variable to track number of true positives wrt thresholds for time major case. fn_thresh_time_var: tf.int64 variable to track number of false negatives wrt thresholds for time major case. fp_thresh_time_var: tf.int64 variable to track number of false positives wrt thresholds for time major case. tn_thresh_time_var: tf.int64 variable to track number of true negatives wrt thresholds for time major case. time_anom_thresh_var: tf.float64 variable to hold the set time anomaly threshold. mahalanobis_dist_feat: Mahalanobis distance, features major. tp_thresh_feat_var: tf.int64 variable to track number of true positives wrt thresholds for feat major case. fn_thresh_feat_var: tf.int64 variable to track number of false negatives wrt thresholds for feat major case. fp_thresh_feat_var: tf.int64 variable to track number of false positives wrt thresholds for feat major case. tn_thresh_feat_var: tf.int64 variable to track number of true negatives wrt thresholds for feat major case. feat_anom_thresh_var: tf.float64 variable to hold the set feat anomaly threshold. params: Dictionary of parameters. mode: Estimator ModeKeys. Can take value of only TRAIN. dummy_var: Dummy variable used to allow training mode to happen since it requires a gradient to tie back to the graph dependency. Returns: loss: The scalar loss to tie our updates back to Estimator graph. train_op: The train operation to tie our updates back to Estimator graph.
Tunes anomaly thresholds during supervised training mode.
def tune_anomaly_thresholds_supervised_training( labels_norm_mask, labels_anom_mask, mahalanobis_dist_time, tp_thresh_time_var, fn_thresh_time_var, fp_thresh_time_var, tn_thresh_time_var, time_anom_thresh_var, mahalanobis_dist_feat, tp_thresh_feat_var, fn_thresh_feat_var, fp_thresh_feat_var, tn_thresh_feat_var, feat_anom_thresh_var, params, mode, dummy_var): """Tunes anomaly thresholds during supervised training mode. Given label masks, mahalanobis distances, confusion matrices, and anomaly thresholds, returns loss and train_op. Args: labels_norm_mask: tf.bool vector mask of labels for normals. labels_anom_mask: tf.bool vector mask of labels for anomalies. mahalanobis_dist_time: Mahalanobis distance, time major. tp_thresh_time_var: tf.int64 variable to track number of true positives wrt thresholds for time major case. fn_thresh_time_var: tf.int64 variable to track number of false negatives wrt thresholds for time major case. fp_thresh_time_var: tf.int64 variable to track number of false positives wrt thresholds for time major case. tn_thresh_time_var: tf.int64 variable to track number of true negatives wrt thresholds for time major case. time_anom_thresh_var: tf.float64 variable to hold the set time anomaly threshold. mahalanobis_dist_feat: Mahalanobis distance, features major. tp_thresh_feat_var: tf.int64 variable to track number of true positives wrt thresholds for feat major case. fn_thresh_feat_var: tf.int64 variable to track number of false negatives wrt thresholds for feat major case. fp_thresh_feat_var: tf.int64 variable to track number of false positives wrt thresholds for feat major case. tn_thresh_feat_var: tf.int64 variable to track number of true negatives wrt thresholds for feat major case. feat_anom_thresh_var: tf.float64 variable to hold the set feat anomaly threshold. params: Dictionary of parameters. mode: Estimator ModeKeys. Can take value of only TRAIN. dummy_var: Dummy variable used to allow training mode to happen since it requires a gradient to tie back to the graph dependency. Returns: loss: The scalar loss to tie our updates back to Estimator graph. train_op: The train operation to tie our updates back to Estimator graph. """ # Time based if params["time_anom_thresh"] is None: best_anom_thresh_time = optimize_anomaly_theshold( "time", labels_norm_mask, labels_anom_mask, mahalanobis_dist_time, tp_thresh_time_var, fn_thresh_time_var, fp_thresh_time_var, tn_thresh_time_var, params, mode, time_anom_thresh_var) else: best_anom_thresh_time = set_anom_thresh( params["time_anom_thresh"], time_anom_thresh_var) # Features based if params["feat_anom_thresh"] is None: best_anom_thresh_feat = optimize_anomaly_theshold( "feat", labels_norm_mask, labels_anom_mask, mahalanobis_dist_feat, tp_thresh_feat_var, fn_thresh_feat_var, fp_thresh_feat_var, tn_thresh_feat_var, params, mode, feat_anom_thresh_var) else: best_anom_thresh_feat = set_anom_thresh( params["feat_anom_thresh"], feat_anom_thresh_var) with tf.control_dependencies( control_inputs=[best_anom_thresh_time, best_anom_thresh_feat]): loss = tf.reduce_sum( input_tensor=tf.zeros( shape=(), dtype=tf.float64) * dummy_var) train_op = tf.contrib.layers.optimize_loss( loss=loss, global_step=tf.train.get_global_step(), learning_rate=params["learning_rate"], optimizer="SGD") return loss, train_op
[ "def", "tune_anomaly_thresholds_supervised_training", "(", "labels_norm_mask", ",", "labels_anom_mask", ",", "mahalanobis_dist_time", ",", "tp_thresh_time_var", ",", "fn_thresh_time_var", ",", "fp_thresh_time_var", ",", "tn_thresh_time_var", ",", "time_anom_thresh_var", ",", "mahalanobis_dist_feat", ",", "tp_thresh_feat_var", ",", "fn_thresh_feat_var", ",", "fp_thresh_feat_var", ",", "tn_thresh_feat_var", ",", "feat_anom_thresh_var", ",", "params", ",", "mode", ",", "dummy_var", ")", ":", "# Time based", "if", "params", "[", "\"time_anom_thresh\"", "]", "is", "None", ":", "best_anom_thresh_time", "=", "optimize_anomaly_theshold", "(", "\"time\"", ",", "labels_norm_mask", ",", "labels_anom_mask", ",", "mahalanobis_dist_time", ",", "tp_thresh_time_var", ",", "fn_thresh_time_var", ",", "fp_thresh_time_var", ",", "tn_thresh_time_var", ",", "params", ",", "mode", ",", "time_anom_thresh_var", ")", "else", ":", "best_anom_thresh_time", "=", "set_anom_thresh", "(", "params", "[", "\"time_anom_thresh\"", "]", ",", "time_anom_thresh_var", ")", "# Features based", "if", "params", "[", "\"feat_anom_thresh\"", "]", "is", "None", ":", "best_anom_thresh_feat", "=", "optimize_anomaly_theshold", "(", "\"feat\"", ",", "labels_norm_mask", ",", "labels_anom_mask", ",", "mahalanobis_dist_feat", ",", "tp_thresh_feat_var", ",", "fn_thresh_feat_var", ",", "fp_thresh_feat_var", ",", "tn_thresh_feat_var", ",", "params", ",", "mode", ",", "feat_anom_thresh_var", ")", "else", ":", "best_anom_thresh_feat", "=", "set_anom_thresh", "(", "params", "[", "\"feat_anom_thresh\"", "]", ",", "feat_anom_thresh_var", ")", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "[", "best_anom_thresh_time", ",", "best_anom_thresh_feat", "]", ")", ":", "loss", "=", "tf", ".", "reduce_sum", "(", "input_tensor", "=", "tf", ".", "zeros", "(", "shape", "=", "(", ")", ",", "dtype", "=", "tf", ".", "float64", ")", "*", "dummy_var", ")", "train_op", "=", "tf", ".", "contrib", ".", "layers", ".", "optimize_loss", "(", "loss", "=", "loss", ",", "global_step", "=", "tf", ".", "train", ".", "get_global_step", "(", ")", ",", "learning_rate", "=", "params", "[", "\"learning_rate\"", "]", ",", "optimizer", "=", "\"SGD\"", ")", "return", "loss", ",", "train_op" ]
[ 312, 0 ]
[ 418, 25 ]
python
en
['en', 'zu', 'en']
True
tune_anomaly_thresholds_supervised_eval
( labels_norm_mask, labels_anom_mask, time_anom_thresh_var, mahalanobis_dist_time, tp_thresh_eval_time_var, fn_thresh_eval_time_var, fp_thresh_eval_time_var, tn_thresh_eval_time_var, feat_anom_thresh_var, mahalanobis_dist_feat, tp_thresh_eval_feat_var, fn_thresh_eval_feat_var, fp_thresh_eval_feat_var, tn_thresh_eval_feat_var, params, mode)
Checks tuned anomaly thresholds during supervised evaluation mode. Given label masks, mahalanobis distances, confusion matrices, and anomaly thresholds, returns loss and eval_metric_ops. Args: labels_norm_mask: tf.bool vector mask of labels for normals. labels_anom_mask: tf.bool vector mask of labels for anomalies. time_anom_thresh_var: tf.float64 scalar time anomaly threshold value. mahalanobis_dist_time: Mahalanobis distance, time major. tp_thresh_eval_time_var: tf.int64 variable to track number of true positives wrt thresholds for time major case for evaluation. fn_thresh_eval_time_var: tf.int64 variable to track number of false negatives wrt thresholds for time major case for evaluation. fp_thresh_eval_time_var: tf.int64 variable to track number of false positives wrt thresholds for time major case for evaluation. tn_thresh_eval_time_var: tf.int64 variable to track number of true negatives wrt thresholds for time major case for evaluation. feat_anom_thresh_var: tf.float64 scalar feature anomaly threshold value. mahalanobis_dist_feat: Mahalanobis distance, features major. tp_thresh_eval_feat_var: tf.int64 variable to track number of true positives wrt thresholds for feat major case for evaluation. fn_thresh_eval_feat_var: tf.int64 variable to track number of false negatives wrt thresholds for feat major case for evaluation. fp_thresh_eval_feat_var: tf.int64 variable to track number of false positives wrt thresholds for feat major case for evaluation. tn_thresh_eval_feat_var: tf.int64 variable to track number of true negatives wrt thresholds for feat major case for evaluation. params: Dictionary of parameters. mode: Estimator ModeKeys. Can take value of only EVAL. Returns: loss: Scalar reconstruction loss. eval_metric_ops: Evaluation metrics of threshold tuning.
Checks tuned anomaly thresholds during supervised evaluation mode.
def tune_anomaly_thresholds_supervised_eval( labels_norm_mask, labels_anom_mask, time_anom_thresh_var, mahalanobis_dist_time, tp_thresh_eval_time_var, fn_thresh_eval_time_var, fp_thresh_eval_time_var, tn_thresh_eval_time_var, feat_anom_thresh_var, mahalanobis_dist_feat, tp_thresh_eval_feat_var, fn_thresh_eval_feat_var, fp_thresh_eval_feat_var, tn_thresh_eval_feat_var, params, mode): """Checks tuned anomaly thresholds during supervised evaluation mode. Given label masks, mahalanobis distances, confusion matrices, and anomaly thresholds, returns loss and eval_metric_ops. Args: labels_norm_mask: tf.bool vector mask of labels for normals. labels_anom_mask: tf.bool vector mask of labels for anomalies. time_anom_thresh_var: tf.float64 scalar time anomaly threshold value. mahalanobis_dist_time: Mahalanobis distance, time major. tp_thresh_eval_time_var: tf.int64 variable to track number of true positives wrt thresholds for time major case for evaluation. fn_thresh_eval_time_var: tf.int64 variable to track number of false negatives wrt thresholds for time major case for evaluation. fp_thresh_eval_time_var: tf.int64 variable to track number of false positives wrt thresholds for time major case for evaluation. tn_thresh_eval_time_var: tf.int64 variable to track number of true negatives wrt thresholds for time major case for evaluation. feat_anom_thresh_var: tf.float64 scalar feature anomaly threshold value. mahalanobis_dist_feat: Mahalanobis distance, features major. tp_thresh_eval_feat_var: tf.int64 variable to track number of true positives wrt thresholds for feat major case for evaluation. fn_thresh_eval_feat_var: tf.int64 variable to track number of false negatives wrt thresholds for feat major case for evaluation. fp_thresh_eval_feat_var: tf.int64 variable to track number of false positives wrt thresholds for feat major case for evaluation. tn_thresh_eval_feat_var: tf.int64 variable to track number of true negatives wrt thresholds for feat major case for evaluation. params: Dictionary of parameters. mode: Estimator ModeKeys. Can take value of only EVAL. Returns: loss: Scalar reconstruction loss. eval_metric_ops: Evaluation metrics of threshold tuning. """ with tf.variable_scope( name_or_scope="anom_thresh_eval_vars", reuse=tf.AUTO_REUSE): # Time based (tp_time_update_op, fn_time_update_op, fp_time_update_op, tn_time_update_op) = \ update_anom_thresh_vars( labels_norm_mask, labels_anom_mask, 1, time_anom_thresh_var, mahalanobis_dist_time, tp_thresh_eval_time_var, fn_thresh_eval_time_var, fp_thresh_eval_time_var, tn_thresh_eval_time_var, mode) # Features based (tp_feat_update_op, fn_feat_update_op, fp_feat_update_op, tn_feat_update_op) = \ update_anom_thresh_vars( labels_norm_mask, labels_anom_mask, 1, feat_anom_thresh_var, mahalanobis_dist_feat, tp_thresh_eval_feat_var, fn_thresh_eval_feat_var, fp_thresh_eval_feat_var, tn_thresh_eval_feat_var, mode) with tf.variable_scope( name_or_scope="anom_thresh_eval_vars", reuse=tf.AUTO_REUSE): # Time based (acc_time_update_op, pre_time_update_op, rec_time_update_op, f_beta_time_update_op) = \ calculate_composite_classification_metrics( tp_thresh_eval_time_var, fn_thresh_eval_time_var, fp_thresh_eval_time_var, tn_thresh_eval_time_var, params["f_score_beta"]) # Features based (acc_feat_update_op, pre_feat_update_op, rec_feat_update_op, f_beta_feat_update_op) = \ calculate_composite_classification_metrics( tp_thresh_eval_feat_var, fn_thresh_eval_feat_var, fp_thresh_eval_feat_var, tn_thresh_eval_feat_var, params["f_score_beta"]) loss = tf.zeros(shape=[], dtype=tf.float64) # Time based acc_trues = tf.cast( x=tp_thresh_eval_time_var + tn_thresh_eval_time_var, dtype=tf.float64) acc_falses = tf.cast( x=fp_thresh_eval_time_var + fn_thresh_eval_time_var, dtype=tf.float64) acc_thresh_eval_time_var = acc_trues / (acc_trues + acc_falses) tp_float = tf.cast(x=tp_thresh_eval_time_var, dtype=tf.float64) pre_denominator = tf.cast( x=tp_thresh_eval_time_var + fp_thresh_eval_time_var, dtype=tf.float64) pre_thresh_eval_time_var = tp_float / pre_denominator rec_denominator = tf.cast( x=tp_thresh_eval_time_var + fn_thresh_eval_time_var, dtype=tf.float64) rec_thresh_eval_time_var = tp_float / rec_denominator f_beta_numerator = (1.0 + params["f_score_beta"] ** 2) f_beta_numerator *= pre_thresh_eval_time_var f_beta_numerator *= rec_thresh_eval_time_var f_beta_denominator = params["f_score_beta"] ** 2 f_beta_denominator *= pre_thresh_eval_time_var f_beta_denominator += rec_thresh_eval_time_var f_beta_thresh_eval_time_var = f_beta_numerator / f_beta_denominator # Features based acc_trues = tf.cast( x=tp_thresh_eval_feat_var + tn_thresh_eval_feat_var, dtype=tf.float64) acc_falses = tf.cast( x=fp_thresh_eval_feat_var + fn_thresh_eval_feat_var, dtype=tf.float64) acc_thresh_eval_feat_var = acc_trues / (acc_trues + acc_falses) tp_float = tf.cast(x=tp_thresh_eval_feat_var, dtype=tf.float64) pre_denominator = tf.cast( x=tp_thresh_eval_feat_var + fp_thresh_eval_feat_var, dtype=tf.float64) pre_thresh_eval_feat_var = tp_float / pre_denominator rec_denominator = tf.cast( x=tp_thresh_eval_feat_var + fn_thresh_eval_feat_var, dtype=tf.float64) rec_thresh_eval_feat_var = tp_float / rec_denominator f_beta_numerator = (1.0 + params["f_score_beta"] ** 2) f_beta_numerator *= pre_thresh_eval_feat_var f_beta_numerator *= rec_thresh_eval_feat_var f_beta_denominator = params["f_score_beta"] ** 2 f_beta_denominator *= pre_thresh_eval_feat_var f_beta_denominator += rec_thresh_eval_feat_var f_beta_thresh_eval_feat_var = f_beta_numerator / f_beta_denominator # Anomaly detection eval metrics eval_metric_ops = { # Time based "time_anom_tp": (tp_thresh_eval_time_var, tp_time_update_op), "time_anom_fn": (fn_thresh_eval_time_var, fn_time_update_op), "time_anom_fp": (fp_thresh_eval_time_var, fp_time_update_op), "time_anom_tn": (tn_thresh_eval_time_var, tn_time_update_op), "time_anom_acc": (acc_thresh_eval_time_var, acc_time_update_op), "time_anom_pre": (pre_thresh_eval_time_var, pre_time_update_op), "time_anom_rec": (rec_thresh_eval_time_var, rec_time_update_op), "time_anom_f_beta": (f_beta_thresh_eval_time_var, f_beta_time_update_op), # Features based "feat_anom_tp": (tp_thresh_eval_feat_var, tp_feat_update_op), "feat_anom_fn": (fn_thresh_eval_feat_var, fn_feat_update_op), "feat_anom_fp": (fp_thresh_eval_feat_var, fp_feat_update_op), "feat_anom_tn": (tn_thresh_eval_feat_var, tn_feat_update_op), "feat_anom_acc": (acc_thresh_eval_feat_var, acc_feat_update_op), "feat_anom_pre": (pre_thresh_eval_feat_var, pre_feat_update_op), "feat_anom_rec": (rec_thresh_eval_feat_var, rec_feat_update_op), "feat_anom_f_beta": (f_beta_thresh_eval_feat_var, f_beta_feat_update_op) } return loss, eval_metric_ops
[ "def", "tune_anomaly_thresholds_supervised_eval", "(", "labels_norm_mask", ",", "labels_anom_mask", ",", "time_anom_thresh_var", ",", "mahalanobis_dist_time", ",", "tp_thresh_eval_time_var", ",", "fn_thresh_eval_time_var", ",", "fp_thresh_eval_time_var", ",", "tn_thresh_eval_time_var", ",", "feat_anom_thresh_var", ",", "mahalanobis_dist_feat", ",", "tp_thresh_eval_feat_var", ",", "fn_thresh_eval_feat_var", ",", "fp_thresh_eval_feat_var", ",", "tn_thresh_eval_feat_var", ",", "params", ",", "mode", ")", ":", "with", "tf", ".", "variable_scope", "(", "name_or_scope", "=", "\"anom_thresh_eval_vars\"", ",", "reuse", "=", "tf", ".", "AUTO_REUSE", ")", ":", "# Time based", "(", "tp_time_update_op", ",", "fn_time_update_op", ",", "fp_time_update_op", ",", "tn_time_update_op", ")", "=", "update_anom_thresh_vars", "(", "labels_norm_mask", ",", "labels_anom_mask", ",", "1", ",", "time_anom_thresh_var", ",", "mahalanobis_dist_time", ",", "tp_thresh_eval_time_var", ",", "fn_thresh_eval_time_var", ",", "fp_thresh_eval_time_var", ",", "tn_thresh_eval_time_var", ",", "mode", ")", "# Features based", "(", "tp_feat_update_op", ",", "fn_feat_update_op", ",", "fp_feat_update_op", ",", "tn_feat_update_op", ")", "=", "update_anom_thresh_vars", "(", "labels_norm_mask", ",", "labels_anom_mask", ",", "1", ",", "feat_anom_thresh_var", ",", "mahalanobis_dist_feat", ",", "tp_thresh_eval_feat_var", ",", "fn_thresh_eval_feat_var", ",", "fp_thresh_eval_feat_var", ",", "tn_thresh_eval_feat_var", ",", "mode", ")", "with", "tf", ".", "variable_scope", "(", "name_or_scope", "=", "\"anom_thresh_eval_vars\"", ",", "reuse", "=", "tf", ".", "AUTO_REUSE", ")", ":", "# Time based", "(", "acc_time_update_op", ",", "pre_time_update_op", ",", "rec_time_update_op", ",", "f_beta_time_update_op", ")", "=", "calculate_composite_classification_metrics", "(", "tp_thresh_eval_time_var", ",", "fn_thresh_eval_time_var", ",", "fp_thresh_eval_time_var", ",", "tn_thresh_eval_time_var", ",", "params", "[", "\"f_score_beta\"", "]", ")", "# Features based", "(", "acc_feat_update_op", ",", "pre_feat_update_op", ",", "rec_feat_update_op", ",", "f_beta_feat_update_op", ")", "=", "calculate_composite_classification_metrics", "(", "tp_thresh_eval_feat_var", ",", "fn_thresh_eval_feat_var", ",", "fp_thresh_eval_feat_var", ",", "tn_thresh_eval_feat_var", ",", "params", "[", "\"f_score_beta\"", "]", ")", "loss", "=", "tf", ".", "zeros", "(", "shape", "=", "[", "]", ",", "dtype", "=", "tf", ".", "float64", ")", "# Time based", "acc_trues", "=", "tf", ".", "cast", "(", "x", "=", "tp_thresh_eval_time_var", "+", "tn_thresh_eval_time_var", ",", "dtype", "=", "tf", ".", "float64", ")", "acc_falses", "=", "tf", ".", "cast", "(", "x", "=", "fp_thresh_eval_time_var", "+", "fn_thresh_eval_time_var", ",", "dtype", "=", "tf", ".", "float64", ")", "acc_thresh_eval_time_var", "=", "acc_trues", "/", "(", "acc_trues", "+", "acc_falses", ")", "tp_float", "=", "tf", ".", "cast", "(", "x", "=", "tp_thresh_eval_time_var", ",", "dtype", "=", "tf", ".", "float64", ")", "pre_denominator", "=", "tf", ".", "cast", "(", "x", "=", "tp_thresh_eval_time_var", "+", "fp_thresh_eval_time_var", ",", "dtype", "=", "tf", ".", "float64", ")", "pre_thresh_eval_time_var", "=", "tp_float", "/", "pre_denominator", "rec_denominator", "=", "tf", ".", "cast", "(", "x", "=", "tp_thresh_eval_time_var", "+", "fn_thresh_eval_time_var", ",", "dtype", "=", "tf", ".", "float64", ")", "rec_thresh_eval_time_var", "=", "tp_float", "/", "rec_denominator", "f_beta_numerator", "=", "(", "1.0", "+", "params", "[", "\"f_score_beta\"", "]", "**", "2", ")", "f_beta_numerator", "*=", "pre_thresh_eval_time_var", "f_beta_numerator", "*=", "rec_thresh_eval_time_var", "f_beta_denominator", "=", "params", "[", "\"f_score_beta\"", "]", "**", "2", "f_beta_denominator", "*=", "pre_thresh_eval_time_var", "f_beta_denominator", "+=", "rec_thresh_eval_time_var", "f_beta_thresh_eval_time_var", "=", "f_beta_numerator", "/", "f_beta_denominator", "# Features based", "acc_trues", "=", "tf", ".", "cast", "(", "x", "=", "tp_thresh_eval_feat_var", "+", "tn_thresh_eval_feat_var", ",", "dtype", "=", "tf", ".", "float64", ")", "acc_falses", "=", "tf", ".", "cast", "(", "x", "=", "fp_thresh_eval_feat_var", "+", "fn_thresh_eval_feat_var", ",", "dtype", "=", "tf", ".", "float64", ")", "acc_thresh_eval_feat_var", "=", "acc_trues", "/", "(", "acc_trues", "+", "acc_falses", ")", "tp_float", "=", "tf", ".", "cast", "(", "x", "=", "tp_thresh_eval_feat_var", ",", "dtype", "=", "tf", ".", "float64", ")", "pre_denominator", "=", "tf", ".", "cast", "(", "x", "=", "tp_thresh_eval_feat_var", "+", "fp_thresh_eval_feat_var", ",", "dtype", "=", "tf", ".", "float64", ")", "pre_thresh_eval_feat_var", "=", "tp_float", "/", "pre_denominator", "rec_denominator", "=", "tf", ".", "cast", "(", "x", "=", "tp_thresh_eval_feat_var", "+", "fn_thresh_eval_feat_var", ",", "dtype", "=", "tf", ".", "float64", ")", "rec_thresh_eval_feat_var", "=", "tp_float", "/", "rec_denominator", "f_beta_numerator", "=", "(", "1.0", "+", "params", "[", "\"f_score_beta\"", "]", "**", "2", ")", "f_beta_numerator", "*=", "pre_thresh_eval_feat_var", "f_beta_numerator", "*=", "rec_thresh_eval_feat_var", "f_beta_denominator", "=", "params", "[", "\"f_score_beta\"", "]", "**", "2", "f_beta_denominator", "*=", "pre_thresh_eval_feat_var", "f_beta_denominator", "+=", "rec_thresh_eval_feat_var", "f_beta_thresh_eval_feat_var", "=", "f_beta_numerator", "/", "f_beta_denominator", "# Anomaly detection eval metrics", "eval_metric_ops", "=", "{", "# Time based", "\"time_anom_tp\"", ":", "(", "tp_thresh_eval_time_var", ",", "tp_time_update_op", ")", ",", "\"time_anom_fn\"", ":", "(", "fn_thresh_eval_time_var", ",", "fn_time_update_op", ")", ",", "\"time_anom_fp\"", ":", "(", "fp_thresh_eval_time_var", ",", "fp_time_update_op", ")", ",", "\"time_anom_tn\"", ":", "(", "tn_thresh_eval_time_var", ",", "tn_time_update_op", ")", ",", "\"time_anom_acc\"", ":", "(", "acc_thresh_eval_time_var", ",", "acc_time_update_op", ")", ",", "\"time_anom_pre\"", ":", "(", "pre_thresh_eval_time_var", ",", "pre_time_update_op", ")", ",", "\"time_anom_rec\"", ":", "(", "rec_thresh_eval_time_var", ",", "rec_time_update_op", ")", ",", "\"time_anom_f_beta\"", ":", "(", "f_beta_thresh_eval_time_var", ",", "f_beta_time_update_op", ")", ",", "# Features based", "\"feat_anom_tp\"", ":", "(", "tp_thresh_eval_feat_var", ",", "tp_feat_update_op", ")", ",", "\"feat_anom_fn\"", ":", "(", "fn_thresh_eval_feat_var", ",", "fn_feat_update_op", ")", ",", "\"feat_anom_fp\"", ":", "(", "fp_thresh_eval_feat_var", ",", "fp_feat_update_op", ")", ",", "\"feat_anom_tn\"", ":", "(", "tn_thresh_eval_feat_var", ",", "tn_feat_update_op", ")", ",", "\"feat_anom_acc\"", ":", "(", "acc_thresh_eval_feat_var", ",", "acc_feat_update_op", ")", ",", "\"feat_anom_pre\"", ":", "(", "pre_thresh_eval_feat_var", ",", "pre_feat_update_op", ")", ",", "\"feat_anom_rec\"", ":", "(", "rec_thresh_eval_feat_var", ",", "rec_feat_update_op", ")", ",", "\"feat_anom_f_beta\"", ":", "(", "f_beta_thresh_eval_feat_var", ",", "f_beta_feat_update_op", ")", "}", "return", "loss", ",", "eval_metric_ops" ]
[ 421, 0 ]
[ 622, 30 ]
python
en
['en', 'en', 'en']
True
str_cast
(maybe_bytes, encoding='utf-8')
Converts any bytes-like input to a string-like output, with respect to python version Parameters ---------- maybe_bytes : if this is a bytes-like object, it will be converted to a string encoding : str, default='utf-8' encoding to be used when decoding bytes
Converts any bytes-like input to a string-like output, with respect to python version
def str_cast(maybe_bytes, encoding='utf-8'): """ Converts any bytes-like input to a string-like output, with respect to python version Parameters ---------- maybe_bytes : if this is a bytes-like object, it will be converted to a string encoding : str, default='utf-8' encoding to be used when decoding bytes """ if isinstance(maybe_bytes, bytes_): return maybe_bytes.decode(encoding) else: return maybe_bytes
[ "def", "str_cast", "(", "maybe_bytes", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "isinstance", "(", "maybe_bytes", ",", "bytes_", ")", ":", "return", "maybe_bytes", ".", "decode", "(", "encoding", ")", "else", ":", "return", "maybe_bytes" ]
[ 23, 0 ]
[ 37, 26 ]
python
en
['en', 'error', 'th']
False
bytes_cast
(maybe_str, encoding='utf-8')
Converts any string-like input to a bytes-like output, with respect to python version Parameters ---------- maybe_str : if this is a string-like object, it will be converted to bytes encoding : str, default='utf-8' encoding to be used when encoding string
Converts any string-like input to a bytes-like output, with respect to python version
def bytes_cast(maybe_str, encoding='utf-8'): """ Converts any string-like input to a bytes-like output, with respect to python version Parameters ---------- maybe_str : if this is a string-like object, it will be converted to bytes encoding : str, default='utf-8' encoding to be used when encoding string """ if isinstance(maybe_str, unicode_): return maybe_str.encode(encoding) else: return maybe_str
[ "def", "bytes_cast", "(", "maybe_str", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "isinstance", "(", "maybe_str", ",", "unicode_", ")", ":", "return", "maybe_str", ".", "encode", "(", "encoding", ")", "else", ":", "return", "maybe_str" ]
[ 40, 0 ]
[ 54, 24 ]
python
en
['en', 'error', 'th']
False
str_list_cast
(list_, **kwargs)
Converts any bytes-like items in input list to string-like values, with respect to python version Parameters ---------- list_ : list any bytes-like objects contained in the list will be converted to strings kwargs: encoding: str, default: 'utf-8' encoding to be used when decoding bytes
Converts any bytes-like items in input list to string-like values, with respect to python version
def str_list_cast(list_, **kwargs): """ Converts any bytes-like items in input list to string-like values, with respect to python version Parameters ---------- list_ : list any bytes-like objects contained in the list will be converted to strings kwargs: encoding: str, default: 'utf-8' encoding to be used when decoding bytes """ return [str_cast(elem, **kwargs) for elem in list_]
[ "def", "str_list_cast", "(", "list_", ",", "*", "*", "kwargs", ")", ":", "return", "[", "str_cast", "(", "elem", ",", "*", "*", "kwargs", ")", "for", "elem", "in", "list_", "]" ]
[ 57, 0 ]
[ 71, 55 ]
python
en
['en', 'error', 'th']
False
bytes_list_cast
(list_, **kwargs)
Converts any string-like items in input list to bytes-like values, with respect to python version Parameters ---------- list_ : list any string-like objects contained in the list will be converted to bytes kwargs: encoding: str, default: 'utf-8' encoding to be used when encoding string
Converts any string-like items in input list to bytes-like values, with respect to python version
def bytes_list_cast(list_, **kwargs): """ Converts any string-like items in input list to bytes-like values, with respect to python version Parameters ---------- list_ : list any string-like objects contained in the list will be converted to bytes kwargs: encoding: str, default: 'utf-8' encoding to be used when encoding string """ return [bytes_cast(elem, **kwargs) for elem in list_]
[ "def", "bytes_list_cast", "(", "list_", ",", "*", "*", "kwargs", ")", ":", "return", "[", "bytes_cast", "(", "elem", ",", "*", "*", "kwargs", ")", "for", "elem", "in", "list_", "]" ]
[ 74, 0 ]
[ 87, 57 ]
python
en
['en', 'error', 'th']
False
str_dict_cast
(dict_, include_keys=True, include_vals=True, **kwargs)
Converts any bytes-like items in input dict to string-like values, with respect to python version Parameters ---------- dict_ : dict any bytes-like objects contained in the dict will be converted to a string include_keys : bool, default=True if True, cast keys to a string, else ignore include_values : bool, default=True if True, cast values to a string, else ignore kwargs: encoding: str, default: 'utf-8' encoding to be used when decoding bytes
Converts any bytes-like items in input dict to string-like values, with respect to python version
def str_dict_cast(dict_, include_keys=True, include_vals=True, **kwargs): """ Converts any bytes-like items in input dict to string-like values, with respect to python version Parameters ---------- dict_ : dict any bytes-like objects contained in the dict will be converted to a string include_keys : bool, default=True if True, cast keys to a string, else ignore include_values : bool, default=True if True, cast values to a string, else ignore kwargs: encoding: str, default: 'utf-8' encoding to be used when decoding bytes """ new_keys = str_list_cast(dict_.keys(), **kwargs) if include_keys else dict_.keys() new_vals = str_list_cast(dict_.values(), **kwargs) if include_vals else dict_.values() new_dict = dict(zip_(new_keys, new_vals)) return new_dict
[ "def", "str_dict_cast", "(", "dict_", ",", "include_keys", "=", "True", ",", "include_vals", "=", "True", ",", "*", "*", "kwargs", ")", ":", "new_keys", "=", "str_list_cast", "(", "dict_", ".", "keys", "(", ")", ",", "*", "*", "kwargs", ")", "if", "include_keys", "else", "dict_", ".", "keys", "(", ")", "new_vals", "=", "str_list_cast", "(", "dict_", ".", "values", "(", ")", ",", "*", "*", "kwargs", ")", "if", "include_vals", "else", "dict_", ".", "values", "(", ")", "new_dict", "=", "dict", "(", "zip_", "(", "new_keys", ",", "new_vals", ")", ")", "return", "new_dict" ]
[ 90, 0 ]
[ 111, 19 ]
python
en
['en', 'error', 'th']
False
bytes_dict_cast
(dict_, include_keys=True, include_vals=True, **kwargs)
Converts any string-like items in input dict to bytes-like values, with respect to python version Parameters ---------- dict_ : dict any string-like objects contained in the dict will be converted to bytes include_keys : bool, default=True if True, cast keys to bytes, else ignore include_values : bool, default=True if True, cast values to bytes, else ignore kwargs: encoding: str, default: 'utf-8' encoding to be used when encoding string
Converts any string-like items in input dict to bytes-like values, with respect to python version
def bytes_dict_cast(dict_, include_keys=True, include_vals=True, **kwargs): """ Converts any string-like items in input dict to bytes-like values, with respect to python version Parameters ---------- dict_ : dict any string-like objects contained in the dict will be converted to bytes include_keys : bool, default=True if True, cast keys to bytes, else ignore include_values : bool, default=True if True, cast values to bytes, else ignore kwargs: encoding: str, default: 'utf-8' encoding to be used when encoding string """ new_keys = bytes_list_cast(dict_.keys(), **kwargs) if include_keys else dict_.keys() new_vals = bytes_list_cast(dict_.values(), **kwargs) if include_vals else dict_.values() new_dict = dict(zip_(new_keys, new_vals)) return new_dict
[ "def", "bytes_dict_cast", "(", "dict_", ",", "include_keys", "=", "True", ",", "include_vals", "=", "True", ",", "*", "*", "kwargs", ")", ":", "new_keys", "=", "bytes_list_cast", "(", "dict_", ".", "keys", "(", ")", ",", "*", "*", "kwargs", ")", "if", "include_keys", "else", "dict_", ".", "keys", "(", ")", "new_vals", "=", "bytes_list_cast", "(", "dict_", ".", "values", "(", ")", ",", "*", "*", "kwargs", ")", "if", "include_vals", "else", "dict_", ".", "values", "(", ")", "new_dict", "=", "dict", "(", "zip_", "(", "new_keys", ",", "new_vals", ")", ")", "return", "new_dict" ]
[ 114, 0 ]
[ 134, 19 ]
python
en
['en', 'error', 'th']
False
str_block_cast
(block, include_text=True, include_link_tokens=True, include_css=True, include_features=True, **kwargs)
Converts any bytes-like items in input Block object to string-like values, with respect to python version Parameters ---------- block : blocks.Block any bytes-like objects contained in the block object will be converted to a string include_text : bool, default=True if True, cast text to a string, else ignore include_link_tokens : bool, default=True if True, cast link_tokens to a string, else ignore include_css : bool, default=True if True, cast css to a string, else ignore include_features : bool, default=True if True, cast features to a string, else ignore kwargs: encoding: str, default: 'utf-8' encoding to be used when decoding bytes
Converts any bytes-like items in input Block object to string-like values, with respect to python version
def str_block_cast(block, include_text=True, include_link_tokens=True, include_css=True, include_features=True, **kwargs): """ Converts any bytes-like items in input Block object to string-like values, with respect to python version Parameters ---------- block : blocks.Block any bytes-like objects contained in the block object will be converted to a string include_text : bool, default=True if True, cast text to a string, else ignore include_link_tokens : bool, default=True if True, cast link_tokens to a string, else ignore include_css : bool, default=True if True, cast css to a string, else ignore include_features : bool, default=True if True, cast features to a string, else ignore kwargs: encoding: str, default: 'utf-8' encoding to be used when decoding bytes """ if include_text: block.text = str_cast(block.text, **kwargs) if include_link_tokens: block.link_tokens = str_list_cast(block.link_tokens, **kwargs) if include_css: block.css = str_dict_cast(block.css, **kwargs) if include_features: block.features = str_dict_cast(block.features, **kwargs) return block
[ "def", "str_block_cast", "(", "block", ",", "include_text", "=", "True", ",", "include_link_tokens", "=", "True", ",", "include_css", "=", "True", ",", "include_features", "=", "True", ",", "*", "*", "kwargs", ")", ":", "if", "include_text", ":", "block", ".", "text", "=", "str_cast", "(", "block", ".", "text", ",", "*", "*", "kwargs", ")", "if", "include_link_tokens", ":", "block", ".", "link_tokens", "=", "str_list_cast", "(", "block", ".", "link_tokens", ",", "*", "*", "kwargs", ")", "if", "include_css", ":", "block", ".", "css", "=", "str_dict_cast", "(", "block", ".", "css", ",", "*", "*", "kwargs", ")", "if", "include_features", ":", "block", ".", "features", "=", "str_dict_cast", "(", "block", ".", "features", ",", "*", "*", "kwargs", ")", "return", "block" ]
[ 137, 0 ]
[ 172, 16 ]
python
en
['en', 'error', 'th']
False
bytes_block_cast
(block, include_text=True, include_link_tokens=True, include_css=True, include_features=True, **kwargs)
Converts any string-like items in input Block object to bytes-like values, with respect to python version Parameters ---------- block : blocks.Block any string-like objects contained in the block object will be converted to bytes include_text : bool, default=True if True, cast text to bytes, else ignore include_link_tokens : bool, default=True if True, cast link_tokens to bytes, else ignore include_css : bool, default=True if True, cast css to bytes, else ignore include_features : bool, default=True if True, cast features to bytes, else ignore kwargs: encoding: str, default: 'utf-8' encoding to be used when encoding string
Converts any string-like items in input Block object to bytes-like values, with respect to python version
def bytes_block_cast(block, include_text=True, include_link_tokens=True, include_css=True, include_features=True, **kwargs): """ Converts any string-like items in input Block object to bytes-like values, with respect to python version Parameters ---------- block : blocks.Block any string-like objects contained in the block object will be converted to bytes include_text : bool, default=True if True, cast text to bytes, else ignore include_link_tokens : bool, default=True if True, cast link_tokens to bytes, else ignore include_css : bool, default=True if True, cast css to bytes, else ignore include_features : bool, default=True if True, cast features to bytes, else ignore kwargs: encoding: str, default: 'utf-8' encoding to be used when encoding string """ if include_text: block.text = bytes_cast(block.text, **kwargs) if include_link_tokens: block.link_tokens = bytes_list_cast(block.link_tokens, **kwargs) if include_css: block.css = bytes_dict_cast(block.css, **kwargs) if include_features: block.features = bytes_dict_cast(block.features, **kwargs) return block
[ "def", "bytes_block_cast", "(", "block", ",", "include_text", "=", "True", ",", "include_link_tokens", "=", "True", ",", "include_css", "=", "True", ",", "include_features", "=", "True", ",", "*", "*", "kwargs", ")", ":", "if", "include_text", ":", "block", ".", "text", "=", "bytes_cast", "(", "block", ".", "text", ",", "*", "*", "kwargs", ")", "if", "include_link_tokens", ":", "block", ".", "link_tokens", "=", "bytes_list_cast", "(", "block", ".", "link_tokens", ",", "*", "*", "kwargs", ")", "if", "include_css", ":", "block", ".", "css", "=", "bytes_dict_cast", "(", "block", ".", "css", ",", "*", "*", "kwargs", ")", "if", "include_features", ":", "block", ".", "features", "=", "bytes_dict_cast", "(", "block", ".", "features", ",", "*", "*", "kwargs", ")", "return", "block" ]
[ 175, 0 ]
[ 210, 16 ]
python
en
['en', 'error', 'th']
False
str_block_list_cast
(blocks, **kwargs)
Converts any bytes-like items in input lxml.Blocks to string-like values, with respect to python version Parameters ---------- blocks : list[lxml.Block] any bytes-like objects contained in the block object will be converted to a string kwargs: include_text : bool, default=True if True, cast text to a string, else ignore include_link_tokens : bool, default=True if True, cast link_tokens to a string, else ignore include_css : bool, default=True if True, cast css to a string, else ignore include_features : bool, default=True if True, cast features to a string, else ignore encoding: str, default: 'utf-8' encoding to be used when decoding bytes
Converts any bytes-like items in input lxml.Blocks to string-like values, with respect to python version
def str_block_list_cast(blocks, **kwargs): """ Converts any bytes-like items in input lxml.Blocks to string-like values, with respect to python version Parameters ---------- blocks : list[lxml.Block] any bytes-like objects contained in the block object will be converted to a string kwargs: include_text : bool, default=True if True, cast text to a string, else ignore include_link_tokens : bool, default=True if True, cast link_tokens to a string, else ignore include_css : bool, default=True if True, cast css to a string, else ignore include_features : bool, default=True if True, cast features to a string, else ignore encoding: str, default: 'utf-8' encoding to be used when decoding bytes """ return [str_block_cast(block, **kwargs) for block in blocks]
[ "def", "str_block_list_cast", "(", "blocks", ",", "*", "*", "kwargs", ")", ":", "return", "[", "str_block_cast", "(", "block", ",", "*", "*", "kwargs", ")", "for", "block", "in", "blocks", "]" ]
[ 213, 0 ]
[ 235, 64 ]
python
en
['en', 'error', 'th']
False
bytes_block_list_cast
(blocks, **kwargs)
Converts any string-like items in input lxml.Blocks to bytes-like values, with respect to python version Parameters ---------- blocks : list[lxml.Block] any string-like objects contained in the block object will be converted to bytes kwargs: include_text : bool, default=True if True, cast text to bytes, else ignore include_link_tokens : bool, default=True if True, cast link_tokens to bytes, else ignore include_css : bool, default=True if True, cast css to bytes, else ignore include_features : bool, default=True if True, cast features to bytes, else ignore encoding: str, default: 'utf-8' encoding to be used when decoding bytes
Converts any string-like items in input lxml.Blocks to bytes-like values, with respect to python version
def bytes_block_list_cast(blocks, **kwargs): """ Converts any string-like items in input lxml.Blocks to bytes-like values, with respect to python version Parameters ---------- blocks : list[lxml.Block] any string-like objects contained in the block object will be converted to bytes kwargs: include_text : bool, default=True if True, cast text to bytes, else ignore include_link_tokens : bool, default=True if True, cast link_tokens to bytes, else ignore include_css : bool, default=True if True, cast css to bytes, else ignore include_features : bool, default=True if True, cast features to bytes, else ignore encoding: str, default: 'utf-8' encoding to be used when decoding bytes """ return [bytes_block_cast(block, **kwargs) for block in blocks]
[ "def", "bytes_block_list_cast", "(", "blocks", ",", "*", "*", "kwargs", ")", ":", "return", "[", "bytes_block_cast", "(", "block", ",", "*", "*", "kwargs", ")", "for", "block", "in", "blocks", "]" ]
[ 238, 0 ]
[ 260, 66 ]
python
en
['en', 'error', 'th']
False
gaussian_2d
(shape, centre, sigma=1.0)
Generate heatmap with single 2D gaussian.
Generate heatmap with single 2D gaussian.
def gaussian_2d(shape, centre, sigma=1.0): """Generate heatmap with single 2D gaussian.""" xs = np.arange(0.5, shape[1] + 0.5, step=1.0, dtype=np.float32) ys = np.expand_dims(np.arange(0.5, shape[0] + 0.5, step=1.0, dtype=np.float32), -1) alpha = -0.5 / (sigma**2) heatmap = np.exp(alpha * ((xs - centre[0])**2 + (ys - centre[1])**2)) return heatmap
[ "def", "gaussian_2d", "(", "shape", ",", "centre", ",", "sigma", "=", "1.0", ")", ":", "xs", "=", "np", ".", "arange", "(", "0.5", ",", "shape", "[", "1", "]", "+", "0.5", ",", "step", "=", "1.0", ",", "dtype", "=", "np", ".", "float32", ")", "ys", "=", "np", ".", "expand_dims", "(", "np", ".", "arange", "(", "0.5", ",", "shape", "[", "0", "]", "+", "0.5", ",", "step", "=", "1.0", ",", "dtype", "=", "np", ".", "float32", ")", ",", "-", "1", ")", "alpha", "=", "-", "0.5", "/", "(", "sigma", "**", "2", ")", "heatmap", "=", "np", ".", "exp", "(", "alpha", "*", "(", "(", "xs", "-", "centre", "[", "0", "]", ")", "**", "2", "+", "(", "ys", "-", "centre", "[", "1", "]", ")", "**", "2", ")", ")", "return", "heatmap" ]
[ 30, 0 ]
[ 36, 18 ]
python
en
['en', 'fy', 'en']
True
generate_test_image
(test_img, net_input_shape, batchSize=1, numSlices=1, subSampAmt=0, stride=1, downSampAmt=1)
test_img: numpy.array of image data, (height, width, channels)
test_img: numpy.array of image data, (height, width, channels)
def generate_test_image(test_img, net_input_shape, batchSize=1, numSlices=1, subSampAmt=0, stride=1, downSampAmt=1): ''' test_img: numpy.array of image data, (height, width, channels) ''' # Create placeholders for testing logging.info('\nload_2D_data.generate_test_image') # Convert image to 4 dimensions test_img = convert_img_data(test_img, 4) yield (test_img)
[ "def", "generate_test_image", "(", "test_img", ",", "net_input_shape", ",", "batchSize", "=", "1", ",", "numSlices", "=", "1", ",", "subSampAmt", "=", "0", ",", "stride", "=", "1", ",", "downSampAmt", "=", "1", ")", ":", "# Create placeholders for testing", "logging", ".", "info", "(", "'\\nload_2D_data.generate_test_image'", ")", "# Convert image to 4 dimensions", "test_img", "=", "convert_img_data", "(", "test_img", ",", "4", ")", "yield", "(", "test_img", ")" ]
[ 308, 0 ]
[ 319, 20 ]
python
en
['en', 'error', 'th']
False
Loader.get_template
(self, template_name, skip=None)
Call self.get_template_sources() and return a Template object for the first template matching template_name. If skip is provided, ignore template origins in skip. This is used to avoid recursion during template extending.
Call self.get_template_sources() and return a Template object for the first template matching template_name. If skip is provided, ignore template origins in skip. This is used to avoid recursion during template extending.
def get_template(self, template_name, skip=None): """ Call self.get_template_sources() and return a Template object for the first template matching template_name. If skip is provided, ignore template origins in skip. This is used to avoid recursion during template extending. """ tried = [] for origin in self.get_template_sources(template_name): if skip is not None and origin in skip: tried.append((origin, 'Skipped to avoid recursion')) continue try: contents = self.get_contents(origin) except TemplateDoesNotExist: tried.append((origin, 'Source does not exist')) continue else: return Template( contents, origin, origin.template_name, self.engine, ) raise TemplateDoesNotExist(template_name, tried=tried)
[ "def", "get_template", "(", "self", ",", "template_name", ",", "skip", "=", "None", ")", ":", "tried", "=", "[", "]", "for", "origin", "in", "self", ".", "get_template_sources", "(", "template_name", ")", ":", "if", "skip", "is", "not", "None", "and", "origin", "in", "skip", ":", "tried", ".", "append", "(", "(", "origin", ",", "'Skipped to avoid recursion'", ")", ")", "continue", "try", ":", "contents", "=", "self", ".", "get_contents", "(", "origin", ")", "except", "TemplateDoesNotExist", ":", "tried", ".", "append", "(", "(", "origin", ",", "'Source does not exist'", ")", ")", "continue", "else", ":", "return", "Template", "(", "contents", ",", "origin", ",", "origin", ".", "template_name", ",", "self", ".", "engine", ",", ")", "raise", "TemplateDoesNotExist", "(", "template_name", ",", "tried", "=", "tried", ")" ]
[ 8, 4 ]
[ 32, 62 ]
python
en
['en', 'error', 'th']
False