nwo
stringlengths
5
91
sha
stringlengths
40
40
path
stringlengths
5
174
language
stringclasses
1 value
identifier
stringlengths
1
120
parameters
stringlengths
0
3.15k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
24.1k
docstring
stringlengths
0
27.3k
docstring_summary
stringlengths
0
13.8k
docstring_tokens
sequence
function
stringlengths
22
139k
function_tokens
sequence
url
stringlengths
87
283
gramps-project/gramps
04d4651a43eb210192f40a9f8c2bad8ee8fa3753
gramps/gen/user.py
python
UserBase.warn
(self, title, warning="")
Warn the user. :param title: the title of the warning :type title: str :param warning: the warning :type warning: str :returns: none
Warn the user.
[ "Warn", "the", "user", "." ]
def warn(self, title, warning=""): """ Warn the user. :param title: the title of the warning :type title: str :param warning: the warning :type warning: str :returns: none """
[ "def", "warn", "(", "self", ",", "title", ",", "warning", "=", "\"\"", ")", ":" ]
https://github.com/gramps-project/gramps/blob/04d4651a43eb210192f40a9f8c2bad8ee8fa3753/gramps/gen/user.py#L142-L151
holzschu/Carnets
44effb10ddfc6aa5c8b0687582a724ba82c6b547
Library/lib/python3.7/site-packages/sympy/calculus/singularities.py
python
singularities
(expression, symbol)
Find singularities of a given function. Parameters ========== expression : Expr The target function in which singularities need to be found. symbol : Symbol The symbol over the values of which the singularity in expression in being searched for. Returns ======= Set A set of values for ``symbol`` for which ``expression`` has a singularity. An ``EmptySet`` is returned if ``expression`` has no singularities for any given value of ``Symbol``. Raises ====== NotImplementedError The algorithm to find singularities for irrational functions has not been implemented yet. Notes ===== This function does not find non-isolated singularities nor does it find branch points of the expression. Currently supported functions are: - univariate rational (real or complex) functions References ========== .. [1] https://en.wikipedia.org/wiki/Mathematical_singularity Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> y = Symbol('y', real=False) >>> singularities(x**2 + x + 1, x) EmptySet >>> singularities(1/(x + 1), x) FiniteSet(-1) >>> singularities(1/(y**2 + 1), y) FiniteSet(I, -I) >>> singularities(1/(y**3 + 1), y) FiniteSet(-1, 1/2 - sqrt(3)*I/2, 1/2 + sqrt(3)*I/2)
Find singularities of a given function.
[ "Find", "singularities", "of", "a", "given", "function", "." ]
def singularities(expression, symbol): """ Find singularities of a given function. Parameters ========== expression : Expr The target function in which singularities need to be found. symbol : Symbol The symbol over the values of which the singularity in expression in being searched for. Returns ======= Set A set of values for ``symbol`` for which ``expression`` has a singularity. An ``EmptySet`` is returned if ``expression`` has no singularities for any given value of ``Symbol``. Raises ====== NotImplementedError The algorithm to find singularities for irrational functions has not been implemented yet. Notes ===== This function does not find non-isolated singularities nor does it find branch points of the expression. Currently supported functions are: - univariate rational (real or complex) functions References ========== .. [1] https://en.wikipedia.org/wiki/Mathematical_singularity Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> y = Symbol('y', real=False) >>> singularities(x**2 + x + 1, x) EmptySet >>> singularities(1/(x + 1), x) FiniteSet(-1) >>> singularities(1/(y**2 + 1), y) FiniteSet(I, -I) >>> singularities(1/(y**3 + 1), y) FiniteSet(-1, 1/2 - sqrt(3)*I/2, 1/2 + sqrt(3)*I/2) """ if not expression.is_rational_function(symbol): raise NotImplementedError( "Algorithms finding singularities for non-rational" " functions are not yet implemented." ) else: domain = S.Reals if symbol.is_real else S.Complexes return solveset(simplify(1 / expression), symbol, domain)
[ "def", "singularities", "(", "expression", ",", "symbol", ")", ":", "if", "not", "expression", ".", "is_rational_function", "(", "symbol", ")", ":", "raise", "NotImplementedError", "(", "\"Algorithms finding singularities for non-rational\"", "\" functions are not yet implemented.\"", ")", "else", ":", "domain", "=", "S", ".", "Reals", "if", "symbol", ".", "is_real", "else", "S", ".", "Complexes", "return", "solveset", "(", "simplify", "(", "1", "/", "expression", ")", ",", "symbol", ",", "domain", ")" ]
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/sympy/calculus/singularities.py#L24-L90
jython/frozen-mirror
b8d7aa4cee50c0c0fe2f4b235dd62922dd0f3f99
lib-python/2.7/xml/dom/pulldom.py
python
PullDOM.clear
(self)
clear(): Explicitly release parsing structures
clear(): Explicitly release parsing structures
[ "clear", "()", ":", "Explicitly", "release", "parsing", "structures" ]
def clear(self): "clear(): Explicitly release parsing structures" self.document = None
[ "def", "clear", "(", "self", ")", ":", "self", ".", "document", "=", "None" ]
https://github.com/jython/frozen-mirror/blob/b8d7aa4cee50c0c0fe2f4b235dd62922dd0f3f99/lib-python/2.7/xml/dom/pulldom.py#L198-L200
kensho-technologies/graphql-compiler
4318443b7b2512a059f3616112bfc40bbf8eec06
graphql_compiler/compiler/emit_sql.py
python
CompilationState.start_global_operations
(self)
Execute a GlobalOperationsStart block.
Execute a GlobalOperationsStart block.
[ "Execute", "a", "GlobalOperationsStart", "block", "." ]
def start_global_operations(self) -> None: """Execute a GlobalOperationsStart block.""" if self._current_location is None: raise AssertionError("CompilationState is already in global scope.") self._current_location = None
[ "def", "start_global_operations", "(", "self", ")", "->", "None", ":", "if", "self", ".", "_current_location", "is", "None", ":", "raise", "AssertionError", "(", "\"CompilationState is already in global scope.\"", ")", "self", ".", "_current_location", "=", "None" ]
https://github.com/kensho-technologies/graphql-compiler/blob/4318443b7b2512a059f3616112bfc40bbf8eec06/graphql_compiler/compiler/emit_sql.py#L1146-L1150
espnet/espnet
ea411f3f627b8f101c211e107d0ff7053344ac80
espnet/lm/lm_utils.py
python
count_tokens
(data, unk_id=None)
return n_tokens, n_oovs
Count tokens and oovs in token ID sequences. Args: data (list[np.ndarray]): list of token ID sequences unk_id (int): ID of unknown token Returns: tuple: tuple of number of token occurrences and number of oov tokens
Count tokens and oovs in token ID sequences.
[ "Count", "tokens", "and", "oovs", "in", "token", "ID", "sequences", "." ]
def count_tokens(data, unk_id=None): """Count tokens and oovs in token ID sequences. Args: data (list[np.ndarray]): list of token ID sequences unk_id (int): ID of unknown token Returns: tuple: tuple of number of token occurrences and number of oov tokens """ n_tokens = 0 n_oovs = 0 for sentence in data: n_tokens += len(sentence) if unk_id is not None: n_oovs += np.count_nonzero(sentence == unk_id) return n_tokens, n_oovs
[ "def", "count_tokens", "(", "data", ",", "unk_id", "=", "None", ")", ":", "n_tokens", "=", "0", "n_oovs", "=", "0", "for", "sentence", "in", "data", ":", "n_tokens", "+=", "len", "(", "sentence", ")", "if", "unk_id", "is", "not", "None", ":", "n_oovs", "+=", "np", ".", "count_nonzero", "(", "sentence", "==", "unk_id", ")", "return", "n_tokens", ",", "n_oovs" ]
https://github.com/espnet/espnet/blob/ea411f3f627b8f101c211e107d0ff7053344ac80/espnet/lm/lm_utils.py#L81-L99
CLUEbenchmark/CLUEPretrainedModels
b384fd41665a8261f9c689c940cf750b3bc21fce
baselines/models/bert/run_classifier.py
python
ColaProcessor.get_test_examples
(self, data_dir)
return self._create_examples( self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
See base class.
See base class.
[ "See", "base", "class", "." ]
def get_test_examples(self, data_dir): """See base class.""" return self._create_examples( self._read_tsv(os.path.join(data_dir, "test.tsv")), "test")
[ "def", "get_test_examples", "(", "self", ",", "data_dir", ")", ":", "return", "self", ".", "_create_examples", "(", "self", ".", "_read_tsv", "(", "os", ".", "path", ".", "join", "(", "data_dir", ",", "\"test.tsv\"", ")", ")", ",", "\"test\"", ")" ]
https://github.com/CLUEbenchmark/CLUEPretrainedModels/blob/b384fd41665a8261f9c689c940cf750b3bc21fce/baselines/models/bert/run_classifier.py#L357-L360
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/Django-1.11.29/django/db/utils.py
python
ConnectionHandler.ensure_defaults
(self, alias)
Puts the defaults into the settings dictionary for a given connection where no settings is provided.
Puts the defaults into the settings dictionary for a given connection where no settings is provided.
[ "Puts", "the", "defaults", "into", "the", "settings", "dictionary", "for", "a", "given", "connection", "where", "no", "settings", "is", "provided", "." ]
def ensure_defaults(self, alias): """ Puts the defaults into the settings dictionary for a given connection where no settings is provided. """ try: conn = self.databases[alias] except KeyError: raise ConnectionDoesNotExist("The connection %s doesn't exist" % alias) conn.setdefault('ATOMIC_REQUESTS', False) conn.setdefault('AUTOCOMMIT', True) conn.setdefault('ENGINE', 'django.db.backends.dummy') if conn['ENGINE'] == 'django.db.backends.' or not conn['ENGINE']: conn['ENGINE'] = 'django.db.backends.dummy' conn.setdefault('CONN_MAX_AGE', 0) conn.setdefault('OPTIONS', {}) conn.setdefault('TIME_ZONE', None) for setting in ['NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']: conn.setdefault(setting, '')
[ "def", "ensure_defaults", "(", "self", ",", "alias", ")", ":", "try", ":", "conn", "=", "self", ".", "databases", "[", "alias", "]", "except", "KeyError", ":", "raise", "ConnectionDoesNotExist", "(", "\"The connection %s doesn't exist\"", "%", "alias", ")", "conn", ".", "setdefault", "(", "'ATOMIC_REQUESTS'", ",", "False", ")", "conn", ".", "setdefault", "(", "'AUTOCOMMIT'", ",", "True", ")", "conn", ".", "setdefault", "(", "'ENGINE'", ",", "'django.db.backends.dummy'", ")", "if", "conn", "[", "'ENGINE'", "]", "==", "'django.db.backends.'", "or", "not", "conn", "[", "'ENGINE'", "]", ":", "conn", "[", "'ENGINE'", "]", "=", "'django.db.backends.dummy'", "conn", ".", "setdefault", "(", "'CONN_MAX_AGE'", ",", "0", ")", "conn", ".", "setdefault", "(", "'OPTIONS'", ",", "{", "}", ")", "conn", ".", "setdefault", "(", "'TIME_ZONE'", ",", "None", ")", "for", "setting", "in", "[", "'NAME'", ",", "'USER'", ",", "'PASSWORD'", ",", "'HOST'", ",", "'PORT'", "]", ":", "conn", ".", "setdefault", "(", "setting", ",", "''", ")" ]
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/Django-1.11.29/django/db/utils.py#L170-L189
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/apps/hqwebapp/widgets.py
python
DateRangePickerWidget.render
(self, name, value, attrs=None, renderer=None)
return format_html( '<div class="input-group hqwebapp-datespan">' ' <span class="input-group-addon"><i class="fa fa-calendar"></i></span>' ' {}' '</div>', output )
[]
def render(self, name, value, attrs=None, renderer=None): startdate = '' enddate = '' if isinstance(self.default_datespan, DateSpan): if self.default_datespan.startdate is not None: startdate = self.default_datespan.startdate.strftime('%m/%d/%Y') if self.default_datespan.enddate is not None: enddate = self.default_datespan.enddate.strftime('%m/%d/%Y') attrs.update({ 'data-separator': self.separator, 'data-labels': json.dumps(self.range_labels), 'data-start-date': startdate, 'data-end-date': enddate, }) output = super(DateRangePickerWidget, self).render(name, value, attrs, renderer) return format_html( '<div class="input-group hqwebapp-datespan">' ' <span class="input-group-addon"><i class="fa fa-calendar"></i></span>' ' {}' '</div>', output )
[ "def", "render", "(", "self", ",", "name", ",", "value", ",", "attrs", "=", "None", ",", "renderer", "=", "None", ")", ":", "startdate", "=", "''", "enddate", "=", "''", "if", "isinstance", "(", "self", ".", "default_datespan", ",", "DateSpan", ")", ":", "if", "self", ".", "default_datespan", ".", "startdate", "is", "not", "None", ":", "startdate", "=", "self", ".", "default_datespan", ".", "startdate", ".", "strftime", "(", "'%m/%d/%Y'", ")", "if", "self", ".", "default_datespan", ".", "enddate", "is", "not", "None", ":", "enddate", "=", "self", ".", "default_datespan", ".", "enddate", ".", "strftime", "(", "'%m/%d/%Y'", ")", "attrs", ".", "update", "(", "{", "'data-separator'", ":", "self", ".", "separator", ",", "'data-labels'", ":", "json", ".", "dumps", "(", "self", ".", "range_labels", ")", ",", "'data-start-date'", ":", "startdate", ",", "'data-end-date'", ":", "enddate", ",", "}", ")", "output", "=", "super", "(", "DateRangePickerWidget", ",", "self", ")", ".", "render", "(", "name", ",", "value", ",", "attrs", ",", "renderer", ")", "return", "format_html", "(", "'<div class=\"input-group hqwebapp-datespan\">'", "' <span class=\"input-group-addon\"><i class=\"fa fa-calendar\"></i></span>'", "' {}'", "'</div>'", ",", "output", ")" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/apps/hqwebapp/widgets.py#L114-L137
tgalal/yowsup
dd47d57a950964bab0c4715e9d56fd8450bc94e2
yowsup/layers/protocol_profiles/protocolentities/iq_statuses_get.py
python
GetStatusesIqProtocolEntity.__init__
(self, jids, _id = None)
Request the statuses of users. Should be sent once after login. Args: - jids: A list of jids representing the users whose statuses you are trying to get.
Request the statuses of users. Should be sent once after login.
[ "Request", "the", "statuses", "of", "users", ".", "Should", "be", "sent", "once", "after", "login", "." ]
def __init__(self, jids, _id = None): """ Request the statuses of users. Should be sent once after login. Args: - jids: A list of jids representing the users whose statuses you are trying to get. """ super(GetStatusesIqProtocolEntity, self).__init__(self.__class__.XMLNS, _id, _type = "get", to = YowConstants.WHATSAPP_SERVER) self.setGetStatusesProps(jids)
[ "def", "__init__", "(", "self", ",", "jids", ",", "_id", "=", "None", ")", ":", "super", "(", "GetStatusesIqProtocolEntity", ",", "self", ")", ".", "__init__", "(", "self", ".", "__class__", ".", "XMLNS", ",", "_id", ",", "_type", "=", "\"get\"", ",", "to", "=", "YowConstants", ".", "WHATSAPP_SERVER", ")", "self", ".", "setGetStatusesProps", "(", "jids", ")" ]
https://github.com/tgalal/yowsup/blob/dd47d57a950964bab0c4715e9d56fd8450bc94e2/yowsup/layers/protocol_profiles/protocolentities/iq_statuses_get.py#L8-L17
chainer/chainer
e9da1423255c58c37be9733f51b158aa9b39dc93
chainer/training/extensions/evaluator.py
python
Evaluator.get_target
(self, name)
return self._targets[name]
Returns the target link of the given name.
Returns the target link of the given name.
[ "Returns", "the", "target", "link", "of", "the", "given", "name", "." ]
def get_target(self, name): """Returns the target link of the given name.""" return self._targets[name]
[ "def", "get_target", "(", "self", ",", "name", ")", ":", "return", "self", ".", "_targets", "[", "name", "]" ]
https://github.com/chainer/chainer/blob/e9da1423255c58c37be9733f51b158aa9b39dc93/chainer/training/extensions/evaluator.py#L140-L142
emmetio/livestyle-sublime-old
c42833c046e9b2f53ebce3df3aa926528f5a33b5
tornado/web.py
python
Application.reverse_url
(self, name, *args)
Returns a URL path for handler named ``name`` The handler must be added to the application as a named `URLSpec`. Args will be substituted for capturing groups in the `URLSpec` regex. They will be converted to strings if necessary, encoded as utf8, and url-escaped.
Returns a URL path for handler named ``name``
[ "Returns", "a", "URL", "path", "for", "handler", "named", "name" ]
def reverse_url(self, name, *args): """Returns a URL path for handler named ``name`` The handler must be added to the application as a named `URLSpec`. Args will be substituted for capturing groups in the `URLSpec` regex. They will be converted to strings if necessary, encoded as utf8, and url-escaped. """ if name in self.named_handlers: return self.named_handlers[name].reverse(*args) raise KeyError("%s not found in named urls" % name)
[ "def", "reverse_url", "(", "self", ",", "name", ",", "*", "args", ")", ":", "if", "name", "in", "self", ".", "named_handlers", ":", "return", "self", ".", "named_handlers", "[", "name", "]", ".", "reverse", "(", "*", "args", ")", "raise", "KeyError", "(", "\"%s not found in named urls\"", "%", "name", ")" ]
https://github.com/emmetio/livestyle-sublime-old/blob/c42833c046e9b2f53ebce3df3aa926528f5a33b5/tornado/web.py#L1603-L1614
larryhastings/gilectomy
4315ec3f1d6d4f813cc82ce27a24e7f784dbfc1a
Lib/_strptime.py
python
TimeRE.compile
(self, format)
return re_compile(self.pattern(format), IGNORECASE)
Return a compiled re object for the format string.
Return a compiled re object for the format string.
[ "Return", "a", "compiled", "re", "object", "for", "the", "format", "string", "." ]
def compile(self, format): """Return a compiled re object for the format string.""" return re_compile(self.pattern(format), IGNORECASE)
[ "def", "compile", "(", "self", ",", "format", ")", ":", "return", "re_compile", "(", "self", ".", "pattern", "(", "format", ")", ",", "IGNORECASE", ")" ]
https://github.com/larryhastings/gilectomy/blob/4315ec3f1d6d4f813cc82ce27a24e7f784dbfc1a/Lib/_strptime.py#L273-L275
robotlearn/pyrobolearn
9cd7c060723fda7d2779fa255ac998c2c82b8436
pyrobolearn/worlds/world.py
python
World.get_body_color
(self, body_id)
return self.sim.get_visual_shape_data(body_id)[-1]
Return the RGBA color of the given body. Args: body_id (int): body id Returns: float[4]: RGBA color
Return the RGBA color of the given body.
[ "Return", "the", "RGBA", "color", "of", "the", "given", "body", "." ]
def get_body_color(self, body_id): """ Return the RGBA color of the given body. Args: body_id (int): body id Returns: float[4]: RGBA color """ return self.sim.get_visual_shape_data(body_id)[-1]
[ "def", "get_body_color", "(", "self", ",", "body_id", ")", ":", "return", "self", ".", "sim", ".", "get_visual_shape_data", "(", "body_id", ")", "[", "-", "1", "]" ]
https://github.com/robotlearn/pyrobolearn/blob/9cd7c060723fda7d2779fa255ac998c2c82b8436/pyrobolearn/worlds/world.py#L732-L742
qibinlou/SinaWeibo-Emotion-Classification
f336fc104abd68b0ec4180fe2ed80fafe49cb790
nltk/tree.py
python
ImmutableTree._get_node
(self)
return self._node
Get the node value
Get the node value
[ "Get", "the", "node", "value" ]
def _get_node(self): """Get the node value""" return self._node
[ "def", "_get_node", "(", "self", ")", ":", "return", "self", ".", "_node" ]
https://github.com/qibinlou/SinaWeibo-Emotion-Classification/blob/f336fc104abd68b0ec4180fe2ed80fafe49cb790/nltk/tree.py#L773-L775
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/geometry/polyhedral_complex.py
python
PolyhedralComplex._an_element_
(self)
Return a (maximal) cell of this complex. EXAMPLES:: sage: PolyhedralComplex()._an_element_() Traceback (most recent call last): ... EmptySetError: the complex is empty sage: pc = PolyhedralComplex([ ....: Polyhedron(vertices=[(1/3, 1/3), (0, 0), (1, 2)]), ....: Polyhedron(vertices=[(1, 2), (0, 0), (0, 1/2)])]) sage: pc._an_element_().vertices_list() [[0, 0], [0, 1/2], [1, 2]]
Return a (maximal) cell of this complex.
[ "Return", "a", "(", "maximal", ")", "cell", "of", "this", "complex", "." ]
def _an_element_(self): """ Return a (maximal) cell of this complex. EXAMPLES:: sage: PolyhedralComplex()._an_element_() Traceback (most recent call last): ... EmptySetError: the complex is empty sage: pc = PolyhedralComplex([ ....: Polyhedron(vertices=[(1/3, 1/3), (0, 0), (1, 2)]), ....: Polyhedron(vertices=[(1, 2), (0, 0), (0, 1/2)])]) sage: pc._an_element_().vertices_list() [[0, 0], [0, 1/2], [1, 2]] """ try: return next(self.maximal_cell_iterator(increasing=False)) except StopIteration: from sage.categories.sets_cat import EmptySetError raise EmptySetError("the complex is empty")
[ "def", "_an_element_", "(", "self", ")", ":", "try", ":", "return", "next", "(", "self", ".", "maximal_cell_iterator", "(", "increasing", "=", "False", ")", ")", "except", "StopIteration", ":", "from", "sage", ".", "categories", ".", "sets_cat", "import", "EmptySetError", "raise", "EmptySetError", "(", "\"the complex is empty\"", ")" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/geometry/polyhedral_complex.py#L858-L878
fabioz/PyDev.Debugger
0f8c02a010fe5690405da1dd30ed72326191ce63
pydevd_attach_to_process/winappdbg/breakpoint.py
python
Breakpoint.run_action
(self, event)
return True
Executes the breakpoint action callback, if any was set. @type event: L{Event} @param event: Debug event triggered by the breakpoint.
Executes the breakpoint action callback, if any was set.
[ "Executes", "the", "breakpoint", "action", "callback", "if", "any", "was", "set", "." ]
def run_action(self, event): """ Executes the breakpoint action callback, if any was set. @type event: L{Event} @param event: Debug event triggered by the breakpoint. """ action = self.get_action() if action is not None: try: return bool( action(event) ) except Exception: e = sys.exc_info()[1] msg = ("Breakpoint action callback %r" " raised an exception: %s") msg = msg % (action, traceback.format_exc(e)) warnings.warn(msg, BreakpointCallbackWarning) return False return True
[ "def", "run_action", "(", "self", ",", "event", ")", ":", "action", "=", "self", ".", "get_action", "(", ")", "if", "action", "is", "not", "None", ":", "try", ":", "return", "bool", "(", "action", "(", "event", ")", ")", "except", "Exception", ":", "e", "=", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "msg", "=", "(", "\"Breakpoint action callback %r\"", "\" raised an exception: %s\"", ")", "msg", "=", "msg", "%", "(", "action", ",", "traceback", ".", "format_exc", "(", "e", ")", ")", "warnings", ".", "warn", "(", "msg", ",", "BreakpointCallbackWarning", ")", "return", "False", "return", "True" ]
https://github.com/fabioz/PyDev.Debugger/blob/0f8c02a010fe5690405da1dd30ed72326191ce63/pydevd_attach_to_process/winappdbg/breakpoint.py#L391-L409
InvestmentSystems/static-frame
0b19d6969bf6c17fb0599871aca79eb3b52cf2ed
static_frame/core/index_level.py
python
IndexLevel.leaf_loc_to_iloc
(self, key: tp.Union[tp.Iterable[tp.Hashable], ILoc, HLoc] )
Given an iterable of single-element level keys (a leaf loc), return the iloc value. Note that key components (level selectors) cannot be slices, lists, or np.ndarray.
Given an iterable of single-element level keys (a leaf loc), return the iloc value.
[ "Given", "an", "iterable", "of", "single", "-", "element", "level", "keys", "(", "a", "leaf", "loc", ")", "return", "the", "iloc", "value", "." ]
def leaf_loc_to_iloc(self, key: tp.Union[tp.Iterable[tp.Hashable], ILoc, HLoc] ) -> int: '''Given an iterable of single-element level keys (a leaf loc), return the iloc value. Note that key components (level selectors) cannot be slices, lists, or np.ndarray. ''' if isinstance(key, ILoc): return key.key #type: ignore [return-value] node = self pos = 0 key_depth_max = len(key) - 1 #type: ignore # NOTE: rather than a for/enumerate, this could use a while loop on an iter() and explicitly look at next() results to determine if the key matches for key_depth, k in enumerate(key): if isinstance(k, KEY_MULTIPLE_TYPES): raise RuntimeError(f'slices cannot be used in a leaf selection into an IndexHierarchy; try HLoc[{key}].') if node.targets is not None: node = node.targets[node.index._loc_to_iloc(k)] pos += node.offset else: # targets is None, meaning we are at max depth # k returns an integer offset = node.index._loc_to_iloc(k) assert isinstance(offset, INT_TYPES) # enforces leaf loc if key_depth == key_depth_max: return pos + offset break # return exception below if key_depth not max depth raise KeyError(f'Invalid key length {key_depth_max + 1}; must be length {self.depth}.')
[ "def", "leaf_loc_to_iloc", "(", "self", ",", "key", ":", "tp", ".", "Union", "[", "tp", ".", "Iterable", "[", "tp", ".", "Hashable", "]", ",", "ILoc", ",", "HLoc", "]", ")", "->", "int", ":", "if", "isinstance", "(", "key", ",", "ILoc", ")", ":", "return", "key", ".", "key", "#type: ignore [return-value]", "node", "=", "self", "pos", "=", "0", "key_depth_max", "=", "len", "(", "key", ")", "-", "1", "#type: ignore", "# NOTE: rather than a for/enumerate, this could use a while loop on an iter() and explicitly look at next() results to determine if the key matches", "for", "key_depth", ",", "k", "in", "enumerate", "(", "key", ")", ":", "if", "isinstance", "(", "k", ",", "KEY_MULTIPLE_TYPES", ")", ":", "raise", "RuntimeError", "(", "f'slices cannot be used in a leaf selection into an IndexHierarchy; try HLoc[{key}].'", ")", "if", "node", ".", "targets", "is", "not", "None", ":", "node", "=", "node", ".", "targets", "[", "node", ".", "index", ".", "_loc_to_iloc", "(", "k", ")", "]", "pos", "+=", "node", ".", "offset", "else", ":", "# targets is None, meaning we are at max depth", "# k returns an integer", "offset", "=", "node", ".", "index", ".", "_loc_to_iloc", "(", "k", ")", "assert", "isinstance", "(", "offset", ",", "INT_TYPES", ")", "# enforces leaf loc", "if", "key_depth", "==", "key_depth_max", ":", "return", "pos", "+", "offset", "break", "# return exception below if key_depth not max depth", "raise", "KeyError", "(", "f'Invalid key length {key_depth_max + 1}; must be length {self.depth}.'", ")" ]
https://github.com/InvestmentSystems/static-frame/blob/0b19d6969bf6c17fb0599871aca79eb3b52cf2ed/static_frame/core/index_level.py#L580-L609
pypa/pip
7f8a6844037fb7255cfd0d34ff8e8cf44f2598d4
src/pip/_vendor/pep517/dirtools.py
python
tempdir
()
Create a temporary directory in a context manager.
Create a temporary directory in a context manager.
[ "Create", "a", "temporary", "directory", "in", "a", "context", "manager", "." ]
def tempdir(): """Create a temporary directory in a context manager.""" td = tempfile.mkdtemp() try: yield td finally: shutil.rmtree(td)
[ "def", "tempdir", "(", ")", ":", "td", "=", "tempfile", ".", "mkdtemp", "(", ")", "try", ":", "yield", "td", "finally", ":", "shutil", ".", "rmtree", "(", "td", ")" ]
https://github.com/pypa/pip/blob/7f8a6844037fb7255cfd0d34ff8e8cf44f2598d4/src/pip/_vendor/pep517/dirtools.py#L11-L17
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/CPython/27/Lib/ast.py
python
dump
(node, annotate_fields=True, include_attributes=False)
return _format(node)
Return a formatted dump of the tree in *node*. This is mainly useful for debugging purposes. The returned string will show the names and the values for fields. This makes the code impossible to evaluate, so if evaluation is wanted *annotate_fields* must be set to False. Attributes such as line numbers and column offsets are not dumped by default. If this is wanted, *include_attributes* can be set to True.
Return a formatted dump of the tree in *node*. This is mainly useful for debugging purposes. The returned string will show the names and the values for fields. This makes the code impossible to evaluate, so if evaluation is wanted *annotate_fields* must be set to False. Attributes such as line numbers and column offsets are not dumped by default. If this is wanted, *include_attributes* can be set to True.
[ "Return", "a", "formatted", "dump", "of", "the", "tree", "in", "*", "node", "*", ".", "This", "is", "mainly", "useful", "for", "debugging", "purposes", ".", "The", "returned", "string", "will", "show", "the", "names", "and", "the", "values", "for", "fields", ".", "This", "makes", "the", "code", "impossible", "to", "evaluate", "so", "if", "evaluation", "is", "wanted", "*", "annotate_fields", "*", "must", "be", "set", "to", "False", ".", "Attributes", "such", "as", "line", "numbers", "and", "column", "offsets", "are", "not", "dumped", "by", "default", ".", "If", "this", "is", "wanted", "*", "include_attributes", "*", "can", "be", "set", "to", "True", "." ]
def dump(node, annotate_fields=True, include_attributes=False): """ Return a formatted dump of the tree in *node*. This is mainly useful for debugging purposes. The returned string will show the names and the values for fields. This makes the code impossible to evaluate, so if evaluation is wanted *annotate_fields* must be set to False. Attributes such as line numbers and column offsets are not dumped by default. If this is wanted, *include_attributes* can be set to True. """ def _format(node): if isinstance(node, AST): fields = [(a, _format(b)) for a, b in iter_fields(node)] rv = '%s(%s' % (node.__class__.__name__, ', '.join( ('%s=%s' % field for field in fields) if annotate_fields else (b for a, b in fields) )) if include_attributes and node._attributes: rv += fields and ', ' or ' ' rv += ', '.join('%s=%s' % (a, _format(getattr(node, a))) for a in node._attributes) return rv + ')' elif isinstance(node, list): return '[%s]' % ', '.join(_format(x) for x in node) return repr(node) if not isinstance(node, AST): raise TypeError('expected AST, got %r' % node.__class__.__name__) return _format(node)
[ "def", "dump", "(", "node", ",", "annotate_fields", "=", "True", ",", "include_attributes", "=", "False", ")", ":", "def", "_format", "(", "node", ")", ":", "if", "isinstance", "(", "node", ",", "AST", ")", ":", "fields", "=", "[", "(", "a", ",", "_format", "(", "b", ")", ")", "for", "a", ",", "b", "in", "iter_fields", "(", "node", ")", "]", "rv", "=", "'%s(%s'", "%", "(", "node", ".", "__class__", ".", "__name__", ",", "', '", ".", "join", "(", "(", "'%s=%s'", "%", "field", "for", "field", "in", "fields", ")", "if", "annotate_fields", "else", "(", "b", "for", "a", ",", "b", "in", "fields", ")", ")", ")", "if", "include_attributes", "and", "node", ".", "_attributes", ":", "rv", "+=", "fields", "and", "', '", "or", "' '", "rv", "+=", "', '", ".", "join", "(", "'%s=%s'", "%", "(", "a", ",", "_format", "(", "getattr", "(", "node", ",", "a", ")", ")", ")", "for", "a", "in", "node", ".", "_attributes", ")", "return", "rv", "+", "')'", "elif", "isinstance", "(", "node", ",", "list", ")", ":", "return", "'[%s]'", "%", "', '", ".", "join", "(", "_format", "(", "x", ")", "for", "x", "in", "node", ")", "return", "repr", "(", "node", ")", "if", "not", "isinstance", "(", "node", ",", "AST", ")", ":", "raise", "TypeError", "(", "'expected AST, got %r'", "%", "node", ".", "__class__", ".", "__name__", ")", "return", "_format", "(", "node", ")" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/CPython/27/Lib/ast.py#L83-L110
pycrypto/pycrypto
7acba5f3a6ff10f1424c309d0d34d2b713233019
lib/Crypto/PublicKey/RSA.py
python
_RSAobj.verify
(self, M, signature)
return pubkey.pubkey.verify(self, M, signature)
Verify the validity of an RSA signature. :attention: this function performs the plain, primitive RSA encryption (*textbook*). In real applications, you always need to use proper cryptographic padding, and you should not directly verify data with this method. Failure to do so may lead to security vulnerabilities. It is recommended to use modules `Crypto.Signature.PKCS1_PSS` or `Crypto.Signature.PKCS1_v1_5` instead. :Parameter M: The expected message. :Type M: byte string or long :Parameter signature: The RSA signature to verify. The first item of the tuple is the actual signature (a long not larger than the modulus **n**), whereas the second item is always ignored. :Type signature: A 2-item tuple as return by `sign` :Return: True if the signature is correct, False otherwise.
Verify the validity of an RSA signature.
[ "Verify", "the", "validity", "of", "an", "RSA", "signature", "." ]
def verify(self, M, signature): """Verify the validity of an RSA signature. :attention: this function performs the plain, primitive RSA encryption (*textbook*). In real applications, you always need to use proper cryptographic padding, and you should not directly verify data with this method. Failure to do so may lead to security vulnerabilities. It is recommended to use modules `Crypto.Signature.PKCS1_PSS` or `Crypto.Signature.PKCS1_v1_5` instead. :Parameter M: The expected message. :Type M: byte string or long :Parameter signature: The RSA signature to verify. The first item of the tuple is the actual signature (a long not larger than the modulus **n**), whereas the second item is always ignored. :Type signature: A 2-item tuple as return by `sign` :Return: True if the signature is correct, False otherwise. """ return pubkey.pubkey.verify(self, M, signature)
[ "def", "verify", "(", "self", ",", "M", ",", "signature", ")", ":", "return", "pubkey", ".", "pubkey", ".", "verify", "(", "self", ",", "M", ",", "signature", ")" ]
https://github.com/pycrypto/pycrypto/blob/7acba5f3a6ff10f1424c309d0d34d2b713233019/lib/Crypto/PublicKey/RSA.py#L209-L229
eirannejad/pyRevit
49c0b7eb54eb343458ce1365425e6552d0c47d44
site-packages/sqlalchemy/engine/default.py
python
DefaultExecutionContext.get_lastrowid
(self)
return self.cursor.lastrowid
return self.cursor.lastrowid, or equivalent, after an INSERT. This may involve calling special cursor functions, issuing a new SELECT on the cursor (or a new one), or returning a stored value that was calculated within post_exec(). This function will only be called for dialects which support "implicit" primary key generation, keep preexecute_autoincrement_sequences set to False, and when no explicit id value was bound to the statement. The function is called once, directly after post_exec() and before the transaction is committed or ResultProxy is generated. If the post_exec() method assigns a value to `self._lastrowid`, the value is used in place of calling get_lastrowid(). Note that this method is *not* equivalent to the ``lastrowid`` method on ``ResultProxy``, which is a direct proxy to the DBAPI ``lastrowid`` accessor in all cases.
return self.cursor.lastrowid, or equivalent, after an INSERT.
[ "return", "self", ".", "cursor", ".", "lastrowid", "or", "equivalent", "after", "an", "INSERT", "." ]
def get_lastrowid(self): """return self.cursor.lastrowid, or equivalent, after an INSERT. This may involve calling special cursor functions, issuing a new SELECT on the cursor (or a new one), or returning a stored value that was calculated within post_exec(). This function will only be called for dialects which support "implicit" primary key generation, keep preexecute_autoincrement_sequences set to False, and when no explicit id value was bound to the statement. The function is called once, directly after post_exec() and before the transaction is committed or ResultProxy is generated. If the post_exec() method assigns a value to `self._lastrowid`, the value is used in place of calling get_lastrowid(). Note that this method is *not* equivalent to the ``lastrowid`` method on ``ResultProxy``, which is a direct proxy to the DBAPI ``lastrowid`` accessor in all cases. """ return self.cursor.lastrowid
[ "def", "get_lastrowid", "(", "self", ")", ":", "return", "self", ".", "cursor", ".", "lastrowid" ]
https://github.com/eirannejad/pyRevit/blob/49c0b7eb54eb343458ce1365425e6552d0c47d44/site-packages/sqlalchemy/engine/default.py#L842-L868
arizvisa/ida-minsc
8627a60f047b5e55d3efeecde332039cd1a16eea
base/structure.py
python
structure_t.__contains__
(self, member)
return member in self.members
Return whether the specified `member` is contained by this structure.
Return whether the specified `member` is contained by this structure.
[ "Return", "whether", "the", "specified", "member", "is", "contained", "by", "this", "structure", "." ]
def __contains__(self, member): '''Return whether the specified `member` is contained by this structure.''' if not isinstance(member, member_t): raise TypeError(member) return member in self.members
[ "def", "__contains__", "(", "self", ",", "member", ")", ":", "if", "not", "isinstance", "(", "member", ",", "member_t", ")", ":", "raise", "TypeError", "(", "member", ")", "return", "member", "in", "self", ".", "members" ]
https://github.com/arizvisa/ida-minsc/blob/8627a60f047b5e55d3efeecde332039cd1a16eea/base/structure.py#L696-L700
google/timesketch
1ce6b60e125d104e6644947c6f1dbe1b82ac76b6
api_client/python/timesketch_api_client/story.py
python
ViewBlock.__init__
(self, story, index)
[]
def __init__(self, story, index): super().__init__(story, index) self._view_id = 0 self._view_name = ''
[ "def", "__init__", "(", "self", ",", "story", ",", "index", ")", ":", "super", "(", ")", ".", "__init__", "(", "story", ",", "index", ")", "self", ".", "_view_id", "=", "0", "self", ".", "_view_name", "=", "''" ]
https://github.com/google/timesketch/blob/1ce6b60e125d104e6644947c6f1dbe1b82ac76b6/api_client/python/timesketch_api_client/story.py#L115-L118
tensorflow/benchmarks
16af178ad312e8c1213efb27a5f227044228bfdf
scripts/tf_cnn_benchmarks/models/tf1_only/nasnet_model.py
python
_build_nasnet_base
(images, normal_cell, reduction_cell, num_classes, hparams, is_training, stem_type, final_endpoint=None)
return logits, end_points
Constructs a NASNet image model.
Constructs a NASNet image model.
[ "Constructs", "a", "NASNet", "image", "model", "." ]
def _build_nasnet_base(images, normal_cell, reduction_cell, num_classes, hparams, is_training, stem_type, final_endpoint=None): """Constructs a NASNet image model.""" end_points = {} def add_and_check_endpoint(endpoint_name, net): end_points[endpoint_name] = net return final_endpoint and (endpoint_name == final_endpoint) # Find where to place the reduction cells or stride normal cells reduction_indices = nasnet_utils.calc_reduction_layers( hparams.num_cells, hparams.num_reduction_layers) stem_cell = reduction_cell if stem_type == 'imagenet': stem = lambda: _imagenet_stem(images, hparams, stem_cell) elif stem_type == 'cifar': stem = lambda: _cifar_stem(images, hparams) else: raise ValueError('Unknown stem_type: ', stem_type) net, cell_outputs = stem() if add_and_check_endpoint('Stem', net): return net, end_points # Setup for building in the auxiliary head. aux_head_cell_idxes = [] if len(reduction_indices) >= 2: aux_head_cell_idxes.append(reduction_indices[1] - 1) # Run the cells filter_scaling = 1.0 # true_cell_num accounts for the stem cells true_cell_num = 2 if stem_type == 'imagenet' else 0 for cell_num in range(hparams.num_cells): stride = 1 if hparams.skip_reduction_layer_input: prev_layer = cell_outputs[-2] if cell_num in reduction_indices: filter_scaling *= hparams.filter_scaling_rate net = reduction_cell( net, scope='reduction_cell_{}'.format(reduction_indices.index(cell_num)), filter_scaling=filter_scaling, stride=2, prev_layer=cell_outputs[-2], cell_num=true_cell_num) if add_and_check_endpoint( 'Reduction_Cell_{}'.format(reduction_indices.index(cell_num)), net): return net, end_points true_cell_num += 1 cell_outputs.append(net) if not hparams.skip_reduction_layer_input: prev_layer = cell_outputs[-2] net = normal_cell( net, scope='cell_{}'.format(cell_num), filter_scaling=filter_scaling, stride=stride, prev_layer=prev_layer, cell_num=true_cell_num) if add_and_check_endpoint('Cell_{}'.format(cell_num), net): return net, end_points true_cell_num += 1 if (hparams.use_aux_head and cell_num in aux_head_cell_idxes and num_classes and is_training): aux_net = tf.nn.relu(net) _build_aux_head( aux_net, end_points, num_classes, hparams, scope='aux_{}'.format(cell_num)) cell_outputs.append(net) # Final softmax layer with tf.variable_scope('final_layer'): net = tf.nn.relu(net) net = nasnet_utils.global_avg_pool(net) if add_and_check_endpoint('global_pool', net) or num_classes is None: return net, end_points net = slim.dropout(net, hparams.dense_dropout_keep_prob, scope='dropout') logits = slim.fully_connected(net, num_classes) if add_and_check_endpoint('Logits', logits): return net, end_points predictions = tf.nn.softmax(logits, name='predictions') if add_and_check_endpoint('Predictions', predictions): return net, end_points return logits, end_points
[ "def", "_build_nasnet_base", "(", "images", ",", "normal_cell", ",", "reduction_cell", ",", "num_classes", ",", "hparams", ",", "is_training", ",", "stem_type", ",", "final_endpoint", "=", "None", ")", ":", "end_points", "=", "{", "}", "def", "add_and_check_endpoint", "(", "endpoint_name", ",", "net", ")", ":", "end_points", "[", "endpoint_name", "]", "=", "net", "return", "final_endpoint", "and", "(", "endpoint_name", "==", "final_endpoint", ")", "# Find where to place the reduction cells or stride normal cells", "reduction_indices", "=", "nasnet_utils", ".", "calc_reduction_layers", "(", "hparams", ".", "num_cells", ",", "hparams", ".", "num_reduction_layers", ")", "stem_cell", "=", "reduction_cell", "if", "stem_type", "==", "'imagenet'", ":", "stem", "=", "lambda", ":", "_imagenet_stem", "(", "images", ",", "hparams", ",", "stem_cell", ")", "elif", "stem_type", "==", "'cifar'", ":", "stem", "=", "lambda", ":", "_cifar_stem", "(", "images", ",", "hparams", ")", "else", ":", "raise", "ValueError", "(", "'Unknown stem_type: '", ",", "stem_type", ")", "net", ",", "cell_outputs", "=", "stem", "(", ")", "if", "add_and_check_endpoint", "(", "'Stem'", ",", "net", ")", ":", "return", "net", ",", "end_points", "# Setup for building in the auxiliary head.", "aux_head_cell_idxes", "=", "[", "]", "if", "len", "(", "reduction_indices", ")", ">=", "2", ":", "aux_head_cell_idxes", ".", "append", "(", "reduction_indices", "[", "1", "]", "-", "1", ")", "# Run the cells", "filter_scaling", "=", "1.0", "# true_cell_num accounts for the stem cells", "true_cell_num", "=", "2", "if", "stem_type", "==", "'imagenet'", "else", "0", "for", "cell_num", "in", "range", "(", "hparams", ".", "num_cells", ")", ":", "stride", "=", "1", "if", "hparams", ".", "skip_reduction_layer_input", ":", "prev_layer", "=", "cell_outputs", "[", "-", "2", "]", "if", "cell_num", "in", "reduction_indices", ":", "filter_scaling", "*=", "hparams", ".", "filter_scaling_rate", "net", "=", "reduction_cell", "(", "net", ",", "scope", "=", "'reduction_cell_{}'", ".", "format", "(", "reduction_indices", ".", "index", "(", "cell_num", ")", ")", ",", "filter_scaling", "=", "filter_scaling", ",", "stride", "=", "2", ",", "prev_layer", "=", "cell_outputs", "[", "-", "2", "]", ",", "cell_num", "=", "true_cell_num", ")", "if", "add_and_check_endpoint", "(", "'Reduction_Cell_{}'", ".", "format", "(", "reduction_indices", ".", "index", "(", "cell_num", ")", ")", ",", "net", ")", ":", "return", "net", ",", "end_points", "true_cell_num", "+=", "1", "cell_outputs", ".", "append", "(", "net", ")", "if", "not", "hparams", ".", "skip_reduction_layer_input", ":", "prev_layer", "=", "cell_outputs", "[", "-", "2", "]", "net", "=", "normal_cell", "(", "net", ",", "scope", "=", "'cell_{}'", ".", "format", "(", "cell_num", ")", ",", "filter_scaling", "=", "filter_scaling", ",", "stride", "=", "stride", ",", "prev_layer", "=", "prev_layer", ",", "cell_num", "=", "true_cell_num", ")", "if", "add_and_check_endpoint", "(", "'Cell_{}'", ".", "format", "(", "cell_num", ")", ",", "net", ")", ":", "return", "net", ",", "end_points", "true_cell_num", "+=", "1", "if", "(", "hparams", ".", "use_aux_head", "and", "cell_num", "in", "aux_head_cell_idxes", "and", "num_classes", "and", "is_training", ")", ":", "aux_net", "=", "tf", ".", "nn", ".", "relu", "(", "net", ")", "_build_aux_head", "(", "aux_net", ",", "end_points", ",", "num_classes", ",", "hparams", ",", "scope", "=", "'aux_{}'", ".", "format", "(", "cell_num", ")", ")", "cell_outputs", ".", "append", "(", "net", ")", "# Final softmax layer", "with", "tf", ".", "variable_scope", "(", "'final_layer'", ")", ":", "net", "=", "tf", ".", "nn", ".", "relu", "(", "net", ")", "net", "=", "nasnet_utils", ".", "global_avg_pool", "(", "net", ")", "if", "add_and_check_endpoint", "(", "'global_pool'", ",", "net", ")", "or", "num_classes", "is", "None", ":", "return", "net", ",", "end_points", "net", "=", "slim", ".", "dropout", "(", "net", ",", "hparams", ".", "dense_dropout_keep_prob", ",", "scope", "=", "'dropout'", ")", "logits", "=", "slim", ".", "fully_connected", "(", "net", ",", "num_classes", ")", "if", "add_and_check_endpoint", "(", "'Logits'", ",", "logits", ")", ":", "return", "net", ",", "end_points", "predictions", "=", "tf", ".", "nn", ".", "softmax", "(", "logits", ",", "name", "=", "'predictions'", ")", "if", "add_and_check_endpoint", "(", "'Predictions'", ",", "predictions", ")", ":", "return", "net", ",", "end_points", "return", "logits", ",", "end_points" ]
https://github.com/tensorflow/benchmarks/blob/16af178ad312e8c1213efb27a5f227044228bfdf/scripts/tf_cnn_benchmarks/models/tf1_only/nasnet_model.py#L438-L535
aws-quickstart/quickstart-redhat-openshift
2b87dd38b72e7e4c439a606c5a9ea458d72da612
functions/source/DeleteBucketContents/requests/api.py
python
delete
(url, **kwargs)
return request('delete', url, **kwargs)
r"""Sends a DELETE request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response
r"""Sends a DELETE request.
[ "r", "Sends", "a", "DELETE", "request", "." ]
def delete(url, **kwargs): r"""Sends a DELETE request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('delete', url, **kwargs)
[ "def", "delete", "(", "url", ",", "*", "*", "kwargs", ")", ":", "return", "request", "(", "'delete'", ",", "url", ",", "*", "*", "kwargs", ")" ]
https://github.com/aws-quickstart/quickstart-redhat-openshift/blob/2b87dd38b72e7e4c439a606c5a9ea458d72da612/functions/source/DeleteBucketContents/requests/api.py#L149-L158
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/google/appengine/api/datastore_types.py
python
PackInteger
(name, value, pbvalue)
Packs an integer property into a entity_pb.PropertyValue. Args: name: The name of the property as a string. value: An int or long instance. pbvalue: The entity_pb.PropertyValue to pack this value into.
Packs an integer property into a entity_pb.PropertyValue.
[ "Packs", "an", "integer", "property", "into", "a", "entity_pb", ".", "PropertyValue", "." ]
def PackInteger(name, value, pbvalue): """Packs an integer property into a entity_pb.PropertyValue. Args: name: The name of the property as a string. value: An int or long instance. pbvalue: The entity_pb.PropertyValue to pack this value into. """ pbvalue.set_int64value(value)
[ "def", "PackInteger", "(", "name", ",", "value", ",", "pbvalue", ")", ":", "pbvalue", ".", "set_int64value", "(", "value", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/google/appengine/api/datastore_types.py#L1667-L1675
jgagneastro/coffeegrindsize
22661ebd21831dba4cf32bfc6ba59fe3d49f879c
App/venv/lib/python3.7/site-packages/pip/_vendor/lockfile/pidlockfile.py
python
PIDLockFile.acquire
(self, timeout=None)
Acquire the lock. Creates the PID file for this lock, or raises an error if the lock could not be acquired.
Acquire the lock.
[ "Acquire", "the", "lock", "." ]
def acquire(self, timeout=None): """ Acquire the lock. Creates the PID file for this lock, or raises an error if the lock could not be acquired. """ timeout = timeout if timeout is not None else self.timeout end_time = time.time() if timeout is not None and timeout > 0: end_time += timeout while True: try: write_pid_to_pidfile(self.path) except OSError as exc: if exc.errno == errno.EEXIST: # The lock creation failed. Maybe sleep a bit. if time.time() > end_time: if timeout is not None and timeout > 0: raise LockTimeout("Timeout waiting to acquire" " lock for %s" % self.path) else: raise AlreadyLocked("%s is already locked" % self.path) time.sleep(timeout is not None and timeout / 10 or 0.1) else: raise LockFailed("failed to create %s" % self.path) else: return
[ "def", "acquire", "(", "self", ",", "timeout", "=", "None", ")", ":", "timeout", "=", "timeout", "if", "timeout", "is", "not", "None", "else", "self", ".", "timeout", "end_time", "=", "time", ".", "time", "(", ")", "if", "timeout", "is", "not", "None", "and", "timeout", ">", "0", ":", "end_time", "+=", "timeout", "while", "True", ":", "try", ":", "write_pid_to_pidfile", "(", "self", ".", "path", ")", "except", "OSError", "as", "exc", ":", "if", "exc", ".", "errno", "==", "errno", ".", "EEXIST", ":", "# The lock creation failed. Maybe sleep a bit.", "if", "time", ".", "time", "(", ")", ">", "end_time", ":", "if", "timeout", "is", "not", "None", "and", "timeout", ">", "0", ":", "raise", "LockTimeout", "(", "\"Timeout waiting to acquire\"", "\" lock for %s\"", "%", "self", ".", "path", ")", "else", ":", "raise", "AlreadyLocked", "(", "\"%s is already locked\"", "%", "self", ".", "path", ")", "time", ".", "sleep", "(", "timeout", "is", "not", "None", "and", "timeout", "/", "10", "or", "0.1", ")", "else", ":", "raise", "LockFailed", "(", "\"failed to create %s\"", "%", "self", ".", "path", ")", "else", ":", "return" ]
https://github.com/jgagneastro/coffeegrindsize/blob/22661ebd21831dba4cf32bfc6ba59fe3d49f879c/App/venv/lib/python3.7/site-packages/pip/_vendor/lockfile/pidlockfile.py#L63-L93
usnistgov/fipy
6809b180b41a11de988a48655575df7e142c93b9
fipy/tools/dimensions/physicalField.py
python
PhysicalField.arcsin
(self)
return PhysicalField(value = umath.arcsin(self.inDimensionless()), unit = "rad")
Return the inverse sine of the `PhysicalField` in radians >>> print(PhysicalField(1).arcsin().allclose("1.57079632679 rad")) 1 The input `PhysicalField` must be dimensionless >>> print(numerix.round_(PhysicalField("1 m").arcsin(), 6)) Traceback (most recent call last): ... TypeError: Incompatible units
Return the inverse sine of the `PhysicalField` in radians
[ "Return", "the", "inverse", "sine", "of", "the", "PhysicalField", "in", "radians" ]
def arcsin(self): """ Return the inverse sine of the `PhysicalField` in radians >>> print(PhysicalField(1).arcsin().allclose("1.57079632679 rad")) 1 The input `PhysicalField` must be dimensionless >>> print(numerix.round_(PhysicalField("1 m").arcsin(), 6)) Traceback (most recent call last): ... TypeError: Incompatible units """ return PhysicalField(value = umath.arcsin(self.inDimensionless()), unit = "rad")
[ "def", "arcsin", "(", "self", ")", ":", "return", "PhysicalField", "(", "value", "=", "umath", ".", "arcsin", "(", "self", ".", "inDimensionless", "(", ")", ")", ",", "unit", "=", "\"rad\"", ")" ]
https://github.com/usnistgov/fipy/blob/6809b180b41a11de988a48655575df7e142c93b9/fipy/tools/dimensions/physicalField.py#L998-L1012
sqall01/alertR
e1d1a83e54f876cc4cd7bd87387e05cb75d4dc13
alertClientRaspberryPi/lib/client/util.py
python
MsgBuilder.build_ping_msg
()
return json.dumps(message)
Internal function that builds the ping message. :return:
Internal function that builds the ping message.
[ "Internal", "function", "that", "builds", "the", "ping", "message", "." ]
def build_ping_msg() -> str: """ Internal function that builds the ping message. :return: """ payload = {"type": "request"} utc_timestamp = int(time.time()) message = {"msgTime": utc_timestamp, "message": "ping", "payload": payload} return json.dumps(message)
[ "def", "build_ping_msg", "(", ")", "->", "str", ":", "payload", "=", "{", "\"type\"", ":", "\"request\"", "}", "utc_timestamp", "=", "int", "(", "time", ".", "time", "(", ")", ")", "message", "=", "{", "\"msgTime\"", ":", "utc_timestamp", ",", "\"message\"", ":", "\"ping\"", ",", "\"payload\"", ":", "payload", "}", "return", "json", ".", "dumps", "(", "message", ")" ]
https://github.com/sqall01/alertR/blob/e1d1a83e54f876cc4cd7bd87387e05cb75d4dc13/alertClientRaspberryPi/lib/client/util.py#L1349-L1360
Rapptz/RoboDanny
1fb95d76d1b7685e2e2ff950e11cddfc96efbfec
cogs/utils/db.py
python
Table.migrate
(cls, *, directory='migrations', index=-1, downgrade=False, verbose=False, connection=None)
Actually run the latest migration pointed by the data file. Parameters ----------- directory: str The directory of where the migration data file resides. index: int The index of the migration array to use. downgrade: bool Whether to run an upgrade or a downgrade. verbose: bool Whether to output some information to stdout. connection: Optional[asyncpg.Connection] The connection to use, if not provided will acquire one from the internal pool.
Actually run the latest migration pointed by the data file.
[ "Actually", "run", "the", "latest", "migration", "pointed", "by", "the", "data", "file", "." ]
async def migrate(cls, *, directory='migrations', index=-1, downgrade=False, verbose=False, connection=None): """Actually run the latest migration pointed by the data file. Parameters ----------- directory: str The directory of where the migration data file resides. index: int The index of the migration array to use. downgrade: bool Whether to run an upgrade or a downgrade. verbose: bool Whether to output some information to stdout. connection: Optional[asyncpg.Connection] The connection to use, if not provided will acquire one from the internal pool. """ directory = Path(directory) / cls.__tablename__ p = directory.with_suffix('.json') if not p.exists(): raise RuntimeError('Could not find migration file.') with p.open('r', encoding='utf-8') as fp: data = json.load(fp) migrations = data['migrations'] try: migration = migrations[index] except IndexError: return False diff = SchemaDiff(cls, migration['upgrade'], migration['downgrade']) if diff.is_empty(): return False async with MaybeAcquire(connection, pool=cls._pool) as con: sql = diff.to_sql(downgrade=downgrade) if verbose: print(sql) await con.execute(sql) current = directory.with_name('current-' + p.name) with current.open('w', encoding='utf-8') as fp: json.dump(cls.to_dict(), fp, indent=4, ensure_ascii=True)
[ "async", "def", "migrate", "(", "cls", ",", "*", ",", "directory", "=", "'migrations'", ",", "index", "=", "-", "1", ",", "downgrade", "=", "False", ",", "verbose", "=", "False", ",", "connection", "=", "None", ")", ":", "directory", "=", "Path", "(", "directory", ")", "/", "cls", ".", "__tablename__", "p", "=", "directory", ".", "with_suffix", "(", "'.json'", ")", "if", "not", "p", ".", "exists", "(", ")", ":", "raise", "RuntimeError", "(", "'Could not find migration file.'", ")", "with", "p", ".", "open", "(", "'r'", ",", "encoding", "=", "'utf-8'", ")", "as", "fp", ":", "data", "=", "json", ".", "load", "(", "fp", ")", "migrations", "=", "data", "[", "'migrations'", "]", "try", ":", "migration", "=", "migrations", "[", "index", "]", "except", "IndexError", ":", "return", "False", "diff", "=", "SchemaDiff", "(", "cls", ",", "migration", "[", "'upgrade'", "]", ",", "migration", "[", "'downgrade'", "]", ")", "if", "diff", ".", "is_empty", "(", ")", ":", "return", "False", "async", "with", "MaybeAcquire", "(", "connection", ",", "pool", "=", "cls", ".", "_pool", ")", "as", "con", ":", "sql", "=", "diff", ".", "to_sql", "(", "downgrade", "=", "downgrade", ")", "if", "verbose", ":", "print", "(", "sql", ")", "await", "con", ".", "execute", "(", "sql", ")", "current", "=", "directory", ".", "with_name", "(", "'current-'", "+", "p", ".", "name", ")", "with", "current", ".", "open", "(", "'w'", ",", "encoding", "=", "'utf-8'", ")", "as", "fp", ":", "json", ".", "dump", "(", "cls", ".", "to_dict", "(", ")", ",", "fp", ",", "indent", "=", "4", ",", "ensure_ascii", "=", "True", ")" ]
https://github.com/Rapptz/RoboDanny/blob/1fb95d76d1b7685e2e2ff950e11cddfc96efbfec/cogs/utils/db.py#L577-L621
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/idlelib/configDialog.py
python
ConfigDialog.KeyBindingSelected
(self, event)
[]
def KeyBindingSelected(self, event): self.buttonNewKeys.config(state=NORMAL)
[ "def", "KeyBindingSelected", "(", "self", ",", "event", ")", ":", "self", ".", "buttonNewKeys", ".", "config", "(", "state", "=", "NORMAL", ")" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/idlelib/configDialog.py#L711-L712
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/boto-2.46.1/boto/swf/layer2.py
python
ActivityWorker.poll
(self, **kwargs)
return task
PollForActivityTask.
PollForActivityTask.
[ "PollForActivityTask", "." ]
def poll(self, **kwargs): """PollForActivityTask.""" task_list = self.task_list if 'task_list' in kwargs: task_list = kwargs.get('task_list') del kwargs['task_list'] task = self._swf.poll_for_activity_task(self.domain, task_list, **kwargs) self.last_tasktoken = task.get('taskToken') return task
[ "def", "poll", "(", "self", ",", "*", "*", "kwargs", ")", ":", "task_list", "=", "self", ".", "task_list", "if", "'task_list'", "in", "kwargs", ":", "task_list", "=", "kwargs", ".", "get", "(", "'task_list'", ")", "del", "kwargs", "[", "'task_list'", "]", "task", "=", "self", ".", "_swf", ".", "poll_for_activity_task", "(", "self", ".", "domain", ",", "task_list", ",", "*", "*", "kwargs", ")", "self", ".", "last_tasktoken", "=", "task", ".", "get", "(", "'taskToken'", ")", "return", "task" ]
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/boto-2.46.1/boto/swf/layer2.py#L194-L203
fabioz/PyDev.Debugger
0f8c02a010fe5690405da1dd30ed72326191ce63
third_party/pep8/pycodestyle.py
python
Checker.init_checker_state
(self, name, argument_names)
Prepare custom state for the specific checker plugin.
Prepare custom state for the specific checker plugin.
[ "Prepare", "custom", "state", "for", "the", "specific", "checker", "plugin", "." ]
def init_checker_state(self, name, argument_names): """Prepare custom state for the specific checker plugin.""" if 'checker_state' in argument_names: self.checker_state = self._checker_states.setdefault(name, {})
[ "def", "init_checker_state", "(", "self", ",", "name", ",", "argument_names", ")", ":", "if", "'checker_state'", "in", "argument_names", ":", "self", ".", "checker_state", "=", "self", ".", "_checker_states", ".", "setdefault", "(", "name", ",", "{", "}", ")" ]
https://github.com/fabioz/PyDev.Debugger/blob/0f8c02a010fe5690405da1dd30ed72326191ce63/third_party/pep8/pycodestyle.py#L1592-L1595
leo-editor/leo-editor
383d6776d135ef17d73d935a2f0ecb3ac0e99494
leo/core/leoAst.py
python
TokenOrderGenerator.do_arg
(self, node)
This is one argument of a list of ast.Function or ast.Lambda arguments.
This is one argument of a list of ast.Function or ast.Lambda arguments.
[ "This", "is", "one", "argument", "of", "a", "list", "of", "ast", ".", "Function", "or", "ast", ".", "Lambda", "arguments", "." ]
def do_arg(self, node): """This is one argument of a list of ast.Function or ast.Lambda arguments.""" yield from self.gen_name(node.arg) annotation = getattr(node, 'annotation', None) if annotation is not None: yield from self.gen_op(':') yield from self.gen(node.annotation)
[ "def", "do_arg", "(", "self", ",", "node", ")", ":", "yield", "from", "self", ".", "gen_name", "(", "node", ".", "arg", ")", "annotation", "=", "getattr", "(", "node", ",", "'annotation'", ",", "None", ")", "if", "annotation", "is", "not", "None", ":", "yield", "from", "self", ".", "gen_op", "(", "':'", ")", "yield", "from", "self", ".", "gen", "(", "node", ".", "annotation", ")" ]
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/core/leoAst.py#L1527-L1533
lohriialo/photoshop-scripting-python
6b97da967a5d0a45e54f7c99631b29773b923f09
api_reference/photoshop_CC_2018.py
python
ArtLayers.__iter__
(self)
return win32com.client.util.Iterator(ob, '{16BE80A3-57B1-4871-83AC-7F844EEEB1CA}')
Return a Python iterator for this object
Return a Python iterator for this object
[ "Return", "a", "Python", "iterator", "for", "this", "object" ]
def __iter__(self): "Return a Python iterator for this object" try: ob = self._oleobj_.InvokeTypes(-4,LCID,2,(13, 10),()) except pythoncom.error: raise TypeError("This object does not support enumeration") return win32com.client.util.Iterator(ob, '{16BE80A3-57B1-4871-83AC-7F844EEEB1CA}')
[ "def", "__iter__", "(", "self", ")", ":", "try", ":", "ob", "=", "self", ".", "_oleobj_", ".", "InvokeTypes", "(", "-", "4", ",", "LCID", ",", "2", ",", "(", "13", ",", "10", ")", ",", "(", ")", ")", "except", "pythoncom", ".", "error", ":", "raise", "TypeError", "(", "\"This object does not support enumeration\"", ")", "return", "win32com", ".", "client", ".", "util", ".", "Iterator", "(", "ob", ",", "'{16BE80A3-57B1-4871-83AC-7F844EEEB1CA}'", ")" ]
https://github.com/lohriialo/photoshop-scripting-python/blob/6b97da967a5d0a45e54f7c99631b29773b923f09/api_reference/photoshop_CC_2018.py#L1224-L1230
andresriancho/w3af
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
w3af/plugins/grep/clamav.py
python
clamav._is_properly_configured
(self)
:return: True if the plugin can connect to the ClamAV daemon.
:return: True if the plugin can connect to the ClamAV daemon.
[ ":", "return", ":", "True", "if", "the", "plugin", "can", "connect", "to", "the", "ClamAV", "daemon", "." ]
def _is_properly_configured(self): """ :return: True if the plugin can connect to the ClamAV daemon. """ with self._config_check_lock: if self._properly_configured is not None: # Return the cached response return self._properly_configured if self._connection_test(): msg = 'Using %s for scanning HTTP response bodies.' om.out.information(msg % self._get_clamd_version()) self._properly_configured = True else: msg = ('The ClamAV plugin failed to connect to clamd using' ' the provided unix socket: "%s". Please verify your' ' configuration and try again.') om.out.error(msg % self._clamd_socket) self._properly_configured = False return self._properly_configured
[ "def", "_is_properly_configured", "(", "self", ")", ":", "with", "self", ".", "_config_check_lock", ":", "if", "self", ".", "_properly_configured", "is", "not", "None", ":", "# Return the cached response", "return", "self", ".", "_properly_configured", "if", "self", ".", "_connection_test", "(", ")", ":", "msg", "=", "'Using %s for scanning HTTP response bodies.'", "om", ".", "out", ".", "information", "(", "msg", "%", "self", ".", "_get_clamd_version", "(", ")", ")", "self", ".", "_properly_configured", "=", "True", "else", ":", "msg", "=", "(", "'The ClamAV plugin failed to connect to clamd using'", "' the provided unix socket: \"%s\". Please verify your'", "' configuration and try again.'", ")", "om", ".", "out", ".", "error", "(", "msg", "%", "self", ".", "_clamd_socket", ")", "self", ".", "_properly_configured", "=", "False", "return", "self", ".", "_properly_configured" ]
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/plugins/grep/clamav.py#L86-L107
python/cpython
e13cdca0f5224ec4e23bdd04bb3120506964bc8b
Lib/enum.py
python
Enum.name
(self)
return self._name_
The name of the Enum member.
The name of the Enum member.
[ "The", "name", "of", "the", "Enum", "member", "." ]
def name(self): """The name of the Enum member.""" return self._name_
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name_" ]
https://github.com/python/cpython/blob/e13cdca0f5224ec4e23bdd04bb3120506964bc8b/Lib/enum.py#L1082-L1084
CvvT/dumpDex
92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1
python/idautils.py
python
GetRegisterList
()
return idaapi.ph_get_regnames()
Returns the register list
Returns the register list
[ "Returns", "the", "register", "list" ]
def GetRegisterList(): """Returns the register list""" return idaapi.ph_get_regnames()
[ "def", "GetRegisterList", "(", ")", ":", "return", "idaapi", ".", "ph_get_regnames", "(", ")" ]
https://github.com/CvvT/dumpDex/blob/92ab3b7e996194a06bf1dd5538a4954e8a5ee9c1/python/idautils.py#L589-L591
rucio/rucio
6d0d358e04f5431f0b9a98ae40f31af0ddff4833
lib/rucio/daemons/conveyor/preparer.py
python
stop
()
Graceful exit.
Graceful exit.
[ "Graceful", "exit", "." ]
def stop(): """ Graceful exit. """ graceful_stop.set()
[ "def", "stop", "(", ")", ":", "graceful_stop", ".", "set", "(", ")" ]
https://github.com/rucio/rucio/blob/6d0d358e04f5431f0b9a98ae40f31af0ddff4833/lib/rucio/daemons/conveyor/preparer.py#L44-L49
asyml/texar
a23f021dae289a3d768dc099b220952111da04fd
texar/tf/modules/qnets/qnets.py
python
CategoricalQNet._build
(self, inputs, mode=None)
return outputs
Takes in states and outputs Q values. Args: inputs: Inputs to the Q net with the first dimension the batch dimension. mode (optional): A tensor taking value in :tf_main:`tf.estimator.ModeKeys <estimator/ModeKeys>`, including `TRAIN`, `EVAL`, and `PREDICT`. If `None`, :func:`texar.tf.global_mode` is used. Returns A `dict` including fields `"qvalues"`. where - **"qvalues"**: A Tensor of shape \ `[batch_size] + action_space size` containing Q values of all\ possible actions.
Takes in states and outputs Q values.
[ "Takes", "in", "states", "and", "outputs", "Q", "values", "." ]
def _build(self, inputs, mode=None): """Takes in states and outputs Q values. Args: inputs: Inputs to the Q net with the first dimension the batch dimension. mode (optional): A tensor taking value in :tf_main:`tf.estimator.ModeKeys <estimator/ModeKeys>`, including `TRAIN`, `EVAL`, and `PREDICT`. If `None`, :func:`texar.tf.global_mode` is used. Returns A `dict` including fields `"qvalues"`. where - **"qvalues"**: A Tensor of shape \ `[batch_size] + action_space size` containing Q values of all\ possible actions. """ outputs = { "qvalues": self._network(inputs, mode=mode) } if not self._built: self._add_internal_trainable_variables() self._add_trainable_variable(self._network.trainable_variables) self._built = True return outputs
[ "def", "_build", "(", "self", ",", "inputs", ",", "mode", "=", "None", ")", ":", "outputs", "=", "{", "\"qvalues\"", ":", "self", ".", "_network", "(", "inputs", ",", "mode", "=", "mode", ")", "}", "if", "not", "self", ".", "_built", ":", "self", ".", "_add_internal_trainable_variables", "(", ")", "self", ".", "_add_trainable_variable", "(", "self", ".", "_network", ".", "trainable_variables", ")", "self", ".", "_built", "=", "True", "return", "outputs" ]
https://github.com/asyml/texar/blob/a23f021dae289a3d768dc099b220952111da04fd/texar/tf/modules/qnets/qnets.py#L244-L272
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/lib-python/3/plat-sunos5/IN.py
python
NSEC_TO_TICK_ROUNDUP
(nsec)
return
[]
def NSEC_TO_TICK_ROUNDUP(nsec): return
[ "def", "NSEC_TO_TICK_ROUNDUP", "(", "nsec", ")", ":", "return" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/lib-python/3/plat-sunos5/IN.py#L423-L423
ladybug-tools/butterfly
c8fc0bbe317bb41bfe5f28305782a82347b8c776
butterfly/sampleDict.py
python
SampleDict.points
(self)
return self._pts
Get and set probe locations from list of tuples.
Get and set probe locations from list of tuples.
[ "Get", "and", "set", "probe", "locations", "from", "list", "of", "tuples", "." ]
def points(self): """Get and set probe locations from list of tuples.""" return self._pts
[ "def", "points", "(", "self", ")", ":", "return", "self", ".", "_pts" ]
https://github.com/ladybug-tools/butterfly/blob/c8fc0bbe317bb41bfe5f28305782a82347b8c776/butterfly/sampleDict.py#L62-L64
lambdaji/tf_repos
b531ff12cdab65acc9551025f73fade2b6f425a7
DeepMTL/Feature_pipeline/get_remap_mapper.py
python
load_fcnts
(if_str)
return feat_cnts_dict
[]
def load_fcnts(if_str): feat_cnts_dict = defaultdict(lambda: 0) new_id = 20 with open(if_str) as f: for line in f: fid, cnts = line.strip().split('\t') if feat_cnts_dict.get(fid): continue if int(cnts) >= 20: #cutoff=20 feat_cnts_dict[fid] = new_id new_id = new_id + 1 return feat_cnts_dict
[ "def", "load_fcnts", "(", "if_str", ")", ":", "feat_cnts_dict", "=", "defaultdict", "(", "lambda", ":", "0", ")", "new_id", "=", "20", "with", "open", "(", "if_str", ")", "as", "f", ":", "for", "line", "in", "f", ":", "fid", ",", "cnts", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "'\\t'", ")", "if", "feat_cnts_dict", ".", "get", "(", "fid", ")", ":", "continue", "if", "int", "(", "cnts", ")", ">=", "20", ":", "#cutoff=20", "feat_cnts_dict", "[", "fid", "]", "=", "new_id", "new_id", "=", "new_id", "+", "1", "return", "feat_cnts_dict" ]
https://github.com/lambdaji/tf_repos/blob/b531ff12cdab65acc9551025f73fade2b6f425a7/DeepMTL/Feature_pipeline/get_remap_mapper.py#L10-L21
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/sympy/sympy/combinatorics/perm_groups.py
python
PermutationGroup.derived_series
(self)
return res
r"""Return the derived series for the group. The derived series for a group ``G`` is defined as ``G = G_0 > G_1 > G_2 > \ldots`` where ``G_i = [G_{i-1}, G_{i-1}]``, i.e. ``G_i`` is the derived subgroup of ``G_{i-1}``, for ``i\in\mathbb{N}``. When we have ``G_k = G_{k-1}`` for some ``k\in\mathbb{N}``, the series terminates. Returns ======= A list of permutation groups containing the members of the derived series in the order ``G = G_0, G_1, G_2, \ldots``. Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup, DihedralGroup) >>> A = AlternatingGroup(5) >>> len(A.derived_series()) 1 >>> S = SymmetricGroup(4) >>> len(S.derived_series()) 4 >>> S.derived_series()[1].is_subgroup(AlternatingGroup(4)) True >>> S.derived_series()[2].is_subgroup(DihedralGroup(2)) True See Also ======== derived_subgroup
r"""Return the derived series for the group.
[ "r", "Return", "the", "derived", "series", "for", "the", "group", "." ]
def derived_series(self): r"""Return the derived series for the group. The derived series for a group ``G`` is defined as ``G = G_0 > G_1 > G_2 > \ldots`` where ``G_i = [G_{i-1}, G_{i-1}]``, i.e. ``G_i`` is the derived subgroup of ``G_{i-1}``, for ``i\in\mathbb{N}``. When we have ``G_k = G_{k-1}`` for some ``k\in\mathbb{N}``, the series terminates. Returns ======= A list of permutation groups containing the members of the derived series in the order ``G = G_0, G_1, G_2, \ldots``. Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup, DihedralGroup) >>> A = AlternatingGroup(5) >>> len(A.derived_series()) 1 >>> S = SymmetricGroup(4) >>> len(S.derived_series()) 4 >>> S.derived_series()[1].is_subgroup(AlternatingGroup(4)) True >>> S.derived_series()[2].is_subgroup(DihedralGroup(2)) True See Also ======== derived_subgroup """ res = [self] current = self next = self.derived_subgroup() while not current.is_subgroup(next): res.append(next) current = next next = next.derived_subgroup() return res
[ "def", "derived_series", "(", "self", ")", ":", "res", "=", "[", "self", "]", "current", "=", "self", "next", "=", "self", ".", "derived_subgroup", "(", ")", "while", "not", "current", ".", "is_subgroup", "(", "next", ")", ":", "res", ".", "append", "(", "next", ")", "current", "=", "next", "next", "=", "next", ".", "derived_subgroup", "(", ")", "return", "res" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/combinatorics/perm_groups.py#L1029-L1073
weiaicunzai/pytorch-cifar100
2149cb57f517c6e5fa7262f958652227225d125b
models/stochasticdepth.py
python
stochastic_depth_resnet34
()
return StochasticDepthResNet(StochasticDepthBasicBlock, [3, 4, 6, 3])
return a ResNet 34 object
return a ResNet 34 object
[ "return", "a", "ResNet", "34", "object" ]
def stochastic_depth_resnet34(): """ return a ResNet 34 object """ return StochasticDepthResNet(StochasticDepthBasicBlock, [3, 4, 6, 3])
[ "def", "stochastic_depth_resnet34", "(", ")", ":", "return", "StochasticDepthResNet", "(", "StochasticDepthBasicBlock", ",", "[", "3", ",", "4", ",", "6", ",", "3", "]", ")" ]
https://github.com/weiaicunzai/pytorch-cifar100/blob/2149cb57f517c6e5fa7262f958652227225d125b/models/stochasticdepth.py#L186-L189
dpgaspar/Flask-AppBuilder
557249f33b66d02a48c1322ef21324b815abe18e
flask_appbuilder/models/base.py
python
BaseInterface.get_values_json
(self, lst, list_columns)
return result
Converts list of objects from query to JSON
Converts list of objects from query to JSON
[ "Converts", "list", "of", "objects", "from", "query", "to", "JSON" ]
def get_values_json(self, lst, list_columns): """ Converts list of objects from query to JSON """ result = [] for item in self.get_values(lst, list_columns): for key, value in list(item.items()): if isinstance(value, datetime.datetime) or isinstance( value, datetime.date ): value = value.isoformat() item[key] = value if isinstance(value, list): item[key] = [str(v) for v in value] result.append(item) return result
[ "def", "get_values_json", "(", "self", ",", "lst", ",", "list_columns", ")", ":", "result", "=", "[", "]", "for", "item", "in", "self", ".", "get_values", "(", "lst", ",", "list_columns", ")", ":", "for", "key", ",", "value", "in", "list", "(", "item", ".", "items", "(", ")", ")", ":", "if", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", "or", "isinstance", "(", "value", ",", "datetime", ".", "date", ")", ":", "value", "=", "value", ".", "isoformat", "(", ")", "item", "[", "key", "]", "=", "value", "if", "isinstance", "(", "value", ",", "list", ")", ":", "item", "[", "key", "]", "=", "[", "str", "(", "v", ")", "for", "v", "in", "value", "]", "result", ".", "append", "(", "item", ")", "return", "result" ]
https://github.com/dpgaspar/Flask-AppBuilder/blob/557249f33b66d02a48c1322ef21324b815abe18e/flask_appbuilder/models/base.py#L138-L153
Lausannen/NAS-FCOS
d586b28c0fae72a9f30119797732e614a2517de5
maskrcnn_benchmark/utils/comm.py
python
decode
(encoded_data)
return pickle.loads(bytearray(encoded_tensor.tolist()))
Decode data ,usually used in process or gpu which receive data coming from main process Arguments: encoded_data (list): encoded data coming from main process Returns: data (tensor): decoded data
Decode data ,usually used in process or gpu which receive data coming from main process Arguments: encoded_data (list): encoded data coming from main process Returns: data (tensor): decoded data
[ "Decode", "data", "usually", "used", "in", "process", "or", "gpu", "which", "receive", "data", "coming", "from", "main", "process", "Arguments", ":", "encoded_data", "(", "list", ")", ":", "encoded", "data", "coming", "from", "main", "process", "Returns", ":", "data", "(", "tensor", ")", ":", "decoded", "data" ]
def decode(encoded_data): """Decode data ,usually used in process or gpu which receive data coming from main process Arguments: encoded_data (list): encoded data coming from main process Returns: data (tensor): decoded data """ size = encoded_data[0] encoded_tensor = encoded_data[1 : (size + 1)].to("cpu") return pickle.loads(bytearray(encoded_tensor.tolist()))
[ "def", "decode", "(", "encoded_data", ")", ":", "size", "=", "encoded_data", "[", "0", "]", "encoded_tensor", "=", "encoded_data", "[", "1", ":", "(", "size", "+", "1", ")", "]", ".", "to", "(", "\"cpu\"", ")", "return", "pickle", ".", "loads", "(", "bytearray", "(", "encoded_tensor", ".", "tolist", "(", ")", ")", ")" ]
https://github.com/Lausannen/NAS-FCOS/blob/d586b28c0fae72a9f30119797732e614a2517de5/maskrcnn_benchmark/utils/comm.py#L153-L165
OneDrive/onedrive-sdk-python
e5642f8cad8eea37a4f653c1a23dfcfc06c37110
src/onedrivesdk/model/thumbnail_set.py
python
ThumbnailSet.source
(self)
return None
Gets and sets the source Returns: :class:`Thumbnail<onedrivesdk.model.thumbnail.Thumbnail>`: The source
Gets and sets the source Returns: :class:`Thumbnail<onedrivesdk.model.thumbnail.Thumbnail>`: The source
[ "Gets", "and", "sets", "the", "source", "Returns", ":", ":", "class", ":", "Thumbnail<onedrivesdk", ".", "model", ".", "thumbnail", ".", "Thumbnail", ">", ":", "The", "source" ]
def source(self): """ Gets and sets the source Returns: :class:`Thumbnail<onedrivesdk.model.thumbnail.Thumbnail>`: The source """ if "source" in self._prop_dict: if isinstance(self._prop_dict["source"], OneDriveObjectBase): return self._prop_dict["source"] else : self._prop_dict["source"] = Thumbnail(self._prop_dict["source"]) return self._prop_dict["source"] return None
[ "def", "source", "(", "self", ")", ":", "if", "\"source\"", "in", "self", ".", "_prop_dict", ":", "if", "isinstance", "(", "self", ".", "_prop_dict", "[", "\"source\"", "]", ",", "OneDriveObjectBase", ")", ":", "return", "self", ".", "_prop_dict", "[", "\"source\"", "]", "else", ":", "self", ".", "_prop_dict", "[", "\"source\"", "]", "=", "Thumbnail", "(", "self", ".", "_prop_dict", "[", "\"source\"", "]", ")", "return", "self", ".", "_prop_dict", "[", "\"source\"", "]", "return", "None" ]
https://github.com/OneDrive/onedrive-sdk-python/blob/e5642f8cad8eea37a4f653c1a23dfcfc06c37110/src/onedrivesdk/model/thumbnail_set.py#L103-L118
golismero/golismero
7d605b937e241f51c1ca4f47b20f755eeefb9d76
thirdparty_libs/nltk/collocations.py
python
AbstractCollocationFinder.apply_word_filter
(self, fn)
Removes candidate ngrams (w1, w2, ...) where any of (fn(w1), fn(w2), ...) evaluates to True.
Removes candidate ngrams (w1, w2, ...) where any of (fn(w1), fn(w2), ...) evaluates to True.
[ "Removes", "candidate", "ngrams", "(", "w1", "w2", "...", ")", "where", "any", "of", "(", "fn", "(", "w1", ")", "fn", "(", "w2", ")", "...", ")", "evaluates", "to", "True", "." ]
def apply_word_filter(self, fn): """Removes candidate ngrams (w1, w2, ...) where any of (fn(w1), fn(w2), ...) evaluates to True. """ self._apply_filter(lambda ng, f: any(fn(w) for w in ng))
[ "def", "apply_word_filter", "(", "self", ",", "fn", ")", ":", "self", ".", "_apply_filter", "(", "lambda", "ng", ",", "f", ":", "any", "(", "fn", "(", "w", ")", "for", "w", "in", "ng", ")", ")" ]
https://github.com/golismero/golismero/blob/7d605b937e241f51c1ca4f47b20f755eeefb9d76/thirdparty_libs/nltk/collocations.py#L88-L92
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/StdLib/Lib/pprint.py
python
isreadable
(object)
return _safe_repr(object, {}, None, 0)[1]
Determine if saferepr(object) is readable by eval().
Determine if saferepr(object) is readable by eval().
[ "Determine", "if", "saferepr", "(", "object", ")", "is", "readable", "by", "eval", "()", "." ]
def isreadable(object): """Determine if saferepr(object) is readable by eval().""" return _safe_repr(object, {}, None, 0)[1]
[ "def", "isreadable", "(", "object", ")", ":", "return", "_safe_repr", "(", "object", ",", "{", "}", ",", "None", ",", "0", ")", "[", "1", "]" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/StdLib/Lib/pprint.py#L63-L65
hatRiot/zarp
2e772350a01c2aeed3f4da9685cd0cc5d6b3ecad
src/lib/scapy/utils6.py
python
in6_getnsma
(a)
return r
Return link-local solicited-node multicast address for given address. Passed address must be provided in network format. Returned value is also in network format.
Return link-local solicited-node multicast address for given address. Passed address must be provided in network format. Returned value is also in network format.
[ "Return", "link", "-", "local", "solicited", "-", "node", "multicast", "address", "for", "given", "address", ".", "Passed", "address", "must", "be", "provided", "in", "network", "format", ".", "Returned", "value", "is", "also", "in", "network", "format", "." ]
def in6_getnsma(a): """ Return link-local solicited-node multicast address for given address. Passed address must be provided in network format. Returned value is also in network format. """ r = in6_and(a, inet_pton(socket.AF_INET6, '::ff:ffff')) r = in6_or(inet_pton(socket.AF_INET6, 'ff02::1:ff00:0'), r) return r
[ "def", "in6_getnsma", "(", "a", ")", ":", "r", "=", "in6_and", "(", "a", ",", "inet_pton", "(", "socket", ".", "AF_INET6", ",", "'::ff:ffff'", ")", ")", "r", "=", "in6_or", "(", "inet_pton", "(", "socket", ".", "AF_INET6", ",", "'ff02::1:ff00:0'", ")", ",", "r", ")", "return", "r" ]
https://github.com/hatRiot/zarp/blob/2e772350a01c2aeed3f4da9685cd0cc5d6b3ecad/src/lib/scapy/utils6.py#L600-L609
Zehaos/MobileNet
bb02b10fbd211d717f7a207245feac229f6bb23e
tools/quantize_graph.py
python
GraphRewriter.create_nodes_map
(self, graph)
return nodes_map
Builds a mapping of node names to their defs from the graph.
Builds a mapping of node names to their defs from the graph.
[ "Builds", "a", "mapping", "of", "node", "names", "to", "their", "defs", "from", "the", "graph", "." ]
def create_nodes_map(self, graph): """Builds a mapping of node names to their defs from the graph.""" nodes_map = {} for node in graph.node: if node.name not in nodes_map.keys(): nodes_map[node.name] = node else: raise ValueError("Duplicate node names detected.") return nodes_map
[ "def", "create_nodes_map", "(", "self", ",", "graph", ")", ":", "nodes_map", "=", "{", "}", "for", "node", "in", "graph", ".", "node", ":", "if", "node", ".", "name", "not", "in", "nodes_map", ".", "keys", "(", ")", ":", "nodes_map", "[", "node", ".", "name", "]", "=", "node", "else", ":", "raise", "ValueError", "(", "\"Duplicate node names detected.\"", ")", "return", "nodes_map" ]
https://github.com/Zehaos/MobileNet/blob/bb02b10fbd211d717f7a207245feac229f6bb23e/tools/quantize_graph.py#L376-L384
sahana/eden
1696fa50e90ce967df69f66b571af45356cc18da
modules/templates/IFRC/survey.py
python
survey_answerlist_dataTable_pre
()
The answer list has been removed for the moment. Currently it displays all answers for a summary it would be better to be able to display just a few select answers
The answer list has been removed for the moment. Currently it displays all answers for a summary it would be better to be able to display just a few select answers
[ "The", "answer", "list", "has", "been", "removed", "for", "the", "moment", ".", "Currently", "it", "displays", "all", "answers", "for", "a", "summary", "it", "would", "be", "better", "to", "be", "able", "to", "display", "just", "a", "few", "select", "answers" ]
def survey_answerlist_dataTable_pre(): """ The answer list has been removed for the moment. Currently it displays all answers for a summary it would be better to be able to display just a few select answers """ list_fields = ["created_on", "series_id", "location", "modified_by"] current.s3db.configure("survey_complete", list_fields=list_fields)
[ "def", "survey_answerlist_dataTable_pre", "(", ")", ":", "list_fields", "=", "[", "\"created_on\"", ",", "\"series_id\"", ",", "\"location\"", ",", "\"modified_by\"", "]", "current", ".", "s3db", ".", "configure", "(", "\"survey_complete\"", ",", "list_fields", "=", "list_fields", ")" ]
https://github.com/sahana/eden/blob/1696fa50e90ce967df69f66b571af45356cc18da/modules/templates/IFRC/survey.py#L2781-L2789
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_utils/filter_plugins/oo_filters.py
python
lib_utils_oo_l_of_d_to_csv
(input_list)
return ','.join(json.dumps(x) for x in input_list)
Map a list of dictionaries, input_list, into a csv string of json values. Example input: [{'var1': 'val1', 'var2': 'val2'}, {'var1': 'val3', 'var2': 'val4'}] Example output: u'{"var1": "val1", "var2": "val2"},{"var1": "val3", "var2": "val4"}'
Map a list of dictionaries, input_list, into a csv string of json values.
[ "Map", "a", "list", "of", "dictionaries", "input_list", "into", "a", "csv", "string", "of", "json", "values", "." ]
def lib_utils_oo_l_of_d_to_csv(input_list): """Map a list of dictionaries, input_list, into a csv string of json values. Example input: [{'var1': 'val1', 'var2': 'val2'}, {'var1': 'val3', 'var2': 'val4'}] Example output: u'{"var1": "val1", "var2": "val2"},{"var1": "val3", "var2": "val4"}' """ return ','.join(json.dumps(x) for x in input_list)
[ "def", "lib_utils_oo_l_of_d_to_csv", "(", "input_list", ")", ":", "return", "','", ".", "join", "(", "json", ".", "dumps", "(", "x", ")", "for", "x", "in", "input_list", ")" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_utils/filter_plugins/oo_filters.py#L653-L662
uqfoundation/mystic
154e6302d1f2f94e8f13e88ecc5f24241cc28ac7
models/circle.py
python
gendata
(coeffs,npts=20)
return xy
Generate a 2D dataset of npts enclosed in circle of given coeffs, where coeffs = (x,y,r). NOTE: if npts == None, constrain all points to circle of given radius
Generate a 2D dataset of npts enclosed in circle of given coeffs, where coeffs = (x,y,r).
[ "Generate", "a", "2D", "dataset", "of", "npts", "enclosed", "in", "circle", "of", "given", "coeffs", "where", "coeffs", "=", "(", "x", "y", "r", ")", "." ]
def gendata(coeffs,npts=20): """Generate a 2D dataset of npts enclosed in circle of given coeffs, where coeffs = (x,y,r). NOTE: if npts == None, constrain all points to circle of given radius""" if not npts: return gencircle(coeffs) def points_circle(N): # generate N random points in a unit circle n = 0 while n < N: x = random.random()*2.-1. y = random.random()*2.-1. if x*x + y*y <= 1: n = n+1 yield [x,y] x0,y0,R0 = coeffs xy = array(list(points_circle(npts)))*R0 xy[:,0] += x0 xy[:,1] += y0 return xy
[ "def", "gendata", "(", "coeffs", ",", "npts", "=", "20", ")", ":", "if", "not", "npts", ":", "return", "gencircle", "(", "coeffs", ")", "def", "points_circle", "(", "N", ")", ":", "# generate N random points in a unit circle", "n", "=", "0", "while", "n", "<", "N", ":", "x", "=", "random", ".", "random", "(", ")", "*", "2.", "-", "1.", "y", "=", "random", ".", "random", "(", ")", "*", "2.", "-", "1.", "if", "x", "*", "x", "+", "y", "*", "y", "<=", "1", ":", "n", "=", "n", "+", "1", "yield", "[", "x", ",", "y", "]", "x0", ",", "y0", ",", "R0", "=", "coeffs", "xy", "=", "array", "(", "list", "(", "points_circle", "(", "npts", ")", ")", ")", "*", "R0", "xy", "[", ":", ",", "0", "]", "+=", "x0", "xy", "[", ":", ",", "1", "]", "+=", "y0", "return", "xy" ]
https://github.com/uqfoundation/mystic/blob/154e6302d1f2f94e8f13e88ecc5f24241cc28ac7/models/circle.py#L117-L137
autorope/donkeycar
a3df79a3573127dff31fc9b2953588056875b214
donkeycar/templates/cv_control.py
python
LineFollower.debug_display
(self, cam_img, mask, max_yellow, confidense)
composite mask on top the original image. show some values we are using for control
composite mask on top the original image. show some values we are using for control
[ "composite", "mask", "on", "top", "the", "original", "image", ".", "show", "some", "values", "we", "are", "using", "for", "control" ]
def debug_display(self, cam_img, mask, max_yellow, confidense): ''' composite mask on top the original image. show some values we are using for control ''' mask_exp = np.stack((mask,)*3, axis=-1) iSlice = self.scan_y img = np.copy(cam_img) img[iSlice : iSlice + self.scan_height, :, :] = mask_exp img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) display_str = [] display_str.append("STEERING:{:.1f}".format(self.steering)) display_str.append("THROTTLE:{:.2f}".format(self.throttle)) display_str.append("I YELLOW:{:d}".format(max_yellow)) display_str.append("CONF:{:.2f}".format(confidense)) y = 10 x = 10 for s in display_str: cv2.putText(img, s, color=(0,255,255), org=(x,y), fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=0.4) y += 10 cv2.namedWindow('image', cv2.WINDOW_NORMAL) cv2.imshow("image", img) cv2.resizeWindow('image', 300,300) cv2.waitKey(1)
[ "def", "debug_display", "(", "self", ",", "cam_img", ",", "mask", ",", "max_yellow", ",", "confidense", ")", ":", "mask_exp", "=", "np", ".", "stack", "(", "(", "mask", ",", ")", "*", "3", ",", "axis", "=", "-", "1", ")", "iSlice", "=", "self", ".", "scan_y", "img", "=", "np", ".", "copy", "(", "cam_img", ")", "img", "[", "iSlice", ":", "iSlice", "+", "self", ".", "scan_height", ",", ":", ",", ":", "]", "=", "mask_exp", "img", "=", "cv2", ".", "cvtColor", "(", "img", ",", "cv2", ".", "COLOR_RGB2BGR", ")", "display_str", "=", "[", "]", "display_str", ".", "append", "(", "\"STEERING:{:.1f}\"", ".", "format", "(", "self", ".", "steering", ")", ")", "display_str", ".", "append", "(", "\"THROTTLE:{:.2f}\"", ".", "format", "(", "self", ".", "throttle", ")", ")", "display_str", ".", "append", "(", "\"I YELLOW:{:d}\"", ".", "format", "(", "max_yellow", ")", ")", "display_str", ".", "append", "(", "\"CONF:{:.2f}\"", ".", "format", "(", "confidense", ")", ")", "y", "=", "10", "x", "=", "10", "for", "s", "in", "display_str", ":", "cv2", ".", "putText", "(", "img", ",", "s", ",", "color", "=", "(", "0", ",", "255", ",", "255", ")", ",", "org", "=", "(", "x", ",", "y", ")", ",", "fontFace", "=", "cv2", ".", "FONT_HERSHEY_SIMPLEX", ",", "fontScale", "=", "0.4", ")", "y", "+=", "10", "cv2", ".", "namedWindow", "(", "'image'", ",", "cv2", ".", "WINDOW_NORMAL", ")", "cv2", ".", "imshow", "(", "\"image\"", ",", "img", ")", "cv2", ".", "resizeWindow", "(", "'image'", ",", "300", ",", "300", ")", "cv2", ".", "waitKey", "(", "1", ")" ]
https://github.com/autorope/donkeycar/blob/a3df79a3573127dff31fc9b2953588056875b214/donkeycar/templates/cv_control.py#L115-L143
facebookresearch/FixRes
c9be6acc7a6b32f896e62c28a97c20c2348327d3
imnet_finetune/Res.py
python
resnet101
(pretrained=False, progress=True, **kwargs)
return _resnet('resnet101', Bottleneck, [3, 4, 23, 3], pretrained, progress, **kwargs)
Constructs a ResNet-101 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr
Constructs a ResNet-101 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr
[ "Constructs", "a", "ResNet", "-", "101", "model", ".", "Args", ":", "pretrained", "(", "bool", ")", ":", "If", "True", "returns", "a", "model", "pre", "-", "trained", "on", "ImageNet", "progress", "(", "bool", ")", ":", "If", "True", "displays", "a", "progress", "bar", "of", "the", "download", "to", "stderr" ]
def resnet101(pretrained=False, progress=True, **kwargs): """Constructs a ResNet-101 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet progress (bool): If True, displays a progress bar of the download to stderr """ return _resnet('resnet101', Bottleneck, [3, 4, 23, 3], pretrained, progress, **kwargs)
[ "def", "resnet101", "(", "pretrained", "=", "False", ",", "progress", "=", "True", ",", "*", "*", "kwargs", ")", ":", "return", "_resnet", "(", "'resnet101'", ",", "Bottleneck", ",", "[", "3", ",", "4", ",", "23", ",", "3", "]", ",", "pretrained", ",", "progress", ",", "*", "*", "kwargs", ")" ]
https://github.com/facebookresearch/FixRes/blob/c9be6acc7a6b32f896e62c28a97c20c2348327d3/imnet_finetune/Res.py#L258-L265
qilingframework/qiling
32cc674f2f6fa4b4c9d64a35a1a57853fe1e4142
qiling/os/memory.py
python
del_mapinfo
(self, mem_s: int, mem_e: int)
Subtract a memory range from map. Args: mem_s: memory range start mem_e: memory range end
Subtract a memory range from map.
[ "Subtract", "a", "memory", "range", "from", "map", "." ]
def del_mapinfo(self, mem_s: int, mem_e: int): """Subtract a memory range from map. Args: mem_s: memory range start mem_e: memory range end """ tmp_map_info: MutableSequence[MapInfoEntry] = [] for s, e, p, info, mmio in self.map_info: if e <= mem_s: tmp_map_info.append((s, e, p, info, mmio)) continue if s >= mem_e: tmp_map_info.append((s, e, p, info, mmio)) continue if s < mem_s: tmp_map_info.append((s, mem_s, p, info, mmio)) if s == mem_s: pass if e > mem_e: tmp_map_info.append((mem_e, e, p, info, mmio)) if e == mem_e: pass self.map_info = tmp_map_info
[ "def", "del_mapinfo", "(", "self", ",", "mem_s", ":", "int", ",", "mem_e", ":", "int", ")", ":", "tmp_map_info", ":", "MutableSequence", "[", "MapInfoEntry", "]", "=", "[", "]", "for", "s", ",", "e", ",", "p", ",", "info", ",", "mmio", "in", "self", ".", "map_info", ":", "if", "e", "<=", "mem_s", ":", "tmp_map_info", ".", "append", "(", "(", "s", ",", "e", ",", "p", ",", "info", ",", "mmio", ")", ")", "continue", "if", "s", ">=", "mem_e", ":", "tmp_map_info", ".", "append", "(", "(", "s", ",", "e", ",", "p", ",", "info", ",", "mmio", ")", ")", "continue", "if", "s", "<", "mem_s", ":", "tmp_map_info", ".", "append", "(", "(", "s", ",", "mem_s", ",", "p", ",", "info", ",", "mmio", ")", ")", "if", "s", "==", "mem_s", ":", "pass", "if", "e", ">", "mem_e", ":", "tmp_map_info", ".", "append", "(", "(", "mem_e", ",", "e", ",", "p", ",", "info", ",", "mmio", ")", ")", "if", "e", "==", "mem_e", ":", "pass", "self", ".", "map_info", "=", "tmp_map_info" ]
https://github.com/qilingframework/qiling/blob/32cc674f2f6fa4b4c9d64a35a1a57853fe1e4142/qiling/os/memory.py#L98-L129
n1nj4sec/pupy
a5d766ea81fdfe3bc2c38c9bdaf10e9b75af3b39
pupy/network/lib/streams/PupySocketStream.py
python
PupyUDPSocketStream.close
(self)
[]
def close(self): self._send_packet(self.END) if self.close_callback: self.close_callback('{}:{}'.format( self.dst_addr[0], self.dst_addr[1])) self.closed = True self.kcp = None if self.client_side: self.sock.close()
[ "def", "close", "(", "self", ")", ":", "self", ".", "_send_packet", "(", "self", ".", "END", ")", "if", "self", ".", "close_callback", ":", "self", ".", "close_callback", "(", "'{}:{}'", ".", "format", "(", "self", ".", "dst_addr", "[", "0", "]", ",", "self", ".", "dst_addr", "[", "1", "]", ")", ")", "self", ".", "closed", "=", "True", "self", ".", "kcp", "=", "None", "if", "self", ".", "client_side", ":", "self", ".", "sock", ".", "close", "(", ")" ]
https://github.com/n1nj4sec/pupy/blob/a5d766ea81fdfe3bc2c38c9bdaf10e9b75af3b39/pupy/network/lib/streams/PupySocketStream.py#L458-L469
triaquae/triaquae
bbabf736b3ba56a0c6498e7f04e16c13b8b8f2b9
TriAquae/models/Centos_6.4/pyasn1/codec/ber/decoder.py
python
BitStringDecoder.indefLenValueDecoder
(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun)
return r, substrate
[]
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): r = self._createComponent(asn1Spec, tagSet, '') if substrateFun: return substrateFun(r, substrate, length) while substrate: component, substrate = decodeFun(substrate) if eoo.endOfOctets.isSameTypeWith(component) and \ component == eoo.endOfOctets: break r = r + component else: raise error.SubstrateUnderrunError( 'No EOO seen before substrate ends' ) return r, substrate
[ "def", "indefLenValueDecoder", "(", "self", ",", "fullSubstrate", ",", "substrate", ",", "asn1Spec", ",", "tagSet", ",", "length", ",", "state", ",", "decodeFun", ",", "substrateFun", ")", ":", "r", "=", "self", ".", "_createComponent", "(", "asn1Spec", ",", "tagSet", ",", "''", ")", "if", "substrateFun", ":", "return", "substrateFun", "(", "r", ",", "substrate", ",", "length", ")", "while", "substrate", ":", "component", ",", "substrate", "=", "decodeFun", "(", "substrate", ")", "if", "eoo", ".", "endOfOctets", ".", "isSameTypeWith", "(", "component", ")", "and", "component", "==", "eoo", ".", "endOfOctets", ":", "break", "r", "=", "r", "+", "component", "else", ":", "raise", "error", ".", "SubstrateUnderrunError", "(", "'No EOO seen before substrate ends'", ")", "return", "r", ",", "substrate" ]
https://github.com/triaquae/triaquae/blob/bbabf736b3ba56a0c6498e7f04e16c13b8b8f2b9/TriAquae/models/Centos_6.4/pyasn1/codec/ber/decoder.py#L151-L166
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/CPython/27/Lib/traceback.py
python
print_stack
(f=None, limit=None, file=None)
Print a stack trace from its invocation point. The optional 'f' argument can be used to specify an alternate stack frame at which to start. The optional 'limit' and 'file' arguments have the same meaning as for print_exception().
Print a stack trace from its invocation point.
[ "Print", "a", "stack", "trace", "from", "its", "invocation", "point", "." ]
def print_stack(f=None, limit=None, file=None): """Print a stack trace from its invocation point. The optional 'f' argument can be used to specify an alternate stack frame at which to start. The optional 'limit' and 'file' arguments have the same meaning as for print_exception(). """ if f is None: try: raise ZeroDivisionError except ZeroDivisionError: f = sys.exc_info()[2].tb_frame.f_back print_list(extract_stack(f, limit), file)
[ "def", "print_stack", "(", "f", "=", "None", ",", "limit", "=", "None", ",", "file", "=", "None", ")", ":", "if", "f", "is", "None", ":", "try", ":", "raise", "ZeroDivisionError", "except", "ZeroDivisionError", ":", "f", "=", "sys", ".", "exc_info", "(", ")", "[", "2", "]", ".", "tb_frame", ".", "f_back", "print_list", "(", "extract_stack", "(", "f", ",", "limit", ")", ",", "file", ")" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/CPython/27/Lib/traceback.py#L257-L269
leo-editor/leo-editor
383d6776d135ef17d73d935a2f0ecb3ac0e99494
leo/commands/commanderFileCommands.py
python
editRecentFiles
(self, event=None)
Opens recent files list in a new node for editing.
Opens recent files list in a new node for editing.
[ "Opens", "recent", "files", "list", "in", "a", "new", "node", "for", "editing", "." ]
def editRecentFiles(self, event=None): """Opens recent files list in a new node for editing.""" c = self g.app.recentFilesManager.editRecentFiles(c)
[ "def", "editRecentFiles", "(", "self", ",", "event", "=", "None", ")", ":", "c", "=", "self", "g", ".", "app", ".", "recentFilesManager", ".", "editRecentFiles", "(", "c", ")" ]
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/commands/commanderFileCommands.py#L951-L954
kylebebak/Requester
4a9f9f051fa5fc951a8f7ad098a328261ca2db97
deps/oauthlib/oauth1/rfc5849/request_validator.py
python
RequestValidator.get_realms
(self, token, request)
Get realms associated with a request token. :param token: The request token string. :param request: An oauthlib.common.Request object. :returns: The list of realms associated with the request token. This method is used by * AuthorizationEndpoint * AccessTokenEndpoint
Get realms associated with a request token.
[ "Get", "realms", "associated", "with", "a", "request", "token", "." ]
def get_realms(self, token, request): """Get realms associated with a request token. :param token: The request token string. :param request: An oauthlib.common.Request object. :returns: The list of realms associated with the request token. This method is used by * AuthorizationEndpoint * AccessTokenEndpoint """ raise self._subclass_must_implement("get_realms")
[ "def", "get_realms", "(", "self", ",", "token", ",", "request", ")", ":", "raise", "self", ".", "_subclass_must_implement", "(", "\"get_realms\"", ")" ]
https://github.com/kylebebak/Requester/blob/4a9f9f051fa5fc951a8f7ad098a328261ca2db97/deps/oauthlib/oauth1/rfc5849/request_validator.py#L381-L393
sosreport/sos
900e8bea7f3cd36c1dd48f3cbb351ab92f766654
sos/report/plugins/__init__.py
python
Plugin.is_installed
(self, package_name)
return self.policy.pkg_by_name(package_name) is not None
Is the package $package_name installed? :param package_name: The name of the package to check :type package_name: ``str`` :returns: ``True`` id the package is installed, else ``False`` :rtype: ``bool``
Is the package $package_name installed?
[ "Is", "the", "package", "$package_name", "installed?" ]
def is_installed(self, package_name): """Is the package $package_name installed? :param package_name: The name of the package to check :type package_name: ``str`` :returns: ``True`` id the package is installed, else ``False`` :rtype: ``bool`` """ return self.policy.pkg_by_name(package_name) is not None
[ "def", "is_installed", "(", "self", ",", "package_name", ")", ":", "return", "self", ".", "policy", ".", "pkg_by_name", "(", "package_name", ")", "is", "not", "None" ]
https://github.com/sosreport/sos/blob/900e8bea7f3cd36c1dd48f3cbb351ab92f766654/sos/report/plugins/__init__.py#L751-L760
douban/pymesos
047c7bac8ca98772f63192aed063148fdf399b55
pymesos/interface.py
python
OperatorMasterDriver.getMaintenanceSchedule
(self)
This call retrieves the cluster's maintenance status.
This call retrieves the cluster's maintenance status.
[ "This", "call", "retrieves", "the", "cluster", "s", "maintenance", "status", "." ]
def getMaintenanceSchedule(self): """ This call retrieves the cluster's maintenance status. """
[ "def", "getMaintenanceSchedule", "(", "self", ")", ":" ]
https://github.com/douban/pymesos/blob/047c7bac8ca98772f63192aed063148fdf399b55/pymesos/interface.py#L637-L640
bloomberg/phabricator-tools
09bd1587fe8945d93a891162fd4c89640c6fada7
py/phl/phlurl_watcher.py
python
Watcher.load
(self, f)
Load data from the supplied file pointer, overwriting existing data. :f: a text file pointer to load from :returns: None
Load data from the supplied file pointer, overwriting existing data.
[ "Load", "data", "from", "the", "supplied", "file", "pointer", "overwriting", "existing", "data", "." ]
def load(self, f): """Load data from the supplied file pointer, overwriting existing data. :f: a text file pointer to load from :returns: None """ results = json.load(f) self._results = dict( (k, _HashHexdigestHasChanged(*v)) for k, v in results.iteritems())
[ "def", "load", "(", "self", ",", "f", ")", ":", "results", "=", "json", ".", "load", "(", "f", ")", "self", ".", "_results", "=", "dict", "(", "(", "k", ",", "_HashHexdigestHasChanged", "(", "*", "v", ")", ")", "for", "k", ",", "v", "in", "results", ".", "iteritems", "(", ")", ")" ]
https://github.com/bloomberg/phabricator-tools/blob/09bd1587fe8945d93a891162fd4c89640c6fada7/py/phl/phlurl_watcher.py#L142-L151
wikimedia/pywikibot
81a01ffaec7271bf5b4b170f85a80388420a4e78
pywikibot/site/_datasite.py
python
DataSite.property_namespace
(self)
return self._property_namespace
Return namespace for properties. :return: property namespace :rtype: Namespace
Return namespace for properties.
[ "Return", "namespace", "for", "properties", "." ]
def property_namespace(self): """ Return namespace for properties. :return: property namespace :rtype: Namespace """ if self._property_namespace is None: self._property_namespace = self.get_namespace_for_entity_type( 'property') return self._property_namespace
[ "def", "property_namespace", "(", "self", ")", ":", "if", "self", ".", "_property_namespace", "is", "None", ":", "self", ".", "_property_namespace", "=", "self", ".", "get_namespace_for_entity_type", "(", "'property'", ")", "return", "self", ".", "_property_namespace" ]
https://github.com/wikimedia/pywikibot/blob/81a01ffaec7271bf5b4b170f85a80388420a4e78/pywikibot/site/_datasite.py#L89-L99
mrlesmithjr/Ansible
d44f0dc0d942bdf3bf7334b307e6048f0ee16e36
roles/ansible-vsphere-management/scripts/pdns/lib/python2.7/site-packages/pip/_vendor/requests/models.py
python
Response.is_permanent_redirect
(self)
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
True if this Response one of the permanent versions of redirect
True if this Response one of the permanent versions of redirect
[ "True", "if", "this", "Response", "one", "of", "the", "permanent", "versions", "of", "redirect" ]
def is_permanent_redirect(self): """True if this Response one of the permanent versions of redirect""" return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
[ "def", "is_permanent_redirect", "(", "self", ")", ":", "return", "(", "'location'", "in", "self", ".", "headers", "and", "self", ".", "status_code", "in", "(", "codes", ".", "moved_permanently", ",", "codes", ".", "permanent_redirect", ")", ")" ]
https://github.com/mrlesmithjr/Ansible/blob/d44f0dc0d942bdf3bf7334b307e6048f0ee16e36/roles/ansible-vsphere-management/scripts/pdns/lib/python2.7/site-packages/pip/_vendor/requests/models.py#L646-L648
biolab/orange3
41685e1c7b1d1babe680113685a2d44bcc9fec0b
Orange/widgets/utils/textimport.py
python
number_parser
(groupsep, decimalsep)
[]
def number_parser(groupsep, decimalsep): # type: (str, str) -> Callable[[str], float] if groupsep == "" and decimalsep == ".": return float elif groupsep == "": return lambda value: float(value.replace(decimalsep, ".")) elif decimalsep != groupsep and decimalsep != "" and groupsep != "": table = {ord(groupsep): None, ord(decimalsep): ord(".")} return lambda value: float(value.translate(table)) else: return float
[ "def", "number_parser", "(", "groupsep", ",", "decimalsep", ")", ":", "# type: (str, str) -> Callable[[str], float]", "if", "groupsep", "==", "\"\"", "and", "decimalsep", "==", "\".\"", ":", "return", "float", "elif", "groupsep", "==", "\"\"", ":", "return", "lambda", "value", ":", "float", "(", "value", ".", "replace", "(", "decimalsep", ",", "\".\"", ")", ")", "elif", "decimalsep", "!=", "groupsep", "and", "decimalsep", "!=", "\"\"", "and", "groupsep", "!=", "\"\"", ":", "table", "=", "{", "ord", "(", "groupsep", ")", ":", "None", ",", "ord", "(", "decimalsep", ")", ":", "ord", "(", "\".\"", ")", "}", "return", "lambda", "value", ":", "float", "(", "value", ".", "translate", "(", "table", ")", ")", "else", ":", "return", "float" ]
https://github.com/biolab/orange3/blob/41685e1c7b1d1babe680113685a2d44bcc9fec0b/Orange/widgets/utils/textimport.py#L1498-L1508
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/pip/_vendor/pkg_resources/__init__.py
python
__getstate__
()
return state
[]
def __getstate__(): state = {} g = globals() for k, v in _state_vars.items(): state[k] = g['_sget_' + v](g[k]) return state
[ "def", "__getstate__", "(", ")", ":", "state", "=", "{", "}", "g", "=", "globals", "(", ")", "for", "k", ",", "v", "in", "_state_vars", ".", "items", "(", ")", ":", "state", "[", "k", "]", "=", "g", "[", "'_sget_'", "+", "v", "]", "(", "g", "[", "k", "]", ")", "return", "state" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/pip/_vendor/pkg_resources/__init__.py#L220-L225
ValvePython/steam
7aef9d2df57c2195f35bd85013e1b5ccb04624a5
steam/client/__init__.py
python
SteamClient.send
(self, message, body_params=None)
Send a message to CM :param message: a message instance :type message: :class:`.Msg`, :class:`.MsgProto` :param body_params: a dict with params to the body (only :class:`.MsgProto`) :type body_params: dict
Send a message to CM
[ "Send", "a", "message", "to", "CM" ]
def send(self, message, body_params=None): """Send a message to CM :param message: a message instance :type message: :class:`.Msg`, :class:`.MsgProto` :param body_params: a dict with params to the body (only :class:`.MsgProto`) :type body_params: dict """ if not self.connected: self._LOG.debug("Trying to send message when not connected. (discarded)") else: if body_params and isinstance(message, MsgProto): proto_fill_from_dict(message.body, body_params) CMClient.send(self, message)
[ "def", "send", "(", "self", ",", "message", ",", "body_params", "=", "None", ")", ":", "if", "not", "self", ".", "connected", ":", "self", ".", "_LOG", ".", "debug", "(", "\"Trying to send message when not connected. (discarded)\"", ")", "else", ":", "if", "body_params", "and", "isinstance", "(", "message", ",", "MsgProto", ")", ":", "proto_fill_from_dict", "(", "message", ".", "body", ",", "body_params", ")", "CMClient", ".", "send", "(", "self", ",", "message", ")" ]
https://github.com/ValvePython/steam/blob/7aef9d2df57c2195f35bd85013e1b5ccb04624a5/steam/client/__init__.py#L290-L304
IntelAI/nauta
bbedb114a755cf1f43b834a58fc15fb6e3a4b291
applications/cli/scripts/mypy_check.py
python
filter_mypy_results
(mypy_results: List[str])
return [error for error in mypy_results if not any(ignored_error in error for ignored_error in IGNORED_ERRORS)]
[]
def filter_mypy_results(mypy_results: List[str]) -> List[str]: return [error for error in mypy_results if not any(ignored_error in error for ignored_error in IGNORED_ERRORS)]
[ "def", "filter_mypy_results", "(", "mypy_results", ":", "List", "[", "str", "]", ")", "->", "List", "[", "str", "]", ":", "return", "[", "error", "for", "error", "in", "mypy_results", "if", "not", "any", "(", "ignored_error", "in", "error", "for", "ignored_error", "in", "IGNORED_ERRORS", ")", "]" ]
https://github.com/IntelAI/nauta/blob/bbedb114a755cf1f43b834a58fc15fb6e3a4b291/applications/cli/scripts/mypy_check.py#L39-L41
guildai/guildai
1665985a3d4d788efc1a3180ca51cc417f71ca78
guild/external/setuptools/command/egg_info.py
python
FileList.global_exclude
(self, pattern)
return self._remove_files(match.match)
Exclude all files anywhere that match the pattern.
Exclude all files anywhere that match the pattern.
[ "Exclude", "all", "files", "anywhere", "that", "match", "the", "pattern", "." ]
def global_exclude(self, pattern): """ Exclude all files anywhere that match the pattern. """ match = translate_pattern(os.path.join('**', pattern)) return self._remove_files(match.match)
[ "def", "global_exclude", "(", "self", ",", "pattern", ")", ":", "match", "=", "translate_pattern", "(", "os", ".", "path", ".", "join", "(", "'**'", ",", "pattern", ")", ")", "return", "self", ".", "_remove_files", "(", "match", ".", "match", ")" ]
https://github.com/guildai/guildai/blob/1665985a3d4d788efc1a3180ca51cc417f71ca78/guild/external/setuptools/command/egg_info.py#L467-L472
dipu-bd/lightnovel-crawler
eca7a71f217ce7a6b0a54d2e2afb349571871880
lncrawl/bots/telegram/__init__.py
python
TelegramBot.error_handler
(self, bot, update, error)
Log Errors caused by Updates.
Log Errors caused by Updates.
[ "Log", "Errors", "caused", "by", "Updates", "." ]
def error_handler(self, bot, update, error): """Log Errors caused by Updates.""" logger.warn('Error: %s\nCaused by: %s', error, update)
[ "def", "error_handler", "(", "self", ",", "bot", ",", "update", ",", "error", ")", ":", "logger", ".", "warn", "(", "'Error: %s\\nCaused by: %s'", ",", "error", ",", "update", ")" ]
https://github.com/dipu-bd/lightnovel-crawler/blob/eca7a71f217ce7a6b0a54d2e2afb349571871880/lncrawl/bots/telegram/__init__.py#L129-L131
heimlich1024/OD_CopyPasteExternal
943b993198e16d19f1fb4ba44049e498abf1e993
C4D/C4D_PasteFromExternal.py
python
vertDataToObj
(outputfile)
[]
def vertDataToObj(outputfile): output = "" inputfile = tempfile.gettempdir() + os.sep + "ODVertexData.txt" file = open(inputfile, "r") lines = file.readlines() file.close() #Parse File to see what Data we have vertline = []; polyline = []; vtxnormals = []; uvMaps = []; morphMaps = []; weightMaps = [] count = 0 for line in lines: if line.startswith("VERTICES:"): vertline.append([int(line.strip().split(":")[1].strip()), count]) if line.startswith("POLYGONS:"): polyline.append([int(line.strip().split(":")[1].strip()), count]) if line.startswith("VERTEXNORMALS:"): vtxnormals.append([int(line.strip().split(":")[1].strip()), count]) if line.startswith("UV:"): if line.strip().split(":")[1:][1] != "0": uvMaps.append([line.strip().split(":")[1:], count]) # changed this to add the # of uv coordinates into the mix count += 1 #write header output += "o ODVertexData.obj\n" output += "g default\n" #rewrite verts for verts in vertline: for i in xrange(verts[1] + 1, verts[1] + verts[0] + 1): x = map(float, lines[i].split()) output += "v " + str(x[0]) + " " + str(x[1]) + " " + str(x[2]) + "\n" uvforobj = [] values = [] assignment = [] for uvMap in uvMaps: count = 0 for i in range(int(uvMap[0][1])): split = lines[uvMap[1]+1+count].split(":") if str(float(split[0].split(" ")[0])) + " " + str(float(split[0].split(" ")[1])) not in values: values.append(str(float(split[0].split(" ")[0])) + " " + str(float(split[0].split(" ")[1]))) assignment.append(str(float(split[0].split(" ")[0])) + " " + str(float(split[0].split(" ")[1]))) count +=1 values.sort() for val in values: output += "vt " + val + "\n" for norm in vtxnormals: for i in xrange(norm[1] + 1, norm[1] + norm[0] + 1): x = map(float, lines[i].split()) output += "vn " + str(x[0]) + " " + str(x[1]) + " " + str(x[2]) + "\n" #create Polygons for polygons in polyline: polys = [] count = 0 ncount = 0 mat = "" testnorm = [] for i in xrange(polygons[1] + 1, polygons[1] + polygons[0] + 1): pts = lines[i].split(";;")[0].split(",") newpts = [] #indices in an obj start at 1, so we gotta add one to each index testpts = [] testidx = [] for p in range(len(pts)): if len(uvMaps) < 1: newpts.append(str(int(pts[p]) + 1)) if len(vtxnormals) > 0: newpts[-1] = str(newpts[-1]) + "//" + str(count+1) else: testpts.append(str(int(pts[p])+1)) testidx.append(str(values.index(assignment[count])+1)) if len(vtxnormals) > 0: testnorm.append(count) count += 1 string = "" for t in range(len(testpts)): string += " " + testpts[t] + "/" + testidx[len(testpts)-1-t] if len(testnorm) > 0: string += "/" + str(testnorm[ncount]+1) ncount += 1 if lines[i].split(";;")[1].strip() != mat: output += "g " + lines[i].split(";;")[1].strip() + "\n" output += "usemtl " + lines[i].split(";;")[1].strip() + "\n" #output += "s 1\n" mat = lines[i].split(";;")[1].strip() if string != "": output += "f " + string.strip() + "\n" else: output += "f " + " ".join(newpts) + "\n" #writing output file f = open(outputfile, "w") f.write(output) f.close()
[ "def", "vertDataToObj", "(", "outputfile", ")", ":", "output", "=", "\"\"", "inputfile", "=", "tempfile", ".", "gettempdir", "(", ")", "+", "os", ".", "sep", "+", "\"ODVertexData.txt\"", "file", "=", "open", "(", "inputfile", ",", "\"r\"", ")", "lines", "=", "file", ".", "readlines", "(", ")", "file", ".", "close", "(", ")", "#Parse File to see what Data we have", "vertline", "=", "[", "]", "polyline", "=", "[", "]", "vtxnormals", "=", "[", "]", "uvMaps", "=", "[", "]", "morphMaps", "=", "[", "]", "weightMaps", "=", "[", "]", "count", "=", "0", "for", "line", "in", "lines", ":", "if", "line", ".", "startswith", "(", "\"VERTICES:\"", ")", ":", "vertline", ".", "append", "(", "[", "int", "(", "line", ".", "strip", "(", ")", ".", "split", "(", "\":\"", ")", "[", "1", "]", ".", "strip", "(", ")", ")", ",", "count", "]", ")", "if", "line", ".", "startswith", "(", "\"POLYGONS:\"", ")", ":", "polyline", ".", "append", "(", "[", "int", "(", "line", ".", "strip", "(", ")", ".", "split", "(", "\":\"", ")", "[", "1", "]", ".", "strip", "(", ")", ")", ",", "count", "]", ")", "if", "line", ".", "startswith", "(", "\"VERTEXNORMALS:\"", ")", ":", "vtxnormals", ".", "append", "(", "[", "int", "(", "line", ".", "strip", "(", ")", ".", "split", "(", "\":\"", ")", "[", "1", "]", ".", "strip", "(", ")", ")", ",", "count", "]", ")", "if", "line", ".", "startswith", "(", "\"UV:\"", ")", ":", "if", "line", ".", "strip", "(", ")", ".", "split", "(", "\":\"", ")", "[", "1", ":", "]", "[", "1", "]", "!=", "\"0\"", ":", "uvMaps", ".", "append", "(", "[", "line", ".", "strip", "(", ")", ".", "split", "(", "\":\"", ")", "[", "1", ":", "]", ",", "count", "]", ")", "# changed this to add the # of uv coordinates into the mix", "count", "+=", "1", "#write header", "output", "+=", "\"o ODVertexData.obj\\n\"", "output", "+=", "\"g default\\n\"", "#rewrite verts", "for", "verts", "in", "vertline", ":", "for", "i", "in", "xrange", "(", "verts", "[", "1", "]", "+", "1", ",", "verts", "[", "1", "]", "+", "verts", "[", "0", "]", "+", "1", ")", ":", "x", "=", "map", "(", "float", ",", "lines", "[", "i", "]", ".", "split", "(", ")", ")", "output", "+=", "\"v \"", "+", "str", "(", "x", "[", "0", "]", ")", "+", "\" \"", "+", "str", "(", "x", "[", "1", "]", ")", "+", "\" \"", "+", "str", "(", "x", "[", "2", "]", ")", "+", "\"\\n\"", "uvforobj", "=", "[", "]", "values", "=", "[", "]", "assignment", "=", "[", "]", "for", "uvMap", "in", "uvMaps", ":", "count", "=", "0", "for", "i", "in", "range", "(", "int", "(", "uvMap", "[", "0", "]", "[", "1", "]", ")", ")", ":", "split", "=", "lines", "[", "uvMap", "[", "1", "]", "+", "1", "+", "count", "]", ".", "split", "(", "\":\"", ")", "if", "str", "(", "float", "(", "split", "[", "0", "]", ".", "split", "(", "\" \"", ")", "[", "0", "]", ")", ")", "+", "\" \"", "+", "str", "(", "float", "(", "split", "[", "0", "]", ".", "split", "(", "\" \"", ")", "[", "1", "]", ")", ")", "not", "in", "values", ":", "values", ".", "append", "(", "str", "(", "float", "(", "split", "[", "0", "]", ".", "split", "(", "\" \"", ")", "[", "0", "]", ")", ")", "+", "\" \"", "+", "str", "(", "float", "(", "split", "[", "0", "]", ".", "split", "(", "\" \"", ")", "[", "1", "]", ")", ")", ")", "assignment", ".", "append", "(", "str", "(", "float", "(", "split", "[", "0", "]", ".", "split", "(", "\" \"", ")", "[", "0", "]", ")", ")", "+", "\" \"", "+", "str", "(", "float", "(", "split", "[", "0", "]", ".", "split", "(", "\" \"", ")", "[", "1", "]", ")", ")", ")", "count", "+=", "1", "values", ".", "sort", "(", ")", "for", "val", "in", "values", ":", "output", "+=", "\"vt \"", "+", "val", "+", "\"\\n\"", "for", "norm", "in", "vtxnormals", ":", "for", "i", "in", "xrange", "(", "norm", "[", "1", "]", "+", "1", ",", "norm", "[", "1", "]", "+", "norm", "[", "0", "]", "+", "1", ")", ":", "x", "=", "map", "(", "float", ",", "lines", "[", "i", "]", ".", "split", "(", ")", ")", "output", "+=", "\"vn \"", "+", "str", "(", "x", "[", "0", "]", ")", "+", "\" \"", "+", "str", "(", "x", "[", "1", "]", ")", "+", "\" \"", "+", "str", "(", "x", "[", "2", "]", ")", "+", "\"\\n\"", "#create Polygons", "for", "polygons", "in", "polyline", ":", "polys", "=", "[", "]", "count", "=", "0", "ncount", "=", "0", "mat", "=", "\"\"", "testnorm", "=", "[", "]", "for", "i", "in", "xrange", "(", "polygons", "[", "1", "]", "+", "1", ",", "polygons", "[", "1", "]", "+", "polygons", "[", "0", "]", "+", "1", ")", ":", "pts", "=", "lines", "[", "i", "]", ".", "split", "(", "\";;\"", ")", "[", "0", "]", ".", "split", "(", "\",\"", ")", "newpts", "=", "[", "]", "#indices in an obj start at 1, so we gotta add one to each index", "testpts", "=", "[", "]", "testidx", "=", "[", "]", "for", "p", "in", "range", "(", "len", "(", "pts", ")", ")", ":", "if", "len", "(", "uvMaps", ")", "<", "1", ":", "newpts", ".", "append", "(", "str", "(", "int", "(", "pts", "[", "p", "]", ")", "+", "1", ")", ")", "if", "len", "(", "vtxnormals", ")", ">", "0", ":", "newpts", "[", "-", "1", "]", "=", "str", "(", "newpts", "[", "-", "1", "]", ")", "+", "\"//\"", "+", "str", "(", "count", "+", "1", ")", "else", ":", "testpts", ".", "append", "(", "str", "(", "int", "(", "pts", "[", "p", "]", ")", "+", "1", ")", ")", "testidx", ".", "append", "(", "str", "(", "values", ".", "index", "(", "assignment", "[", "count", "]", ")", "+", "1", ")", ")", "if", "len", "(", "vtxnormals", ")", ">", "0", ":", "testnorm", ".", "append", "(", "count", ")", "count", "+=", "1", "string", "=", "\"\"", "for", "t", "in", "range", "(", "len", "(", "testpts", ")", ")", ":", "string", "+=", "\" \"", "+", "testpts", "[", "t", "]", "+", "\"/\"", "+", "testidx", "[", "len", "(", "testpts", ")", "-", "1", "-", "t", "]", "if", "len", "(", "testnorm", ")", ">", "0", ":", "string", "+=", "\"/\"", "+", "str", "(", "testnorm", "[", "ncount", "]", "+", "1", ")", "ncount", "+=", "1", "if", "lines", "[", "i", "]", ".", "split", "(", "\";;\"", ")", "[", "1", "]", ".", "strip", "(", ")", "!=", "mat", ":", "output", "+=", "\"g \"", "+", "lines", "[", "i", "]", ".", "split", "(", "\";;\"", ")", "[", "1", "]", ".", "strip", "(", ")", "+", "\"\\n\"", "output", "+=", "\"usemtl \"", "+", "lines", "[", "i", "]", ".", "split", "(", "\";;\"", ")", "[", "1", "]", ".", "strip", "(", ")", "+", "\"\\n\"", "#output += \"s 1\\n\"", "mat", "=", "lines", "[", "i", "]", ".", "split", "(", "\";;\"", ")", "[", "1", "]", ".", "strip", "(", ")", "if", "string", "!=", "\"\"", ":", "output", "+=", "\"f \"", "+", "string", ".", "strip", "(", ")", "+", "\"\\n\"", "else", ":", "output", "+=", "\"f \"", "+", "\" \"", ".", "join", "(", "newpts", ")", "+", "\"\\n\"", "#writing output file", "f", "=", "open", "(", "outputfile", ",", "\"w\"", ")", "f", ".", "write", "(", "output", ")", "f", ".", "close", "(", ")" ]
https://github.com/heimlich1024/OD_CopyPasteExternal/blob/943b993198e16d19f1fb4ba44049e498abf1e993/C4D/C4D_PasteFromExternal.py#L6-L103
fooying/3102
0faee38c30b2e24154f41e68457cfd8f7a61c040
thirdparty/dns/name.py
python
from_unicode
(text, origin = root)
return Name(labels)
Convert unicode text into a Name object. Lables are encoded in IDN ACE form. @rtype: dns.name.Name object
Convert unicode text into a Name object.
[ "Convert", "unicode", "text", "into", "a", "Name", "object", "." ]
def from_unicode(text, origin = root): """Convert unicode text into a Name object. Lables are encoded in IDN ACE form. @rtype: dns.name.Name object """ if not isinstance(text, unicode): raise ValueError("input to from_unicode() must be a unicode string") if not (origin is None or isinstance(origin, Name)): raise ValueError("origin must be a Name or None") labels = [] label = u'' escaping = False edigits = 0 total = 0 if text == u'@': text = u'' if text: if text == u'.': return Name(['']) # no Unicode "u" on this constant! for c in text: if escaping: if edigits == 0: if c.isdigit(): total = int(c) edigits += 1 else: label += c escaping = False else: if not c.isdigit(): raise BadEscape total *= 10 total += int(c) edigits += 1 if edigits == 3: escaping = False label += chr(total) elif c == u'.' or c == u'\u3002' or \ c == u'\uff0e' or c == u'\uff61': if len(label) == 0: raise EmptyLabel labels.append(encodings.idna.ToASCII(label)) label = u'' elif c == u'\\': escaping = True edigits = 0 total = 0 else: label += c if escaping: raise BadEscape if len(label) > 0: labels.append(encodings.idna.ToASCII(label)) else: labels.append('') if (len(labels) == 0 or labels[-1] != '') and not origin is None: labels.extend(list(origin.labels)) return Name(labels)
[ "def", "from_unicode", "(", "text", ",", "origin", "=", "root", ")", ":", "if", "not", "isinstance", "(", "text", ",", "unicode", ")", ":", "raise", "ValueError", "(", "\"input to from_unicode() must be a unicode string\"", ")", "if", "not", "(", "origin", "is", "None", "or", "isinstance", "(", "origin", ",", "Name", ")", ")", ":", "raise", "ValueError", "(", "\"origin must be a Name or None\"", ")", "labels", "=", "[", "]", "label", "=", "u''", "escaping", "=", "False", "edigits", "=", "0", "total", "=", "0", "if", "text", "==", "u'@'", ":", "text", "=", "u''", "if", "text", ":", "if", "text", "==", "u'.'", ":", "return", "Name", "(", "[", "''", "]", ")", "# no Unicode \"u\" on this constant!", "for", "c", "in", "text", ":", "if", "escaping", ":", "if", "edigits", "==", "0", ":", "if", "c", ".", "isdigit", "(", ")", ":", "total", "=", "int", "(", "c", ")", "edigits", "+=", "1", "else", ":", "label", "+=", "c", "escaping", "=", "False", "else", ":", "if", "not", "c", ".", "isdigit", "(", ")", ":", "raise", "BadEscape", "total", "*=", "10", "total", "+=", "int", "(", "c", ")", "edigits", "+=", "1", "if", "edigits", "==", "3", ":", "escaping", "=", "False", "label", "+=", "chr", "(", "total", ")", "elif", "c", "==", "u'.'", "or", "c", "==", "u'\\u3002'", "or", "c", "==", "u'\\uff0e'", "or", "c", "==", "u'\\uff61'", ":", "if", "len", "(", "label", ")", "==", "0", ":", "raise", "EmptyLabel", "labels", ".", "append", "(", "encodings", ".", "idna", ".", "ToASCII", "(", "label", ")", ")", "label", "=", "u''", "elif", "c", "==", "u'\\\\'", ":", "escaping", "=", "True", "edigits", "=", "0", "total", "=", "0", "else", ":", "label", "+=", "c", "if", "escaping", ":", "raise", "BadEscape", "if", "len", "(", "label", ")", ">", "0", ":", "labels", ".", "append", "(", "encodings", ".", "idna", ".", "ToASCII", "(", "label", ")", ")", "else", ":", "labels", ".", "append", "(", "''", ")", "if", "(", "len", "(", "labels", ")", "==", "0", "or", "labels", "[", "-", "1", "]", "!=", "''", ")", "and", "not", "origin", "is", "None", ":", "labels", ".", "extend", "(", "list", "(", "origin", ".", "labels", ")", ")", "return", "Name", "(", "labels", ")" ]
https://github.com/fooying/3102/blob/0faee38c30b2e24154f41e68457cfd8f7a61c040/thirdparty/dns/name.py#L546-L606
KalleHallden/AutoTimer
2d954216700c4930baa154e28dbddc34609af7ce
env/lib/python2.7/site-packages/CoreMedia/_macros.py
python
CMTIME_IS_INDEFINITE
(time)
return CMTIME_IS_VALID(time) and (time.flags & CoreMedia.kCMTimeFlags_Indefinite) != 0
[]
def CMTIME_IS_INDEFINITE(time): return CMTIME_IS_VALID(time) and (time.flags & CoreMedia.kCMTimeFlags_Indefinite) != 0
[ "def", "CMTIME_IS_INDEFINITE", "(", "time", ")", ":", "return", "CMTIME_IS_VALID", "(", "time", ")", "and", "(", "time", ".", "flags", "&", "CoreMedia", ".", "kCMTimeFlags_Indefinite", ")", "!=", "0" ]
https://github.com/KalleHallden/AutoTimer/blob/2d954216700c4930baa154e28dbddc34609af7ce/env/lib/python2.7/site-packages/CoreMedia/_macros.py#L42-L43
CaptainEven/Vehicle-Car-detection-and-multilabel-classification
0b0ab3ad8478c5a0ac29819b4fce3ae110d44d82
Clipper.py
python
Car_DR.cls_draw_bbox
(self, output, orig_img)
orig_img是通过opencv读取的numpy array格式: 通道顺序BGR 在bbox基础上预测车辆属性 将bbox绘制到原图上
orig_img是通过opencv读取的numpy array格式: 通道顺序BGR 在bbox基础上预测车辆属性 将bbox绘制到原图上
[ "orig_img是通过opencv读取的numpy", "array格式", ":", "通道顺序BGR", "在bbox基础上预测车辆属性", "将bbox绘制到原图上" ]
def cls_draw_bbox(self, output, orig_img): """ orig_img是通过opencv读取的numpy array格式: 通道顺序BGR 在bbox基础上预测车辆属性 将bbox绘制到原图上 """ labels = [] pt_1s = [] pt_2s = [] # 获取车辆属性labels for det in output: # rectangle points pt_1 = tuple(det[1:3].int()) # the left-up point pt_2 = tuple(det[3:5].int()) # the right down point pt_1s.append(pt_1) pt_2s.append(pt_2) # 调用分类器预测车辆属性: BGR => RGB ROI = Image.fromarray( orig_img[pt_1[1]: pt_2[1], pt_1[0]: pt_2[0]][:, :, ::-1]) # ROI.show() car_color, car_direction, car_type = self.manager.predict(ROI) label = str(car_color + ' ' + car_direction + ' ' + car_type) labels.append(label) print('=> predicted label: ', label) # 将bbox绘制到原图 color = (0, 215, 255) for i, det in enumerate(output): pt_1 = pt_1s[i] pt_2 = pt_2s[i] # 绘制bounding box cv2.rectangle(orig_img, pt_1, pt_2, color, thickness=2) # 获取文本大小 txt_size = cv2.getTextSize( label, cv2.FONT_HERSHEY_PLAIN, 2, 2)[0] # 文字大小 # pt_2 = pt_1[0] + txt_size[0] + 3, pt_1[1] + txt_size[1] + 5 pt_2 = pt_1[0] + txt_size[0] + 3, pt_1[1] - txt_size[1] - 5 # 绘制文本底色矩形 cv2.rectangle(orig_img, pt_1, pt_2, color, thickness=-1) # text # 绘制文本 cv2.putText(orig_img, labels[i], (pt_1[0], pt_1[1]), # pt_1[1] + txt_size[1] + 4 cv2.FONT_HERSHEY_PLAIN, 2, [225, 255, 255], 2)
[ "def", "cls_draw_bbox", "(", "self", ",", "output", ",", "orig_img", ")", ":", "labels", "=", "[", "]", "pt_1s", "=", "[", "]", "pt_2s", "=", "[", "]", "# 获取车辆属性labels", "for", "det", "in", "output", ":", "# rectangle points", "pt_1", "=", "tuple", "(", "det", "[", "1", ":", "3", "]", ".", "int", "(", ")", ")", "# the left-up point", "pt_2", "=", "tuple", "(", "det", "[", "3", ":", "5", "]", ".", "int", "(", ")", ")", "# the right down point", "pt_1s", ".", "append", "(", "pt_1", ")", "pt_2s", ".", "append", "(", "pt_2", ")", "# 调用分类器预测车辆属性: BGR => RGB", "ROI", "=", "Image", ".", "fromarray", "(", "orig_img", "[", "pt_1", "[", "1", "]", ":", "pt_2", "[", "1", "]", ",", "pt_1", "[", "0", "]", ":", "pt_2", "[", "0", "]", "]", "[", ":", ",", ":", ",", ":", ":", "-", "1", "]", ")", "# ROI.show()", "car_color", ",", "car_direction", ",", "car_type", "=", "self", ".", "manager", ".", "predict", "(", "ROI", ")", "label", "=", "str", "(", "car_color", "+", "' '", "+", "car_direction", "+", "' '", "+", "car_type", ")", "labels", ".", "append", "(", "label", ")", "print", "(", "'=> predicted label: '", ",", "label", ")", "# 将bbox绘制到原图", "color", "=", "(", "0", ",", "215", ",", "255", ")", "for", "i", ",", "det", "in", "enumerate", "(", "output", ")", ":", "pt_1", "=", "pt_1s", "[", "i", "]", "pt_2", "=", "pt_2s", "[", "i", "]", "# 绘制bounding box", "cv2", ".", "rectangle", "(", "orig_img", ",", "pt_1", ",", "pt_2", ",", "color", ",", "thickness", "=", "2", ")", "# 获取文本大小", "txt_size", "=", "cv2", ".", "getTextSize", "(", "label", ",", "cv2", ".", "FONT_HERSHEY_PLAIN", ",", "2", ",", "2", ")", "[", "0", "]", "# 文字大小", "# pt_2 = pt_1[0] + txt_size[0] + 3, pt_1[1] + txt_size[1] + 5", "pt_2", "=", "pt_1", "[", "0", "]", "+", "txt_size", "[", "0", "]", "+", "3", ",", "pt_1", "[", "1", "]", "-", "txt_size", "[", "1", "]", "-", "5", "# 绘制文本底色矩形", "cv2", ".", "rectangle", "(", "orig_img", ",", "pt_1", ",", "pt_2", ",", "color", ",", "thickness", "=", "-", "1", ")", "# text", "# 绘制文本", "cv2", ".", "putText", "(", "orig_img", ",", "labels", "[", "i", "]", ",", "(", "pt_1", "[", "0", "]", ",", "pt_1", "[", "1", "]", ")", ",", "# pt_1[1] + txt_size[1] + 4", "cv2", ".", "FONT_HERSHEY_PLAIN", ",", "2", ",", "[", "225", ",", "255", ",", "255", "]", ",", "2", ")" ]
https://github.com/CaptainEven/Vehicle-Car-detection-and-multilabel-classification/blob/0b0ab3ad8478c5a0ac29819b4fce3ae110d44d82/Clipper.py#L600-L649
carbonblack/cbapi-python
24d677ffd99aee911c2c76ecb5528e4e9320c7cc
src/cbapi/auth.py
python
FileCredentialStore.get_credentials
(self, profile=None)
return Credentials(retval)
[]
def get_credentials(self, profile=None): credential_profile = profile or "default" if credential_profile not in self.get_profiles(): raise CredentialError("Cannot find credential profile '%s' after searching in these files: %s." % (credential_profile, ", ".join(self.credential_search_path))) retval = {} for k, v in six.iteritems(default_profile): retval[k] = self.credentials.get(credential_profile, k) if not retval["url"] or not retval["token"]: raise CredentialError("Token and/or URL not available for profile %s" % credential_profile) return Credentials(retval)
[ "def", "get_credentials", "(", "self", ",", "profile", "=", "None", ")", ":", "credential_profile", "=", "profile", "or", "\"default\"", "if", "credential_profile", "not", "in", "self", ".", "get_profiles", "(", ")", ":", "raise", "CredentialError", "(", "\"Cannot find credential profile '%s' after searching in these files: %s.\"", "%", "(", "credential_profile", ",", "\", \"", ".", "join", "(", "self", ".", "credential_search_path", ")", ")", ")", "retval", "=", "{", "}", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "default_profile", ")", ":", "retval", "[", "k", "]", "=", "self", ".", "credentials", ".", "get", "(", "credential_profile", ",", "k", ")", "if", "not", "retval", "[", "\"url\"", "]", "or", "not", "retval", "[", "\"token\"", "]", ":", "raise", "CredentialError", "(", "\"Token and/or URL not available for profile %s\"", "%", "credential_profile", ")", "return", "Credentials", "(", "retval", ")" ]
https://github.com/carbonblack/cbapi-python/blob/24d677ffd99aee911c2c76ecb5528e4e9320c7cc/src/cbapi/auth.py#L99-L112
dansoutner/LSTM
1817b881d2f03e9d3746f39e734426c00134fda7
ArpaLM.py
python
ArpaLM.mgrams
(self, m)
Return an iterator over N-Grams of order M+1. @param m: Length of history (i.e. order-1) of desired N-Grams. @type m: int @return: Iterator over N-Grams @rtype: generator(NGram)
Return an iterator over N-Grams of order M+1.
[ "Return", "an", "iterator", "over", "N", "-", "Grams", "of", "order", "M", "+", "1", "." ]
def mgrams(self, m): """ Return an iterator over N-Grams of order M+1. @param m: Length of history (i.e. order-1) of desired N-Grams. @type m: int @return: Iterator over N-Grams @rtype: generator(NGram) """ for ng, ngid in self.ngmap[m].iteritems(): if isinstance(ng, str): ng = (ng,) yield self.NGram(ng, *self.ngrams[m][ngid,:])
[ "def", "mgrams", "(", "self", ",", "m", ")", ":", "for", "ng", ",", "ngid", "in", "self", ".", "ngmap", "[", "m", "]", ".", "iteritems", "(", ")", ":", "if", "isinstance", "(", "ng", ",", "str", ")", ":", "ng", "=", "(", "ng", ",", ")", "yield", "self", ".", "NGram", "(", "ng", ",", "*", "self", ".", "ngrams", "[", "m", "]", "[", "ngid", ",", ":", "]", ")" ]
https://github.com/dansoutner/LSTM/blob/1817b881d2f03e9d3746f39e734426c00134fda7/ArpaLM.py#L262-L274
git-cola/git-cola
b48b8028e0c3baf47faf7b074b9773737358163d
cola/widgets/dag.py
python
ReaderThread.run
(self)
[]
def run(self): context = self.context repo = dag.RepoReader(context, self.params) repo.reset() self.begin.emit() commits = [] for c in repo.get(): self._mutex.lock() if self._stop: self._condition.wait(self._mutex) self._mutex.unlock() if self._abort: repo.reset() return commits.append(c) if len(commits) >= 512: self.add.emit(commits) commits = [] self.status.emit(repo.returncode == 0) if commits: self.add.emit(commits) self.end.emit()
[ "def", "run", "(", "self", ")", ":", "context", "=", "self", ".", "context", "repo", "=", "dag", ".", "RepoReader", "(", "context", ",", "self", ".", "params", ")", "repo", ".", "reset", "(", ")", "self", ".", "begin", ".", "emit", "(", ")", "commits", "=", "[", "]", "for", "c", "in", "repo", ".", "get", "(", ")", ":", "self", ".", "_mutex", ".", "lock", "(", ")", "if", "self", ".", "_stop", ":", "self", ".", "_condition", ".", "wait", "(", "self", ".", "_mutex", ")", "self", ".", "_mutex", ".", "unlock", "(", ")", "if", "self", ".", "_abort", ":", "repo", ".", "reset", "(", ")", "return", "commits", ".", "append", "(", "c", ")", "if", "len", "(", "commits", ")", ">=", "512", ":", "self", ".", "add", ".", "emit", "(", "commits", ")", "commits", "=", "[", "]", "self", ".", "status", ".", "emit", "(", "repo", ".", "returncode", "==", "0", ")", "if", "commits", ":", "self", ".", "add", ".", "emit", "(", "commits", ")", "self", ".", "end", ".", "emit", "(", ")" ]
https://github.com/git-cola/git-cola/blob/b48b8028e0c3baf47faf7b074b9773737358163d/cola/widgets/dag.py#L931-L953
iclavera/learning_to_adapt
bd7d99ba402521c96631e7d09714128f549db0f1
learning_to_adapt/mujoco_py/mjtypes.py
python
MjvCameraWrapper.pose
(self)
return self._wrapped.contents.pose
[]
def pose(self): return self._wrapped.contents.pose
[ "def", "pose", "(", "self", ")", ":", "return", "self", ".", "_wrapped", ".", "contents", ".", "pose" ]
https://github.com/iclavera/learning_to_adapt/blob/bd7d99ba402521c96631e7d09714128f549db0f1/learning_to_adapt/mujoco_py/mjtypes.py#L1395-L1396
pyqtgraph/pyqtgraph
ac3887abfca4e529aac44f022f8e40556a2587b0
pyqtgraph/opengl/GLGraphicsItem.py
python
GLGraphicsItem.visible
(self)
return self.__visible
Return True if the item is currently set to be visible. Note that this does not guarantee that the item actually appears in the view, as it may be obscured or outside of the current view area.
Return True if the item is currently set to be visible. Note that this does not guarantee that the item actually appears in the view, as it may be obscured or outside of the current view area.
[ "Return", "True", "if", "the", "item", "is", "currently", "set", "to", "be", "visible", ".", "Note", "that", "this", "does", "not", "guarantee", "that", "the", "item", "actually", "appears", "in", "the", "view", "as", "it", "may", "be", "obscured", "or", "outside", "of", "the", "current", "view", "area", "." ]
def visible(self): """Return True if the item is currently set to be visible. Note that this does not guarantee that the item actually appears in the view, as it may be obscured or outside of the current view area.""" return self.__visible
[ "def", "visible", "(", "self", ")", ":", "return", "self", ".", "__visible" ]
https://github.com/pyqtgraph/pyqtgraph/blob/ac3887abfca4e529aac44f022f8e40556a2587b0/pyqtgraph/opengl/GLGraphicsItem.py#L225-L229
anitab-org/vms
0189ce8260b705e0e75e0ab5418593f35066f106
vms/registration/views.py
python
load_states
(request)
return render( request, 'registration/state_dropdown_list_options.html', {'states': states} )
Renders the options of states dropdown list :return: states belonging to the selected country
Renders the options of states dropdown list
[ "Renders", "the", "options", "of", "states", "dropdown", "list" ]
def load_states(request): """ Renders the options of states dropdown list :return: states belonging to the selected country """ country_name = request.GET.get('country') states = Region.objects.filter(country__name=country_name).order_by('name') return render( request, 'registration/state_dropdown_list_options.html', {'states': states} )
[ "def", "load_states", "(", "request", ")", ":", "country_name", "=", "request", ".", "GET", ".", "get", "(", "'country'", ")", "states", "=", "Region", ".", "objects", ".", "filter", "(", "country__name", "=", "country_name", ")", ".", "order_by", "(", "'name'", ")", "return", "render", "(", "request", ",", "'registration/state_dropdown_list_options.html'", ",", "{", "'states'", ":", "states", "}", ")" ]
https://github.com/anitab-org/vms/blob/0189ce8260b705e0e75e0ab5418593f35066f106/vms/registration/views.py#L353-L365
kdexd/virtex
2baba8a4f3a4d80d617b3bc59e4be25b1052db57
virtex/optim/lookahead.py
python
Lookahead.zero_grad
(self)
r"""Clear all grad buffers at the start of new forward pass.
r"""Clear all grad buffers at the start of new forward pass.
[ "r", "Clear", "all", "grad", "buffers", "at", "the", "start", "of", "new", "forward", "pass", "." ]
def zero_grad(self): r"""Clear all grad buffers at the start of new forward pass.""" self.optimizer.zero_grad()
[ "def", "zero_grad", "(", "self", ")", ":", "self", ".", "optimizer", ".", "zero_grad", "(", ")" ]
https://github.com/kdexd/virtex/blob/2baba8a4f3a4d80d617b3bc59e4be25b1052db57/virtex/optim/lookahead.py#L65-L67
Pymol-Scripts/Pymol-script-repo
bcd7bb7812dc6db1595953dfa4471fa15fb68c77
modules/pdb2pqr/contrib/numpy-1.1.0/numpy/distutils/misc_util.py
python
Configuration.__init__
(self, package_name=None, parent_name=None, top_path=None, package_path=None, caller_level=1, setup_name='setup.py', **attrs)
Construct configuration instance of a package. package_name -- name of the package Ex.: 'distutils' parent_name -- name of the parent package Ex.: 'numpy' top_path -- directory of the toplevel package Ex.: the directory where the numpy package source sits package_path -- directory of package. Will be computed by magic from the directory of the caller module if not specified Ex.: the directory where numpy.distutils is caller_level -- frame level to caller namespace, internal parameter.
Construct configuration instance of a package.
[ "Construct", "configuration", "instance", "of", "a", "package", "." ]
def __init__(self, package_name=None, parent_name=None, top_path=None, package_path=None, caller_level=1, setup_name='setup.py', **attrs): """Construct configuration instance of a package. package_name -- name of the package Ex.: 'distutils' parent_name -- name of the parent package Ex.: 'numpy' top_path -- directory of the toplevel package Ex.: the directory where the numpy package source sits package_path -- directory of package. Will be computed by magic from the directory of the caller module if not specified Ex.: the directory where numpy.distutils is caller_level -- frame level to caller namespace, internal parameter. """ self.name = dot_join(parent_name, package_name) self.version = None caller_frame = get_frame(caller_level) self.local_path = get_path_from_frame(caller_frame, top_path) # local_path -- directory of a file (usually setup.py) that # defines a configuration() function. # local_path -- directory of a file (usually setup.py) that # defines a configuration() function. if top_path is None: top_path = self.local_path self.local_path = '' if package_path is None: package_path = self.local_path elif os.path.isdir(njoin(self.local_path,package_path)): package_path = njoin(self.local_path,package_path) if not os.path.isdir(package_path or '.'): raise ValueError("%r is not a directory" % (package_path,)) self.top_path = top_path self.package_path = package_path # this is the relative path in the installed package self.path_in_package = os.path.join(*self.name.split('.')) self.list_keys = self._list_keys[:] self.dict_keys = self._dict_keys[:] for n in self.list_keys: v = copy.copy(attrs.get(n, [])) setattr(self, n, as_list(v)) for n in self.dict_keys: v = copy.copy(attrs.get(n, {})) setattr(self, n, v) known_keys = self.list_keys + self.dict_keys self.extra_keys = self._extra_keys[:] for n in attrs.keys(): if n in known_keys: continue a = attrs[n] setattr(self,n,a) if isinstance(a, list): self.list_keys.append(n) elif isinstance(a, dict): self.dict_keys.append(n) else: self.extra_keys.append(n) if os.path.exists(njoin(package_path,'__init__.py')): self.packages.append(self.name) self.package_dir[self.name] = package_path self.options = dict( ignore_setup_xxx_py = False, assume_default_configuration = False, delegate_options_to_subpackages = False, quiet = False, ) caller_instance = None for i in range(1,3): try: f = get_frame(i) except ValueError: break try: caller_instance = eval('self',f.f_globals,f.f_locals) break except NameError: pass if isinstance(caller_instance, self.__class__): if caller_instance.options['delegate_options_to_subpackages']: self.set_options(**caller_instance.options) self.setup_name = setup_name
[ "def", "__init__", "(", "self", ",", "package_name", "=", "None", ",", "parent_name", "=", "None", ",", "top_path", "=", "None", ",", "package_path", "=", "None", ",", "caller_level", "=", "1", ",", "setup_name", "=", "'setup.py'", ",", "*", "*", "attrs", ")", ":", "self", ".", "name", "=", "dot_join", "(", "parent_name", ",", "package_name", ")", "self", ".", "version", "=", "None", "caller_frame", "=", "get_frame", "(", "caller_level", ")", "self", ".", "local_path", "=", "get_path_from_frame", "(", "caller_frame", ",", "top_path", ")", "# local_path -- directory of a file (usually setup.py) that", "# defines a configuration() function.", "# local_path -- directory of a file (usually setup.py) that", "# defines a configuration() function.", "if", "top_path", "is", "None", ":", "top_path", "=", "self", ".", "local_path", "self", ".", "local_path", "=", "''", "if", "package_path", "is", "None", ":", "package_path", "=", "self", ".", "local_path", "elif", "os", ".", "path", ".", "isdir", "(", "njoin", "(", "self", ".", "local_path", ",", "package_path", ")", ")", ":", "package_path", "=", "njoin", "(", "self", ".", "local_path", ",", "package_path", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "package_path", "or", "'.'", ")", ":", "raise", "ValueError", "(", "\"%r is not a directory\"", "%", "(", "package_path", ",", ")", ")", "self", ".", "top_path", "=", "top_path", "self", ".", "package_path", "=", "package_path", "# this is the relative path in the installed package", "self", ".", "path_in_package", "=", "os", ".", "path", ".", "join", "(", "*", "self", ".", "name", ".", "split", "(", "'.'", ")", ")", "self", ".", "list_keys", "=", "self", ".", "_list_keys", "[", ":", "]", "self", ".", "dict_keys", "=", "self", ".", "_dict_keys", "[", ":", "]", "for", "n", "in", "self", ".", "list_keys", ":", "v", "=", "copy", ".", "copy", "(", "attrs", ".", "get", "(", "n", ",", "[", "]", ")", ")", "setattr", "(", "self", ",", "n", ",", "as_list", "(", "v", ")", ")", "for", "n", "in", "self", ".", "dict_keys", ":", "v", "=", "copy", ".", "copy", "(", "attrs", ".", "get", "(", "n", ",", "{", "}", ")", ")", "setattr", "(", "self", ",", "n", ",", "v", ")", "known_keys", "=", "self", ".", "list_keys", "+", "self", ".", "dict_keys", "self", ".", "extra_keys", "=", "self", ".", "_extra_keys", "[", ":", "]", "for", "n", "in", "attrs", ".", "keys", "(", ")", ":", "if", "n", "in", "known_keys", ":", "continue", "a", "=", "attrs", "[", "n", "]", "setattr", "(", "self", ",", "n", ",", "a", ")", "if", "isinstance", "(", "a", ",", "list", ")", ":", "self", ".", "list_keys", ".", "append", "(", "n", ")", "elif", "isinstance", "(", "a", ",", "dict", ")", ":", "self", ".", "dict_keys", ".", "append", "(", "n", ")", "else", ":", "self", ".", "extra_keys", ".", "append", "(", "n", ")", "if", "os", ".", "path", ".", "exists", "(", "njoin", "(", "package_path", ",", "'__init__.py'", ")", ")", ":", "self", ".", "packages", ".", "append", "(", "self", ".", "name", ")", "self", ".", "package_dir", "[", "self", ".", "name", "]", "=", "package_path", "self", ".", "options", "=", "dict", "(", "ignore_setup_xxx_py", "=", "False", ",", "assume_default_configuration", "=", "False", ",", "delegate_options_to_subpackages", "=", "False", ",", "quiet", "=", "False", ",", ")", "caller_instance", "=", "None", "for", "i", "in", "range", "(", "1", ",", "3", ")", ":", "try", ":", "f", "=", "get_frame", "(", "i", ")", "except", "ValueError", ":", "break", "try", ":", "caller_instance", "=", "eval", "(", "'self'", ",", "f", ".", "f_globals", ",", "f", ".", "f_locals", ")", "break", "except", "NameError", ":", "pass", "if", "isinstance", "(", "caller_instance", ",", "self", ".", "__class__", ")", ":", "if", "caller_instance", ".", "options", "[", "'delegate_options_to_subpackages'", "]", ":", "self", ".", "set_options", "(", "*", "*", "caller_instance", ".", "options", ")", "self", ".", "setup_name", "=", "setup_name" ]
https://github.com/Pymol-Scripts/Pymol-script-repo/blob/bcd7bb7812dc6db1595953dfa4471fa15fb68c77/modules/pdb2pqr/contrib/numpy-1.1.0/numpy/distutils/misc_util.py#L585-L680
edisonlz/fastor
342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3
base/site-packages/django/db/backends/__init__.py
python
BaseDatabaseWrapper.rollback
(self)
Rolls back a transaction and resets the dirty flag.
Rolls back a transaction and resets the dirty flag.
[ "Rolls", "back", "a", "transaction", "and", "resets", "the", "dirty", "flag", "." ]
def rollback(self): """ Rolls back a transaction and resets the dirty flag. """ self.validate_thread_sharing() self.validate_no_atomic_block() self._rollback() self.set_clean()
[ "def", "rollback", "(", "self", ")", ":", "self", ".", "validate_thread_sharing", "(", ")", "self", ".", "validate_no_atomic_block", "(", ")", "self", ".", "_rollback", "(", ")", "self", ".", "set_clean", "(", ")" ]
https://github.com/edisonlz/fastor/blob/342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3/base/site-packages/django/db/backends/__init__.py#L209-L216
materialsproject/pymatgen
8128f3062a334a2edd240e4062b5b9bdd1ae6f58
pymatgen/transformations/advanced_transformations.py
python
MultipleSubstitutionTransformation.__init__
( self, sp_to_replace, r_fraction, substitution_dict, charge_balance_species=None, order=True, )
Performs multiple fractional substitutions on a transmuter. Args: sp_to_replace: species to be replaced r_fraction: fraction of that specie to replace substitution_dict: dictionary of the format {2: ["Mg", "Ti", "V", "As", "Cr", "Ta", "N", "Nb"], 3: ["Ru", "Fe", "Co", "Ce", "As", "Cr", "Ta", "N", "Nb"], 4: ["Ru", "V", "Cr", "Ta", "N", "Nb"], 5: ["Ru", "W", "Mn"] } The number is the charge used for each of the list of elements (an element can be present in multiple lists) charge_balance_species: If specified, will balance the charge on the structure using that specie.
Performs multiple fractional substitutions on a transmuter.
[ "Performs", "multiple", "fractional", "substitutions", "on", "a", "transmuter", "." ]
def __init__( self, sp_to_replace, r_fraction, substitution_dict, charge_balance_species=None, order=True, ): """ Performs multiple fractional substitutions on a transmuter. Args: sp_to_replace: species to be replaced r_fraction: fraction of that specie to replace substitution_dict: dictionary of the format {2: ["Mg", "Ti", "V", "As", "Cr", "Ta", "N", "Nb"], 3: ["Ru", "Fe", "Co", "Ce", "As", "Cr", "Ta", "N", "Nb"], 4: ["Ru", "V", "Cr", "Ta", "N", "Nb"], 5: ["Ru", "W", "Mn"] } The number is the charge used for each of the list of elements (an element can be present in multiple lists) charge_balance_species: If specified, will balance the charge on the structure using that specie. """ self.sp_to_replace = sp_to_replace self.r_fraction = r_fraction self.substitution_dict = substitution_dict self.charge_balance_species = charge_balance_species self.order = order
[ "def", "__init__", "(", "self", ",", "sp_to_replace", ",", "r_fraction", ",", "substitution_dict", ",", "charge_balance_species", "=", "None", ",", "order", "=", "True", ",", ")", ":", "self", ".", "sp_to_replace", "=", "sp_to_replace", "self", ".", "r_fraction", "=", "r_fraction", "self", ".", "substitution_dict", "=", "substitution_dict", "self", ".", "charge_balance_species", "=", "charge_balance_species", "self", ".", "order", "=", "order" ]
https://github.com/materialsproject/pymatgen/blob/8128f3062a334a2edd240e4062b5b9bdd1ae6f58/pymatgen/transformations/advanced_transformations.py#L190-L219
omz/PythonistaAppTemplate
f560f93f8876d82a21d108977f90583df08d55af
PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/sympy/physics/quantum/qexpr.py
python
split_qexpr_parts
(e)
return expr_part, qexpr_part
Split an expression into Expr and noncommutative QExpr parts.
Split an expression into Expr and noncommutative QExpr parts.
[ "Split", "an", "expression", "into", "Expr", "and", "noncommutative", "QExpr", "parts", "." ]
def split_qexpr_parts(e): """Split an expression into Expr and noncommutative QExpr parts.""" expr_part = [] qexpr_part = [] for arg in e.args: if not isinstance(arg, QExpr): expr_part.append(arg) else: qexpr_part.append(arg) return expr_part, qexpr_part
[ "def", "split_qexpr_parts", "(", "e", ")", ":", "expr_part", "=", "[", "]", "qexpr_part", "=", "[", "]", "for", "arg", "in", "e", ".", "args", ":", "if", "not", "isinstance", "(", "arg", ",", "QExpr", ")", ":", "expr_part", ".", "append", "(", "arg", ")", "else", ":", "qexpr_part", ".", "append", "(", "arg", ")", "return", "expr_part", ",", "qexpr_part" ]
https://github.com/omz/PythonistaAppTemplate/blob/f560f93f8876d82a21d108977f90583df08d55af/PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/sympy/physics/quantum/qexpr.py#L413-L422
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/cherrypy/cherrypy/wsgiserver/wsgiserver3.py
python
ThreadPool.start
(self)
Start the pool of threads.
Start the pool of threads.
[ "Start", "the", "pool", "of", "threads", "." ]
def start(self): """Start the pool of threads.""" for i in range(self.min): self._threads.append(WorkerThread(self.server)) for worker in self._threads: worker.setName("CP Server " + worker.getName()) worker.start() for worker in self._threads: while not worker.ready: time.sleep(.1)
[ "def", "start", "(", "self", ")", ":", "for", "i", "in", "range", "(", "self", ".", "min", ")", ":", "self", ".", "_threads", ".", "append", "(", "WorkerThread", "(", "self", ".", "server", ")", ")", "for", "worker", "in", "self", ".", "_threads", ":", "worker", ".", "setName", "(", "\"CP Server \"", "+", "worker", ".", "getName", "(", ")", ")", "worker", ".", "start", "(", ")", "for", "worker", "in", "self", ".", "_threads", ":", "while", "not", "worker", ".", "ready", ":", "time", ".", "sleep", "(", ".1", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/cherrypy/cherrypy/wsgiserver/wsgiserver3.py#L1209-L1218
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/sqlalchemy/util/_collections.py
python
OrderedSet.intersection
(self, other)
return self.__class__(a for a in self if a in other)
[]
def intersection(self, other): other = set(other) return self.__class__(a for a in self if a in other)
[ "def", "intersection", "(", "self", ",", "other", ")", ":", "other", "=", "set", "(", "other", ")", "return", "self", ".", "__class__", "(", "a", "for", "a", "in", "self", "if", "a", "in", "other", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/sqlalchemy/util/_collections.py#L422-L424
xonsh/xonsh
b76d6f994f22a4078f602f8b386f4ec280c8461f
xonsh/events.py
python
Event.fire
(self, **kwargs)
return vals
Fires an event, calling registered handlers with the given arguments. A non-unique iterable of the results is returned. Each handler is called immediately. Exceptions are turned in to warnings. Parameters ---------- **kwargs Keyword arguments to pass to each handler Returns ------- vals : iterable Return values of each handler. If multiple handlers return the same value, it will appear multiple times.
Fires an event, calling registered handlers with the given arguments. A non-unique iterable of the results is returned.
[ "Fires", "an", "event", "calling", "registered", "handlers", "with", "the", "given", "arguments", ".", "A", "non", "-", "unique", "iterable", "of", "the", "results", "is", "returned", "." ]
def fire(self, **kwargs): """ Fires an event, calling registered handlers with the given arguments. A non-unique iterable of the results is returned. Each handler is called immediately. Exceptions are turned in to warnings. Parameters ---------- **kwargs Keyword arguments to pass to each handler Returns ------- vals : iterable Return values of each handler. If multiple handlers return the same value, it will appear multiple times. """ vals = [] self._firing = True for handler in self._filterhandlers(self._handlers, **kwargs): try: rv = handler(**kwargs) except Exception: print_exception("Exception raised in event handler; ignored.") else: vals.append(rv) # clean up self._firing = False if self._delayed_adds is not None: self._handlers.update(self._delayed_adds) self._delayed_adds = None if self._delayed_discards is not None: self._handlers.difference_update(self._delayed_discards) self._delayed_discards = None return vals
[ "def", "fire", "(", "self", ",", "*", "*", "kwargs", ")", ":", "vals", "=", "[", "]", "self", ".", "_firing", "=", "True", "for", "handler", "in", "self", ".", "_filterhandlers", "(", "self", ".", "_handlers", ",", "*", "*", "kwargs", ")", ":", "try", ":", "rv", "=", "handler", "(", "*", "*", "kwargs", ")", "except", "Exception", ":", "print_exception", "(", "\"Exception raised in event handler; ignored.\"", ")", "else", ":", "vals", ".", "append", "(", "rv", ")", "# clean up", "self", ".", "_firing", "=", "False", "if", "self", ".", "_delayed_adds", "is", "not", "None", ":", "self", ".", "_handlers", ".", "update", "(", "self", ".", "_delayed_adds", ")", "self", ".", "_delayed_adds", "=", "None", "if", "self", ".", "_delayed_discards", "is", "not", "None", ":", "self", ".", "_handlers", ".", "difference_update", "(", "self", ".", "_delayed_discards", ")", "self", ".", "_delayed_discards", "=", "None", "return", "vals" ]
https://github.com/xonsh/xonsh/blob/b76d6f994f22a4078f602f8b386f4ec280c8461f/xonsh/events.py#L160-L195
quantumlib/OpenFermion
6187085f2a7707012b68370b625acaeed547e62b
src/openfermion/circuits/primitives/state_preparation.py
python
_generic_gaussian_circuit
( qubits: Sequence[cirq.Qid], quadratic_hamiltonian: 'openfermion.QuadraticHamiltonian', occupied_orbitals: Optional[Sequence[int]], initial_state: Union[int, Sequence[int]])
[]
def _generic_gaussian_circuit( qubits: Sequence[cirq.Qid], quadratic_hamiltonian: 'openfermion.QuadraticHamiltonian', occupied_orbitals: Optional[Sequence[int]], initial_state: Union[int, Sequence[int]]) -> cirq.OP_TREE: n_qubits = len(qubits) circuit_description, start_orbitals = (gaussian_state_preparation_circuit( quadratic_hamiltonian, occupied_orbitals)) if isinstance(initial_state, int): initially_occupied_orbitals = _occupied_orbitals( initial_state, n_qubits) else: initially_occupied_orbitals = initial_state # type: ignore # Flip bits so that the correct starting orbitals are occupied yield (cirq.X(qubits[j]) for j in range(n_qubits) if (j in initially_occupied_orbitals) != (j in start_orbitals)) yield _ops_from_givens_rotations_circuit_description( qubits, circuit_description)
[ "def", "_generic_gaussian_circuit", "(", "qubits", ":", "Sequence", "[", "cirq", ".", "Qid", "]", ",", "quadratic_hamiltonian", ":", "'openfermion.QuadraticHamiltonian'", ",", "occupied_orbitals", ":", "Optional", "[", "Sequence", "[", "int", "]", "]", ",", "initial_state", ":", "Union", "[", "int", ",", "Sequence", "[", "int", "]", "]", ")", "->", "cirq", ".", "OP_TREE", ":", "n_qubits", "=", "len", "(", "qubits", ")", "circuit_description", ",", "start_orbitals", "=", "(", "gaussian_state_preparation_circuit", "(", "quadratic_hamiltonian", ",", "occupied_orbitals", ")", ")", "if", "isinstance", "(", "initial_state", ",", "int", ")", ":", "initially_occupied_orbitals", "=", "_occupied_orbitals", "(", "initial_state", ",", "n_qubits", ")", "else", ":", "initially_occupied_orbitals", "=", "initial_state", "# type: ignore", "# Flip bits so that the correct starting orbitals are occupied", "yield", "(", "cirq", ".", "X", "(", "qubits", "[", "j", "]", ")", "for", "j", "in", "range", "(", "n_qubits", ")", "if", "(", "j", "in", "initially_occupied_orbitals", ")", "!=", "(", "j", "in", "start_orbitals", ")", ")", "yield", "_ops_from_givens_rotations_circuit_description", "(", "qubits", ",", "circuit_description", ")" ]
https://github.com/quantumlib/OpenFermion/blob/6187085f2a7707012b68370b625acaeed547e62b/src/openfermion/circuits/primitives/state_preparation.py#L82-L104
leancloud/satori
701caccbd4fe45765001ca60435c0cb499477c03
satori-rules/plugin/libs/pymongo/results.py
python
UpdateResult.__init__
(self, raw_result, acknowledged)
[]
def __init__(self, raw_result, acknowledged): self.__raw_result = raw_result super(UpdateResult, self).__init__(acknowledged)
[ "def", "__init__", "(", "self", ",", "raw_result", ",", "acknowledged", ")", ":", "self", ".", "__raw_result", "=", "raw_result", "super", "(", "UpdateResult", ",", "self", ")", ".", "__init__", "(", "acknowledged", ")" ]
https://github.com/leancloud/satori/blob/701caccbd4fe45765001ca60435c0cb499477c03/satori-rules/plugin/libs/pymongo/results.py#L100-L102
mjwestcott/Goodrich
dc2516591bd28488516c0337a62e64248debe47c
ch11/binary_search_tree.py
python
TreeMap._subtree_last_position
(self, p)
return walk
Return Position of last item in subtree rooted at p.
Return Position of last item in subtree rooted at p.
[ "Return", "Position", "of", "last", "item", "in", "subtree", "rooted", "at", "p", "." ]
def _subtree_last_position(self, p): """Return Position of last item in subtree rooted at p.""" walk = p while self.right(walk) is not None: # keep walking right walk = self.right(walk) return walk
[ "def", "_subtree_last_position", "(", "self", ",", "p", ")", ":", "walk", "=", "p", "while", "self", ".", "right", "(", "walk", ")", "is", "not", "None", ":", "# keep walking right", "walk", "=", "self", ".", "right", "(", "walk", ")", "return", "walk" ]
https://github.com/mjwestcott/Goodrich/blob/dc2516591bd28488516c0337a62e64248debe47c/ch11/binary_search_tree.py#L58-L63
pypa/pip
7f8a6844037fb7255cfd0d34ff8e8cf44f2598d4
src/pip/_vendor/distlib/_backport/sysconfig.py
python
_init_non_posix
(vars)
Initialize the module as appropriate for NT
Initialize the module as appropriate for NT
[ "Initialize", "the", "module", "as", "appropriate", "for", "NT" ]
def _init_non_posix(vars): """Initialize the module as appropriate for NT""" # set basic install directories vars['LIBDEST'] = get_path('stdlib') vars['BINLIBDEST'] = get_path('platstdlib') vars['INCLUDEPY'] = get_path('include') vars['SO'] = '.pyd' vars['EXE'] = '.exe' vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
[ "def", "_init_non_posix", "(", "vars", ")", ":", "# set basic install directories", "vars", "[", "'LIBDEST'", "]", "=", "get_path", "(", "'stdlib'", ")", "vars", "[", "'BINLIBDEST'", "]", "=", "get_path", "(", "'platstdlib'", ")", "vars", "[", "'INCLUDEPY'", "]", "=", "get_path", "(", "'include'", ")", "vars", "[", "'SO'", "]", "=", "'.pyd'", "vars", "[", "'EXE'", "]", "=", "'.exe'", "vars", "[", "'VERSION'", "]", "=", "_PY_VERSION_SHORT_NO_DOT", "vars", "[", "'BINDIR'", "]", "=", "os", ".", "path", ".", "dirname", "(", "_safe_realpath", "(", "sys", ".", "executable", ")", ")" ]
https://github.com/pypa/pip/blob/7f8a6844037fb7255cfd0d34ff8e8cf44f2598d4/src/pip/_vendor/distlib/_backport/sysconfig.py#L370-L379
bruderstein/PythonScript
df9f7071ddf3a079e3a301b9b53a6dc78cf1208f
PythonLib/full/ast.py
python
_pad_whitespace
(source)
return result
r"""Replace all chars except '\f\t' in a line with spaces.
r"""Replace all chars except '\f\t' in a line with spaces.
[ "r", "Replace", "all", "chars", "except", "\\", "f", "\\", "t", "in", "a", "line", "with", "spaces", "." ]
def _pad_whitespace(source): r"""Replace all chars except '\f\t' in a line with spaces.""" result = '' for c in source: if c in '\f\t': result += c else: result += ' ' return result
[ "def", "_pad_whitespace", "(", "source", ")", ":", "result", "=", "''", "for", "c", "in", "source", ":", "if", "c", "in", "'\\f\\t'", ":", "result", "+=", "c", "else", ":", "result", "+=", "' '", "return", "result" ]
https://github.com/bruderstein/PythonScript/blob/df9f7071ddf3a079e3a301b9b53a6dc78cf1208f/PythonLib/full/ast.py#L324-L332
readbeyond/aeneas
4d200a050690903b30b3d885b44714fecb23f18a
aeneas/task.py
python
TaskConfiguration.aba_parameters
(self)
return { "algorithm": (aba_algorithm, ABA_MAP[aba_algorithm]), "nonspeech": (ns_min, ns_string), "nozero": nozero }
Return a dictionary representing the :class:`~aeneas.adjustboundaryalgorithm.AdjustBoundaryAlgorithm` parameters stored in this task configuration. Available keys: * ``algorithm``, tuple: (string, list) * ``nonspeech``, tuple: (TimeValue or None, string) * ``nozero``, bool :rtype: dict
Return a dictionary representing the :class:`~aeneas.adjustboundaryalgorithm.AdjustBoundaryAlgorithm` parameters stored in this task configuration.
[ "Return", "a", "dictionary", "representing", "the", ":", "class", ":", "~aeneas", ".", "adjustboundaryalgorithm", ".", "AdjustBoundaryAlgorithm", "parameters", "stored", "in", "this", "task", "configuration", "." ]
def aba_parameters(self): """ Return a dictionary representing the :class:`~aeneas.adjustboundaryalgorithm.AdjustBoundaryAlgorithm` parameters stored in this task configuration. Available keys: * ``algorithm``, tuple: (string, list) * ``nonspeech``, tuple: (TimeValue or None, string) * ``nozero``, bool :rtype: dict """ ABA_MAP = { AdjustBoundaryAlgorithm.AFTERCURRENT: [self[gc.PPN_TASK_ADJUST_BOUNDARY_AFTERCURRENT_VALUE]], AdjustBoundaryAlgorithm.AUTO: [], AdjustBoundaryAlgorithm.BEFORENEXT: [self[gc.PPN_TASK_ADJUST_BOUNDARY_BEFORENEXT_VALUE]], AdjustBoundaryAlgorithm.OFFSET: [self[gc.PPN_TASK_ADJUST_BOUNDARY_OFFSET_VALUE]], AdjustBoundaryAlgorithm.PERCENT: [self[gc.PPN_TASK_ADJUST_BOUNDARY_PERCENT_VALUE]], AdjustBoundaryAlgorithm.RATE: [self[gc.PPN_TASK_ADJUST_BOUNDARY_RATE_VALUE]], AdjustBoundaryAlgorithm.RATEAGGRESSIVE: [self[gc.PPN_TASK_ADJUST_BOUNDARY_RATE_VALUE]] } aba_algorithm = self[gc.PPN_TASK_ADJUST_BOUNDARY_ALGORITHM] or AdjustBoundaryAlgorithm.AUTO ns_min = self[gc.PPN_TASK_ADJUST_BOUNDARY_NONSPEECH_MIN] ns_string = self[gc.PPN_TASK_ADJUST_BOUNDARY_NONSPEECH_STRING] nozero = self[gc.PPN_TASK_ADJUST_BOUNDARY_NO_ZERO] or False return { "algorithm": (aba_algorithm, ABA_MAP[aba_algorithm]), "nonspeech": (ns_min, ns_string), "nozero": nozero }
[ "def", "aba_parameters", "(", "self", ")", ":", "ABA_MAP", "=", "{", "AdjustBoundaryAlgorithm", ".", "AFTERCURRENT", ":", "[", "self", "[", "gc", ".", "PPN_TASK_ADJUST_BOUNDARY_AFTERCURRENT_VALUE", "]", "]", ",", "AdjustBoundaryAlgorithm", ".", "AUTO", ":", "[", "]", ",", "AdjustBoundaryAlgorithm", ".", "BEFORENEXT", ":", "[", "self", "[", "gc", ".", "PPN_TASK_ADJUST_BOUNDARY_BEFORENEXT_VALUE", "]", "]", ",", "AdjustBoundaryAlgorithm", ".", "OFFSET", ":", "[", "self", "[", "gc", ".", "PPN_TASK_ADJUST_BOUNDARY_OFFSET_VALUE", "]", "]", ",", "AdjustBoundaryAlgorithm", ".", "PERCENT", ":", "[", "self", "[", "gc", ".", "PPN_TASK_ADJUST_BOUNDARY_PERCENT_VALUE", "]", "]", ",", "AdjustBoundaryAlgorithm", ".", "RATE", ":", "[", "self", "[", "gc", ".", "PPN_TASK_ADJUST_BOUNDARY_RATE_VALUE", "]", "]", ",", "AdjustBoundaryAlgorithm", ".", "RATEAGGRESSIVE", ":", "[", "self", "[", "gc", ".", "PPN_TASK_ADJUST_BOUNDARY_RATE_VALUE", "]", "]", "}", "aba_algorithm", "=", "self", "[", "gc", ".", "PPN_TASK_ADJUST_BOUNDARY_ALGORITHM", "]", "or", "AdjustBoundaryAlgorithm", ".", "AUTO", "ns_min", "=", "self", "[", "gc", ".", "PPN_TASK_ADJUST_BOUNDARY_NONSPEECH_MIN", "]", "ns_string", "=", "self", "[", "gc", ".", "PPN_TASK_ADJUST_BOUNDARY_NONSPEECH_STRING", "]", "nozero", "=", "self", "[", "gc", ".", "PPN_TASK_ADJUST_BOUNDARY_NO_ZERO", "]", "or", "False", "return", "{", "\"algorithm\"", ":", "(", "aba_algorithm", ",", "ABA_MAP", "[", "aba_algorithm", "]", ")", ",", "\"nonspeech\"", ":", "(", "ns_min", ",", "ns_string", ")", ",", "\"nozero\"", ":", "nozero", "}" ]
https://github.com/readbeyond/aeneas/blob/4d200a050690903b30b3d885b44714fecb23f18a/aeneas/task.py#L377-L408
angr/angr
4b04d56ace135018083d36d9083805be8146688b
angr/analyses/bindiff.py
python
FunctionDiff.probably_identical
(self)
return True
:returns: Whether or not these two functions are identical.
:returns: Whether or not these two functions are identical.
[ ":", "returns", ":", "Whether", "or", "not", "these", "two", "functions", "are", "identical", "." ]
def probably_identical(self): """ :returns: Whether or not these two functions are identical. """ if len(self._unmatched_blocks_from_a | self._unmatched_blocks_from_b) > 0: return False for (a, b) in self._block_matches: if not self.blocks_probably_identical(a, b): return False return True
[ "def", "probably_identical", "(", "self", ")", ":", "if", "len", "(", "self", ".", "_unmatched_blocks_from_a", "|", "self", ".", "_unmatched_blocks_from_b", ")", ">", "0", ":", "return", "False", "for", "(", "a", ",", "b", ")", "in", "self", ".", "_block_matches", ":", "if", "not", "self", ".", "blocks_probably_identical", "(", "a", ",", "b", ")", ":", "return", "False", "return", "True" ]
https://github.com/angr/angr/blob/4b04d56ace135018083d36d9083805be8146688b/angr/analyses/bindiff.py#L373-L382
huggingface/transformers
623b4f7c63f60cce917677ee704d6c93ee960b4b
src/transformers/models/rembert/modeling_tf_rembert.py
python
TFRemBertSelfAttention.call
( self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, head_mask: tf.Tensor, encoder_hidden_states: tf.Tensor, encoder_attention_mask: tf.Tensor, past_key_value: Tuple[tf.Tensor], output_attentions: bool, training: bool = False, )
return outputs
[]
def call( self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, head_mask: tf.Tensor, encoder_hidden_states: tf.Tensor, encoder_attention_mask: tf.Tensor, past_key_value: Tuple[tf.Tensor], output_attentions: bool, training: bool = False, ) -> Tuple[tf.Tensor]: batch_size = shape_list(hidden_states)[0] mixed_query_layer = self.query(inputs=hidden_states) # If this is instantiated as a cross-attention module, the keys # and values come from an encoder; the attention mask needs to be # such that the encoder's padding tokens are not attended to. is_cross_attention = encoder_hidden_states is not None if is_cross_attention and past_key_value is not None: # reuse k,v, cross_attentions key_layer = past_key_value[0] value_layer = past_key_value[1] attention_mask = encoder_attention_mask elif is_cross_attention: key_layer = self.transpose_for_scores(self.key(inputs=encoder_hidden_states), batch_size) value_layer = self.transpose_for_scores(self.value(inputs=encoder_hidden_states), batch_size) attention_mask = encoder_attention_mask elif past_key_value is not None: key_layer = self.transpose_for_scores(self.key(inputs=hidden_states), batch_size) value_layer = self.transpose_for_scores(self.value(inputs=hidden_states), batch_size) key_layer = tf.concatenate([past_key_value[0], key_layer], dim=2) value_layer = tf.concatenate([past_key_value[1], value_layer], dim=2) else: key_layer = self.transpose_for_scores(self.key(inputs=hidden_states), batch_size) value_layer = self.transpose_for_scores(self.value(inputs=hidden_states), batch_size) query_layer = self.transpose_for_scores(mixed_query_layer, batch_size) if self.is_decoder: # if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_layer, value_layer) # Take the dot product between "query" and "key" to get the raw attention scores. # (batch size, num_heads, seq_len_q, seq_len_k) attention_scores = tf.matmul(query_layer, key_layer, transpose_b=True) dk = tf.cast(self.sqrt_att_head_size, dtype=attention_scores.dtype) attention_scores = tf.divide(attention_scores, dk) if attention_mask is not None: # Apply the attention mask is (precomputed for all layers in TFRemBertModel call() function) attention_scores = tf.add(attention_scores, attention_mask) # Normalize the attention scores to probabilities. attention_probs = tf.nn.softmax(logits=attention_scores, axis=-1) # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. attention_probs = self.dropout(inputs=attention_probs, training=training) # Mask heads if we want to if head_mask is not None: attention_probs = tf.multiply(attention_probs, head_mask) attention_output = tf.matmul(attention_probs, value_layer) attention_output = tf.transpose(attention_output, perm=[0, 2, 1, 3]) # (batch_size, seq_len_q, all_head_size) attention_output = tf.reshape(tensor=attention_output, shape=(batch_size, -1, self.all_head_size)) outputs = (attention_output, attention_probs) if output_attentions else (attention_output,) if self.is_decoder: outputs = outputs + (past_key_value,) return outputs
[ "def", "call", "(", "self", ",", "hidden_states", ":", "tf", ".", "Tensor", ",", "attention_mask", ":", "tf", ".", "Tensor", ",", "head_mask", ":", "tf", ".", "Tensor", ",", "encoder_hidden_states", ":", "tf", ".", "Tensor", ",", "encoder_attention_mask", ":", "tf", ".", "Tensor", ",", "past_key_value", ":", "Tuple", "[", "tf", ".", "Tensor", "]", ",", "output_attentions", ":", "bool", ",", "training", ":", "bool", "=", "False", ",", ")", "->", "Tuple", "[", "tf", ".", "Tensor", "]", ":", "batch_size", "=", "shape_list", "(", "hidden_states", ")", "[", "0", "]", "mixed_query_layer", "=", "self", ".", "query", "(", "inputs", "=", "hidden_states", ")", "# If this is instantiated as a cross-attention module, the keys", "# and values come from an encoder; the attention mask needs to be", "# such that the encoder's padding tokens are not attended to.", "is_cross_attention", "=", "encoder_hidden_states", "is", "not", "None", "if", "is_cross_attention", "and", "past_key_value", "is", "not", "None", ":", "# reuse k,v, cross_attentions", "key_layer", "=", "past_key_value", "[", "0", "]", "value_layer", "=", "past_key_value", "[", "1", "]", "attention_mask", "=", "encoder_attention_mask", "elif", "is_cross_attention", ":", "key_layer", "=", "self", ".", "transpose_for_scores", "(", "self", ".", "key", "(", "inputs", "=", "encoder_hidden_states", ")", ",", "batch_size", ")", "value_layer", "=", "self", ".", "transpose_for_scores", "(", "self", ".", "value", "(", "inputs", "=", "encoder_hidden_states", ")", ",", "batch_size", ")", "attention_mask", "=", "encoder_attention_mask", "elif", "past_key_value", "is", "not", "None", ":", "key_layer", "=", "self", ".", "transpose_for_scores", "(", "self", ".", "key", "(", "inputs", "=", "hidden_states", ")", ",", "batch_size", ")", "value_layer", "=", "self", ".", "transpose_for_scores", "(", "self", ".", "value", "(", "inputs", "=", "hidden_states", ")", ",", "batch_size", ")", "key_layer", "=", "tf", ".", "concatenate", "(", "[", "past_key_value", "[", "0", "]", ",", "key_layer", "]", ",", "dim", "=", "2", ")", "value_layer", "=", "tf", ".", "concatenate", "(", "[", "past_key_value", "[", "1", "]", ",", "value_layer", "]", ",", "dim", "=", "2", ")", "else", ":", "key_layer", "=", "self", ".", "transpose_for_scores", "(", "self", ".", "key", "(", "inputs", "=", "hidden_states", ")", ",", "batch_size", ")", "value_layer", "=", "self", ".", "transpose_for_scores", "(", "self", ".", "value", "(", "inputs", "=", "hidden_states", ")", ",", "batch_size", ")", "query_layer", "=", "self", ".", "transpose_for_scores", "(", "mixed_query_layer", ",", "batch_size", ")", "if", "self", ".", "is_decoder", ":", "# if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states.", "# Further calls to cross_attention layer can then reuse all cross-attention", "# key/value_states (first \"if\" case)", "# if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of", "# all previous decoder key/value_states. Further calls to uni-directional self-attention", "# can concat previous decoder key/value_states to current projected key/value_states (third \"elif\" case)", "# if encoder bi-directional self-attention `past_key_value` is always `None`", "past_key_value", "=", "(", "key_layer", ",", "value_layer", ")", "# Take the dot product between \"query\" and \"key\" to get the raw attention scores.", "# (batch size, num_heads, seq_len_q, seq_len_k)", "attention_scores", "=", "tf", ".", "matmul", "(", "query_layer", ",", "key_layer", ",", "transpose_b", "=", "True", ")", "dk", "=", "tf", ".", "cast", "(", "self", ".", "sqrt_att_head_size", ",", "dtype", "=", "attention_scores", ".", "dtype", ")", "attention_scores", "=", "tf", ".", "divide", "(", "attention_scores", ",", "dk", ")", "if", "attention_mask", "is", "not", "None", ":", "# Apply the attention mask is (precomputed for all layers in TFRemBertModel call() function)", "attention_scores", "=", "tf", ".", "add", "(", "attention_scores", ",", "attention_mask", ")", "# Normalize the attention scores to probabilities.", "attention_probs", "=", "tf", ".", "nn", ".", "softmax", "(", "logits", "=", "attention_scores", ",", "axis", "=", "-", "1", ")", "# This is actually dropping out entire tokens to attend to, which might", "# seem a bit unusual, but is taken from the original Transformer paper.", "attention_probs", "=", "self", ".", "dropout", "(", "inputs", "=", "attention_probs", ",", "training", "=", "training", ")", "# Mask heads if we want to", "if", "head_mask", "is", "not", "None", ":", "attention_probs", "=", "tf", ".", "multiply", "(", "attention_probs", ",", "head_mask", ")", "attention_output", "=", "tf", ".", "matmul", "(", "attention_probs", ",", "value_layer", ")", "attention_output", "=", "tf", ".", "transpose", "(", "attention_output", ",", "perm", "=", "[", "0", ",", "2", ",", "1", ",", "3", "]", ")", "# (batch_size, seq_len_q, all_head_size)", "attention_output", "=", "tf", ".", "reshape", "(", "tensor", "=", "attention_output", ",", "shape", "=", "(", "batch_size", ",", "-", "1", ",", "self", ".", "all_head_size", ")", ")", "outputs", "=", "(", "attention_output", ",", "attention_probs", ")", "if", "output_attentions", "else", "(", "attention_output", ",", ")", "if", "self", ".", "is_decoder", ":", "outputs", "=", "outputs", "+", "(", "past_key_value", ",", ")", "return", "outputs" ]
https://github.com/huggingface/transformers/blob/623b4f7c63f60cce917677ee704d6c93ee960b4b/src/transformers/models/rembert/modeling_tf_rembert.py#L184-L263