nwo
stringlengths
5
91
sha
stringlengths
40
40
path
stringlengths
5
174
language
stringclasses
1 value
identifier
stringlengths
1
120
parameters
stringlengths
0
3.15k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
24.1k
docstring
stringlengths
0
27.3k
docstring_summary
stringlengths
0
13.8k
docstring_tokens
sequence
function
stringlengths
22
139k
function_tokens
sequence
url
stringlengths
87
283
khanhnamle1994/natural-language-processing
01d450d5ac002b0156ef4cf93a07cb508c1bcdc5
assignment1/.env/lib/python2.7/site-packages/IPython/core/interactiveshell.py
python
InteractiveShell.clear_main_mod_cache
(self)
Clear the cache of main modules. Mainly for use by utilities like %reset. Examples -------- In [15]: import IPython In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython') In [17]: len(_ip._main_mod_cache) > 0 Out[17]: True In [18]: _ip.clear_main_mod_cache() In [19]: len(_ip._main_mod_cache) == 0 Out[19]: True
Clear the cache of main modules.
[ "Clear", "the", "cache", "of", "main", "modules", "." ]
def clear_main_mod_cache(self): """Clear the cache of main modules. Mainly for use by utilities like %reset. Examples -------- In [15]: import IPython In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython') In [17]: len(_ip._main_mod_cache) > 0 Out[17]: True In [18]: _ip.clear_main_mod_cache() In [19]: len(_ip._main_mod_cache) == 0 Out[19]: True """ self._main_mod_cache.clear()
[ "def", "clear_main_mod_cache", "(", "self", ")", ":", "self", ".", "_main_mod_cache", ".", "clear", "(", ")" ]
https://github.com/khanhnamle1994/natural-language-processing/blob/01d450d5ac002b0156ef4cf93a07cb508c1bcdc5/assignment1/.env/lib/python2.7/site-packages/IPython/core/interactiveshell.py#L896-L916
EasyIME/PIME
0f1eee10169c1cb2eaa0b59a77fa6f931ecb33b3
python/python3/tornado/web.py
python
RequestHandler.get_login_url
(self)
return self.application.settings["login_url"]
Override to customize the login URL based on the request. By default, we use the ``login_url`` application setting.
Override to customize the login URL based on the request.
[ "Override", "to", "customize", "the", "login", "URL", "based", "on", "the", "request", "." ]
def get_login_url(self) -> str: """Override to customize the login URL based on the request. By default, we use the ``login_url`` application setting. """ self.require_setting("login_url", "@tornado.web.authenticated") return self.application.settings["login_url"]
[ "def", "get_login_url", "(", "self", ")", "->", "str", ":", "self", ".", "require_setting", "(", "\"login_url\"", ",", "\"@tornado.web.authenticated\"", ")", "return", "self", ".", "application", ".", "settings", "[", "\"login_url\"", "]" ]
https://github.com/EasyIME/PIME/blob/0f1eee10169c1cb2eaa0b59a77fa6f931ecb33b3/python/python3/tornado/web.py#L1354-L1360
tendenci/tendenci
0f2c348cc0e7d41bc56f50b00ce05544b083bf1d
tendenci/libs/model_report/utils.py
python
obj_type_format
(value, instance=None)
return OBJECT_TYPE_DICT.get(value)
[]
def obj_type_format(value, instance=None): global OBJECT_TYPE_DICT if not OBJECT_TYPE_DICT: OBJECT_TYPE_DICT = dict((ct.id, '%s: %s' % (ct.app_label, ct.model)) for ct in ContentType.objects.all().order_by('app_label', 'model')) return OBJECT_TYPE_DICT.get(value)
[ "def", "obj_type_format", "(", "value", ",", "instance", "=", "None", ")", ":", "global", "OBJECT_TYPE_DICT", "if", "not", "OBJECT_TYPE_DICT", ":", "OBJECT_TYPE_DICT", "=", "dict", "(", "(", "ct", ".", "id", ",", "'%s: %s'", "%", "(", "ct", ".", "app_label", ",", "ct", ".", "model", ")", ")", "for", "ct", "in", "ContentType", ".", "objects", ".", "all", "(", ")", ".", "order_by", "(", "'app_label'", ",", "'model'", ")", ")", "return", "OBJECT_TYPE_DICT", ".", "get", "(", "value", ")" ]
https://github.com/tendenci/tendenci/blob/0f2c348cc0e7d41bc56f50b00ce05544b083bf1d/tendenci/libs/model_report/utils.py#L99-L104
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/CPython/27/Lib/DocXMLRPCServer.py
python
ServerHTMLDoc.markup
(self, text, escape=None, funcs={}, classes={}, methods={})
return ''.join(results)
Mark up some plain text, given a context of symbols to look for. Each context dictionary maps object names to anchor names.
Mark up some plain text, given a context of symbols to look for. Each context dictionary maps object names to anchor names.
[ "Mark", "up", "some", "plain", "text", "given", "a", "context", "of", "symbols", "to", "look", "for", ".", "Each", "context", "dictionary", "maps", "object", "names", "to", "anchor", "names", "." ]
def markup(self, text, escape=None, funcs={}, classes={}, methods={}): """Mark up some plain text, given a context of symbols to look for. Each context dictionary maps object names to anchor names.""" escape = escape or self.escape results = [] here = 0 # XXX Note that this regular expression does not allow for the # hyperlinking of arbitrary strings being used as method # names. Only methods with names consisting of word characters # and '.'s are hyperlinked. pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|' r'RFC[- ]?(\d+)|' r'PEP[- ]?(\d+)|' r'(self\.)?((?:\w|\.)+))\b') while 1: match = pattern.search(text, here) if not match: break start, end = match.span() results.append(escape(text[here:start])) all, scheme, rfc, pep, selfdot, name = match.groups() if scheme: url = escape(all).replace('"', '&quot;') results.append('<a href="%s">%s</a>' % (url, url)) elif rfc: url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc) results.append('<a href="%s">%s</a>' % (url, escape(all))) elif pep: url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep) results.append('<a href="%s">%s</a>' % (url, escape(all))) elif text[end:end+1] == '(': results.append(self.namelink(name, methods, funcs, classes)) elif selfdot: results.append('self.<strong>%s</strong>' % name) else: results.append(self.namelink(name, classes)) here = end results.append(escape(text[here:])) return ''.join(results)
[ "def", "markup", "(", "self", ",", "text", ",", "escape", "=", "None", ",", "funcs", "=", "{", "}", ",", "classes", "=", "{", "}", ",", "methods", "=", "{", "}", ")", ":", "escape", "=", "escape", "or", "self", ".", "escape", "results", "=", "[", "]", "here", "=", "0", "# XXX Note that this regular expression does not allow for the", "# hyperlinking of arbitrary strings being used as method", "# names. Only methods with names consisting of word characters", "# and '.'s are hyperlinked.", "pattern", "=", "re", ".", "compile", "(", "r'\\b((http|ftp)://\\S+[\\w/]|'", "r'RFC[- ]?(\\d+)|'", "r'PEP[- ]?(\\d+)|'", "r'(self\\.)?((?:\\w|\\.)+))\\b'", ")", "while", "1", ":", "match", "=", "pattern", ".", "search", "(", "text", ",", "here", ")", "if", "not", "match", ":", "break", "start", ",", "end", "=", "match", ".", "span", "(", ")", "results", ".", "append", "(", "escape", "(", "text", "[", "here", ":", "start", "]", ")", ")", "all", ",", "scheme", ",", "rfc", ",", "pep", ",", "selfdot", ",", "name", "=", "match", ".", "groups", "(", ")", "if", "scheme", ":", "url", "=", "escape", "(", "all", ")", ".", "replace", "(", "'\"'", ",", "'&quot;'", ")", "results", ".", "append", "(", "'<a href=\"%s\">%s</a>'", "%", "(", "url", ",", "url", ")", ")", "elif", "rfc", ":", "url", "=", "'http://www.rfc-editor.org/rfc/rfc%d.txt'", "%", "int", "(", "rfc", ")", "results", ".", "append", "(", "'<a href=\"%s\">%s</a>'", "%", "(", "url", ",", "escape", "(", "all", ")", ")", ")", "elif", "pep", ":", "url", "=", "'http://www.python.org/dev/peps/pep-%04d/'", "%", "int", "(", "pep", ")", "results", ".", "append", "(", "'<a href=\"%s\">%s</a>'", "%", "(", "url", ",", "escape", "(", "all", ")", ")", ")", "elif", "text", "[", "end", ":", "end", "+", "1", "]", "==", "'('", ":", "results", ".", "append", "(", "self", ".", "namelink", "(", "name", ",", "methods", ",", "funcs", ",", "classes", ")", ")", "elif", "selfdot", ":", "results", ".", "append", "(", "'self.<strong>%s</strong>'", "%", "name", ")", "else", ":", "results", ".", "append", "(", "self", ".", "namelink", "(", "name", ",", "classes", ")", ")", "here", "=", "end", "results", ".", "append", "(", "escape", "(", "text", "[", "here", ":", "]", ")", ")", "return", "''", ".", "join", "(", "results", ")" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/CPython/27/Lib/DocXMLRPCServer.py#L26-L65
LumaPictures/pymel
fa88a3f4fa18e09bb8aa9bdf4dab53d984bada72
pymel/tools/mel2py/melparse.py
python
p_primary_expression
(t)
primary_expression : boolean | numerical_constant
primary_expression : boolean | numerical_constant
[ "primary_expression", ":", "boolean", "|", "numerical_constant" ]
def p_primary_expression(t): '''primary_expression : boolean | numerical_constant''' t[0] = assemble(t, 'p_primary_expression') if t.lexer.verbose >= 2: print("p_primary_expression", t[0])
[ "def", "p_primary_expression", "(", "t", ")", ":", "t", "[", "0", "]", "=", "assemble", "(", "t", ",", "'p_primary_expression'", ")", "if", "t", ".", "lexer", ".", "verbose", ">=", "2", ":", "print", "(", "\"p_primary_expression\"", ",", "t", "[", "0", "]", ")" ]
https://github.com/LumaPictures/pymel/blob/fa88a3f4fa18e09bb8aa9bdf4dab53d984bada72/pymel/tools/mel2py/melparse.py#L2630-L2635
meduza-corp/interstellar
40a801ccd7856491726f5a126621d9318cabe2e1
gsutil/third_party/boto/boto/mws/connection.py
python
MWSConnection.get_order_reference_details
(self, request, response, **kw)
return self._post_request(request, kw, response)
Returns details about the Order Reference object and its current state.
Returns details about the Order Reference object and its current state.
[ "Returns", "details", "about", "the", "Order", "Reference", "object", "and", "its", "current", "state", "." ]
def get_order_reference_details(self, request, response, **kw): """Returns details about the Order Reference object and its current state. """ return self._post_request(request, kw, response)
[ "def", "get_order_reference_details", "(", "self", ",", "request", ",", "response", ",", "*", "*", "kw", ")", ":", "return", "self", ".", "_post_request", "(", "request", ",", "kw", ",", "response", ")" ]
https://github.com/meduza-corp/interstellar/blob/40a801ccd7856491726f5a126621d9318cabe2e1/gsutil/third_party/boto/boto/mws/connection.py#L1075-L1079
dpgaspar/Flask-AppBuilder
557249f33b66d02a48c1322ef21324b815abe18e
flask_appbuilder/baseviews.py
python
expose
(url="/", methods=("GET",))
return wrap
Use this decorator to expose views on your view classes. :param url: Relative URL for the view :param methods: Allowed HTTP methods. By default only GET is allowed.
Use this decorator to expose views on your view classes.
[ "Use", "this", "decorator", "to", "expose", "views", "on", "your", "view", "classes", "." ]
def expose(url="/", methods=("GET",)): """ Use this decorator to expose views on your view classes. :param url: Relative URL for the view :param methods: Allowed HTTP methods. By default only GET is allowed. """ def wrap(f): if not hasattr(f, "_urls"): f._urls = [] f._urls.append((url, methods)) return f return wrap
[ "def", "expose", "(", "url", "=", "\"/\"", ",", "methods", "=", "(", "\"GET\"", ",", ")", ")", ":", "def", "wrap", "(", "f", ")", ":", "if", "not", "hasattr", "(", "f", ",", "\"_urls\"", ")", ":", "f", ".", "_urls", "=", "[", "]", "f", ".", "_urls", ".", "append", "(", "(", "url", ",", "methods", ")", ")", "return", "f", "return", "wrap" ]
https://github.com/dpgaspar/Flask-AppBuilder/blob/557249f33b66d02a48c1322ef21324b815abe18e/flask_appbuilder/baseviews.py#L35-L51
gramps-project/gramps
04d4651a43eb210192f40a9f8c2bad8ee8fa3753
gramps/gen/lib/location.py
python
Location.serialize
(self)
return (LocationBase.serialize(self), self.parish)
Convert the object to a serialized tuple of data.
Convert the object to a serialized tuple of data.
[ "Convert", "the", "object", "to", "a", "serialized", "tuple", "of", "data", "." ]
def serialize(self): """ Convert the object to a serialized tuple of data. """ return (LocationBase.serialize(self), self.parish)
[ "def", "serialize", "(", "self", ")", ":", "return", "(", "LocationBase", ".", "serialize", "(", "self", ")", ",", "self", ".", "parish", ")" ]
https://github.com/gramps-project/gramps/blob/04d4651a43eb210192f40a9f8c2bad8ee8fa3753/gramps/gen/lib/location.py#L63-L67
enthought/traits
d22ce1f096e2a6f87c78d7f1bb5bf0abab1a18ff
traits/trait_list_object.py
python
TraitListObject.__delitem__
(self, key)
Delete self[key]. Parameters ---------- key : integer or slice Index of the element(s) to be deleted. Raises ------ IndexError If key is an integer index and is out of range.
Delete self[key].
[ "Delete", "self", "[", "key", "]", "." ]
def __delitem__(self, key): """ Delete self[key]. Parameters ---------- key : integer or slice Index of the element(s) to be deleted. Raises ------ IndexError If key is an integer index and is out of range. """ removed_count = len(self[key]) if isinstance(key, slice) else 1 self._validate_length(max(len(self) - removed_count, 0)) super().__delitem__(key)
[ "def", "__delitem__", "(", "self", ",", "key", ")", ":", "removed_count", "=", "len", "(", "self", "[", "key", "]", ")", "if", "isinstance", "(", "key", ",", "slice", ")", "else", "1", "self", ".", "_validate_length", "(", "max", "(", "len", "(", "self", ")", "-", "removed_count", ",", "0", ")", ")", "super", "(", ")", ".", "__delitem__", "(", "key", ")" ]
https://github.com/enthought/traits/blob/d22ce1f096e2a6f87c78d7f1bb5bf0abab1a18ff/traits/trait_list_object.py#L625-L641
plotly/plotly.py
cfad7862594b35965c0e000813bd7805e8494a5b
packages/python/plotly/plotly/graph_objs/heatmapgl/colorbar/_title.py
python
Title.text
(self)
return self["text"]
Sets the title of the color bar. Note that before the existence of `title.text`, the title's contents used to be defined as the `title` attribute itself. This behavior has been deprecated. The 'text' property is a string and must be specified as: - A string - A number that will be converted to a string Returns ------- str
Sets the title of the color bar. Note that before the existence of `title.text`, the title's contents used to be defined as the `title` attribute itself. This behavior has been deprecated. The 'text' property is a string and must be specified as: - A string - A number that will be converted to a string
[ "Sets", "the", "title", "of", "the", "color", "bar", ".", "Note", "that", "before", "the", "existence", "of", "title", ".", "text", "the", "title", "s", "contents", "used", "to", "be", "defined", "as", "the", "title", "attribute", "itself", ".", "This", "behavior", "has", "been", "deprecated", ".", "The", "text", "property", "is", "a", "string", "and", "must", "be", "specified", "as", ":", "-", "A", "string", "-", "A", "number", "that", "will", "be", "converted", "to", "a", "string" ]
def text(self): """ Sets the title of the color bar. Note that before the existence of `title.text`, the title's contents used to be defined as the `title` attribute itself. This behavior has been deprecated. The 'text' property is a string and must be specified as: - A string - A number that will be converted to a string Returns ------- str """ return self["text"]
[ "def", "text", "(", "self", ")", ":", "return", "self", "[", "\"text\"", "]" ]
https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/graph_objs/heatmapgl/colorbar/_title.py#L88-L102
DataDog/integrations-core
934674b29d94b70ccc008f76ea172d0cdae05e1e
citrix_hypervisor/datadog_checks/citrix_hypervisor/config_models/defaults.py
python
instance_tls_use_host_header
(field, value)
return False
[]
def instance_tls_use_host_header(field, value): return False
[ "def", "instance_tls_use_host_header", "(", "field", ",", "value", ")", ":", "return", "False" ]
https://github.com/DataDog/integrations-core/blob/934674b29d94b70ccc008f76ea172d0cdae05e1e/citrix_hypervisor/datadog_checks/citrix_hypervisor/config_models/defaults.py#L165-L166
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.py
python
DefaultProvider._listdir
(self, path)
return os.listdir(path)
[]
def _listdir(self, path): return os.listdir(path)
[ "def", "_listdir", "(", "self", ",", "path", ")", ":", "return", "os", ".", "listdir", "(", "path", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.py#L1505-L1506
SickChill/SickChill
01020f3636d01535f60b83464d8127ea0efabfc7
sickchill/adba/aniDBresponses.py
python
BuddyAlreadyAcceptedResponse.__init__
(self, cmd, restag, rescode, resstr, datalines)
attributes: data:
attributes:
[ "attributes", ":" ]
def __init__(self, cmd, restag, rescode, resstr, datalines): """ attributes: data: """ super().__init__(cmd, restag, rescode, resstr, datalines) self.codestr = "BUDDY_ALREADY_ACCEPTED" self.codehead = () self.codetail = () self.coderep = ()
[ "def", "__init__", "(", "self", ",", "cmd", ",", "restag", ",", "rescode", ",", "resstr", ",", "datalines", ")", ":", "super", "(", ")", ".", "__init__", "(", "cmd", ",", "restag", ",", "rescode", ",", "resstr", ",", "datalines", ")", "self", ".", "codestr", "=", "\"BUDDY_ALREADY_ACCEPTED\"", "self", ".", "codehead", "=", "(", ")", "self", ".", "codetail", "=", "(", ")", "self", ".", "coderep", "=", "(", ")" ]
https://github.com/SickChill/SickChill/blob/01020f3636d01535f60b83464d8127ea0efabfc7/sickchill/adba/aniDBresponses.py#L1414-L1425
planetlabs/planet-client-python
6aa31e873ef40e73c2c49981f6065d05bfc7b56d
planet/api/client.py
python
ClientV1.create_order
(self, request)
return self.dispatcher.response(models.Request(url, self.auth, body_type=models.Order, data=body, method='POST') ).get_body()
Create an order. :param asset: :returns: :py:Class:`planet.api.models.Response` containing a :py:Class:`planet.api.models.Body` of the asset. :raises planet.api.exceptions.APIException: On API error.
Create an order.
[ "Create", "an", "order", "." ]
def create_order(self, request): '''Create an order. :param asset: :returns: :py:Class:`planet.api.models.Response` containing a :py:Class:`planet.api.models.Body` of the asset. :raises planet.api.exceptions.APIException: On API error. ''' url = self._url('compute/ops/orders/v2') body = json.dumps(request) return self.dispatcher.response(models.Request(url, self.auth, body_type=models.Order, data=body, method='POST') ).get_body()
[ "def", "create_order", "(", "self", ",", "request", ")", ":", "url", "=", "self", ".", "_url", "(", "'compute/ops/orders/v2'", ")", "body", "=", "json", ".", "dumps", "(", "request", ")", "return", "self", ".", "dispatcher", ".", "response", "(", "models", ".", "Request", "(", "url", ",", "self", ".", "auth", ",", "body_type", "=", "models", ".", "Order", ",", "data", "=", "body", ",", "method", "=", "'POST'", ")", ")", ".", "get_body", "(", ")" ]
https://github.com/planetlabs/planet-client-python/blob/6aa31e873ef40e73c2c49981f6065d05bfc7b56d/planet/api/client.py#L542-L556
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/util/decorators.py
python
change_log_level.__enter__
(self)
[]
def __enter__(self): self.logger.setLevel(self.new_level)
[ "def", "__enter__", "(", "self", ")", ":", "self", ".", "logger", ".", "setLevel", "(", "self", ".", "new_level", ")" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/util/decorators.py#L80-L81
chribsen/simple-machine-learning-examples
dc94e52a4cebdc8bb959ff88b81ff8cfeca25022
venv/lib/python2.7/site-packages/numpy/lib/npyio.py
python
savez_compressed
(file, *args, **kwds)
Save several arrays into a single file in compressed ``.npz`` format. If keyword arguments are given, then filenames are taken from the keywords. If arguments are passed in with no keywords, then stored file names are arr_0, arr_1, etc. Parameters ---------- file : str File name of ``.npz`` file. args : Arguments Function arguments. kwds : Keyword arguments Keywords. See Also -------- numpy.savez : Save several arrays into an uncompressed ``.npz`` file format numpy.load : Load the files created by savez_compressed.
Save several arrays into a single file in compressed ``.npz`` format.
[ "Save", "several", "arrays", "into", "a", "single", "file", "in", "compressed", ".", "npz", "format", "." ]
def savez_compressed(file, *args, **kwds): """ Save several arrays into a single file in compressed ``.npz`` format. If keyword arguments are given, then filenames are taken from the keywords. If arguments are passed in with no keywords, then stored file names are arr_0, arr_1, etc. Parameters ---------- file : str File name of ``.npz`` file. args : Arguments Function arguments. kwds : Keyword arguments Keywords. See Also -------- numpy.savez : Save several arrays into an uncompressed ``.npz`` file format numpy.load : Load the files created by savez_compressed. """ _savez(file, args, kwds, True)
[ "def", "savez_compressed", "(", "file", ",", "*", "args", ",", "*", "*", "kwds", ")", ":", "_savez", "(", "file", ",", "args", ",", "kwds", ",", "True", ")" ]
https://github.com/chribsen/simple-machine-learning-examples/blob/dc94e52a4cebdc8bb959ff88b81ff8cfeca25022/venv/lib/python2.7/site-packages/numpy/lib/npyio.py#L577-L600
googleapis/python-ndb
e780c81cde1016651afbfcad8180d9912722cf1b
google/cloud/ndb/model.py
python
DateTimeProperty._from_base_type
(self, value)
Convert a value from the "base" value type for this property. Args: value (Union[int, datetime.datetime]): The value to be converted. The value will be `int` for entities retrieved by a projection query and is a timestamp as the number of nanoseconds since the epoch. Returns: Optional[datetime.datetime]: If ``tzinfo`` is set on this property, the value converted to the timezone in ``tzinfo``. Otherwise returns the value without ``tzinfo`` or ``None`` if value did not have ``tzinfo`` set.
Convert a value from the "base" value type for this property.
[ "Convert", "a", "value", "from", "the", "base", "value", "type", "for", "this", "property", "." ]
def _from_base_type(self, value): """Convert a value from the "base" value type for this property. Args: value (Union[int, datetime.datetime]): The value to be converted. The value will be `int` for entities retrieved by a projection query and is a timestamp as the number of nanoseconds since the epoch. Returns: Optional[datetime.datetime]: If ``tzinfo`` is set on this property, the value converted to the timezone in ``tzinfo``. Otherwise returns the value without ``tzinfo`` or ``None`` if value did not have ``tzinfo`` set. """ if isinstance(value, six.integer_types): # Projection query, value is integer nanoseconds seconds = value / 1e6 value = datetime.datetime.fromtimestamp(seconds, pytz.utc) if self._tzinfo is not None: if value.tzinfo is None: value = value.replace(tzinfo=pytz.utc) return value.astimezone(self._tzinfo) elif value.tzinfo is not None: return value.replace(tzinfo=None)
[ "def", "_from_base_type", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "six", ".", "integer_types", ")", ":", "# Projection query, value is integer nanoseconds", "seconds", "=", "value", "/", "1e6", "value", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "seconds", ",", "pytz", ".", "utc", ")", "if", "self", ".", "_tzinfo", "is", "not", "None", ":", "if", "value", ".", "tzinfo", "is", "None", ":", "value", "=", "value", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "return", "value", ".", "astimezone", "(", "self", ".", "_tzinfo", ")", "elif", "value", ".", "tzinfo", "is", "not", "None", ":", "return", "value", ".", "replace", "(", "tzinfo", "=", "None", ")" ]
https://github.com/googleapis/python-ndb/blob/e780c81cde1016651afbfcad8180d9912722cf1b/google/cloud/ndb/model.py#L3865-L3891
OpenNMT/OpenNMT-tf
59a4dfdb911d0570ba1096b7a0a7b9fc5c7844bf
opennmt/encoders/self_attention_encoder.py
python
SelfAttentionEncoder.__init__
( self, num_layers, num_units=512, num_heads=8, ffn_inner_dim=2048, dropout=0.1, attention_dropout=0.1, ffn_dropout=0.1, ffn_activation=tf.nn.relu, position_encoder_class=SinusoidalPositionEncoder, maximum_relative_position=None, pre_norm=True, **kwargs )
Initializes the parameters of the encoder. Args: num_layers: The number of layers. num_units: The number of hidden units. num_heads: The number of heads in the multi-head attention. ffn_inner_dim: The number of units of the inner linear transformation in the feed forward layer. dropout: The probability to drop units from the outputs. attention_dropout: The probability to drop units from the attention. ffn_dropout: The probability to drop units from the activation output in the feed forward layer. ffn_activation: The activation function to apply between the two linear transformations of the feed forward layer. position_encoder_class: The :class:`opennmt.layers.PositionEncoder` class to use for position encoding (or a callable that returns an instance). maximum_relative_position: Maximum relative position representation (from https://arxiv.org/abs/1803.02155). pre_norm: If ``True``, layer normalization is applied before each sub-layer. Otherwise it is applied after. **kwargs: Additional layer arguments.
Initializes the parameters of the encoder.
[ "Initializes", "the", "parameters", "of", "the", "encoder", "." ]
def __init__( self, num_layers, num_units=512, num_heads=8, ffn_inner_dim=2048, dropout=0.1, attention_dropout=0.1, ffn_dropout=0.1, ffn_activation=tf.nn.relu, position_encoder_class=SinusoidalPositionEncoder, maximum_relative_position=None, pre_norm=True, **kwargs ): """Initializes the parameters of the encoder. Args: num_layers: The number of layers. num_units: The number of hidden units. num_heads: The number of heads in the multi-head attention. ffn_inner_dim: The number of units of the inner linear transformation in the feed forward layer. dropout: The probability to drop units from the outputs. attention_dropout: The probability to drop units from the attention. ffn_dropout: The probability to drop units from the activation output in the feed forward layer. ffn_activation: The activation function to apply between the two linear transformations of the feed forward layer. position_encoder_class: The :class:`opennmt.layers.PositionEncoder` class to use for position encoding (or a callable that returns an instance). maximum_relative_position: Maximum relative position representation (from https://arxiv.org/abs/1803.02155). pre_norm: If ``True``, layer normalization is applied before each sub-layer. Otherwise it is applied after. **kwargs: Additional layer arguments. """ super().__init__(**kwargs) self.num_units = num_units self.dropout = dropout self.position_encoder = None if position_encoder_class is not None: self.position_encoder = position_encoder_class() self.layer_norm = common.LayerNorm() if pre_norm else None self.layers = [ transformer.SelfAttentionEncoderLayer( num_units, num_heads, ffn_inner_dim, dropout=dropout, attention_dropout=attention_dropout, ffn_dropout=ffn_dropout, ffn_activation=ffn_activation, maximum_relative_position=maximum_relative_position, pre_norm=pre_norm, ) for i in range(num_layers) ]
[ "def", "__init__", "(", "self", ",", "num_layers", ",", "num_units", "=", "512", ",", "num_heads", "=", "8", ",", "ffn_inner_dim", "=", "2048", ",", "dropout", "=", "0.1", ",", "attention_dropout", "=", "0.1", ",", "ffn_dropout", "=", "0.1", ",", "ffn_activation", "=", "tf", ".", "nn", ".", "relu", ",", "position_encoder_class", "=", "SinusoidalPositionEncoder", ",", "maximum_relative_position", "=", "None", ",", "pre_norm", "=", "True", ",", "*", "*", "kwargs", ")", ":", "super", "(", ")", ".", "__init__", "(", "*", "*", "kwargs", ")", "self", ".", "num_units", "=", "num_units", "self", ".", "dropout", "=", "dropout", "self", ".", "position_encoder", "=", "None", "if", "position_encoder_class", "is", "not", "None", ":", "self", ".", "position_encoder", "=", "position_encoder_class", "(", ")", "self", ".", "layer_norm", "=", "common", ".", "LayerNorm", "(", ")", "if", "pre_norm", "else", "None", "self", ".", "layers", "=", "[", "transformer", ".", "SelfAttentionEncoderLayer", "(", "num_units", ",", "num_heads", ",", "ffn_inner_dim", ",", "dropout", "=", "dropout", ",", "attention_dropout", "=", "attention_dropout", ",", "ffn_dropout", "=", "ffn_dropout", ",", "ffn_activation", "=", "ffn_activation", ",", "maximum_relative_position", "=", "maximum_relative_position", ",", "pre_norm", "=", "pre_norm", ",", ")", "for", "i", "in", "range", "(", "num_layers", ")", "]" ]
https://github.com/OpenNMT/OpenNMT-tf/blob/59a4dfdb911d0570ba1096b7a0a7b9fc5c7844bf/opennmt/encoders/self_attention_encoder.py#L15-L73
Yelp/kafka-utils
74831206648512db1a29426c6ebb428b33820d04
kafka_utils/kafka_check/commands/command.py
python
KafkaCheckCmd.run_command
(self)
Implement the command logic. When run_command is called cluster_config, args, and zk are already initialized.
Implement the command logic. When run_command is called cluster_config, args, and zk are already initialized.
[ "Implement", "the", "command", "logic", ".", "When", "run_command", "is", "called", "cluster_config", "args", "and", "zk", "are", "already", "initialized", "." ]
def run_command(self): """Implement the command logic. When run_command is called cluster_config, args, and zk are already initialized. """ raise NotImplementedError("Implement in subclass")
[ "def", "run_command", "(", "self", ")", ":", "raise", "NotImplementedError", "(", "\"Implement in subclass\"", ")" ]
https://github.com/Yelp/kafka-utils/blob/74831206648512db1a29426c6ebb428b33820d04/kafka_utils/kafka_check/commands/command.py#L41-L46
hyperspy/hyperspy
1ffb3fab33e607045a37f30c1463350b72617e10
hyperspy/io_plugins/bruker.py
python
SFSTreeItem.get_iter_and_properties
(self)
Generate and return the iterator of data chunks and properties of such chunks such as size and count. Method detects if data is compressed and uses iterator with decompression involved, else uses simple iterator of chunks. Returns: (iterator, chunk_size, number_of_chunks)
Generate and return the iterator of data chunks and properties of such chunks such as size and count.
[ "Generate", "and", "return", "the", "iterator", "of", "data", "chunks", "and", "properties", "of", "such", "chunks", "such", "as", "size", "and", "count", "." ]
def get_iter_and_properties(self): """Generate and return the iterator of data chunks and properties of such chunks such as size and count. Method detects if data is compressed and uses iterator with decompression involved, else uses simple iterator of chunks. Returns: (iterator, chunk_size, number_of_chunks) """ if self.sfs.compression == 'None': return self._iter_read_chunks(), self.sfs.usable_chunk,\ self.size_in_chunks elif self.sfs.compression == 'zlib': return self._iter_read_compr_chunks(), self.uncompressed_blk_size,\ self.no_of_compr_blk else: raise RuntimeError('file', str(self.sfs.filename), ' is compressed by not known and not', 'implemented algorithm.\n Aborting...')
[ "def", "get_iter_and_properties", "(", "self", ")", ":", "if", "self", ".", "sfs", ".", "compression", "==", "'None'", ":", "return", "self", ".", "_iter_read_chunks", "(", ")", ",", "self", ".", "sfs", ".", "usable_chunk", ",", "self", ".", "size_in_chunks", "elif", "self", ".", "sfs", ".", "compression", "==", "'zlib'", ":", "return", "self", ".", "_iter_read_compr_chunks", "(", ")", ",", "self", ".", "uncompressed_blk_size", ",", "self", ".", "no_of_compr_blk", "else", ":", "raise", "RuntimeError", "(", "'file'", ",", "str", "(", "self", ".", "sfs", ".", "filename", ")", ",", "' is compressed by not known and not'", ",", "'implemented algorithm.\\n Aborting...'", ")" ]
https://github.com/hyperspy/hyperspy/blob/1ffb3fab33e607045a37f30c1463350b72617e10/hyperspy/io_plugins/bruker.py#L265-L284
openshift/openshift-tools
1188778e728a6e4781acf728123e5b356380fe6f
openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_openshift/library/oc_project.py
python
Utils._write
(filename, contents)
Actually write the file contents to disk. This helps with mocking.
Actually write the file contents to disk. This helps with mocking.
[ "Actually", "write", "the", "file", "contents", "to", "disk", ".", "This", "helps", "with", "mocking", "." ]
def _write(filename, contents): ''' Actually write the file contents to disk. This helps with mocking. ''' with open(filename, 'w') as sfd: sfd.write(str(contents))
[ "def", "_write", "(", "filename", ",", "contents", ")", ":", "with", "open", "(", "filename", ",", "'w'", ")", "as", "sfd", ":", "sfd", ".", "write", "(", "str", "(", "contents", ")", ")" ]
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_openshift/library/oc_project.py#L1172-L1176
IronLanguages/main
a949455434b1fda8c783289e897e78a9a0caabb5
External.LCA_RESTRICTED/Languages/CPython/27/Lib/idlelib/Debugger.py
python
StackViewer.popup_event
(self, event)
override base method
override base method
[ "override", "base", "method" ]
def popup_event(self, event): "override base method" if self.stack: return ScrolledList.popup_event(self, event)
[ "def", "popup_event", "(", "self", ",", "event", ")", ":", "if", "self", ".", "stack", ":", "return", "ScrolledList", ".", "popup_event", "(", "self", ",", "event", ")" ]
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/CPython/27/Lib/idlelib/Debugger.py#L364-L367
triaquae/triaquae
bbabf736b3ba56a0c6498e7f04e16c13b8b8f2b9
TriAquae/models/Centos_6.4/paramiko/logging22.py
python
logger.log
(self, level, text)
[]
def log(self, level, text): if level >= self.level: for h in self.handlers: h.f.write(text + '\n') h.f.flush()
[ "def", "log", "(", "self", ",", "level", ",", "text", ")", ":", "if", "level", ">=", "self", ".", "level", ":", "for", "h", "in", "self", ".", "handlers", ":", "h", ".", "f", ".", "write", "(", "text", "+", "'\\n'", ")", "h", ".", "f", ".", "flush", "(", ")" ]
https://github.com/triaquae/triaquae/blob/bbabf736b3ba56a0c6498e7f04e16c13b8b8f2b9/TriAquae/models/Centos_6.4/paramiko/logging22.py#L49-L53
MozillaSecurity/peach
e5129cb50ce899e3ad009518d8b7cdc535233bbc
Peach/Engine/common.py
python
StreamBuffer.seekFromStart
(self, pos)
Change current position in data. NOTE: If the position is past the end of the existing stream data the data will be expanded such that the position exists padded with '\0'
Change current position in data.
[ "Change", "current", "position", "in", "data", "." ]
def seekFromStart(self, pos): """ Change current position in data. NOTE: If the position is past the end of the existing stream data the data will be expanded such that the position exists padded with '\0' """ if pos < 0: raise Exception("StreamBuffer.seekFromStart(%d) results in negative position" % pos) # Should we expand buffer? if pos > len(self.data): self.data += '\0' * (pos - len(self.data)) self.pos = pos
[ "def", "seekFromStart", "(", "self", ",", "pos", ")", ":", "if", "pos", "<", "0", ":", "raise", "Exception", "(", "\"StreamBuffer.seekFromStart(%d) results in negative position\"", "%", "pos", ")", "# Should we expand buffer?", "if", "pos", ">", "len", "(", "self", ".", "data", ")", ":", "self", ".", "data", "+=", "'\\0'", "*", "(", "pos", "-", "len", "(", "self", ".", "data", ")", ")", "self", ".", "pos", "=", "pos" ]
https://github.com/MozillaSecurity/peach/blob/e5129cb50ce899e3ad009518d8b7cdc535233bbc/Peach/Engine/common.py#L356-L372
unknown-horizons/unknown-horizons
7397fb333006d26c3d9fe796c7bd9cb8c3b43a49
horizons/world/units/unit.py
python
Unit.onInstanceActionFinished
(self, instance, action)
@param instance: fife.Instance @param action: string representing the action that is finished.
[]
def onInstanceActionFinished(self, instance, action): """ @param instance: fife.Instance @param action: string representing the action that is finished. """ location = fife.Location(self._instance.getLocation().getLayer()) location.setExactLayerCoordinates(fife.ExactModelCoordinate( self.position.x + self.position.x - self.last_position.x, self.position.y + self.position.y - self.last_position.y, 0)) facing_loc = self._instance.getFacingLocation() if action.getId().startswith('move_'): # Remember: this means we *ended* a "move" action just now! facing_loc = location self.act(self._action, facing_loc=facing_loc, repeating=True)
[ "def", "onInstanceActionFinished", "(", "self", ",", "instance", ",", "action", ")", ":", "location", "=", "fife", ".", "Location", "(", "self", ".", "_instance", ".", "getLocation", "(", ")", ".", "getLayer", "(", ")", ")", "location", ".", "setExactLayerCoordinates", "(", "fife", ".", "ExactModelCoordinate", "(", "self", ".", "position", ".", "x", "+", "self", ".", "position", ".", "x", "-", "self", ".", "last_position", ".", "x", ",", "self", ".", "position", ".", "y", "+", "self", ".", "position", ".", "y", "-", "self", ".", "last_position", ".", "y", ",", "0", ")", ")", "facing_loc", "=", "self", ".", "_instance", ".", "getFacingLocation", "(", ")", "if", "action", ".", "getId", "(", ")", ".", "startswith", "(", "'move_'", ")", ":", "# Remember: this means we *ended* a \"move\" action just now!", "facing_loc", "=", "location", "self", ".", "act", "(", "self", ".", "_action", ",", "facing_loc", "=", "facing_loc", ",", "repeating", "=", "True", ")" ]
https://github.com/unknown-horizons/unknown-horizons/blob/7397fb333006d26c3d9fe796c7bd9cb8c3b43a49/horizons/world/units/unit.py#L88-L103
cloudera/hue
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
desktop/core/ext-py/tablib-0.12.1/tablib/packages/dbfpy3/fields.py
python
DbfFieldDef.decodeValue
(self, value)
Return decoded value from string value. This method shouldn't be used publicly. It's called from the `decodeFromRecord` method. This is an abstract method and it must be overridden in child classes.
Return decoded value from string value.
[ "Return", "decoded", "value", "from", "string", "value", "." ]
def decodeValue(self, value): """Return decoded value from string value. This method shouldn't be used publicly. It's called from the `decodeFromRecord` method. This is an abstract method and it must be overridden in child classes. """ raise NotImplementedError
[ "def", "decodeValue", "(", "self", ",", "value", ")", ":", "raise", "NotImplementedError" ]
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/tablib-0.12.1/tablib/packages/dbfpy3/fields.py#L181-L189
Podshot/MCEdit-Unified
90abfb170c65b877ac67193e717fa3a3ded635dd
utilities/misc.py
python
Singleton.Instance
(self)
Returns the singleton instance. Upon its first call, it creates a new instance of the decorated class and calls its `__init__` method. On all subsequent calls, the already created instance is returned.
Returns the singleton instance. Upon its first call, it creates a new instance of the decorated class and calls its `__init__` method. On all subsequent calls, the already created instance is returned.
[ "Returns", "the", "singleton", "instance", ".", "Upon", "its", "first", "call", "it", "creates", "a", "new", "instance", "of", "the", "decorated", "class", "and", "calls", "its", "__init__", "method", ".", "On", "all", "subsequent", "calls", "the", "already", "created", "instance", "is", "returned", "." ]
def Instance(self): """ Returns the singleton instance. Upon its first call, it creates a new instance of the decorated class and calls its `__init__` method. On all subsequent calls, the already created instance is returned. """ try: return self._instance except AttributeError: self._instance = self._decorated() return self._instance
[ "def", "Instance", "(", "self", ")", ":", "try", ":", "return", "self", ".", "_instance", "except", "AttributeError", ":", "self", ".", "_instance", "=", "self", ".", "_decorated", "(", ")", "return", "self", ".", "_instance" ]
https://github.com/Podshot/MCEdit-Unified/blob/90abfb170c65b877ac67193e717fa3a3ded635dd/utilities/misc.py#L23-L34
trailbehind/DeepOSM
4361273723ba271105e2401ecc1707556816f20c
src/training_data.py
python
download_and_serialize
(number_of_naips, randomize_naips, naip_state, naip_year, extract_type, bands, tile_size, pixels_to_fatten_roads, label_data_files, tile_overlap)
return raster_data_paths
Download NAIP images, PBF files, and serialize training data.
Download NAIP images, PBF files, and serialize training data.
[ "Download", "NAIP", "images", "PBF", "files", "and", "serialize", "training", "data", "." ]
def download_and_serialize(number_of_naips, randomize_naips, naip_state, naip_year, extract_type, bands, tile_size, pixels_to_fatten_roads, label_data_files, tile_overlap): """Download NAIP images, PBF files, and serialize training data.""" raster_data_paths = NAIPDownloader(number_of_naips, randomize_naips, naip_state, naip_year).download_naips() create_tiled_training_data(raster_data_paths, extract_type, bands, tile_size, pixels_to_fatten_roads, label_data_files, tile_overlap, naip_state) return raster_data_paths
[ "def", "download_and_serialize", "(", "number_of_naips", ",", "randomize_naips", ",", "naip_state", ",", "naip_year", ",", "extract_type", ",", "bands", ",", "tile_size", ",", "pixels_to_fatten_roads", ",", "label_data_files", ",", "tile_overlap", ")", ":", "raster_data_paths", "=", "NAIPDownloader", "(", "number_of_naips", ",", "randomize_naips", ",", "naip_state", ",", "naip_year", ")", ".", "download_naips", "(", ")", "create_tiled_training_data", "(", "raster_data_paths", ",", "extract_type", ",", "bands", ",", "tile_size", ",", "pixels_to_fatten_roads", ",", "label_data_files", ",", "tile_overlap", ",", "naip_state", ")", "return", "raster_data_paths" ]
https://github.com/trailbehind/DeepOSM/blob/4361273723ba271105e2401ecc1707556816f20c/src/training_data.py#L380-L404
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/stringprep.py
python
in_table_c7
(code)
return ord(code) in c7_set
[]
def in_table_c7(code): return ord(code) in c7_set
[ "def", "in_table_c7", "(", "code", ")", ":", "return", "ord", "(", "code", ")", "in", "c7_set" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/stringprep.py#L253-L254
gammapy/gammapy
735b25cd5bbed35e2004d633621896dcd5295e8b
gammapy/estimators/points/sed.py
python
FluxPointsEstimator.run
(self, datasets)
return FluxPoints.from_table( table=table, reference_model=model.copy(), gti=datasets.gti, format="gadf-sed", )
Run the flux point estimator for all energy groups. Parameters ---------- datasets : `~gammapy.datasets.Datasets` Datasets Returns ------- flux_points : `FluxPoints` Estimated flux points.
Run the flux point estimator for all energy groups.
[ "Run", "the", "flux", "point", "estimator", "for", "all", "energy", "groups", "." ]
def run(self, datasets): """Run the flux point estimator for all energy groups. Parameters ---------- datasets : `~gammapy.datasets.Datasets` Datasets Returns ------- flux_points : `FluxPoints` Estimated flux points. """ datasets = Datasets(datasets=datasets) rows = [] for energy_min, energy_max in progress_bar( zip(self.energy_edges[:-1], self.energy_edges[1:]), desc="Energy bins" ): row = self.estimate_flux_point( datasets, energy_min=energy_min, energy_max=energy_max, ) rows.append(row) meta = { "n_sigma": self.n_sigma, "n_sigma_ul": self.n_sigma_ul, "sed_type_init": "likelihood", } table = table_from_row_data(rows=rows, meta=meta) model = datasets.models[self.source] return FluxPoints.from_table( table=table, reference_model=model.copy(), gti=datasets.gti, format="gadf-sed", )
[ "def", "run", "(", "self", ",", "datasets", ")", ":", "datasets", "=", "Datasets", "(", "datasets", "=", "datasets", ")", "rows", "=", "[", "]", "for", "energy_min", ",", "energy_max", "in", "progress_bar", "(", "zip", "(", "self", ".", "energy_edges", "[", ":", "-", "1", "]", ",", "self", ".", "energy_edges", "[", "1", ":", "]", ")", ",", "desc", "=", "\"Energy bins\"", ")", ":", "row", "=", "self", ".", "estimate_flux_point", "(", "datasets", ",", "energy_min", "=", "energy_min", ",", "energy_max", "=", "energy_max", ",", ")", "rows", ".", "append", "(", "row", ")", "meta", "=", "{", "\"n_sigma\"", ":", "self", ".", "n_sigma", ",", "\"n_sigma_ul\"", ":", "self", ".", "n_sigma_ul", ",", "\"sed_type_init\"", ":", "\"likelihood\"", ",", "}", "table", "=", "table_from_row_data", "(", "rows", "=", "rows", ",", "meta", "=", "meta", ")", "model", "=", "datasets", ".", "models", "[", "self", ".", "source", "]", "return", "FluxPoints", ".", "from_table", "(", "table", "=", "table", ",", "reference_model", "=", "model", ".", "copy", "(", ")", ",", "gti", "=", "datasets", ".", "gti", ",", "format", "=", "\"gadf-sed\"", ",", ")" ]
https://github.com/gammapy/gammapy/blob/735b25cd5bbed35e2004d633621896dcd5295e8b/gammapy/estimators/points/sed.py#L83-L122
ronf/asyncssh
ee1714c598d8c2ea6f5484e465443f38b68714aa
asyncssh/saslprep.py
python
_map_saslprep
(s: str)
return ''.join(r)
Map stringprep table B.1 to nothing and C.1.2 to ASCII space
Map stringprep table B.1 to nothing and C.1.2 to ASCII space
[ "Map", "stringprep", "table", "B", ".", "1", "to", "nothing", "and", "C", ".", "1", ".", "2", "to", "ASCII", "space" ]
def _map_saslprep(s: str) -> str: """Map stringprep table B.1 to nothing and C.1.2 to ASCII space""" r = [] for c in s: if stringprep.in_table_c12(c): r.append(' ') elif not stringprep.in_table_b1(c): r.append(c) return ''.join(r)
[ "def", "_map_saslprep", "(", "s", ":", "str", ")", "->", "str", ":", "r", "=", "[", "]", "for", "c", "in", "s", ":", "if", "stringprep", ".", "in_table_c12", "(", "c", ")", ":", "r", ".", "append", "(", "' '", ")", "elif", "not", "stringprep", ".", "in_table_b1", "(", "c", ")", ":", "r", ".", "append", "(", "c", ")", "return", "''", ".", "join", "(", "r", ")" ]
https://github.com/ronf/asyncssh/blob/ee1714c598d8c2ea6f5484e465443f38b68714aa/asyncssh/saslprep.py#L91-L102
triaquae/triaquae
bbabf736b3ba56a0c6498e7f04e16c13b8b8f2b9
TriAquae/models/Centos_6.4/Crypto/Hash/hashalgo.py
python
HashAlgo.copy
(self)
return self._hash.copy()
Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :Return: A hash object of the same type
Return a copy ("clone") of the hash object.
[ "Return", "a", "copy", "(", "clone", ")", "of", "the", "hash", "object", "." ]
def copy(self): """Return a copy ("clone") of the hash object. The copy will have the same internal state as the original hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. :Return: A hash object of the same type """ return self._hash.copy()
[ "def", "copy", "(", "self", ")", ":", "return", "self", ".", "_hash", ".", "copy", "(", ")" ]
https://github.com/triaquae/triaquae/blob/bbabf736b3ba56a0c6498e7f04e16c13b8b8f2b9/TriAquae/models/Centos_6.4/Crypto/Hash/hashalgo.py#L92-L102
edisonlz/fastor
342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3
base/site-packages/tencentcloud/vpc/v20170312/models.py
python
DeleteCustomerGatewayResponse.__init__
(self)
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。 :type RequestId: str
:param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。 :type RequestId: str
[ ":", "param", "RequestId", ":", "唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。", ":", "type", "RequestId", ":", "str" ]
def __init__(self): """ :param RequestId: 唯一请求ID,每次请求都会返回。定位问题时需要提供该次请求的RequestId。 :type RequestId: str """ self.RequestId = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "RequestId", "=", "None" ]
https://github.com/edisonlz/fastor/blob/342078a18363ac41d3c6b1ab29dbdd44fdb0b7b3/base/site-packages/tencentcloud/vpc/v20170312/models.py#L1283-L1288
adonno/Home-AssistantConfig
8038c0143c6a990e409951202bb1bda149fcbaf7
custom_components/hacs/sensor.py
python
HACSSensor.async_update
(self)
Update the sensor.
Update the sensor.
[ "Update", "the", "sensor", "." ]
async def async_update(self): """Update the sensor.""" if hacs.system.status.background_task: return self.repositories = [] for repository in hacs.repositories: if repository.pending_upgrade: self.repositories.append(repository) self._state = len(self.repositories)
[ "async", "def", "async_update", "(", "self", ")", ":", "if", "hacs", ".", "system", ".", "status", ".", "background_task", ":", "return", "self", ".", "repositories", "=", "[", "]", "for", "repository", "in", "hacs", ".", "repositories", ":", "if", "repository", ".", "pending_upgrade", ":", "self", ".", "repositories", ".", "append", "(", "repository", ")", "self", ".", "_state", "=", "len", "(", "self", ".", "repositories", ")" ]
https://github.com/adonno/Home-AssistantConfig/blob/8038c0143c6a990e409951202bb1bda149fcbaf7/custom_components/hacs/sensor.py#L29-L39
HumanCompatibleAI/adversarial-policies
bba910b89149f1274bb9652a6f378b22c3c9b6c5
src/aprl/configs/multi/train.py
python
_finetune_configs
(envs=None, dual_defense=False)
return configs
Generates configs for finetuning a Zoo model. Note in this setup, the adversary is the embedded agent, whereas usually the victim is. :param envs: A list of envs; if set to None, uses all BANSAL_GOOD_ENVS :param dual_defense: If True, fine-tune against both an adversary and Zoo agent (randomly selected per episode); if False, fine-tune against just the adversary.
Generates configs for finetuning a Zoo model.
[ "Generates", "configs", "for", "finetuning", "a", "Zoo", "model", "." ]
def _finetune_configs(envs=None, dual_defense=False): """Generates configs for finetuning a Zoo model. Note in this setup, the adversary is the embedded agent, whereas usually the victim is. :param envs: A list of envs; if set to None, uses all BANSAL_GOOD_ENVS :param dual_defense: If True, fine-tune against both an adversary and Zoo agent (randomly selected per episode); if False, fine-tune against just the adversary. """ if envs is None: envs = BANSAL_GOOD_ENVS configs = [] adversary_paths = get_adversary_paths() for env in envs: original_embed_index = VICTIM_INDEX[env] num_zoo = gym_compete.num_zoo_policies(env) for original_victim in range(1, num_zoo + 1): original_victim = str(original_victim) load_policy = {"type": "zoo", "path": original_victim} adversary = _get_policy_path( adversary_paths, env, str(original_embed_index), original_victim ) if dual_defense: # If training both best adversary and Zoo, try each possible Zoo agent for finetuning_zoo in range(1, num_zoo + 1): finetuning_zoo = str(finetuning_zoo) embed_paths = [adversary, finetuning_zoo] embed_types = ["ppo2", "zoo"] configs.append( (env, embed_paths, embed_types, 1 - original_embed_index, load_policy) ) else: configs.append((env, [adversary], ["ppo2"], 1 - original_embed_index, load_policy)) return configs
[ "def", "_finetune_configs", "(", "envs", "=", "None", ",", "dual_defense", "=", "False", ")", ":", "if", "envs", "is", "None", ":", "envs", "=", "BANSAL_GOOD_ENVS", "configs", "=", "[", "]", "adversary_paths", "=", "get_adversary_paths", "(", ")", "for", "env", "in", "envs", ":", "original_embed_index", "=", "VICTIM_INDEX", "[", "env", "]", "num_zoo", "=", "gym_compete", ".", "num_zoo_policies", "(", "env", ")", "for", "original_victim", "in", "range", "(", "1", ",", "num_zoo", "+", "1", ")", ":", "original_victim", "=", "str", "(", "original_victim", ")", "load_policy", "=", "{", "\"type\"", ":", "\"zoo\"", ",", "\"path\"", ":", "original_victim", "}", "adversary", "=", "_get_policy_path", "(", "adversary_paths", ",", "env", ",", "str", "(", "original_embed_index", ")", ",", "original_victim", ")", "if", "dual_defense", ":", "# If training both best adversary and Zoo, try each possible Zoo agent", "for", "finetuning_zoo", "in", "range", "(", "1", ",", "num_zoo", "+", "1", ")", ":", "finetuning_zoo", "=", "str", "(", "finetuning_zoo", ")", "embed_paths", "=", "[", "adversary", ",", "finetuning_zoo", "]", "embed_types", "=", "[", "\"ppo2\"", ",", "\"zoo\"", "]", "configs", ".", "append", "(", "(", "env", ",", "embed_paths", ",", "embed_types", ",", "1", "-", "original_embed_index", ",", "load_policy", ")", ")", "else", ":", "configs", ".", "append", "(", "(", "env", ",", "[", "adversary", "]", ",", "[", "\"ppo2\"", "]", ",", "1", "-", "original_embed_index", ",", "load_policy", ")", ")", "return", "configs" ]
https://github.com/HumanCompatibleAI/adversarial-policies/blob/bba910b89149f1274bb9652a6f378b22c3c9b6c5/src/aprl/configs/multi/train.py#L117-L151
twisted/twisted
dee676b040dd38b847ea6fb112a712cb5e119490
src/twisted/protocols/amp.py
python
_ParserHelper.parseString
(cls, data)
return cls.parse(BytesIO(data))
Parse some amp data stored in a string. @param data: a str holding some amp-encoded data. @return: a list of AmpBoxes encoded in the given string.
Parse some amp data stored in a string.
[ "Parse", "some", "amp", "data", "stored", "in", "a", "string", "." ]
def parseString(cls, data): """ Parse some amp data stored in a string. @param data: a str holding some amp-encoded data. @return: a list of AmpBoxes encoded in the given string. """ return cls.parse(BytesIO(data))
[ "def", "parseString", "(", "cls", ",", "data", ")", ":", "return", "cls", ".", "parse", "(", "BytesIO", "(", "data", ")", ")" ]
https://github.com/twisted/twisted/blob/dee676b040dd38b847ea6fb112a712cb5e119490/src/twisted/protocols/amp.py#L2642-L2650
mtianyan/VueDjangoAntdProBookShop
fd8fa2151c81edde2f8b8e6df8e1ddd799f940c2
third_party/qiniu/auth.py
python
Auth.verify_callback
(self, origin_authorization, url, body, content_type='application/x-www-form-urlencoded')
return origin_authorization == authorization
回调验证 Args: origin_authorization: 回调时请求Header中的Authorization字段 url: 回调请求的url body: 回调请求的body content_type: 回调请求body的Content-Type Returns: 返回true表示验证成功,返回false表示验证失败
回调验证
[ "回调验证" ]
def verify_callback(self, origin_authorization, url, body, content_type='application/x-www-form-urlencoded'): """回调验证 Args: origin_authorization: 回调时请求Header中的Authorization字段 url: 回调请求的url body: 回调请求的body content_type: 回调请求body的Content-Type Returns: 返回true表示验证成功,返回false表示验证失败 """ token = self.token_of_request(url, body, content_type) authorization = 'QBox {0}'.format(token) return origin_authorization == authorization
[ "def", "verify_callback", "(", "self", ",", "origin_authorization", ",", "url", ",", "body", ",", "content_type", "=", "'application/x-www-form-urlencoded'", ")", ":", "token", "=", "self", ".", "token_of_request", "(", "url", ",", "body", ",", "content_type", ")", "authorization", "=", "'QBox {0}'", ".", "format", "(", "token", ")", "return", "origin_authorization", "==", "authorization" ]
https://github.com/mtianyan/VueDjangoAntdProBookShop/blob/fd8fa2151c81edde2f8b8e6df8e1ddd799f940c2/third_party/qiniu/auth.py#L160-L174
dmnfarrell/pandastable
9c268b3e2bfe2e718eaee4a30bd02832a0ad1614
pandastable/headers.py
python
RowHeader.clearSelected
(self)
return
Clear selected rows
Clear selected rows
[ "Clear", "selected", "rows" ]
def clearSelected(self): """Clear selected rows""" self.delete('rect') return
[ "def", "clearSelected", "(", "self", ")", ":", "self", ".", "delete", "(", "'rect'", ")", "return" ]
https://github.com/dmnfarrell/pandastable/blob/9c268b3e2bfe2e718eaee4a30bd02832a0ad1614/pandastable/headers.py#L628-L631
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/jinja2/runtime.py
python
BlockReference.__init__
(self, name, context, stack, depth)
[]
def __init__(self, name, context, stack, depth): self.name = name self._context = context self._stack = stack self._depth = depth
[ "def", "__init__", "(", "self", ",", "name", ",", "context", ",", "stack", ",", "depth", ")", ":", "self", ".", "name", "=", "name", "self", ".", "_context", "=", "context", "self", ".", "_stack", "=", "stack", "self", ".", "_depth", "=", "depth" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/jinja2/runtime.py#L327-L331
mlrun/mlrun
4c120719d64327a34b7ee1ab08fb5e01b258b00a
mlrun/frameworks/pytorch/mlrun_interface.py
python
PyTorchMLRunInterface._tensor_to_cuda
( tensor: Union[Tensor, Dict, List, Tuple] )
return tensor
Send to given tensor to cuda if it is a tensor. If the given object is a dictionary, the dictionary values will be sent to the function again recursively. If the given object is a list or a tuple, all the values in it will be sent as well. If the given object is not of type torch.Tensor at the end, nothing will happen. :param tensor: The batch to sent to cuda. :return: The copied tensor in cuda memory.
Send to given tensor to cuda if it is a tensor. If the given object is a dictionary, the dictionary values will be sent to the function again recursively. If the given object is a list or a tuple, all the values in it will be sent as well. If the given object is not of type torch.Tensor at the end, nothing will happen.
[ "Send", "to", "given", "tensor", "to", "cuda", "if", "it", "is", "a", "tensor", ".", "If", "the", "given", "object", "is", "a", "dictionary", "the", "dictionary", "values", "will", "be", "sent", "to", "the", "function", "again", "recursively", ".", "If", "the", "given", "object", "is", "a", "list", "or", "a", "tuple", "all", "the", "values", "in", "it", "will", "be", "sent", "as", "well", ".", "If", "the", "given", "object", "is", "not", "of", "type", "torch", ".", "Tensor", "at", "the", "end", "nothing", "will", "happen", "." ]
def _tensor_to_cuda( tensor: Union[Tensor, Dict, List, Tuple] ) -> Union[Tensor, Dict, List, Tuple]: """ Send to given tensor to cuda if it is a tensor. If the given object is a dictionary, the dictionary values will be sent to the function again recursively. If the given object is a list or a tuple, all the values in it will be sent as well. If the given object is not of type torch.Tensor at the end, nothing will happen. :param tensor: The batch to sent to cuda. :return: The copied tensor in cuda memory. """ if isinstance(tensor, Tensor) and not tensor.is_cuda: tensor = tensor.cuda() if tensor._grad is not None: tensor._grad.data = tensor._grad.data.cuda() elif isinstance(tensor, dict): for key in tensor: tensor[key] = PyTorchMLRunInterface._tensor_to_cuda(tensor=tensor[key]) elif isinstance(tensor, list): for index in range(len(tensor)): tensor[index] = PyTorchMLRunInterface._tensor_to_cuda( tensor=tensor[index] ) elif isinstance(tensor, tuple): cuda_tensor = () for value in tensor: cuda_tensor += (PyTorchMLRunInterface._tensor_to_cuda(tensor=value),) tensor = cuda_tensor return tensor
[ "def", "_tensor_to_cuda", "(", "tensor", ":", "Union", "[", "Tensor", ",", "Dict", ",", "List", ",", "Tuple", "]", ")", "->", "Union", "[", "Tensor", ",", "Dict", ",", "List", ",", "Tuple", "]", ":", "if", "isinstance", "(", "tensor", ",", "Tensor", ")", "and", "not", "tensor", ".", "is_cuda", ":", "tensor", "=", "tensor", ".", "cuda", "(", ")", "if", "tensor", ".", "_grad", "is", "not", "None", ":", "tensor", ".", "_grad", ".", "data", "=", "tensor", ".", "_grad", ".", "data", ".", "cuda", "(", ")", "elif", "isinstance", "(", "tensor", ",", "dict", ")", ":", "for", "key", "in", "tensor", ":", "tensor", "[", "key", "]", "=", "PyTorchMLRunInterface", ".", "_tensor_to_cuda", "(", "tensor", "=", "tensor", "[", "key", "]", ")", "elif", "isinstance", "(", "tensor", ",", "list", ")", ":", "for", "index", "in", "range", "(", "len", "(", "tensor", ")", ")", ":", "tensor", "[", "index", "]", "=", "PyTorchMLRunInterface", ".", "_tensor_to_cuda", "(", "tensor", "=", "tensor", "[", "index", "]", ")", "elif", "isinstance", "(", "tensor", ",", "tuple", ")", ":", "cuda_tensor", "=", "(", ")", "for", "value", "in", "tensor", ":", "cuda_tensor", "+=", "(", "PyTorchMLRunInterface", ".", "_tensor_to_cuda", "(", "tensor", "=", "value", ")", ",", ")", "tensor", "=", "cuda_tensor", "return", "tensor" ]
https://github.com/mlrun/mlrun/blob/4c120719d64327a34b7ee1ab08fb5e01b258b00a/mlrun/frameworks/pytorch/mlrun_interface.py#L936-L965
entropy1337/infernal-twin
10995cd03312e39a48ade0f114ebb0ae3a711bb8
Modules/build/reportlab/src/reportlab/pdfbase/ttfonts.py
python
TTFontMaker.__init__
(self)
Initializes the generator.
Initializes the generator.
[ "Initializes", "the", "generator", "." ]
def __init__(self): "Initializes the generator." self.tables = {}
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "tables", "=", "{", "}" ]
https://github.com/entropy1337/infernal-twin/blob/10995cd03312e39a48ade0f114ebb0ae3a711bb8/Modules/build/reportlab/src/reportlab/pdfbase/ttfonts.py#L339-L341
catalyst-cooperative/pudl
40d176313e60dfa9d2481f63842ed23f08f1ad5f
src/pudl/workspace/setup_cli.py
python
main
()
Set up a new default PUDL workspace.
Set up a new default PUDL workspace.
[ "Set", "up", "a", "new", "default", "PUDL", "workspace", "." ]
def main(): """Set up a new default PUDL workspace.""" # Display logged output from the PUDL package: pudl_logger = logging.getLogger("pudl") log_format = '%(asctime)s [%(levelname)8s] %(name)s:%(lineno)s %(message)s' coloredlogs.install(fmt=log_format, level='INFO', logger=pudl_logger) parser = initialize_parser() args = parser.parse_args(sys.argv[1:]) if not args.pudl_in: args.pudl_in = args.pudl_dir if not args.pudl_out: args.pudl_out = args.pudl_dir # Given pudl_in and pudl_out, create a user settings file. pudl_in = pathlib.Path(args.pudl_in).expanduser().resolve() if not pathlib.Path.is_dir(pudl_in): raise FileNotFoundError( f"Directory not found: {pudl_in}") pudl_out = pathlib.Path(args.pudl_out).expanduser().resolve() if not pathlib.Path.is_dir(pudl_out): raise FileNotFoundError( f"Directory not found: {pudl_out}") pudl_defaults_file = pathlib.Path.home() / ".pudl.yml" # Only print out this information and do the defaults setting if that has # been explicitly requested, or there are no defaults already: if not pudl_defaults_file.exists() or args.clobber is True: logger.info(f"Setting default pudl_in: {pudl_in}") logger.info(f"Setting default pudl_out: {pudl_out}") logger.info(f"You can update these default values by editing " f"{pudl_defaults_file}") pudl.workspace.setup.set_defaults(pudl_in, pudl_out, clobber=args.clobber) pudl.workspace.setup.init(pudl_in=pudl_in, pudl_out=pudl_out, clobber=args.clobber)
[ "def", "main", "(", ")", ":", "# Display logged output from the PUDL package:", "pudl_logger", "=", "logging", ".", "getLogger", "(", "\"pudl\"", ")", "log_format", "=", "'%(asctime)s [%(levelname)8s] %(name)s:%(lineno)s %(message)s'", "coloredlogs", ".", "install", "(", "fmt", "=", "log_format", ",", "level", "=", "'INFO'", ",", "logger", "=", "pudl_logger", ")", "parser", "=", "initialize_parser", "(", ")", "args", "=", "parser", ".", "parse_args", "(", "sys", ".", "argv", "[", "1", ":", "]", ")", "if", "not", "args", ".", "pudl_in", ":", "args", ".", "pudl_in", "=", "args", ".", "pudl_dir", "if", "not", "args", ".", "pudl_out", ":", "args", ".", "pudl_out", "=", "args", ".", "pudl_dir", "# Given pudl_in and pudl_out, create a user settings file.", "pudl_in", "=", "pathlib", ".", "Path", "(", "args", ".", "pudl_in", ")", ".", "expanduser", "(", ")", ".", "resolve", "(", ")", "if", "not", "pathlib", ".", "Path", ".", "is_dir", "(", "pudl_in", ")", ":", "raise", "FileNotFoundError", "(", "f\"Directory not found: {pudl_in}\"", ")", "pudl_out", "=", "pathlib", ".", "Path", "(", "args", ".", "pudl_out", ")", ".", "expanduser", "(", ")", ".", "resolve", "(", ")", "if", "not", "pathlib", ".", "Path", ".", "is_dir", "(", "pudl_out", ")", ":", "raise", "FileNotFoundError", "(", "f\"Directory not found: {pudl_out}\"", ")", "pudl_defaults_file", "=", "pathlib", ".", "Path", ".", "home", "(", ")", "/", "\".pudl.yml\"", "# Only print out this information and do the defaults setting if that has", "# been explicitly requested, or there are no defaults already:", "if", "not", "pudl_defaults_file", ".", "exists", "(", ")", "or", "args", ".", "clobber", "is", "True", ":", "logger", ".", "info", "(", "f\"Setting default pudl_in: {pudl_in}\"", ")", "logger", ".", "info", "(", "f\"Setting default pudl_out: {pudl_out}\"", ")", "logger", ".", "info", "(", "f\"You can update these default values by editing \"", "f\"{pudl_defaults_file}\"", ")", "pudl", ".", "workspace", ".", "setup", ".", "set_defaults", "(", "pudl_in", ",", "pudl_out", ",", "clobber", "=", "args", ".", "clobber", ")", "pudl", ".", "workspace", ".", "setup", ".", "init", "(", "pudl_in", "=", "pudl_in", ",", "pudl_out", "=", "pudl_out", ",", "clobber", "=", "args", ".", "clobber", ")" ]
https://github.com/catalyst-cooperative/pudl/blob/40d176313e60dfa9d2481f63842ed23f08f1ad5f/src/pudl/workspace/setup_cli.py#L104-L144
cherrypy/cheroot
75c10f483d7efc87905d955b37150f273ee5ffb2
cheroot/ssl/builtin.py
python
BuiltinSSLAdapter.context
(self, context)
Set the ssl ``context`` to use.
Set the ssl ``context`` to use.
[ "Set", "the", "ssl", "context", "to", "use", "." ]
def context(self, context): """Set the ssl ``context`` to use.""" self._context = context # Python 3.7+ # if a context is provided via `cherrypy.config.update` then # `self.context` will be set after `__init__` # use a property to intercept it to add an SNI callback # but don't override the user's callback # TODO: chain callbacks with suppress(AttributeError): if ssl.HAS_SNI and context.sni_callback is None: context.sni_callback = _sni_callback
[ "def", "context", "(", "self", ",", "context", ")", ":", "self", ".", "_context", "=", "context", "# Python 3.7+", "# if a context is provided via `cherrypy.config.update` then", "# `self.context` will be set after `__init__`", "# use a property to intercept it to add an SNI callback", "# but don't override the user's callback", "# TODO: chain callbacks", "with", "suppress", "(", "AttributeError", ")", ":", "if", "ssl", ".", "HAS_SNI", "and", "context", ".", "sni_callback", "is", "None", ":", "context", ".", "sni_callback", "=", "_sni_callback" ]
https://github.com/cherrypy/cheroot/blob/75c10f483d7efc87905d955b37150f273ee5ffb2/cheroot/ssl/builtin.py#L256-L267
aliyun/aliyun-oss-python-sdk
5f2afa0928a58c7c1cc6317ac147f3637481f6fd
oss2/auth.py
python
StsAuth.__init__
(self, access_key_id, access_key_secret, security_token, auth_version=AUTH_VERSION_1)
[]
def __init__(self, access_key_id, access_key_secret, security_token, auth_version=AUTH_VERSION_1): logger.debug("Init StsAuth: access_key_id: {0}, access_key_secret: ******, security_token: ******".format(access_key_id)) credentials_provider = StaticCredentialsProvider(access_key_id, access_key_secret, security_token) self.__auth = ProviderAuthV2(credentials_provider) if auth_version == AUTH_VERSION_2 else ProviderAuth(credentials_provider)
[ "def", "__init__", "(", "self", ",", "access_key_id", ",", "access_key_secret", ",", "security_token", ",", "auth_version", "=", "AUTH_VERSION_1", ")", ":", "logger", ".", "debug", "(", "\"Init StsAuth: access_key_id: {0}, access_key_secret: ******, security_token: ******\"", ".", "format", "(", "access_key_id", ")", ")", "credentials_provider", "=", "StaticCredentialsProvider", "(", "access_key_id", ",", "access_key_secret", ",", "security_token", ")", "self", ".", "__auth", "=", "ProviderAuthV2", "(", "credentials_provider", ")", "if", "auth_version", "==", "AUTH_VERSION_2", "else", "ProviderAuth", "(", "credentials_provider", ")" ]
https://github.com/aliyun/aliyun-oss-python-sdk/blob/5f2afa0928a58c7c1cc6317ac147f3637481f6fd/oss2/auth.py#L246-L249
landlab/landlab
a5dd80b8ebfd03d1ba87ef6c4368c409485f222c
landlab/components/marine_sediment_transport/simple_submarine_diffuser.py
python
SimpleSubmarineDiffuser.calc_diffusion_coef
(self)
return k
Calculate and store diffusion coefficient values. Returns ------- k : float array Diffusion coefficient, m2/y
Calculate and store diffusion coefficient values.
[ "Calculate", "and", "store", "diffusion", "coefficient", "values", "." ]
def calc_diffusion_coef(self): """ Calculate and store diffusion coefficient values. Returns ------- k : float array Diffusion coefficient, m2/y """ sea_level = self.grid.at_grid["sea_level__elevation"] self._depth[:] = sea_level - self._grid.at_node["topographic__elevation"] deep_water = self._depth > self._wave_base land = self._depth < 0.0 k = self.grid.at_node["kd"] k[:] = self._shallow_water_diffusivity * self.depth_function(self._depth) k[deep_water] *= np.exp( -(self._depth[deep_water] - self._wave_base) / self._wave_base ) k[land] += _TINY_DIFFUSIVITY return k
[ "def", "calc_diffusion_coef", "(", "self", ")", ":", "sea_level", "=", "self", ".", "grid", ".", "at_grid", "[", "\"sea_level__elevation\"", "]", "self", ".", "_depth", "[", ":", "]", "=", "sea_level", "-", "self", ".", "_grid", ".", "at_node", "[", "\"topographic__elevation\"", "]", "deep_water", "=", "self", ".", "_depth", ">", "self", ".", "_wave_base", "land", "=", "self", ".", "_depth", "<", "0.0", "k", "=", "self", ".", "grid", ".", "at_node", "[", "\"kd\"", "]", "k", "[", ":", "]", "=", "self", ".", "_shallow_water_diffusivity", "*", "self", ".", "depth_function", "(", "self", ".", "_depth", ")", "k", "[", "deep_water", "]", "*=", "np", ".", "exp", "(", "-", "(", "self", ".", "_depth", "[", "deep_water", "]", "-", "self", ".", "_wave_base", ")", "/", "self", ".", "_wave_base", ")", "k", "[", "land", "]", "+=", "_TINY_DIFFUSIVITY", "return", "k" ]
https://github.com/landlab/landlab/blob/a5dd80b8ebfd03d1ba87ef6c4368c409485f222c/landlab/components/marine_sediment_transport/simple_submarine_diffuser.py#L204-L226
nightmaredimple/libmot
23b8e2ac00f8b45d5a0ecabd57af90585966f3ff
libmot/motion/epipolar_geometry.py
python
Epipolar.DrawMatches
(self, src, dst, keypoints1, keypoints2, matches, n = None)
return draw
Draw matches between source image with target image Parameters ---------- src : ndarray A HxW matrix of opencv image dst : ndarray A HxW matrix of opencv image keypoints1: ndarray A Nx2 matrix of keypoints in src image keypoints2: ndarray A Nx2 matrix of keypoints in dst image matches: List of matches n: int numbers of matches to be drawn Returns ------- draw: ndarray A Hx2W matrix of opencv image
Draw matches between source image with target image
[ "Draw", "matches", "between", "source", "image", "with", "target", "image" ]
def DrawMatches(self, src, dst, keypoints1, keypoints2, matches, n = None): """Draw matches between source image with target image Parameters ---------- src : ndarray A HxW matrix of opencv image dst : ndarray A HxW matrix of opencv image keypoints1: ndarray A Nx2 matrix of keypoints in src image keypoints2: ndarray A Nx2 matrix of keypoints in dst image matches: List of matches n: int numbers of matches to be drawn Returns ------- draw: ndarray A Hx2W matrix of opencv image """ assert src.shape == dst.shape, "source image must be the same format with target image" if n is None: n = self.n_points n = min(n, len(matches)) draw = cv2.drawMatches(src, keypoints1, dst, keypoints2, matches[: n], None, flags = 2) return draw
[ "def", "DrawMatches", "(", "self", ",", "src", ",", "dst", ",", "keypoints1", ",", "keypoints2", ",", "matches", ",", "n", "=", "None", ")", ":", "assert", "src", ".", "shape", "==", "dst", ".", "shape", ",", "\"source image must be the same format with target image\"", "if", "n", "is", "None", ":", "n", "=", "self", ".", "n_points", "n", "=", "min", "(", "n", ",", "len", "(", "matches", ")", ")", "draw", "=", "cv2", ".", "drawMatches", "(", "src", ",", "keypoints1", ",", "dst", ",", "keypoints2", ",", "matches", "[", ":", "n", "]", ",", "None", ",", "flags", "=", "2", ")", "return", "draw" ]
https://github.com/nightmaredimple/libmot/blob/23b8e2ac00f8b45d5a0ecabd57af90585966f3ff/libmot/motion/epipolar_geometry.py#L193-L219
TengXiaoDai/DistributedCrawling
f5c2439e6ce68dd9b49bde084d76473ff9ed4963
Lib/site-packages/pip/_vendor/requests/structures.py
python
CaseInsensitiveDict.lower_items
(self)
return ( (lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items() )
Like iteritems(), but with all lowercase keys.
Like iteritems(), but with all lowercase keys.
[ "Like", "iteritems", "()", "but", "with", "all", "lowercase", "keys", "." ]
def lower_items(self): """Like iteritems(), but with all lowercase keys.""" return ( (lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items() )
[ "def", "lower_items", "(", "self", ")", ":", "return", "(", "(", "lowerkey", ",", "keyval", "[", "1", "]", ")", "for", "(", "lowerkey", ",", "keyval", ")", "in", "self", ".", "_store", ".", "items", "(", ")", ")" ]
https://github.com/TengXiaoDai/DistributedCrawling/blob/f5c2439e6ce68dd9b49bde084d76473ff9ed4963/Lib/site-packages/pip/_vendor/requests/structures.py#L65-L71
pymeasure/pymeasure
b4d888e9ead85ef7f7af0031f2dbb44c9ce1825e
pymeasure/experiment/experiment.py
python
get_array_zero
(maxval, step)
return np.concatenate((np.arange(0, maxval, step), np.arange(maxval, -maxval, -step), np.arange(-maxval, 0, step)))
Returns a numpy array from 0 to maxval to -maxval to 0
Returns a numpy array from 0 to maxval to -maxval to 0
[ "Returns", "a", "numpy", "array", "from", "0", "to", "maxval", "to", "-", "maxval", "to", "0" ]
def get_array_zero(maxval, step): """Returns a numpy array from 0 to maxval to -maxval to 0""" return np.concatenate((np.arange(0, maxval, step), np.arange(maxval, -maxval, -step), np.arange(-maxval, 0, step)))
[ "def", "get_array_zero", "(", "maxval", ",", "step", ")", ":", "return", "np", ".", "concatenate", "(", "(", "np", ".", "arange", "(", "0", ",", "maxval", ",", "step", ")", ",", "np", ".", "arange", "(", "maxval", ",", "-", "maxval", ",", "-", "step", ")", ",", "np", ".", "arange", "(", "-", "maxval", ",", "0", ",", "step", ")", ")", ")" ]
https://github.com/pymeasure/pymeasure/blob/b4d888e9ead85ef7f7af0031f2dbb44c9ce1825e/pymeasure/experiment/experiment.py#L57-L60
Manjaro-WebDad/jde
91d84df112c00b2eceb5afdaeeca11be347cd99f
src/Jade/Utils.py
python
Session.get_pkg_manager_state
()
return os.path.isfile("/var/lib/pacman/db.lck")
[]
def get_pkg_manager_state(): return os.path.isfile("/var/lib/pacman/db.lck")
[ "def", "get_pkg_manager_state", "(", ")", ":", "return", "os", ".", "path", ".", "isfile", "(", "\"/var/lib/pacman/db.lck\"", ")" ]
https://github.com/Manjaro-WebDad/jde/blob/91d84df112c00b2eceb5afdaeeca11be347cd99f/src/Jade/Utils.py#L77-L78
virantha/pypdfocr
acc5e13763224267e897865fccafbf51e13725e9
fabfile.py
python
run_tests
()
[]
def run_tests(): test_dir = "test" with lcd(test_dir): # Regenerate the test script local("py.test --genscript=runtests.py") t = local("py.test --cov-config .coveragerc --cov=pypdfocr --cov-report=term --cov-report=html", capture=False) t = local("coveralls")
[ "def", "run_tests", "(", ")", ":", "test_dir", "=", "\"test\"", "with", "lcd", "(", "test_dir", ")", ":", "# Regenerate the test script", "local", "(", "\"py.test --genscript=runtests.py\"", ")", "t", "=", "local", "(", "\"py.test --cov-config .coveragerc --cov=pypdfocr --cov-report=term --cov-report=html\"", ",", "capture", "=", "False", ")", "t", "=", "local", "(", "\"coveralls\"", ")" ]
https://github.com/virantha/pypdfocr/blob/acc5e13763224267e897865fccafbf51e13725e9/fabfile.py#L11-L17
demisto/content
5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07
Packs/CheckpointFirewall/Integrations/CheckPointFirewallV2/CheckPointFirewallV2.py
python
Client.logout
(self)
return message
logout from current session, returning the response message
logout from current session, returning the response message
[ "logout", "from", "current", "session", "returning", "the", "response", "message" ]
def logout(self) -> str: """logout from current session, returning the response message""" response = self._http_request(method='POST', url_suffix='logout', headers=self.headers, json_data={}) self.sid = None demisto.setIntegrationContext({}) self.has_performed_login = False message = response.get('message') demisto.debug(f"logout: sid={self.sid}, message={message}") return message
[ "def", "logout", "(", "self", ")", "->", "str", ":", "response", "=", "self", ".", "_http_request", "(", "method", "=", "'POST'", ",", "url_suffix", "=", "'logout'", ",", "headers", "=", "self", ".", "headers", ",", "json_data", "=", "{", "}", ")", "self", ".", "sid", "=", "None", "demisto", ".", "setIntegrationContext", "(", "{", "}", ")", "self", ".", "has_performed_login", "=", "False", "message", "=", "response", ".", "get", "(", "'message'", ")", "demisto", ".", "debug", "(", "f\"logout: sid={self.sid}, message={message}\"", ")", "return", "message" ]
https://github.com/demisto/content/blob/5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07/Packs/CheckpointFirewall/Integrations/CheckPointFirewallV2/CheckPointFirewallV2.py#L87-L96
zetaops/ulakbus
bcc05abf17bbd6dbeec93809e4ad30885e94e83e
ulakbus/views/ogrenci/ogrenci.py
python
MazeretliDersKaydi.karar_no_gir
(self)
Mazeretli öğrenci kaydı, fakülte yönetim kurulu kararıyla yapılmaktadır. Bu adımda kullanıcıdan ilgili karar numarasını girmesi beklenir. Bu method seçilen öğrencinin ilgili programdaki durumunun ders kaydı yapabilir olup olmadığını kontrol eder. Ders kaydı yapabilir durumdaki öğrenciler aktif veya gelen öğrenci statüsüne sahip olmalıdırlar.
Mazeretli öğrenci kaydı, fakülte yönetim kurulu kararıyla yapılmaktadır. Bu adımda kullanıcıdan ilgili karar numarasını girmesi beklenir. Bu method seçilen öğrencinin ilgili programdaki durumunun ders kaydı yapabilir olup olmadığını kontrol eder. Ders kaydı yapabilir durumdaki öğrenciler aktif veya gelen öğrenci statüsüne sahip olmalıdırlar.
[ "Mazeretli", "öğrenci", "kaydı", "fakülte", "yönetim", "kurulu", "kararıyla", "yapılmaktadır", ".", "Bu", "adımda", "kullanıcıdan", "ilgili", "karar", "numarasını", "girmesi", "beklenir", ".", "Bu", "method", "seçilen", "öğrencinin", "ilgili", "programdaki", "durumunun", "ders", "kaydı", "yapabilir", "olup", "olmadığını", "kontrol", "eder", ".", "Ders", "kaydı", "yapabilir", "durumdaki", "öğrenciler", "aktif", "veya", "gelen", "öğrenci", "statüsüne", "sahip", "olmalıdırlar", "." ]
def karar_no_gir(self): """Mazeretli öğrenci kaydı, fakülte yönetim kurulu kararıyla yapılmaktadır. Bu adımda kullanıcıdan ilgili karar numarasını girmesi beklenir. Bu method seçilen öğrencinin ilgili programdaki durumunun ders kaydı yapabilir olup olmadığını kontrol eder. Ders kaydı yapabilir durumdaki öğrenciler aktif veya gelen öğrenci statüsüne sahip olmalıdırlar. """ aktif_ogrenci_status_list = [1, 12, 14, 16, 18, 20] self.current.task_data['program'] = self.current.input['form']['program'] ogrenci_program = OgrenciProgram.objects.get(self.current.input['form']['program']) if ogrenci_program.ogrencilik_statusu in aktif_ogrenci_status_list: _form = forms.JsonForm(current=self.current, title=_(u"Fakülte Yönetim Kurulu Karar No Giriniz")) _form.karar_no = fields.String(title=_(u"Fakülte Yönetim Kurulu Karar No")) _form.sec = fields.Button(_(u"Kaydet")) self.form_out(_form) else: self.current.output['msgbox'] = { 'type': 'warning', "title": _(u'Öğrenci Ders Kaydı Yapamaz'), "msg": _(u'Öğrenci Durum Kodu Ders Kaydı İçin Uygun Değil') }
[ "def", "karar_no_gir", "(", "self", ")", ":", "aktif_ogrenci_status_list", "=", "[", "1", ",", "12", ",", "14", ",", "16", ",", "18", ",", "20", "]", "self", ".", "current", ".", "task_data", "[", "'program'", "]", "=", "self", ".", "current", ".", "input", "[", "'form'", "]", "[", "'program'", "]", "ogrenci_program", "=", "OgrenciProgram", ".", "objects", ".", "get", "(", "self", ".", "current", ".", "input", "[", "'form'", "]", "[", "'program'", "]", ")", "if", "ogrenci_program", ".", "ogrencilik_statusu", "in", "aktif_ogrenci_status_list", ":", "_form", "=", "forms", ".", "JsonForm", "(", "current", "=", "self", ".", "current", ",", "title", "=", "_", "(", "u\"Fakülte Yönetim Kurulu Karar No Giriniz\"))", "", "", "_form", ".", "karar_no", "=", "fields", ".", "String", "(", "title", "=", "_", "(", "u\"Fakülte Yönetim Kurulu Karar No\"))", "", "", "_form", ".", "sec", "=", "fields", ".", "Button", "(", "_", "(", "u\"Kaydet\"", ")", ")", "self", ".", "form_out", "(", "_form", ")", "else", ":", "self", ".", "current", ".", "output", "[", "'msgbox'", "]", "=", "{", "'type'", ":", "'warning'", ",", "\"title\"", ":", "_", "(", "u'Öğrenci Ders Kaydı Yapamaz'),", "", "", "\"msg\"", ":", "_", "(", "u'Öğrenci Durum Kodu Ders Kaydı İçin Uygun Değil')", "", "}" ]
https://github.com/zetaops/ulakbus/blob/bcc05abf17bbd6dbeec93809e4ad30885e94e83e/ulakbus/views/ogrenci/ogrenci.py#L905-L928
studioml/studio
ad8d8c7cff8b4ac6f791ceb881be24dafb2a8a55
studio/encrypted_payload_builder.py
python
EncryptedPayloadBuilder.__init__
(self, name: str, receiver_keypath: str, sender_keypath: str = None)
param: name - payload builder name param: receiver_keypath - file path to .pem file with recipient public key param: sender_keypath - file path to .pem file with sender private key
param: name - payload builder name param: receiver_keypath - file path to .pem file with recipient public key param: sender_keypath - file path to .pem file with sender private key
[ "param", ":", "name", "-", "payload", "builder", "name", "param", ":", "receiver_keypath", "-", "file", "path", "to", ".", "pem", "file", "with", "recipient", "public", "key", "param", ":", "sender_keypath", "-", "file", "path", "to", ".", "pem", "file", "with", "sender", "private", "key" ]
def __init__(self, name: str, receiver_keypath: str, sender_keypath: str = None): """ param: name - payload builder name param: receiver_keypath - file path to .pem file with recipient public key param: sender_keypath - file path to .pem file with sender private key """ super(EncryptedPayloadBuilder, self).__init__(name) # XXX Set logger verbosity level here self.logger = logs.get_logger(self.__class__.__name__) self.recipient_key_path = receiver_keypath self.recipient_key = None try: self.recipient_key =\ RSA.import_key(open(self.recipient_key_path).read()) except: check_for_kb_interrupt() msg = "FAILED to import recipient public key from: {0}"\ .format(self.recipient_key_path) self.logger.error(msg) raise ValueError(msg) self.sender_key_path = sender_keypath self.sender_key: SigningKey = None self.verify_key: VerifyKey = None self.sender_fingerprint = None if self.sender_key_path is None: self.logger.error("Signing key path must be specified for encrypted payloads. ABORTING.") raise ValueError() # We expect ed25519 signing key in "openssh private key" format try: public_key_data, private_key_data =\ Ed25519KeyUtil.parse_private_key_file( self.sender_key_path, self.logger) if public_key_data is None or private_key_data is None: self._raise_error( "Failed to import private signing key from {0}. ABORTING." .format(self.sender_key_path)) self.sender_key = SigningKey(private_key_data) self.verify_key = VerifyKey(public_key_data) except Exception: self._raise_error("FAILED to open/read private signing key file: {0}"\ .format(self.sender_key_path)) self.sender_fingerprint = \ self._get_fingerprint(public_key_data) self.simple_builder =\ UnencryptedPayloadBuilder("simple-builder-for-encryptor")
[ "def", "__init__", "(", "self", ",", "name", ":", "str", ",", "receiver_keypath", ":", "str", ",", "sender_keypath", ":", "str", "=", "None", ")", ":", "super", "(", "EncryptedPayloadBuilder", ",", "self", ")", ".", "__init__", "(", "name", ")", "# XXX Set logger verbosity level here", "self", ".", "logger", "=", "logs", ".", "get_logger", "(", "self", ".", "__class__", ".", "__name__", ")", "self", ".", "recipient_key_path", "=", "receiver_keypath", "self", ".", "recipient_key", "=", "None", "try", ":", "self", ".", "recipient_key", "=", "RSA", ".", "import_key", "(", "open", "(", "self", ".", "recipient_key_path", ")", ".", "read", "(", ")", ")", "except", ":", "check_for_kb_interrupt", "(", ")", "msg", "=", "\"FAILED to import recipient public key from: {0}\"", ".", "format", "(", "self", ".", "recipient_key_path", ")", "self", ".", "logger", ".", "error", "(", "msg", ")", "raise", "ValueError", "(", "msg", ")", "self", ".", "sender_key_path", "=", "sender_keypath", "self", ".", "sender_key", ":", "SigningKey", "=", "None", "self", ".", "verify_key", ":", "VerifyKey", "=", "None", "self", ".", "sender_fingerprint", "=", "None", "if", "self", ".", "sender_key_path", "is", "None", ":", "self", ".", "logger", ".", "error", "(", "\"Signing key path must be specified for encrypted payloads. ABORTING.\"", ")", "raise", "ValueError", "(", ")", "# We expect ed25519 signing key in \"openssh private key\" format", "try", ":", "public_key_data", ",", "private_key_data", "=", "Ed25519KeyUtil", ".", "parse_private_key_file", "(", "self", ".", "sender_key_path", ",", "self", ".", "logger", ")", "if", "public_key_data", "is", "None", "or", "private_key_data", "is", "None", ":", "self", ".", "_raise_error", "(", "\"Failed to import private signing key from {0}. ABORTING.\"", ".", "format", "(", "self", ".", "sender_key_path", ")", ")", "self", ".", "sender_key", "=", "SigningKey", "(", "private_key_data", ")", "self", ".", "verify_key", "=", "VerifyKey", "(", "public_key_data", ")", "except", "Exception", ":", "self", ".", "_raise_error", "(", "\"FAILED to open/read private signing key file: {0}\"", ".", "format", "(", "self", ".", "sender_key_path", ")", ")", "self", ".", "sender_fingerprint", "=", "self", ".", "_get_fingerprint", "(", "public_key_data", ")", "self", ".", "simple_builder", "=", "UnencryptedPayloadBuilder", "(", "\"simple-builder-for-encryptor\"", ")" ]
https://github.com/studioml/studio/blob/ad8d8c7cff8b4ac6f791ceb881be24dafb2a8a55/studio/encrypted_payload_builder.py#L22-L79
tensorflow/federated
5a60a032360087b8f4c7fcfd97ed1c0131c3eac3
tensorflow_federated/python/core/impl/compiler/tree_transformations.py
python
merge_chained_blocks
(comp)
return _apply_transforms(comp, MergeChainedBlocks(comp))
Merges chained blocks into one block.
Merges chained blocks into one block.
[ "Merges", "chained", "blocks", "into", "one", "block", "." ]
def merge_chained_blocks(comp): """Merges chained blocks into one block.""" return _apply_transforms(comp, MergeChainedBlocks(comp))
[ "def", "merge_chained_blocks", "(", "comp", ")", ":", "return", "_apply_transforms", "(", "comp", ",", "MergeChainedBlocks", "(", "comp", ")", ")" ]
https://github.com/tensorflow/federated/blob/5a60a032360087b8f4c7fcfd97ed1c0131c3eac3/tensorflow_federated/python/core/impl/compiler/tree_transformations.py#L553-L555
django/channels
6af1bc3ab45f55e3f47d0d1d059d5db0a18a9581
channels/generic/http.py
python
AsyncHttpConsumer.http_request
(self, message)
Async entrypoint - concatenates body fragments and hands off control to ``self.handle`` when the body has been completely received.
Async entrypoint - concatenates body fragments and hands off control to ``self.handle`` when the body has been completely received.
[ "Async", "entrypoint", "-", "concatenates", "body", "fragments", "and", "hands", "off", "control", "to", "self", ".", "handle", "when", "the", "body", "has", "been", "completely", "received", "." ]
async def http_request(self, message): """ Async entrypoint - concatenates body fragments and hands off control to ``self.handle`` when the body has been completely received. """ if "body" in message: self.body.append(message["body"]) if not message.get("more_body"): try: await self.handle(b"".join(self.body)) finally: await self.disconnect() raise StopConsumer()
[ "async", "def", "http_request", "(", "self", ",", "message", ")", ":", "if", "\"body\"", "in", "message", ":", "self", ".", "body", ".", "append", "(", "message", "[", "\"body\"", "]", ")", "if", "not", "message", ".", "get", "(", "\"more_body\"", ")", ":", "try", ":", "await", "self", ".", "handle", "(", "b\"\"", ".", "join", "(", "self", ".", "body", ")", ")", "finally", ":", "await", "self", ".", "disconnect", "(", ")", "raise", "StopConsumer", "(", ")" ]
https://github.com/django/channels/blob/6af1bc3ab45f55e3f47d0d1d059d5db0a18a9581/channels/generic/http.py#L72-L84
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/google/appengine/api/datastore_types.py
python
PropertyValueFromString
(type_, value_string, _auth_domain=None)
return type_(value_string)
Returns an instance of a property value given a type and string value. The reverse of this method is just str() and type() of the python value. Note that this does *not* support non-UTC offsets in ISO 8601-formatted datetime strings, e.g. the -08:00 suffix in '2002-12-25 00:00:00-08:00'. It only supports -00:00 and +00:00 suffixes, which are UTC. Args: type_: A python class. value_string: A string representation of the value of the property. Returns: An instance of 'type'. Raises: ValueError if type_ is datetime and value_string has a timezone offset.
Returns an instance of a property value given a type and string value.
[ "Returns", "an", "instance", "of", "a", "property", "value", "given", "a", "type", "and", "string", "value", "." ]
def PropertyValueFromString(type_, value_string, _auth_domain=None): """Returns an instance of a property value given a type and string value. The reverse of this method is just str() and type() of the python value. Note that this does *not* support non-UTC offsets in ISO 8601-formatted datetime strings, e.g. the -08:00 suffix in '2002-12-25 00:00:00-08:00'. It only supports -00:00 and +00:00 suffixes, which are UTC. Args: type_: A python class. value_string: A string representation of the value of the property. Returns: An instance of 'type'. Raises: ValueError if type_ is datetime and value_string has a timezone offset. """ if type_ == datetime.datetime: value_string = value_string.strip() if value_string[-6] in ('+', '-'): if value_string[-5:] == '00:00': value_string = value_string[:-6] else: raise ValueError('Non-UTC offsets in datetimes are not supported.') split = value_string.split('.') iso_date = split[0] microseconds = 0 if len(split) > 1: microseconds = int(split[1]) time_struct = time.strptime(iso_date, '%Y-%m-%d %H:%M:%S')[0:6] value = datetime.datetime(*(time_struct + (microseconds,))) return value elif type_ == Rating: return Rating(int(value_string)) elif type_ == bool: return value_string == 'True' elif type_ == users.User: return users.User(value_string, _auth_domain) elif type_ == type(None): return None return type_(value_string)
[ "def", "PropertyValueFromString", "(", "type_", ",", "value_string", ",", "_auth_domain", "=", "None", ")", ":", "if", "type_", "==", "datetime", ".", "datetime", ":", "value_string", "=", "value_string", ".", "strip", "(", ")", "if", "value_string", "[", "-", "6", "]", "in", "(", "'+'", ",", "'-'", ")", ":", "if", "value_string", "[", "-", "5", ":", "]", "==", "'00:00'", ":", "value_string", "=", "value_string", "[", ":", "-", "6", "]", "else", ":", "raise", "ValueError", "(", "'Non-UTC offsets in datetimes are not supported.'", ")", "split", "=", "value_string", ".", "split", "(", "'.'", ")", "iso_date", "=", "split", "[", "0", "]", "microseconds", "=", "0", "if", "len", "(", "split", ")", ">", "1", ":", "microseconds", "=", "int", "(", "split", "[", "1", "]", ")", "time_struct", "=", "time", ".", "strptime", "(", "iso_date", ",", "'%Y-%m-%d %H:%M:%S'", ")", "[", "0", ":", "6", "]", "value", "=", "datetime", ".", "datetime", "(", "*", "(", "time_struct", "+", "(", "microseconds", ",", ")", ")", ")", "return", "value", "elif", "type_", "==", "Rating", ":", "return", "Rating", "(", "int", "(", "value_string", ")", ")", "elif", "type_", "==", "bool", ":", "return", "value_string", "==", "'True'", "elif", "type_", "==", "users", ".", "User", ":", "return", "users", ".", "User", "(", "value_string", ",", "_auth_domain", ")", "elif", "type_", "==", "type", "(", "None", ")", ":", "return", "None", "return", "type_", "(", "value_string", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/google/appengine/api/datastore_types.py#L2015-L2067
ydkhatri/mac_apt
729630c8bbe7a73cce3ca330305d3301a919cb07
plugins/helpers/apfs_reader.py
python
ApfsContainer.fletcher64_verify_block_num
(self, block_num)
return self.fletcher64_verify_block_data(data, self.block_size)
Fletchers checksum verification for block, given block number
Fletchers checksum verification for block, given block number
[ "Fletchers", "checksum", "verification", "for", "block", "given", "block", "number" ]
def fletcher64_verify_block_num(self, block_num): """Fletchers checksum verification for block, given block number""" data = self.get_block(block_num) if not data: return None return self.fletcher64_verify_block_data(data, self.block_size)
[ "def", "fletcher64_verify_block_num", "(", "self", ",", "block_num", ")", ":", "data", "=", "self", ".", "get_block", "(", "block_num", ")", "if", "not", "data", ":", "return", "None", "return", "self", ".", "fletcher64_verify_block_data", "(", "data", ",", "self", ".", "block_size", ")" ]
https://github.com/ydkhatri/mac_apt/blob/729630c8bbe7a73cce3ca330305d3301a919cb07/plugins/helpers/apfs_reader.py#L1666-L1671
nasa/CrisisMappingToolkit
0296487974d74cec6aa8be42eafbb5cd24dc6a51
app_engine/libs/bs4/element.py
python
ContentMetaAttributeValue.encode
(self, encoding)
return self.CHARSET_RE.sub(rewrite, self.original_value)
[]
def encode(self, encoding): def rewrite(match): return match.group(1) + encoding return self.CHARSET_RE.sub(rewrite, self.original_value)
[ "def", "encode", "(", "self", ",", "encoding", ")", ":", "def", "rewrite", "(", "match", ")", ":", "return", "match", ".", "group", "(", "1", ")", "+", "encoding", "return", "self", ".", "CHARSET_RE", ".", "sub", "(", "rewrite", ",", "self", ".", "original_value", ")" ]
https://github.com/nasa/CrisisMappingToolkit/blob/0296487974d74cec6aa8be42eafbb5cd24dc6a51/app_engine/libs/bs4/element.py#L82-L85
riptideio/pymodbus
c5772b35ae3f29d1947f3ab453d8d00df846459f
pymodbus/server/asynchronous.py
python
ModbusTcpProtocol._send
(self, message)
Send a request (string) to the network :param message: The unencoded modbus response
Send a request (string) to the network
[ "Send", "a", "request", "(", "string", ")", "to", "the", "network" ]
def _send(self, message): """ Send a request (string) to the network :param message: The unencoded modbus response """ if message.should_respond: self.factory.control.Counter.BusMessage += 1 pdu = self.framer.buildPacket(message) if _logger.isEnabledFor(logging.DEBUG): _logger.debug('send: %s' % b2a_hex(pdu)) return self.transport.write(pdu)
[ "def", "_send", "(", "self", ",", "message", ")", ":", "if", "message", ".", "should_respond", ":", "self", ".", "factory", ".", "control", ".", "Counter", ".", "BusMessage", "+=", "1", "pdu", "=", "self", ".", "framer", ".", "buildPacket", "(", "message", ")", "if", "_logger", ".", "isEnabledFor", "(", "logging", ".", "DEBUG", ")", ":", "_logger", ".", "debug", "(", "'send: %s'", "%", "b2a_hex", "(", "pdu", ")", ")", "return", "self", ".", "transport", ".", "write", "(", "pdu", ")" ]
https://github.com/riptideio/pymodbus/blob/c5772b35ae3f29d1947f3ab453d8d00df846459f/pymodbus/server/asynchronous.py#L93-L103
openstack/cinder
23494a6d6c51451688191e1847a458f1d3cdcaa5
cinder/volume/volume_utils.py
python
copy_image_to_volume
(driver, context: context.RequestContext, volume: 'objects.Volume', image_meta: dict, image_location: str, image_service)
Downloads Glance image to the specified volume.
Downloads Glance image to the specified volume.
[ "Downloads", "Glance", "image", "to", "the", "specified", "volume", "." ]
def copy_image_to_volume(driver, context: context.RequestContext, volume: 'objects.Volume', image_meta: dict, image_location: str, image_service) -> None: """Downloads Glance image to the specified volume.""" image_id = image_meta['id'] LOG.debug("Attempting download of %(image_id)s (%(image_location)s)" " to volume %(volume_id)s.", {'image_id': image_id, 'volume_id': volume.id, 'image_location': image_location}) try: image_encryption_key = image_meta.get('cinder_encryption_key_id') if volume.encryption_key_id and image_encryption_key: # If the image provided an encryption key, we have # already cloned it to the volume's key in # _get_encryption_key_id, so we can do a direct copy. driver.copy_image_to_volume( context, volume, image_service, image_id) elif volume.encryption_key_id: # Creating an encrypted volume from a normal, unencrypted, # image. driver.copy_image_to_encrypted_volume( context, volume, image_service, image_id) else: driver.copy_image_to_volume( context, volume, image_service, image_id) except processutils.ProcessExecutionError as ex: LOG.exception("Failed to copy image %(image_id)s to volume: " "%(volume_id)s", {'volume_id': volume.id, 'image_id': image_id}) raise exception.ImageCopyFailure(reason=ex.stderr) except (exception.ImageUnacceptable, exception.ImageTooBig): with excutils.save_and_reraise_exception(): LOG.exception("Failed to copy image %(image_id)s to volume: " "%(volume_id)s", {'volume_id': volume.id, 'image_id': image_id}) except Exception as ex: LOG.exception("Failed to copy image %(image_id)s to " "volume: %(volume_id)s", {'volume_id': volume.id, 'image_id': image_id}) if not isinstance(ex, exception.ImageCopyFailure): raise exception.ImageCopyFailure(reason=ex) else: raise LOG.debug("Downloaded image %(image_id)s (%(image_location)s)" " to volume %(volume_id)s successfully.", {'image_id': image_id, 'volume_id': volume.id, 'image_location': image_location})
[ "def", "copy_image_to_volume", "(", "driver", ",", "context", ":", "context", ".", "RequestContext", ",", "volume", ":", "'objects.Volume'", ",", "image_meta", ":", "dict", ",", "image_location", ":", "str", ",", "image_service", ")", "->", "None", ":", "image_id", "=", "image_meta", "[", "'id'", "]", "LOG", ".", "debug", "(", "\"Attempting download of %(image_id)s (%(image_location)s)\"", "\" to volume %(volume_id)s.\"", ",", "{", "'image_id'", ":", "image_id", ",", "'volume_id'", ":", "volume", ".", "id", ",", "'image_location'", ":", "image_location", "}", ")", "try", ":", "image_encryption_key", "=", "image_meta", ".", "get", "(", "'cinder_encryption_key_id'", ")", "if", "volume", ".", "encryption_key_id", "and", "image_encryption_key", ":", "# If the image provided an encryption key, we have", "# already cloned it to the volume's key in", "# _get_encryption_key_id, so we can do a direct copy.", "driver", ".", "copy_image_to_volume", "(", "context", ",", "volume", ",", "image_service", ",", "image_id", ")", "elif", "volume", ".", "encryption_key_id", ":", "# Creating an encrypted volume from a normal, unencrypted,", "# image.", "driver", ".", "copy_image_to_encrypted_volume", "(", "context", ",", "volume", ",", "image_service", ",", "image_id", ")", "else", ":", "driver", ".", "copy_image_to_volume", "(", "context", ",", "volume", ",", "image_service", ",", "image_id", ")", "except", "processutils", ".", "ProcessExecutionError", "as", "ex", ":", "LOG", ".", "exception", "(", "\"Failed to copy image %(image_id)s to volume: \"", "\"%(volume_id)s\"", ",", "{", "'volume_id'", ":", "volume", ".", "id", ",", "'image_id'", ":", "image_id", "}", ")", "raise", "exception", ".", "ImageCopyFailure", "(", "reason", "=", "ex", ".", "stderr", ")", "except", "(", "exception", ".", "ImageUnacceptable", ",", "exception", ".", "ImageTooBig", ")", ":", "with", "excutils", ".", "save_and_reraise_exception", "(", ")", ":", "LOG", ".", "exception", "(", "\"Failed to copy image %(image_id)s to volume: \"", "\"%(volume_id)s\"", ",", "{", "'volume_id'", ":", "volume", ".", "id", ",", "'image_id'", ":", "image_id", "}", ")", "except", "Exception", "as", "ex", ":", "LOG", ".", "exception", "(", "\"Failed to copy image %(image_id)s to \"", "\"volume: %(volume_id)s\"", ",", "{", "'volume_id'", ":", "volume", ".", "id", ",", "'image_id'", ":", "image_id", "}", ")", "if", "not", "isinstance", "(", "ex", ",", "exception", ".", "ImageCopyFailure", ")", ":", "raise", "exception", ".", "ImageCopyFailure", "(", "reason", "=", "ex", ")", "else", ":", "raise", "LOG", ".", "debug", "(", "\"Downloaded image %(image_id)s (%(image_location)s)\"", "\" to volume %(volume_id)s successfully.\"", ",", "{", "'image_id'", ":", "image_id", ",", "'volume_id'", ":", "volume", ".", "id", ",", "'image_location'", ":", "image_location", "}", ")" ]
https://github.com/openstack/cinder/blob/23494a6d6c51451688191e1847a458f1d3cdcaa5/cinder/volume/volume_utils.py#L1220-L1271
lebedov/scikit-cuda
5d3c74f926fe7ce67ecfc85e9623aab7bc0b344f
skcuda/cusparse.py
python
cusparseGetVersion
(handle)
return version.value
Return CUSPARSE library version. Returns the version number of the CUSPARSE library. Parameters ---------- handle : int CUSPARSE library context. Returns ------- version : int CUSPARSE library version number.
Return CUSPARSE library version.
[ "Return", "CUSPARSE", "library", "version", "." ]
def cusparseGetVersion(handle): """ Return CUSPARSE library version. Returns the version number of the CUSPARSE library. Parameters ---------- handle : int CUSPARSE library context. Returns ------- version : int CUSPARSE library version number. """ version = ctypes.c_int() status = _libcusparse.cusparseGetVersion(handle, ctypes.byref(version)) cusparseCheckStatus(status) return version.value
[ "def", "cusparseGetVersion", "(", "handle", ")", ":", "version", "=", "ctypes", ".", "c_int", "(", ")", "status", "=", "_libcusparse", ".", "cusparseGetVersion", "(", "handle", ",", "ctypes", ".", "byref", "(", "version", ")", ")", "cusparseCheckStatus", "(", "status", ")", "return", "version", ".", "value" ]
https://github.com/lebedov/scikit-cuda/blob/5d3c74f926fe7ce67ecfc85e9623aab7bc0b344f/skcuda/cusparse.py#L201-L223
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/PIL/ImageDraw.py
python
ImageDraw.__init__
(self, im, mode=None)
Create a drawing instance. :param im: The image to draw in. :param mode: Optional mode to use for color values. For RGB images, this argument can be RGB or RGBA (to blend the drawing into the image). For all other modes, this argument must be the same as the image mode. If omitted, the mode defaults to the mode of the image.
Create a drawing instance.
[ "Create", "a", "drawing", "instance", "." ]
def __init__(self, im, mode=None): """ Create a drawing instance. :param im: The image to draw in. :param mode: Optional mode to use for color values. For RGB images, this argument can be RGB or RGBA (to blend the drawing into the image). For all other modes, this argument must be the same as the image mode. If omitted, the mode defaults to the mode of the image. """ im.load() if im.readonly: im._copy() # make it writeable blend = 0 if mode is None: mode = im.mode if mode != im.mode: if mode == "RGBA" and im.mode == "RGB": blend = 1 else: raise ValueError("mode mismatch") if mode == "P": self.palette = im.palette else: self.palette = None self.im = im.im self.draw = Image.core.draw(self.im, blend) self.mode = mode if mode in ("I", "F"): self.ink = self.draw.draw_ink(1, mode) else: self.ink = self.draw.draw_ink(-1, mode) if mode in ("1", "P", "I", "F"): # FIXME: fix Fill2 to properly support matte for I+F images self.fontmode = "1" else: self.fontmode = "L" # aliasing is okay for other modes self.fill = 0 self.font = None
[ "def", "__init__", "(", "self", ",", "im", ",", "mode", "=", "None", ")", ":", "im", ".", "load", "(", ")", "if", "im", ".", "readonly", ":", "im", ".", "_copy", "(", ")", "# make it writeable", "blend", "=", "0", "if", "mode", "is", "None", ":", "mode", "=", "im", ".", "mode", "if", "mode", "!=", "im", ".", "mode", ":", "if", "mode", "==", "\"RGBA\"", "and", "im", ".", "mode", "==", "\"RGB\"", ":", "blend", "=", "1", "else", ":", "raise", "ValueError", "(", "\"mode mismatch\"", ")", "if", "mode", "==", "\"P\"", ":", "self", ".", "palette", "=", "im", ".", "palette", "else", ":", "self", ".", "palette", "=", "None", "self", ".", "im", "=", "im", ".", "im", "self", ".", "draw", "=", "Image", ".", "core", ".", "draw", "(", "self", ".", "im", ",", "blend", ")", "self", ".", "mode", "=", "mode", "if", "mode", "in", "(", "\"I\"", ",", "\"F\"", ")", ":", "self", ".", "ink", "=", "self", ".", "draw", ".", "draw_ink", "(", "1", ",", "mode", ")", "else", ":", "self", ".", "ink", "=", "self", ".", "draw", ".", "draw_ink", "(", "-", "1", ",", "mode", ")", "if", "mode", "in", "(", "\"1\"", ",", "\"P\"", ",", "\"I\"", ",", "\"F\"", ")", ":", "# FIXME: fix Fill2 to properly support matte for I+F images", "self", ".", "fontmode", "=", "\"1\"", "else", ":", "self", ".", "fontmode", "=", "\"L\"", "# aliasing is okay for other modes", "self", ".", "fill", "=", "0", "self", ".", "font", "=", "None" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/PIL/ImageDraw.py#L48-L87
OmegaK2/PyPoE
97479f3aea2385b196b1a900e06caa253bb5c72d
PyPoE/cli/handler.py
python
ConfigHandler.get
(self, args)
return 0
Prints the config setting for the specified var. Parameters ---------- args : argparse.Namespace namespace object as passed from argument parser Returns ------- int success code
Prints the config setting for the specified var.
[ "Prints", "the", "config", "setting", "for", "the", "specified", "var", "." ]
def get(self, args): """ Prints the config setting for the specified var. Parameters ---------- args : argparse.Namespace namespace object as passed from argument parser Returns ------- int success code """ console('Config setting "%s" is currently set to:\n%s' % (args.variable, self.config.option[args.variable])) return 0
[ "def", "get", "(", "self", ",", "args", ")", ":", "console", "(", "'Config setting \"%s\" is currently set to:\\n%s'", "%", "(", "args", ".", "variable", ",", "self", ".", "config", ".", "option", "[", "args", ".", "variable", "]", ")", ")", "return", "0" ]
https://github.com/OmegaK2/PyPoE/blob/97479f3aea2385b196b1a900e06caa253bb5c72d/PyPoE/cli/handler.py#L210-L225
Breakthrough/PySceneDetect
f1724075498f66712a9082abe8b3c7446b19790a
scenedetect/scene_detector.py
python
SparseSceneDetector.process_frame
(self, frame_num, frame_img)
return []
Process Frame: Computes/stores metrics and detects any scene changes. Prototype method, no actual detection. Returns: List[Tuple[int,int]]: List of frame pairs representing individual scenes to be added to the output scene list directly.
Process Frame: Computes/stores metrics and detects any scene changes.
[ "Process", "Frame", ":", "Computes", "/", "stores", "metrics", "and", "detects", "any", "scene", "changes", "." ]
def process_frame(self, frame_num, frame_img): # type: (int, numpy.ndarray) -> List[Tuple[int, int]] """ Process Frame: Computes/stores metrics and detects any scene changes. Prototype method, no actual detection. Returns: List[Tuple[int,int]]: List of frame pairs representing individual scenes to be added to the output scene list directly. """ return []
[ "def", "process_frame", "(", "self", ",", "frame_num", ",", "frame_img", ")", ":", "# type: (int, numpy.ndarray) -> List[Tuple[int, int]]", "return", "[", "]" ]
https://github.com/Breakthrough/PySceneDetect/blob/f1724075498f66712a9082abe8b3c7446b19790a/scenedetect/scene_detector.py#L126-L136
saltstack/salt
fae5bc757ad0f1716483ce7ae180b451545c2058
salt/cloud/clouds/xen.py
python
destroy_vm_vdis
(name=None, session=None, call=None)
return ret
Get virtual block devices on VM .. code-block:: bash salt-cloud -a destroy_vm_vdis xenvm01
Get virtual block devices on VM
[ "Get", "virtual", "block", "devices", "on", "VM" ]
def destroy_vm_vdis(name=None, session=None, call=None): """ Get virtual block devices on VM .. code-block:: bash salt-cloud -a destroy_vm_vdis xenvm01 """ if session is None: session = _get_session() ret = {} # get vm object vms = session.xenapi.VM.get_by_name_label(name) if len(vms) == 1: # read virtual block device (vdb) vbds = session.xenapi.VM.get_VBDs(vms[0]) if vbds is not None: x = 0 for vbd in vbds: vbd_record = session.xenapi.VBD.get_record(vbd) if vbd_record["VDI"] != "OpaqueRef:NULL": # read vdi on vdb vdi_record = session.xenapi.VDI.get_record(vbd_record["VDI"]) if "iso" not in vdi_record["name_label"]: session.xenapi.VDI.destroy(vbd_record["VDI"]) ret["vdi-{}".format(x)] = vdi_record["name_label"] x += 1 return ret
[ "def", "destroy_vm_vdis", "(", "name", "=", "None", ",", "session", "=", "None", ",", "call", "=", "None", ")", ":", "if", "session", "is", "None", ":", "session", "=", "_get_session", "(", ")", "ret", "=", "{", "}", "# get vm object", "vms", "=", "session", ".", "xenapi", ".", "VM", ".", "get_by_name_label", "(", "name", ")", "if", "len", "(", "vms", ")", "==", "1", ":", "# read virtual block device (vdb)", "vbds", "=", "session", ".", "xenapi", ".", "VM", ".", "get_VBDs", "(", "vms", "[", "0", "]", ")", "if", "vbds", "is", "not", "None", ":", "x", "=", "0", "for", "vbd", "in", "vbds", ":", "vbd_record", "=", "session", ".", "xenapi", ".", "VBD", ".", "get_record", "(", "vbd", ")", "if", "vbd_record", "[", "\"VDI\"", "]", "!=", "\"OpaqueRef:NULL\"", ":", "# read vdi on vdb", "vdi_record", "=", "session", ".", "xenapi", ".", "VDI", ".", "get_record", "(", "vbd_record", "[", "\"VDI\"", "]", ")", "if", "\"iso\"", "not", "in", "vdi_record", "[", "\"name_label\"", "]", ":", "session", ".", "xenapi", ".", "VDI", ".", "destroy", "(", "vbd_record", "[", "\"VDI\"", "]", ")", "ret", "[", "\"vdi-{}\"", ".", "format", "(", "x", ")", "]", "=", "vdi_record", "[", "\"name_label\"", "]", "x", "+=", "1", "return", "ret" ]
https://github.com/saltstack/salt/blob/fae5bc757ad0f1716483ce7ae180b451545c2058/salt/cloud/clouds/xen.py#L1196-L1224
krintoxi/NoobSec-Toolkit
38738541cbc03cedb9a3b3ed13b629f781ad64f6
NoobSecToolkit - MAC OSX/scripts/sshbackdoors/target.py
python
Target.conn
(self)
[]
def conn(self): #print("Opening SSH connection to target...") self.ssh = paramiko.SSHClient()#use ssh.exec_command("") to perform an action. self.ssh.load_system_host_keys() self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.ssh.connect(self.hostname, port=self.port, username=self.uname, password=self.pword) self.scp = SCPClient(self.ssh.get_transport())#don't call this, but use the above function instead. self.is_open = True
[ "def", "conn", "(", "self", ")", ":", "#print(\"Opening SSH connection to target...\")", "self", ".", "ssh", "=", "paramiko", ".", "SSHClient", "(", ")", "#use ssh.exec_command(\"\") to perform an action.", "self", ".", "ssh", ".", "load_system_host_keys", "(", ")", "self", ".", "ssh", ".", "set_missing_host_key_policy", "(", "paramiko", ".", "AutoAddPolicy", "(", ")", ")", "self", ".", "ssh", ".", "connect", "(", "self", ".", "hostname", ",", "port", "=", "self", ".", "port", ",", "username", "=", "self", ".", "uname", ",", "password", "=", "self", ".", "pword", ")", "self", ".", "scp", "=", "SCPClient", "(", "self", ".", "ssh", ".", "get_transport", "(", ")", ")", "#don't call this, but use the above function instead.", "self", ".", "is_open", "=", "True" ]
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit - MAC OSX/scripts/sshbackdoors/target.py#L19-L26
ARM-DOE/pyart
72affe5b669f1996cd3cc39ec7d8dd29b838bd48
pyart/core/radar.py
python
Radar.get_gate_x_y_z
(self, sweep, edges=False, filter_transitions=False)
return antenna_vectors_to_cartesian( self.range['data'], azimuths, elevations, edges=edges)
Return the x, y and z gate locations in meters for a given sweep. With the default parameter this method returns the same data as contained in the gate_x, gate_y and gate_z attributes but this method performs the gate location calculations only for the specified sweep and therefore is more efficient than accessing this data through these attribute. When used with :py:func:`get_field` this method can be used to obtain the data needed for plotting a radar field with the correct spatial context. Parameters ---------- sweep : int Sweep number to retrieve gate locations from, 0 based. edges : bool, optional True to return the locations of the gate edges calculated by interpolating between the range, azimuths and elevations. False (the default) will return the locations of the gate centers with no interpolation. filter_transitions : bool, optional True to remove rays where the antenna was in transition between sweeps. False will include these rays. No rays will be removed if the antenna_transition attribute is not available (set to None). Returns ------- x, y, z : 2D array Array containing the x, y and z, distances from the radar in meters for the center (or edges) for all gates in the sweep.
Return the x, y and z gate locations in meters for a given sweep.
[ "Return", "the", "x", "y", "and", "z", "gate", "locations", "in", "meters", "for", "a", "given", "sweep", "." ]
def get_gate_x_y_z(self, sweep, edges=False, filter_transitions=False): """ Return the x, y and z gate locations in meters for a given sweep. With the default parameter this method returns the same data as contained in the gate_x, gate_y and gate_z attributes but this method performs the gate location calculations only for the specified sweep and therefore is more efficient than accessing this data through these attribute. When used with :py:func:`get_field` this method can be used to obtain the data needed for plotting a radar field with the correct spatial context. Parameters ---------- sweep : int Sweep number to retrieve gate locations from, 0 based. edges : bool, optional True to return the locations of the gate edges calculated by interpolating between the range, azimuths and elevations. False (the default) will return the locations of the gate centers with no interpolation. filter_transitions : bool, optional True to remove rays where the antenna was in transition between sweeps. False will include these rays. No rays will be removed if the antenna_transition attribute is not available (set to None). Returns ------- x, y, z : 2D array Array containing the x, y and z, distances from the radar in meters for the center (or edges) for all gates in the sweep. """ azimuths = self.get_azimuth(sweep) elevations = self.get_elevation(sweep) if filter_transitions and self.antenna_transition is not None: sweep_slice = self.get_slice(sweep) valid = self.antenna_transition['data'][sweep_slice] == 0 azimuths = azimuths[valid] elevations = elevations[valid] return antenna_vectors_to_cartesian( self.range['data'], azimuths, elevations, edges=edges)
[ "def", "get_gate_x_y_z", "(", "self", ",", "sweep", ",", "edges", "=", "False", ",", "filter_transitions", "=", "False", ")", ":", "azimuths", "=", "self", ".", "get_azimuth", "(", "sweep", ")", "elevations", "=", "self", ".", "get_elevation", "(", "sweep", ")", "if", "filter_transitions", "and", "self", ".", "antenna_transition", "is", "not", "None", ":", "sweep_slice", "=", "self", ".", "get_slice", "(", "sweep", ")", "valid", "=", "self", ".", "antenna_transition", "[", "'data'", "]", "[", "sweep_slice", "]", "==", "0", "azimuths", "=", "azimuths", "[", "valid", "]", "elevations", "=", "elevations", "[", "valid", "]", "return", "antenna_vectors_to_cartesian", "(", "self", ".", "range", "[", "'data'", "]", ",", "azimuths", ",", "elevations", ",", "edges", "=", "edges", ")" ]
https://github.com/ARM-DOE/pyart/blob/72affe5b669f1996cd3cc39ec7d8dd29b838bd48/pyart/core/radar.py#L452-L497
florath/rmtoo
6ffe08703451358dca24b232ee4380b1da23bcad
rmtoo/lib/vcs/Git.py
python
Git.__get_tree_direct
(self, base_tree, directory)
Return the tree of the given directory. This does not walk down the directory structure. It just checks the current hierarchy.
Return the tree of the given directory. This does not walk down the directory structure. It just checks the current hierarchy.
[ "Return", "the", "tree", "of", "the", "given", "directory", ".", "This", "does", "not", "walk", "down", "the", "directory", "structure", ".", "It", "just", "checks", "the", "current", "hierarchy", "." ]
def __get_tree_direct(self, base_tree, directory): '''Return the tree of the given directory. This does not walk down the directory structure. It just checks the current hierarchy.''' for tree in base_tree.trees: if tree.name == directory: return tree raise RMTException(108, "directory entry [%s] not found in tree " "[%s]." % (directory, base_tree.name))
[ "def", "__get_tree_direct", "(", "self", ",", "base_tree", ",", "directory", ")", ":", "for", "tree", "in", "base_tree", ".", "trees", ":", "if", "tree", ".", "name", "==", "directory", ":", "return", "tree", "raise", "RMTException", "(", "108", ",", "\"directory entry [%s] not found in tree \"", "\"[%s].\"", "%", "(", "directory", ",", "base_tree", ".", "name", ")", ")" ]
https://github.com/florath/rmtoo/blob/6ffe08703451358dca24b232ee4380b1da23bcad/rmtoo/lib/vcs/Git.py#L154-L162
ktbyers/netmiko
4c3732346eea1a4a608abd9e09d65eeb2f577810
netmiko/dell/dell_os10_ssh.py
python
DellOS10FileTransfer.check_file_exists
(self, remote_cmd: str = "dir home")
Check if the dest_file already exists on the file system (return boolean).
Check if the dest_file already exists on the file system (return boolean).
[ "Check", "if", "the", "dest_file", "already", "exists", "on", "the", "file", "system", "(", "return", "boolean", ")", "." ]
def check_file_exists(self, remote_cmd: str = "dir home") -> bool: """Check if the dest_file already exists on the file system (return boolean).""" if self.direction == "put": remote_out = self.ssh_ctl_chan._send_command_str(remote_cmd) search_string = r"Directory contents .*{}".format(self.dest_file) return bool(re.search(search_string, remote_out, flags=re.DOTALL)) elif self.direction == "get": return os.path.exists(self.dest_file) else: raise ValueError("self.direction is set to an invalid value")
[ "def", "check_file_exists", "(", "self", ",", "remote_cmd", ":", "str", "=", "\"dir home\"", ")", "->", "bool", ":", "if", "self", ".", "direction", "==", "\"put\"", ":", "remote_out", "=", "self", ".", "ssh_ctl_chan", ".", "_send_command_str", "(", "remote_cmd", ")", "search_string", "=", "r\"Directory contents .*{}\"", ".", "format", "(", "self", ".", "dest_file", ")", "return", "bool", "(", "re", ".", "search", "(", "search_string", ",", "remote_out", ",", "flags", "=", "re", ".", "DOTALL", ")", ")", "elif", "self", ".", "direction", "==", "\"get\"", ":", "return", "os", ".", "path", ".", "exists", "(", "self", ".", "dest_file", ")", "else", ":", "raise", "ValueError", "(", "\"self.direction is set to an invalid value\"", ")" ]
https://github.com/ktbyers/netmiko/blob/4c3732346eea1a4a608abd9e09d65eeb2f577810/netmiko/dell/dell_os10_ssh.py#L101-L110
foxmask/django-th
29aa84f8d4aa945dbef6cf580593b435cc708e31
th_rss/lib/conditionchecker/conditionchecker.py
python
Condition.check
(self, datas, *filers)
this method permits to reduce the quantity of information to read by applying some filtering here '*filers' can receive a list of properties to be filtered
this method permits to reduce the quantity of information to read by applying some filtering here '*filers' can receive a list of properties to be filtered
[ "this", "method", "permits", "to", "reduce", "the", "quantity", "of", "information", "to", "read", "by", "applying", "some", "filtering", "here", "*", "filers", "can", "receive", "a", "list", "of", "properties", "to", "be", "filtered" ]
def check(self, datas, *filers): ''' this method permits to reduce the quantity of information to read by applying some filtering here '*filers' can receive a list of properties to be filtered ''' # special case : no filter : want to read all the feed if self.match == "" and self.does_not_match == '': yield datas # let's filtering : else: condition1 = False condition2 = False # arg contain the property from which we want to check the 'data' for prop in filers: # check if my datas contains my property if prop in datas: # filter to find only this data if self.match != '' and condition1 is False: condition1 = self.filter_that(self.match, datas[prop]) # filter to exclude this data, # when found, continue to the next entry if self.does_not_match != '' and condition2 is False: condition2 = self.filter_that(self.does_not_match, datas[prop]) if condition2: continue if condition1 and condition2 is False: yield datas
[ "def", "check", "(", "self", ",", "datas", ",", "*", "filers", ")", ":", "# special case : no filter : want to read all the feed", "if", "self", ".", "match", "==", "\"\"", "and", "self", ".", "does_not_match", "==", "''", ":", "yield", "datas", "# let's filtering :", "else", ":", "condition1", "=", "False", "condition2", "=", "False", "# arg contain the property from which we want to check the 'data'", "for", "prop", "in", "filers", ":", "# check if my datas contains my property", "if", "prop", "in", "datas", ":", "# filter to find only this data", "if", "self", ".", "match", "!=", "''", "and", "condition1", "is", "False", ":", "condition1", "=", "self", ".", "filter_that", "(", "self", ".", "match", ",", "datas", "[", "prop", "]", ")", "# filter to exclude this data,", "# when found, continue to the next entry", "if", "self", ".", "does_not_match", "!=", "''", "and", "condition2", "is", "False", ":", "condition2", "=", "self", ".", "filter_that", "(", "self", ".", "does_not_match", ",", "datas", "[", "prop", "]", ")", "if", "condition2", ":", "continue", "if", "condition1", "and", "condition2", "is", "False", ":", "yield", "datas" ]
https://github.com/foxmask/django-th/blob/29aa84f8d4aa945dbef6cf580593b435cc708e31/th_rss/lib/conditionchecker/conditionchecker.py#L22-L51
llSourcell/AI_Artist
3038c06c2e389b9c919c881c9a169efe2fd7810e
lib/python2.7/site-packages/pip/_vendor/requests/cookies.py
python
RequestsCookieJar.itervalues
(self)
Dict-like itervalues() that returns an iterator of values of cookies from the jar. See iterkeys() and iteritems().
Dict-like itervalues() that returns an iterator of values of cookies from the jar. See iterkeys() and iteritems().
[ "Dict", "-", "like", "itervalues", "()", "that", "returns", "an", "iterator", "of", "values", "of", "cookies", "from", "the", "jar", ".", "See", "iterkeys", "()", "and", "iteritems", "()", "." ]
def itervalues(self): """Dict-like itervalues() that returns an iterator of values of cookies from the jar. See iterkeys() and iteritems().""" for cookie in iter(self): yield cookie.value
[ "def", "itervalues", "(", "self", ")", ":", "for", "cookie", "in", "iter", "(", "self", ")", ":", "yield", "cookie", ".", "value" ]
https://github.com/llSourcell/AI_Artist/blob/3038c06c2e389b9c919c881c9a169efe2fd7810e/lib/python2.7/site-packages/pip/_vendor/requests/cookies.py#L219-L223
josauder/procedural_city_generation
e53d9a48440c914f9aad65455b3aebc13d90bc98
procedural_city_generation/building_generation/roofs.py
python
houseroof
(walls, currentheight, roofheight, texture)
return [Polygon3D(verts, faces, texture)]
Creates a "classic" roof with two triangles and two rectangles. Used only for houses and assumes that the house has 4 sides. Parameters ----------- walls : procedural_city_generation.building_generation.Walls object currentheight : float Current height, Z coordinate of the base of the roof roofheight : float Height of the roof itself texture : procedural_city_generation.building_generation.Texture object Returns ------- list<procedural_city_generation.building_generation.Polygon3D object>
Creates a "classic" roof with two triangles and two rectangles. Used only for houses and assumes that the house has 4 sides.
[ "Creates", "a", "classic", "roof", "with", "two", "triangles", "and", "two", "rectangles", ".", "Used", "only", "for", "houses", "and", "assumes", "that", "the", "house", "has", "4", "sides", "." ]
def houseroof(walls, currentheight, roofheight, texture): """Creates a "classic" roof with two triangles and two rectangles. Used only for houses and assumes that the house has 4 sides. Parameters ----------- walls : procedural_city_generation.building_generation.Walls object currentheight : float Current height, Z coordinate of the base of the roof roofheight : float Height of the roof itself texture : procedural_city_generation.building_generation.Texture object Returns ------- list<procedural_city_generation.building_generation.Polygon3D object> """ #Differentiation: the shorter of the first two walls is to be cut in half if not np.linalg.norm(np.diff(walls.getWalls()[0], axis=0))<np.linalg.norm(np.diff(walls.getWalls()[1], axis=0)): walls=Walls(np.roll(walls.vertices, 1, axis=0), walls.l) h_low=np.array([0, 0, currentheight]) h_high=h_low+np.array([0, 0, roofheight]) #The gable coordinates c1, c2=sum(walls.getWalls()[0]/2), sum(walls.getWalls()[2]/2) #Verts are the vertices of the wall and the vertices of the gable verts=[x+h_low for x in walls.vertices]+[c1+h_high, c2+h_high] #Faces are two rectangles and two triangles faces=[(0, 1, 5, 4), (3, 2, 5, 4), (0, 3, 4), (1, 2, 5)] return [Polygon3D(verts, faces, texture)]
[ "def", "houseroof", "(", "walls", ",", "currentheight", ",", "roofheight", ",", "texture", ")", ":", "#Differentiation: the shorter of the first two walls is to be cut in half", "if", "not", "np", ".", "linalg", ".", "norm", "(", "np", ".", "diff", "(", "walls", ".", "getWalls", "(", ")", "[", "0", "]", ",", "axis", "=", "0", ")", ")", "<", "np", ".", "linalg", ".", "norm", "(", "np", ".", "diff", "(", "walls", ".", "getWalls", "(", ")", "[", "1", "]", ",", "axis", "=", "0", ")", ")", ":", "walls", "=", "Walls", "(", "np", ".", "roll", "(", "walls", ".", "vertices", ",", "1", ",", "axis", "=", "0", ")", ",", "walls", ".", "l", ")", "h_low", "=", "np", ".", "array", "(", "[", "0", ",", "0", ",", "currentheight", "]", ")", "h_high", "=", "h_low", "+", "np", ".", "array", "(", "[", "0", ",", "0", ",", "roofheight", "]", ")", "#The gable coordinates", "c1", ",", "c2", "=", "sum", "(", "walls", ".", "getWalls", "(", ")", "[", "0", "]", "/", "2", ")", ",", "sum", "(", "walls", ".", "getWalls", "(", ")", "[", "2", "]", "/", "2", ")", "#Verts are the vertices of the wall and the vertices of the gable", "verts", "=", "[", "x", "+", "h_low", "for", "x", "in", "walls", ".", "vertices", "]", "+", "[", "c1", "+", "h_high", ",", "c2", "+", "h_high", "]", "#Faces are two rectangles and two triangles", "faces", "=", "[", "(", "0", ",", "1", ",", "5", ",", "4", ")", ",", "(", "3", ",", "2", ",", "5", ",", "4", ")", ",", "(", "0", ",", "3", ",", "4", ")", ",", "(", "1", ",", "2", ",", "5", ")", "]", "return", "[", "Polygon3D", "(", "verts", ",", "faces", ",", "texture", ")", "]" ]
https://github.com/josauder/procedural_city_generation/blob/e53d9a48440c914f9aad65455b3aebc13d90bc98/procedural_city_generation/building_generation/roofs.py#L45-L77
tensorflow/federated
5a60a032360087b8f4c7fcfd97ed1c0131c3eac3
tensorflow_federated/python/core/impl/types/computation_types.py
python
Type.__eq__
(self, other)
Determines whether two type definitions are identical. Note that this notion of equality is stronger than equivalence. Two types with equivalent definitions may not be identical, e.g., if they represent templates with differently named type variables in their definitions. Args: other: The other type to compare against. Returns: `True` iff type definitions are syntatically identical (as defined above), or `False` otherwise. Raises: NotImplementedError: If not implemented in the derived class.
Determines whether two type definitions are identical.
[ "Determines", "whether", "two", "type", "definitions", "are", "identical", "." ]
def __eq__(self, other): """Determines whether two type definitions are identical. Note that this notion of equality is stronger than equivalence. Two types with equivalent definitions may not be identical, e.g., if they represent templates with differently named type variables in their definitions. Args: other: The other type to compare against. Returns: `True` iff type definitions are syntatically identical (as defined above), or `False` otherwise. Raises: NotImplementedError: If not implemented in the derived class. """ raise NotImplementedError
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "raise", "NotImplementedError" ]
https://github.com/tensorflow/federated/blob/5a60a032360087b8f4c7fcfd97ed1c0131c3eac3/tensorflow_federated/python/core/impl/types/computation_types.py#L236-L253
andresriancho/w3af
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
w3af/core/controllers/profiling/memory_usage.py
python
stop_memory_profiling
()
We cancel the save thread and dump objects for the last time.
We cancel the save thread and dump objects for the last time.
[ "We", "cancel", "the", "save", "thread", "and", "dump", "objects", "for", "the", "last", "time", "." ]
def stop_memory_profiling(): """ We cancel the save thread and dump objects for the last time. """ cancel_thread(SAVE_THREAD_PTR) dump_objects()
[ "def", "stop_memory_profiling", "(", ")", ":", "cancel_thread", "(", "SAVE_THREAD_PTR", ")", "dump_objects", "(", ")" ]
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/core/controllers/profiling/memory_usage.py#L71-L76
toandaominh1997/EfficientDet.Pytorch
fbe56e58c9a2749520303d2d380427e5f01305ba
models/efficientnet.py
python
EfficientNet.extract_features
(self, inputs)
return P
Returns output of the final convolution layer
Returns output of the final convolution layer
[ "Returns", "output", "of", "the", "final", "convolution", "layer" ]
def extract_features(self, inputs): """ Returns output of the final convolution layer """ # Stem x = self._swish(self._bn0(self._conv_stem(inputs))) P = [] index = 0 num_repeat = 0 # Blocks for idx, block in enumerate(self._blocks): drop_connect_rate = self._global_params.drop_connect_rate if drop_connect_rate: drop_connect_rate *= float(idx) / len(self._blocks) x = block(x, drop_connect_rate=drop_connect_rate) num_repeat = num_repeat + 1 if(num_repeat == self._blocks_args[index].num_repeat): num_repeat = 0 index = index + 1 P.append(x) return P
[ "def", "extract_features", "(", "self", ",", "inputs", ")", ":", "# Stem", "x", "=", "self", ".", "_swish", "(", "self", ".", "_bn0", "(", "self", ".", "_conv_stem", "(", "inputs", ")", ")", ")", "P", "=", "[", "]", "index", "=", "0", "num_repeat", "=", "0", "# Blocks", "for", "idx", ",", "block", "in", "enumerate", "(", "self", ".", "_blocks", ")", ":", "drop_connect_rate", "=", "self", ".", "_global_params", ".", "drop_connect_rate", "if", "drop_connect_rate", ":", "drop_connect_rate", "*=", "float", "(", "idx", ")", "/", "len", "(", "self", ".", "_blocks", ")", "x", "=", "block", "(", "x", ",", "drop_connect_rate", "=", "drop_connect_rate", ")", "num_repeat", "=", "num_repeat", "+", "1", "if", "(", "num_repeat", "==", "self", ".", "_blocks_args", "[", "index", "]", ".", "num_repeat", ")", ":", "num_repeat", "=", "0", "index", "=", "index", "+", "1", "P", ".", "append", "(", "x", ")", "return", "P" ]
https://github.com/toandaominh1997/EfficientDet.Pytorch/blob/fbe56e58c9a2749520303d2d380427e5f01305ba/models/efficientnet.py#L190-L209
ungarj/label_centerlines
ec1ac69414ae7b4aa8e353b233b782583df97caf
label_centerlines/_src.py
python
_yield_ridge_vertices
(vor, geometry, dist=False)
Yield Voronoi ridge vertices within geometry.
Yield Voronoi ridge vertices within geometry.
[ "Yield", "Voronoi", "ridge", "vertices", "within", "geometry", "." ]
def _yield_ridge_vertices(vor, geometry, dist=False): """Yield Voronoi ridge vertices within geometry.""" for x, y in vor.ridge_vertices: if x < 0 or y < 0: continue point1 = Point(vor.vertices[[x, y]][0]) point2 = Point(vor.vertices[[x, y]][1]) # Eliminate all points outside our geometry. if point1.within(geometry) and point2.within(geometry): if dist: yield x, y, point1.distance(point2) else: yield x, y
[ "def", "_yield_ridge_vertices", "(", "vor", ",", "geometry", ",", "dist", "=", "False", ")", ":", "for", "x", ",", "y", "in", "vor", ".", "ridge_vertices", ":", "if", "x", "<", "0", "or", "y", "<", "0", ":", "continue", "point1", "=", "Point", "(", "vor", ".", "vertices", "[", "[", "x", ",", "y", "]", "]", "[", "0", "]", ")", "point2", "=", "Point", "(", "vor", ".", "vertices", "[", "[", "x", ",", "y", "]", "]", "[", "1", "]", ")", "# Eliminate all points outside our geometry.", "if", "point1", ".", "within", "(", "geometry", ")", "and", "point2", ".", "within", "(", "geometry", ")", ":", "if", "dist", ":", "yield", "x", ",", "y", ",", "point1", ".", "distance", "(", "point2", ")", "else", ":", "yield", "x", ",", "y" ]
https://github.com/ungarj/label_centerlines/blob/ec1ac69414ae7b4aa8e353b233b782583df97caf/label_centerlines/_src.py#L223-L235
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AppServer/lib/webapp2-2.3/webapp2_extras/sessions.py
python
SessionDict.get_flashes
(self, key='_flash')
return self.pop(key, [])
Returns a flash message. Flash messages are deleted when first read. :param key: Name of the flash key stored in the session. Default is '_flash'. :returns: The data stored in the flash, or an empty list.
Returns a flash message. Flash messages are deleted when first read.
[ "Returns", "a", "flash", "message", ".", "Flash", "messages", "are", "deleted", "when", "first", "read", "." ]
def get_flashes(self, key='_flash'): """Returns a flash message. Flash messages are deleted when first read. :param key: Name of the flash key stored in the session. Default is '_flash'. :returns: The data stored in the flash, or an empty list. """ return self.pop(key, [])
[ "def", "get_flashes", "(", "self", ",", "key", "=", "'_flash'", ")", ":", "return", "self", ".", "pop", "(", "key", ",", "[", "]", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/lib/webapp2-2.3/webapp2_extras/sessions.py#L129-L137
sametmax/Django--an-app-at-a-time
99eddf12ead76e6dfbeb09ce0bae61e282e22f8a
ignore_this_directory/django/template/defaulttags.py
python
firstof
(parser, token)
return FirstOfNode([parser.compile_filter(bit) for bit in bits], asvar)
Output the first variable passed that is not False. Output nothing if all the passed variables are False. Sample usage:: {% firstof var1 var2 var3 as myvar %} This is equivalent to:: {% if var1 %} {{ var1 }} {% elif var2 %} {{ var2 }} {% elif var3 %} {{ var3 }} {% endif %} but obviously much cleaner! You can also use a literal string as a fallback value in case all passed variables are False:: {% firstof var1 var2 var3 "fallback value" %} If you want to disable auto-escaping of variables you can use:: {% autoescape off %} {% firstof var1 var2 var3 "<strong>fallback value</strong>" %} {% autoescape %} Or if only some variables should be escaped, you can use:: {% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %}
Output the first variable passed that is not False.
[ "Output", "the", "first", "variable", "passed", "that", "is", "not", "False", "." ]
def firstof(parser, token): """ Output the first variable passed that is not False. Output nothing if all the passed variables are False. Sample usage:: {% firstof var1 var2 var3 as myvar %} This is equivalent to:: {% if var1 %} {{ var1 }} {% elif var2 %} {{ var2 }} {% elif var3 %} {{ var3 }} {% endif %} but obviously much cleaner! You can also use a literal string as a fallback value in case all passed variables are False:: {% firstof var1 var2 var3 "fallback value" %} If you want to disable auto-escaping of variables you can use:: {% autoescape off %} {% firstof var1 var2 var3 "<strong>fallback value</strong>" %} {% autoescape %} Or if only some variables should be escaped, you can use:: {% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %} """ bits = token.split_contents()[1:] asvar = None if not bits: raise TemplateSyntaxError("'firstof' statement requires at least one argument") if len(bits) >= 2 and bits[-2] == 'as': asvar = bits[-1] bits = bits[:-2] return FirstOfNode([parser.compile_filter(bit) for bit in bits], asvar)
[ "def", "firstof", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "[", "1", ":", "]", "asvar", "=", "None", "if", "not", "bits", ":", "raise", "TemplateSyntaxError", "(", "\"'firstof' statement requires at least one argument\"", ")", "if", "len", "(", "bits", ")", ">=", "2", "and", "bits", "[", "-", "2", "]", "==", "'as'", ":", "asvar", "=", "bits", "[", "-", "1", "]", "bits", "=", "bits", "[", ":", "-", "2", "]", "return", "FirstOfNode", "(", "[", "parser", ".", "compile_filter", "(", "bit", ")", "for", "bit", "in", "bits", "]", ",", "asvar", ")" ]
https://github.com/sametmax/Django--an-app-at-a-time/blob/99eddf12ead76e6dfbeb09ce0bae61e282e22f8a/ignore_this_directory/django/template/defaulttags.py#L681-L726
sympy/sympy
d822fcba181155b85ff2b29fe525adbafb22b448
sympy/integrals/intpoly.py
python
integration_reduction
(facets, index, a, b, expr, dims, degree)
return value/(len(dims) + degree - 1)
Helper method for main_integrate. Returns the value of the input expression evaluated over the polytope facet referenced by a given index. Parameters =========== facets : List of facets of the polytope. index : Index referencing the facet to integrate the expression over. a : Hyperplane parameter denoting direction. b : Hyperplane parameter denoting distance. expr : The expression to integrate over the facet. dims : List of symbols denoting axes. degree : Degree of the homogeneous polynomial. Examples ======== >>> from sympy.abc import x, y >>> from sympy.integrals.intpoly import integration_reduction,\ hyperplane_parameters >>> from sympy.geometry.point import Point >>> from sympy.geometry.polygon import Polygon >>> triangle = Polygon(Point(0, 3), Point(5, 3), Point(1, 1)) >>> facets = triangle.sides >>> a, b = hyperplane_parameters(triangle)[0] >>> integration_reduction(facets, 0, a, b, 1, (x, y), 0) 5
Helper method for main_integrate. Returns the value of the input expression evaluated over the polytope facet referenced by a given index.
[ "Helper", "method", "for", "main_integrate", ".", "Returns", "the", "value", "of", "the", "input", "expression", "evaluated", "over", "the", "polytope", "facet", "referenced", "by", "a", "given", "index", "." ]
def integration_reduction(facets, index, a, b, expr, dims, degree): """Helper method for main_integrate. Returns the value of the input expression evaluated over the polytope facet referenced by a given index. Parameters =========== facets : List of facets of the polytope. index : Index referencing the facet to integrate the expression over. a : Hyperplane parameter denoting direction. b : Hyperplane parameter denoting distance. expr : The expression to integrate over the facet. dims : List of symbols denoting axes. degree : Degree of the homogeneous polynomial. Examples ======== >>> from sympy.abc import x, y >>> from sympy.integrals.intpoly import integration_reduction,\ hyperplane_parameters >>> from sympy.geometry.point import Point >>> from sympy.geometry.polygon import Polygon >>> triangle = Polygon(Point(0, 3), Point(5, 3), Point(1, 1)) >>> facets = triangle.sides >>> a, b = hyperplane_parameters(triangle)[0] >>> integration_reduction(facets, 0, a, b, 1, (x, y), 0) 5 """ expr = _sympify(expr) if expr.is_zero: return expr value = S.Zero x0 = facets[index].points[0] m = len(facets) gens = (x, y) inner_product = diff(expr, gens[0]) * x0[0] + diff(expr, gens[1]) * x0[1] if inner_product != 0: value += integration_reduction(facets, index, a, b, inner_product, dims, degree - 1) value += left_integral2D(m, index, facets, x0, expr, gens) return value/(len(dims) + degree - 1)
[ "def", "integration_reduction", "(", "facets", ",", "index", ",", "a", ",", "b", ",", "expr", ",", "dims", ",", "degree", ")", ":", "expr", "=", "_sympify", "(", "expr", ")", "if", "expr", ".", "is_zero", ":", "return", "expr", "value", "=", "S", ".", "Zero", "x0", "=", "facets", "[", "index", "]", ".", "points", "[", "0", "]", "m", "=", "len", "(", "facets", ")", "gens", "=", "(", "x", ",", "y", ")", "inner_product", "=", "diff", "(", "expr", ",", "gens", "[", "0", "]", ")", "*", "x0", "[", "0", "]", "+", "diff", "(", "expr", ",", "gens", "[", "1", "]", ")", "*", "x0", "[", "1", "]", "if", "inner_product", "!=", "0", ":", "value", "+=", "integration_reduction", "(", "facets", ",", "index", ",", "a", ",", "b", ",", "inner_product", ",", "dims", ",", "degree", "-", "1", ")", "value", "+=", "left_integral2D", "(", "m", ",", "index", ",", "facets", ",", "x0", ",", "expr", ",", "gens", ")", "return", "value", "/", "(", "len", "(", "dims", ")", "+", "degree", "-", "1", ")" ]
https://github.com/sympy/sympy/blob/d822fcba181155b85ff2b29fe525adbafb22b448/sympy/integrals/intpoly.py#L443-L496
fabioz/PyDev.Debugger
0f8c02a010fe5690405da1dd30ed72326191ce63
pydevd_attach_to_process/winappdbg/thread.py
python
Thread.kill
(self, dwExitCode = 0)
Terminates the thread execution. @note: If the C{lpInjectedMemory} member contains a valid pointer, the memory is freed. @type dwExitCode: int @param dwExitCode: (Optional) Thread exit code.
Terminates the thread execution.
[ "Terminates", "the", "thread", "execution", "." ]
def kill(self, dwExitCode = 0): """ Terminates the thread execution. @note: If the C{lpInjectedMemory} member contains a valid pointer, the memory is freed. @type dwExitCode: int @param dwExitCode: (Optional) Thread exit code. """ hThread = self.get_handle(win32.THREAD_TERMINATE) win32.TerminateThread(hThread, dwExitCode) # Ugliest hack ever, won't work if many pieces of code are injected. # Seriously, what was I thinking? :( if self.pInjectedMemory is not None: try: self.get_process().free(self.pInjectedMemory) self.pInjectedMemory = None except Exception: ## raise # XXX DEBUG pass
[ "def", "kill", "(", "self", ",", "dwExitCode", "=", "0", ")", ":", "hThread", "=", "self", ".", "get_handle", "(", "win32", ".", "THREAD_TERMINATE", ")", "win32", ".", "TerminateThread", "(", "hThread", ",", "dwExitCode", ")", "# Ugliest hack ever, won't work if many pieces of code are injected.", "# Seriously, what was I thinking? :(", "if", "self", ".", "pInjectedMemory", "is", "not", "None", ":", "try", ":", "self", ".", "get_process", "(", ")", ".", "free", "(", "self", ".", "pInjectedMemory", ")", "self", ".", "pInjectedMemory", "=", "None", "except", "Exception", ":", "## raise # XXX DEBUG", "pass" ]
https://github.com/fabioz/PyDev.Debugger/blob/0f8c02a010fe5690405da1dd30ed72326191ce63/pydevd_attach_to_process/winappdbg/thread.py#L364-L385
replit-archive/empythoned
977ec10ced29a3541a4973dc2b59910805695752
dist/lib/python2.7/multiprocessing/util.py
python
log_to_stderr
(level=None)
return _logger
Turn on logging and add a handler which prints to stderr
Turn on logging and add a handler which prints to stderr
[ "Turn", "on", "logging", "and", "add", "a", "handler", "which", "prints", "to", "stderr" ]
def log_to_stderr(level=None): ''' Turn on logging and add a handler which prints to stderr ''' global _log_to_stderr import logging logger = get_logger() formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT) handler = logging.StreamHandler() handler.setFormatter(formatter) logger.addHandler(handler) if level: logger.setLevel(level) _log_to_stderr = True return _logger
[ "def", "log_to_stderr", "(", "level", "=", "None", ")", ":", "global", "_log_to_stderr", "import", "logging", "logger", "=", "get_logger", "(", ")", "formatter", "=", "logging", ".", "Formatter", "(", "DEFAULT_LOGGING_FORMAT", ")", "handler", "=", "logging", ".", "StreamHandler", "(", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "handler", ")", "if", "level", ":", "logger", ".", "setLevel", "(", "level", ")", "_log_to_stderr", "=", "True", "return", "_logger" ]
https://github.com/replit-archive/empythoned/blob/977ec10ced29a3541a4973dc2b59910805695752/dist/lib/python2.7/multiprocessing/util.py#L111-L127
pantsbuild/pants
2e126e78ffc40cb108408316b90e8beebee1df9e
src/python/pants/base/exception_sink.py
python
SignalHandler.signal_handler_mapping
(self)
return { signal.SIGINT: self._handle_sigint_if_enabled, signal.SIGQUIT: self.handle_sigquit, signal.SIGTERM: self.handle_sigterm, }
A dict mapping (signal number) -> (a method handling the signal).
A dict mapping (signal number) -> (a method handling the signal).
[ "A", "dict", "mapping", "(", "signal", "number", ")", "-", ">", "(", "a", "method", "handling", "the", "signal", ")", "." ]
def signal_handler_mapping(self) -> Dict[signal.Signals, Callable]: """A dict mapping (signal number) -> (a method handling the signal).""" # Could use an enum here, but we never end up doing any matching on the specific signal value, # instead just iterating over the registered signals to set handlers, so a dict is probably # better. return { signal.SIGINT: self._handle_sigint_if_enabled, signal.SIGQUIT: self.handle_sigquit, signal.SIGTERM: self.handle_sigterm, }
[ "def", "signal_handler_mapping", "(", "self", ")", "->", "Dict", "[", "signal", ".", "Signals", ",", "Callable", "]", ":", "# Could use an enum here, but we never end up doing any matching on the specific signal value,", "# instead just iterating over the registered signals to set handlers, so a dict is probably", "# better.", "return", "{", "signal", ".", "SIGINT", ":", "self", ".", "_handle_sigint_if_enabled", ",", "signal", ".", "SIGQUIT", ":", "self", ".", "handle_sigquit", ",", "signal", ".", "SIGTERM", ":", "self", ".", "handle_sigterm", ",", "}" ]
https://github.com/pantsbuild/pants/blob/2e126e78ffc40cb108408316b90e8beebee1df9e/src/python/pants/base/exception_sink.py#L36-L45
delira-dev/delira
cd3ad277d6fad5f837d6c5147e6eee2ada648596
delira/training/backends/sklearn/trainer.py
python
SklearnEstimatorTrainer._update_state
(self, new_state)
return super()._update_state(new_state)
Update the state from a given new state Parameters ---------- new_state : dict new state to update internal state from Returns ------- :class:`SkLearnEstimatorTrainer` the trainer with a modified state
Update the state from a given new state
[ "Update", "the", "state", "from", "a", "given", "new", "state" ]
def _update_state(self, new_state): """ Update the state from a given new state Parameters ---------- new_state : dict new state to update internal state from Returns ------- :class:`SkLearnEstimatorTrainer` the trainer with a modified state """ if "model" in new_state: self.module = new_state.pop("model") if "epoch" in new_state: self.start_epoch = new_state.pop("epoch") return super()._update_state(new_state)
[ "def", "_update_state", "(", "self", ",", "new_state", ")", ":", "if", "\"model\"", "in", "new_state", ":", "self", ".", "module", "=", "new_state", ".", "pop", "(", "\"model\"", ")", "if", "\"epoch\"", "in", "new_state", ":", "self", ".", "start_epoch", "=", "new_state", ".", "pop", "(", "\"epoch\"", ")", "return", "super", "(", ")", ".", "_update_state", "(", "new_state", ")" ]
https://github.com/delira-dev/delira/blob/cd3ad277d6fad5f837d6c5147e6eee2ada648596/delira/training/backends/sklearn/trainer.py#L445-L467
trakt/Plex-Trakt-Scrobbler
aeb0bfbe62fad4b06c164f1b95581da7f35dce0b
Trakttv.bundle/Contents/Libraries/Linux/aarch64/ucs2/cryptography/hazmat/backends/openssl/encode_asn1.py
python
_encode_sk_name_entry
(backend, attributes)
return stack
The sk_X50_NAME_ENTRY created will not be gc'd.
The sk_X50_NAME_ENTRY created will not be gc'd.
[ "The", "sk_X50_NAME_ENTRY", "created", "will", "not", "be", "gc", "d", "." ]
def _encode_sk_name_entry(backend, attributes): """ The sk_X50_NAME_ENTRY created will not be gc'd. """ stack = backend._lib.sk_X509_NAME_ENTRY_new_null() for attribute in attributes: name_entry = _encode_name_entry(backend, attribute) res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry) backend.openssl_assert(res == 1) return stack
[ "def", "_encode_sk_name_entry", "(", "backend", ",", "attributes", ")", ":", "stack", "=", "backend", ".", "_lib", ".", "sk_X509_NAME_ENTRY_new_null", "(", ")", "for", "attribute", "in", "attributes", ":", "name_entry", "=", "_encode_name_entry", "(", "backend", ",", "attribute", ")", "res", "=", "backend", ".", "_lib", ".", "sk_X509_NAME_ENTRY_push", "(", "stack", ",", "name_entry", ")", "backend", ".", "openssl_assert", "(", "res", "==", "1", ")", "return", "stack" ]
https://github.com/trakt/Plex-Trakt-Scrobbler/blob/aeb0bfbe62fad4b06c164f1b95581da7f35dce0b/Trakttv.bundle/Contents/Libraries/Linux/aarch64/ucs2/cryptography/hazmat/backends/openssl/encode_asn1.py#L103-L112
IronLanguages/ironpython3
7a7bb2a872eeab0d1009fc8a6e24dca43f65b693
Src/Scripts/generate_reflected_calls.py
python
get_type_names
(i)
return ['T' + str(x) for x in range(i)]
[]
def get_type_names(i): if i == 1: return ['T0'] return ['T' + str(x) for x in range(i)]
[ "def", "get_type_names", "(", "i", ")", ":", "if", "i", "==", "1", ":", "return", "[", "'T0'", "]", "return", "[", "'T'", "+", "str", "(", "x", ")", "for", "x", "in", "range", "(", "i", ")", "]" ]
https://github.com/IronLanguages/ironpython3/blob/7a7bb2a872eeab0d1009fc8a6e24dca43f65b693/Src/Scripts/generate_reflected_calls.py#L21-L23
andresriancho/w3af
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
w3af/plugins/attack/db/sqlmap/thirdparty/bottle/bottle.py
python
BaseRequest.__repr__
(self)
return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url)
[]
def __repr__(self): return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url)
[ "def", "__repr__", "(", "self", ")", ":", "return", "'<%s: %s %s>'", "%", "(", "self", ".", "__class__", ".", "__name__", ",", "self", ".", "method", ",", "self", ".", "url", ")" ]
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/plugins/attack/db/sqlmap/thirdparty/bottle/bottle.py#L1503-L1504
GalSim-developers/GalSim
a05d4ec3b8d8574f99d3b0606ad882cbba53f345
galsim/config/value_random.py
python
_GenerateFromRandomPoisson
(config, base, value_type)
return val, False
Return a random value drawn from a Poisson distribution
Return a random value drawn from a Poisson distribution
[ "Return", "a", "random", "value", "drawn", "from", "a", "Poisson", "distribution" ]
def _GenerateFromRandomPoisson(config, base, value_type): """Return a random value drawn from a Poisson distribution """ rng = GetRNG(config, base) req = { 'mean' : float } kwargs, safe = GetAllParams(config, base, req=req) mean = kwargs['mean'] dev = PoissonDeviate(rng,mean=mean) val = dev() #print(base['obj_num'],'RandomPoisson: ',val) return val, False
[ "def", "_GenerateFromRandomPoisson", "(", "config", ",", "base", ",", "value_type", ")", ":", "rng", "=", "GetRNG", "(", "config", ",", "base", ")", "req", "=", "{", "'mean'", ":", "float", "}", "kwargs", ",", "safe", "=", "GetAllParams", "(", "config", ",", "base", ",", "req", "=", "req", ")", "mean", "=", "kwargs", "[", "'mean'", "]", "dev", "=", "PoissonDeviate", "(", "rng", ",", "mean", "=", "mean", ")", "val", "=", "dev", "(", ")", "#print(base['obj_num'],'RandomPoisson: ',val)", "return", "val", ",", "False" ]
https://github.com/GalSim-developers/GalSim/blob/a05d4ec3b8d8574f99d3b0606ad882cbba53f345/galsim/config/value_random.py#L134-L148
jmg/crawley
3953dcaac6d0828a03b353c6eced34acd7220c77
crawley/manager/utils.py
python
generate_template
(tm_name, project_name, output_dir, new_extension=None)
Generates a project's file from a template
Generates a project's file from a template
[ "Generates", "a", "project", "s", "file", "from", "a", "template" ]
def generate_template(tm_name, project_name, output_dir, new_extension=None): """ Generates a project's file from a template """ tm_name, ext = os.path.splitext(tm_name) if not ext: ext = ".tm" if new_extension is None: new_extension = '.py' with open(os.path.join(TEMPLATES_DIR, "%s%s" % (tm_name, ext)), 'r') as f: template = f.read() data = template % { 'project_name' : project_name } with open(os.path.join(output_dir, "%s%s" % (tm_name, new_extension)), 'w') as f: f.write(data)
[ "def", "generate_template", "(", "tm_name", ",", "project_name", ",", "output_dir", ",", "new_extension", "=", "None", ")", ":", "tm_name", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "tm_name", ")", "if", "not", "ext", ":", "ext", "=", "\".tm\"", "if", "new_extension", "is", "None", ":", "new_extension", "=", "'.py'", "with", "open", "(", "os", ".", "path", ".", "join", "(", "TEMPLATES_DIR", ",", "\"%s%s\"", "%", "(", "tm_name", ",", "ext", ")", ")", ",", "'r'", ")", "as", "f", ":", "template", "=", "f", ".", "read", "(", ")", "data", "=", "template", "%", "{", "'project_name'", ":", "project_name", "}", "with", "open", "(", "os", ".", "path", ".", "join", "(", "output_dir", ",", "\"%s%s\"", "%", "(", "tm_name", ",", "new_extension", ")", ")", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "data", ")" ]
https://github.com/jmg/crawley/blob/3953dcaac6d0828a03b353c6eced34acd7220c77/crawley/manager/utils.py#L51-L69
misterch0c/shadowbroker
e3a069bea47a2c1009697941ac214adc6f90aa8d
windows/Resources/Python/Core/Lib/decimal.py
python
Decimal.from_float
(cls, f)
Converts a float to a decimal number, exactly. Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). Since 0.1 is not exactly representable in binary floating point, the value is stored as the nearest representable value which is 0x1.999999999999ap-4. The exact equivalent of the value in decimal is 0.1000000000000000055511151231257827021181583404541015625. >>> Decimal.from_float(0.1) Decimal('0.1000000000000000055511151231257827021181583404541015625') >>> Decimal.from_float(float('nan')) Decimal('NaN') >>> Decimal.from_float(float('inf')) Decimal('Infinity') >>> Decimal.from_float(-float('inf')) Decimal('-Infinity') >>> Decimal.from_float(-0.0) Decimal('-0')
Converts a float to a decimal number, exactly. Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). Since 0.1 is not exactly representable in binary floating point, the value is stored as the nearest representable value which is 0x1.999999999999ap-4. The exact equivalent of the value in decimal is 0.1000000000000000055511151231257827021181583404541015625. >>> Decimal.from_float(0.1) Decimal('0.1000000000000000055511151231257827021181583404541015625') >>> Decimal.from_float(float('nan')) Decimal('NaN') >>> Decimal.from_float(float('inf')) Decimal('Infinity') >>> Decimal.from_float(-float('inf')) Decimal('-Infinity') >>> Decimal.from_float(-0.0) Decimal('-0')
[ "Converts", "a", "float", "to", "a", "decimal", "number", "exactly", ".", "Note", "that", "Decimal", ".", "from_float", "(", "0", ".", "1", ")", "is", "not", "the", "same", "as", "Decimal", "(", "0", ".", "1", ")", ".", "Since", "0", ".", "1", "is", "not", "exactly", "representable", "in", "binary", "floating", "point", "the", "value", "is", "stored", "as", "the", "nearest", "representable", "value", "which", "is", "0x1", ".", "999999999999ap", "-", "4", ".", "The", "exact", "equivalent", "of", "the", "value", "in", "decimal", "is", "0", ".", "1000000000000000055511151231257827021181583404541015625", ".", ">>>", "Decimal", ".", "from_float", "(", "0", ".", "1", ")", "Decimal", "(", "0", ".", "1000000000000000055511151231257827021181583404541015625", ")", ">>>", "Decimal", ".", "from_float", "(", "float", "(", "nan", "))", "Decimal", "(", "NaN", ")", ">>>", "Decimal", ".", "from_float", "(", "float", "(", "inf", "))", "Decimal", "(", "Infinity", ")", ">>>", "Decimal", ".", "from_float", "(", "-", "float", "(", "inf", "))", "Decimal", "(", "-", "Infinity", ")", ">>>", "Decimal", ".", "from_float", "(", "-", "0", ".", "0", ")", "Decimal", "(", "-", "0", ")" ]
def from_float(cls, f): """Converts a float to a decimal number, exactly. Note that Decimal.from_float(0.1) is not the same as Decimal('0.1'). Since 0.1 is not exactly representable in binary floating point, the value is stored as the nearest representable value which is 0x1.999999999999ap-4. The exact equivalent of the value in decimal is 0.1000000000000000055511151231257827021181583404541015625. >>> Decimal.from_float(0.1) Decimal('0.1000000000000000055511151231257827021181583404541015625') >>> Decimal.from_float(float('nan')) Decimal('NaN') >>> Decimal.from_float(float('inf')) Decimal('Infinity') >>> Decimal.from_float(-float('inf')) Decimal('-Infinity') >>> Decimal.from_float(-0.0) Decimal('-0') """ if isinstance(f, (int, long)): return cls(f) else: if _math.isinf(f) or _math.isnan(f): return cls(repr(f)) if _math.copysign(1.0, f) == 1.0: sign = 0 else: sign = 1 n, d = abs(f).as_integer_ratio() k = d.bit_length() - 1 result = _dec_from_triple(sign, str(n * 5 ** k), -k) if cls is Decimal: return result return cls(result)
[ "def", "from_float", "(", "cls", ",", "f", ")", ":", "if", "isinstance", "(", "f", ",", "(", "int", ",", "long", ")", ")", ":", "return", "cls", "(", "f", ")", "else", ":", "if", "_math", ".", "isinf", "(", "f", ")", "or", "_math", ".", "isnan", "(", "f", ")", ":", "return", "cls", "(", "repr", "(", "f", ")", ")", "if", "_math", ".", "copysign", "(", "1.0", ",", "f", ")", "==", "1.0", ":", "sign", "=", "0", "else", ":", "sign", "=", "1", "n", ",", "d", "=", "abs", "(", "f", ")", ".", "as_integer_ratio", "(", ")", "k", "=", "d", ".", "bit_length", "(", ")", "-", "1", "result", "=", "_dec_from_triple", "(", "sign", ",", "str", "(", "n", "*", "5", "**", "k", ")", ",", "-", "k", ")", "if", "cls", "is", "Decimal", ":", "return", "result", "return", "cls", "(", "result", ")" ]
https://github.com/misterch0c/shadowbroker/blob/e3a069bea47a2c1009697941ac214adc6f90aa8d/windows/Resources/Python/Core/Lib/decimal.py#L595-L630
semirook/snaql
9e3987e19e677725f8d4882cb15882be6366db09
snaql/factory.py
python
JinjaSQLExtension.parse
(self, parser)
return nodes.CallBlock(call_node, [], [], body)
[]
def parse(self, parser): lineno = next(parser.stream).lineno expr = parser.parse_expression() args = [expr] kwargs = [nodes.Keyword('func', expr)] if parser.stream.skip_if('comma'): # Optional 'note' for function docstring if ( parser.stream.current.type == 'name' and parser.stream.current.value in ( 'note', 'cond_for', 'depends_on' ) ): stream_type = parser.stream.current.value next(parser.stream) parser.stream.expect('assign') # Depends meta is always a list if stream_type == 'depends_on': c_expr = parser.parse_list() else: c_expr = parser.parse_expression() args.append(c_expr) kwargs.append(nodes.Keyword(stream_type, c_expr)) body = parser.parse_statements( ['name:endsql', 'name:endquery'], drop_needle=True ) raw_template = self.environment.sql_params['raws'][parser.name] # Lines range of original raw template raw_lines = slice(lineno, parser.stream.current.lineno-1) self.environment.sql_params.setdefault('funcs', {}).update({ expr.value: {'raw_sql': '\n '.join(raw_template[raw_lines])} }) call_node = nodes.Call( self.attr('_sql_process', lineno=lineno), args, kwargs, None, None ) return nodes.CallBlock(call_node, [], [], body)
[ "def", "parse", "(", "self", ",", "parser", ")", ":", "lineno", "=", "next", "(", "parser", ".", "stream", ")", ".", "lineno", "expr", "=", "parser", ".", "parse_expression", "(", ")", "args", "=", "[", "expr", "]", "kwargs", "=", "[", "nodes", ".", "Keyword", "(", "'func'", ",", "expr", ")", "]", "if", "parser", ".", "stream", ".", "skip_if", "(", "'comma'", ")", ":", "# Optional 'note' for function docstring", "if", "(", "parser", ".", "stream", ".", "current", ".", "type", "==", "'name'", "and", "parser", ".", "stream", ".", "current", ".", "value", "in", "(", "'note'", ",", "'cond_for'", ",", "'depends_on'", ")", ")", ":", "stream_type", "=", "parser", ".", "stream", ".", "current", ".", "value", "next", "(", "parser", ".", "stream", ")", "parser", ".", "stream", ".", "expect", "(", "'assign'", ")", "# Depends meta is always a list", "if", "stream_type", "==", "'depends_on'", ":", "c_expr", "=", "parser", ".", "parse_list", "(", ")", "else", ":", "c_expr", "=", "parser", ".", "parse_expression", "(", ")", "args", ".", "append", "(", "c_expr", ")", "kwargs", ".", "append", "(", "nodes", ".", "Keyword", "(", "stream_type", ",", "c_expr", ")", ")", "body", "=", "parser", ".", "parse_statements", "(", "[", "'name:endsql'", ",", "'name:endquery'", "]", ",", "drop_needle", "=", "True", ")", "raw_template", "=", "self", ".", "environment", ".", "sql_params", "[", "'raws'", "]", "[", "parser", ".", "name", "]", "# Lines range of original raw template", "raw_lines", "=", "slice", "(", "lineno", ",", "parser", ".", "stream", ".", "current", ".", "lineno", "-", "1", ")", "self", ".", "environment", ".", "sql_params", ".", "setdefault", "(", "'funcs'", ",", "{", "}", ")", ".", "update", "(", "{", "expr", ".", "value", ":", "{", "'raw_sql'", ":", "'\\n '", ".", "join", "(", "raw_template", "[", "raw_lines", "]", ")", "}", "}", ")", "call_node", "=", "nodes", ".", "Call", "(", "self", ".", "attr", "(", "'_sql_process'", ",", "lineno", "=", "lineno", ")", ",", "args", ",", "kwargs", ",", "None", ",", "None", ")", "return", "nodes", ".", "CallBlock", "(", "call_node", ",", "[", "]", ",", "[", "]", ",", "body", ")" ]
https://github.com/semirook/snaql/blob/9e3987e19e677725f8d4882cb15882be6366db09/snaql/factory.py#L67-L105
bshao001/ChatLearner
4b7d8a617bb0cc5c2a792a3e87d7be7bf6364b43
webui/server/tornadows/soaphandler.py
python
SoapHandler._parseParams
(self,elements,types=None,args=None)
return values
Private method to parse a Body element of SOAP Envelope and extract the values of the request document like parameters for the soapmethod, this method return a list values of parameters.
Private method to parse a Body element of SOAP Envelope and extract the values of the request document like parameters for the soapmethod, this method return a list values of parameters.
[ "Private", "method", "to", "parse", "a", "Body", "element", "of", "SOAP", "Envelope", "and", "extract", "the", "values", "of", "the", "request", "document", "like", "parameters", "for", "the", "soapmethod", "this", "method", "return", "a", "list", "values", "of", "parameters", "." ]
def _parseParams(self,elements,types=None,args=None): """ Private method to parse a Body element of SOAP Envelope and extract the values of the request document like parameters for the soapmethod, this method return a list values of parameters. """ values = [] for tagname in args: type = types[tagname] values += self._findValues(tagname,type,elements) return values
[ "def", "_parseParams", "(", "self", ",", "elements", ",", "types", "=", "None", ",", "args", "=", "None", ")", ":", "values", "=", "[", "]", "for", "tagname", "in", "args", ":", "type", "=", "types", "[", "tagname", "]", "values", "+=", "self", ".", "_findValues", "(", "tagname", ",", "type", ",", "elements", ")", "return", "values" ]
https://github.com/bshao001/ChatLearner/blob/4b7d8a617bb0cc5c2a792a3e87d7be7bf6364b43/webui/server/tornadows/soaphandler.py#L283-L292
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/algebras/lie_algebras/onsager.py
python
OnsagerAlgebraACE._repr_generator
(self, m)
return 'B[{}]'.format(m[1])
Return a string representation of the generator indexed by ``m``. EXAMPLES:: sage: O = lie_algebras.OnsagerAlgebra(QQ).alternating_central_extension() sage: O._repr_generator((0,-2)) 'A[-2]' sage: O._repr_generator((1,4)) 'B[4]'
Return a string representation of the generator indexed by ``m``.
[ "Return", "a", "string", "representation", "of", "the", "generator", "indexed", "by", "m", "." ]
def _repr_generator(self, m): """ Return a string representation of the generator indexed by ``m``. EXAMPLES:: sage: O = lie_algebras.OnsagerAlgebra(QQ).alternating_central_extension() sage: O._repr_generator((0,-2)) 'A[-2]' sage: O._repr_generator((1,4)) 'B[4]' """ if m[0] == 0: return 'A[{}]'.format(m[1]) return 'B[{}]'.format(m[1])
[ "def", "_repr_generator", "(", "self", ",", "m", ")", ":", "if", "m", "[", "0", "]", "==", "0", ":", "return", "'A[{}]'", ".", "format", "(", "m", "[", "1", "]", ")", "return", "'B[{}]'", ".", "format", "(", "m", "[", "1", "]", ")" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/algebras/lie_algebras/onsager.py#L1077-L1091
liyibo/text-classification-demos
2bc3f56e0eb2b028565881c91db26a589b050db8
cnn_model.py
python
main
()
[]
def main(): word_to_id, id_to_word = word_2_id(vocab_dir) cat_to_id, id_to_cat = cat_2_id() x_train, y_train = process_file(train_dir, word_to_id, cat_to_id, max_length) x_val, y_val = process_file(val_dir, word_to_id, cat_to_id, max_length) epochs = 5 best_acc_val = 0.0 # 最佳验证集准确率 train_steps = 0 val_loss = 0.0 val_acc = 0.0 with tf.Graph().as_default(): seq_length = 512 num_classes = 10 vocab_size = 5000 cnn_model = TextCNN(seq_length, num_classes, vocab_size) saver = tf.train.Saver() sess = tf.Session() with sess.as_default(): sess.run(tf.global_variables_initializer()) for epoch in range(epochs): print('Epoch:', epoch + 1) batch_train = batch_iter(x_train, y_train, 32) for x_batch, y_batch in batch_train: train_steps += 1 learn_rate = 0.001 # learning rate vary feed_dict = {cnn_model.input_x: x_batch, cnn_model.input_y: y_batch, cnn_model.drop_prob: 0.5, cnn_model.learning_rate: learn_rate} _, train_loss, train_acc = sess.run([cnn_model.optim, cnn_model.loss, cnn_model.acc], feed_dict=feed_dict) if train_steps % 1000 == 0: val_loss, val_acc = evaluate(sess, cnn_model, x_val, y_val) if val_acc > best_acc_val: # 保存最好结果 best_acc_val = val_acc last_improved = train_steps saver.save(sess, "./model/cnn/model", global_step=train_steps) # saver.save(sess=session, save_path=save_path) improved_str = '*' else: improved_str = '' now_time = datetime.now() msg = 'Iter: {0:>6}, Train Loss: {1:>6.2}, Train Acc: {2:>7.2%},' \ + ' Val Loss: {3:>6.2}, Val Acc: {4:>7.2%}, Time: {5} {6}' print(msg.format(train_steps, train_loss, train_acc, val_loss, val_acc, now_time, improved_str))
[ "def", "main", "(", ")", ":", "word_to_id", ",", "id_to_word", "=", "word_2_id", "(", "vocab_dir", ")", "cat_to_id", ",", "id_to_cat", "=", "cat_2_id", "(", ")", "x_train", ",", "y_train", "=", "process_file", "(", "train_dir", ",", "word_to_id", ",", "cat_to_id", ",", "max_length", ")", "x_val", ",", "y_val", "=", "process_file", "(", "val_dir", ",", "word_to_id", ",", "cat_to_id", ",", "max_length", ")", "epochs", "=", "5", "best_acc_val", "=", "0.0", "# 最佳验证集准确率", "train_steps", "=", "0", "val_loss", "=", "0.0", "val_acc", "=", "0.0", "with", "tf", ".", "Graph", "(", ")", ".", "as_default", "(", ")", ":", "seq_length", "=", "512", "num_classes", "=", "10", "vocab_size", "=", "5000", "cnn_model", "=", "TextCNN", "(", "seq_length", ",", "num_classes", ",", "vocab_size", ")", "saver", "=", "tf", ".", "train", ".", "Saver", "(", ")", "sess", "=", "tf", ".", "Session", "(", ")", "with", "sess", ".", "as_default", "(", ")", ":", "sess", ".", "run", "(", "tf", ".", "global_variables_initializer", "(", ")", ")", "for", "epoch", "in", "range", "(", "epochs", ")", ":", "print", "(", "'Epoch:'", ",", "epoch", "+", "1", ")", "batch_train", "=", "batch_iter", "(", "x_train", ",", "y_train", ",", "32", ")", "for", "x_batch", ",", "y_batch", "in", "batch_train", ":", "train_steps", "+=", "1", "learn_rate", "=", "0.001", "# learning rate vary", "feed_dict", "=", "{", "cnn_model", ".", "input_x", ":", "x_batch", ",", "cnn_model", ".", "input_y", ":", "y_batch", ",", "cnn_model", ".", "drop_prob", ":", "0.5", ",", "cnn_model", ".", "learning_rate", ":", "learn_rate", "}", "_", ",", "train_loss", ",", "train_acc", "=", "sess", ".", "run", "(", "[", "cnn_model", ".", "optim", ",", "cnn_model", ".", "loss", ",", "cnn_model", ".", "acc", "]", ",", "feed_dict", "=", "feed_dict", ")", "if", "train_steps", "%", "1000", "==", "0", ":", "val_loss", ",", "val_acc", "=", "evaluate", "(", "sess", ",", "cnn_model", ",", "x_val", ",", "y_val", ")", "if", "val_acc", ">", "best_acc_val", ":", "# 保存最好结果", "best_acc_val", "=", "val_acc", "last_improved", "=", "train_steps", "saver", ".", "save", "(", "sess", ",", "\"./model/cnn/model\"", ",", "global_step", "=", "train_steps", ")", "# saver.save(sess=session, save_path=save_path)", "improved_str", "=", "'*'", "else", ":", "improved_str", "=", "''", "now_time", "=", "datetime", ".", "now", "(", ")", "msg", "=", "'Iter: {0:>6}, Train Loss: {1:>6.2}, Train Acc: {2:>7.2%},'", "+", "' Val Loss: {3:>6.2}, Val Acc: {4:>7.2%}, Time: {5} {6}'", "print", "(", "msg", ".", "format", "(", "train_steps", ",", "train_loss", ",", "train_acc", ",", "val_loss", ",", "val_acc", ",", "now_time", ",", "improved_str", ")", ")" ]
https://github.com/liyibo/text-classification-demos/blob/2bc3f56e0eb2b028565881c91db26a589b050db8/cnn_model.py#L152-L202
almarklein/visvis
766ed97767b44a55a6ff72c742d7385e074d3d55
core/axes.py
python
Legend.SetStrings
(self, *stringList)
SetStrings(*stringList) Set the strings of the legend labels.
SetStrings(*stringList) Set the strings of the legend labels.
[ "SetStrings", "(", "*", "stringList", ")", "Set", "the", "strings", "of", "the", "legend", "labels", "." ]
def SetStrings(self, *stringList): """ SetStrings(*stringList) Set the strings of the legend labels. """ # Note that setting the .visible property will invoke a draw # test if len(stringList)==1 and isinstance(stringList[0],(tuple,list)): stringList = stringList[0] for value in stringList: if not isinstance(value, basestring): raise ValueError("Legend string list should only contain strings.") # store self._stringList = stringList # clean up labels and lines for line in [line for line in self._wobjects]: line.Destroy() for label in self.children: label.Destroy() # find axes and figure axes = self.parent while axes and not isinstance(axes, Axes): axes = axes.parent if not axes: return fig = axes.GetFigure() # collect line objects lines = [] twoPoints = False for ob in axes._wobjects: if len(self._wobjects) >= len(stringList): break if isinstance(ob, Line): # Add line props tmp = ob.ls, ob.lc, ob.lw, ob.ms, ob.mc, ob.mw, ob.mec, ob.mew lines.append(tmp) # Set whether to use two points twoPoints = twoPoints or bool(ob.ls and ob.lc and ob.lw) # create new lines and labels maxWidth = 0 nr = -1 for lineProps in lines: nr += 1 if nr >= len(stringList): break # get new line and label text = stringList[nr] yspacing = self._yspacing * fig._relativeFontSize line, label = self._AddLineAndLabel(text, yspacing, twoPoints) # apply line properties line.ls, line.lc, line.lw = lineProps[0:3] line.ms, line.mc, line.mw = lineProps[3:6] line.mec, line.mew = lineProps[6:8] # correct label size and store max deltax, deltay = label.GetVertexLimits() label.position.w = (deltax[1]-deltax[0])+2 maxWidth = max([maxWidth, label.position.w ]) # make own size ok if self._wobjects: pos = label.position self.position.w = maxWidth + pos.x + self._xoffset #self.position.h = pos.bottom + self._yoffset deltax, deltay = label.GetVertexLimits() labelHeight = deltay[1]# - deltay[0] self.position.h = pos.top + labelHeight + self._yoffset + 2 self.visible = True else: self.visible = False
[ "def", "SetStrings", "(", "self", ",", "*", "stringList", ")", ":", "# Note that setting the .visible property will invoke a draw", "# test", "if", "len", "(", "stringList", ")", "==", "1", "and", "isinstance", "(", "stringList", "[", "0", "]", ",", "(", "tuple", ",", "list", ")", ")", ":", "stringList", "=", "stringList", "[", "0", "]", "for", "value", "in", "stringList", ":", "if", "not", "isinstance", "(", "value", ",", "basestring", ")", ":", "raise", "ValueError", "(", "\"Legend string list should only contain strings.\"", ")", "# store", "self", ".", "_stringList", "=", "stringList", "# clean up labels and lines", "for", "line", "in", "[", "line", "for", "line", "in", "self", ".", "_wobjects", "]", ":", "line", ".", "Destroy", "(", ")", "for", "label", "in", "self", ".", "children", ":", "label", ".", "Destroy", "(", ")", "# find axes and figure", "axes", "=", "self", ".", "parent", "while", "axes", "and", "not", "isinstance", "(", "axes", ",", "Axes", ")", ":", "axes", "=", "axes", ".", "parent", "if", "not", "axes", ":", "return", "fig", "=", "axes", ".", "GetFigure", "(", ")", "# collect line objects", "lines", "=", "[", "]", "twoPoints", "=", "False", "for", "ob", "in", "axes", ".", "_wobjects", ":", "if", "len", "(", "self", ".", "_wobjects", ")", ">=", "len", "(", "stringList", ")", ":", "break", "if", "isinstance", "(", "ob", ",", "Line", ")", ":", "# Add line props", "tmp", "=", "ob", ".", "ls", ",", "ob", ".", "lc", ",", "ob", ".", "lw", ",", "ob", ".", "ms", ",", "ob", ".", "mc", ",", "ob", ".", "mw", ",", "ob", ".", "mec", ",", "ob", ".", "mew", "lines", ".", "append", "(", "tmp", ")", "# Set whether to use two points", "twoPoints", "=", "twoPoints", "or", "bool", "(", "ob", ".", "ls", "and", "ob", ".", "lc", "and", "ob", ".", "lw", ")", "# create new lines and labels", "maxWidth", "=", "0", "nr", "=", "-", "1", "for", "lineProps", "in", "lines", ":", "nr", "+=", "1", "if", "nr", ">=", "len", "(", "stringList", ")", ":", "break", "# get new line and label", "text", "=", "stringList", "[", "nr", "]", "yspacing", "=", "self", ".", "_yspacing", "*", "fig", ".", "_relativeFontSize", "line", ",", "label", "=", "self", ".", "_AddLineAndLabel", "(", "text", ",", "yspacing", ",", "twoPoints", ")", "# apply line properties", "line", ".", "ls", ",", "line", ".", "lc", ",", "line", ".", "lw", "=", "lineProps", "[", "0", ":", "3", "]", "line", ".", "ms", ",", "line", ".", "mc", ",", "line", ".", "mw", "=", "lineProps", "[", "3", ":", "6", "]", "line", ".", "mec", ",", "line", ".", "mew", "=", "lineProps", "[", "6", ":", "8", "]", "# correct label size and store max", "deltax", ",", "deltay", "=", "label", ".", "GetVertexLimits", "(", ")", "label", ".", "position", ".", "w", "=", "(", "deltax", "[", "1", "]", "-", "deltax", "[", "0", "]", ")", "+", "2", "maxWidth", "=", "max", "(", "[", "maxWidth", ",", "label", ".", "position", ".", "w", "]", ")", "# make own size ok", "if", "self", ".", "_wobjects", ":", "pos", "=", "label", ".", "position", "self", ".", "position", ".", "w", "=", "maxWidth", "+", "pos", ".", "x", "+", "self", ".", "_xoffset", "#self.position.h = pos.bottom + self._yoffset", "deltax", ",", "deltay", "=", "label", ".", "GetVertexLimits", "(", ")", "labelHeight", "=", "deltay", "[", "1", "]", "# - deltay[0]", "self", ".", "position", ".", "h", "=", "pos", ".", "top", "+", "labelHeight", "+", "self", ".", "_yoffset", "+", "2", "self", ".", "visible", "=", "True", "else", ":", "self", ".", "visible", "=", "False" ]
https://github.com/almarklein/visvis/blob/766ed97767b44a55a6ff72c742d7385e074d3d55/core/axes.py#L1161-L1236
istresearch/scrapy-cluster
01861c2dca1563aab740417d315cc4ebf9b73f72
utils/scutils/zookeeper_watcher.py
python
ZookeeperWatcher.watch_file
(self, event)
Fired when changes made to the file
Fired when changes made to the file
[ "Fired", "when", "changes", "made", "to", "the", "file" ]
def watch_file(self, event): ''' Fired when changes made to the file ''' if not self.update_file(self.my_file): self.threaded_start()
[ "def", "watch_file", "(", "self", ",", "event", ")", ":", "if", "not", "self", ".", "update_file", "(", "self", ".", "my_file", ")", ":", "self", ".", "threaded_start", "(", ")" ]
https://github.com/istresearch/scrapy-cluster/blob/01861c2dca1563aab740417d315cc4ebf9b73f72/utils/scutils/zookeeper_watcher.py#L190-L195
jerryli27/TwinGAN
4e5593445778dfb77af9f815b3f4fcafc35758dc
datasets/dataset_factory.py
python
get_dataset
(name, split_name, dataset_dir, file_pattern=None, reader=None)
return dataset
Given a dataset name and a split_name returns a Dataset. Args: name: String, the name of the dataset. split_name: A train/test split name. dataset_dir: The directory where the dataset files are stored. file_pattern: The file pattern to use for matching the dataset source files. reader: The subclass of tf.ReaderBase. If left as `None`, then the default reader defined by each dataset is used. Returns: A `Dataset` class. Raises: ValueError: If the dataset `name` is unknown.
Given a dataset name and a split_name returns a Dataset.
[ "Given", "a", "dataset", "name", "and", "a", "split_name", "returns", "a", "Dataset", "." ]
def get_dataset(name, split_name, dataset_dir, file_pattern=None, reader=None): """Given a dataset name and a split_name returns a Dataset. Args: name: String, the name of the dataset. split_name: A train/test split name. dataset_dir: The directory where the dataset files are stored. file_pattern: The file pattern to use for matching the dataset source files. reader: The subclass of tf.ReaderBase. If left as `None`, then the default reader defined by each dataset is used. Returns: A `Dataset` class. Raises: ValueError: If the dataset `name` is unknown. """ if name not in datasets_map: raise ValueError('Name of dataset unknown %s' % name) dataset = datasets_map[name].get_split( split_name, dataset_dir, file_pattern, reader) dataset.name = name if FLAGS.train_size and split_name == 'train': dataset.num_samples = FLAGS.train_size else: if FLAGS.validation_size: dataset.num_samples = FLAGS.validation_size return dataset
[ "def", "get_dataset", "(", "name", ",", "split_name", ",", "dataset_dir", ",", "file_pattern", "=", "None", ",", "reader", "=", "None", ")", ":", "if", "name", "not", "in", "datasets_map", ":", "raise", "ValueError", "(", "'Name of dataset unknown %s'", "%", "name", ")", "dataset", "=", "datasets_map", "[", "name", "]", ".", "get_split", "(", "split_name", ",", "dataset_dir", ",", "file_pattern", ",", "reader", ")", "dataset", ".", "name", "=", "name", "if", "FLAGS", ".", "train_size", "and", "split_name", "==", "'train'", ":", "dataset", ".", "num_samples", "=", "FLAGS", ".", "train_size", "else", ":", "if", "FLAGS", ".", "validation_size", ":", "dataset", ".", "num_samples", "=", "FLAGS", ".", "validation_size", "return", "dataset" ]
https://github.com/jerryli27/TwinGAN/blob/4e5593445778dfb77af9f815b3f4fcafc35758dc/datasets/dataset_factory.py#L61-L91
googleapis/python-dialogflow
e48ea001b7c8a4a5c1fe4b162bad49ea397458e9
google/cloud/dialogflow_v2/services/documents/transports/grpc_asyncio.py
python
DocumentsGrpcAsyncIOTransport.operations_client
(self)
return self._operations_client
Create the client designed to process long-running operations. This property caches on the instance; repeated calls return the same client.
Create the client designed to process long-running operations.
[ "Create", "the", "client", "designed", "to", "process", "long", "-", "running", "operations", "." ]
def operations_client(self) -> operations_v1.OperationsAsyncClient: """Create the client designed to process long-running operations. This property caches on the instance; repeated calls return the same client. """ # Sanity check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel ) # Return the client from cache. return self._operations_client
[ "def", "operations_client", "(", "self", ")", "->", "operations_v1", ".", "OperationsAsyncClient", ":", "# Sanity check: Only create a new client if we do not already have one.", "if", "self", ".", "_operations_client", "is", "None", ":", "self", ".", "_operations_client", "=", "operations_v1", ".", "OperationsAsyncClient", "(", "self", ".", "grpc_channel", ")", "# Return the client from cache.", "return", "self", ".", "_operations_client" ]
https://github.com/googleapis/python-dialogflow/blob/e48ea001b7c8a4a5c1fe4b162bad49ea397458e9/google/cloud/dialogflow_v2/services/documents/transports/grpc_asyncio.py#L236-L249
pytorchbearer/torchbearer
9d97c60ec4deb37a0627311ddecb9c6f1429cd82
torchbearer/callbacks/printer.py
python
Tqdm.on_end_validation
(self, state)
Update the bar with the terminal validation metrics and then close. Args: state (dict): The :class:`.Trial` state
Update the bar with the terminal validation metrics and then close.
[ "Update", "the", "bar", "with", "the", "terminal", "validation", "metrics", "and", "then", "close", "." ]
def on_end_validation(self, state): """Update the bar with the terminal validation metrics and then close. Args: state (dict): The :class:`.Trial` state """ if not self._on_epoch: self._close(state)
[ "def", "on_end_validation", "(", "self", ",", "state", ")", ":", "if", "not", "self", ".", "_on_epoch", ":", "self", ".", "_close", "(", "state", ")" ]
https://github.com/pytorchbearer/torchbearer/blob/9d97c60ec4deb37a0627311ddecb9c6f1429cd82/torchbearer/callbacks/printer.py#L213-L220
clcarwin/sphereface_pytorch
e2f11f0b0f394cf75b9f65ce8d353e4390b0948c
matlab_cp2tform.py
python
findSimilarity
(uv, xy, options=None)
Function: ---------- Find Reflective Similarity Transform Matrix 'trans': u = uv[:, 0] v = uv[:, 1] x = xy[:, 0] y = xy[:, 1] [x, y, 1] = [u, v, 1] * trans Parameters: ---------- @uv: Kx2 np.array source points each row is a pair of coordinates (x, y) @xy: Kx2 np.array each row is a pair of inverse-transformed @option: not used, keep it as None Returns: ---------- @trans: 3x3 np.array transform matrix from uv to xy @trans_inv: 3x3 np.array inverse of trans, transform matrix from xy to uv Matlab: ---------- % The similarities are a superset of the nonreflective similarities as they may % also include reflection. % % let sc = s*cos(theta) % let ss = s*sin(theta) % % [ sc -ss % [u v] = [x y 1] * ss sc % tx ty] % % OR % % [ sc ss % [u v] = [x y 1] * ss -sc % tx ty] % % Algorithm: % 1) Solve for trans1, a nonreflective similarity. % 2) Reflect the xy data across the Y-axis, % and solve for trans2r, also a nonreflective similarity. % 3) Transform trans2r to trans2, undoing the reflection done in step 2. % 4) Use TFORMFWD to transform uv using both trans1 and trans2, % and compare the results, Returnsing the transformation corresponding % to the smaller L2 norm. % Need to reset options.K to prepare for calls to findNonreflectiveSimilarity. % This is safe because we already checked that there are enough point pairs.
Function: ---------- Find Reflective Similarity Transform Matrix 'trans': u = uv[:, 0] v = uv[:, 1] x = xy[:, 0] y = xy[:, 1] [x, y, 1] = [u, v, 1] * trans
[ "Function", ":", "----------", "Find", "Reflective", "Similarity", "Transform", "Matrix", "trans", ":", "u", "=", "uv", "[", ":", "0", "]", "v", "=", "uv", "[", ":", "1", "]", "x", "=", "xy", "[", ":", "0", "]", "y", "=", "xy", "[", ":", "1", "]", "[", "x", "y", "1", "]", "=", "[", "u", "v", "1", "]", "*", "trans" ]
def findSimilarity(uv, xy, options=None): """ Function: ---------- Find Reflective Similarity Transform Matrix 'trans': u = uv[:, 0] v = uv[:, 1] x = xy[:, 0] y = xy[:, 1] [x, y, 1] = [u, v, 1] * trans Parameters: ---------- @uv: Kx2 np.array source points each row is a pair of coordinates (x, y) @xy: Kx2 np.array each row is a pair of inverse-transformed @option: not used, keep it as None Returns: ---------- @trans: 3x3 np.array transform matrix from uv to xy @trans_inv: 3x3 np.array inverse of trans, transform matrix from xy to uv Matlab: ---------- % The similarities are a superset of the nonreflective similarities as they may % also include reflection. % % let sc = s*cos(theta) % let ss = s*sin(theta) % % [ sc -ss % [u v] = [x y 1] * ss sc % tx ty] % % OR % % [ sc ss % [u v] = [x y 1] * ss -sc % tx ty] % % Algorithm: % 1) Solve for trans1, a nonreflective similarity. % 2) Reflect the xy data across the Y-axis, % and solve for trans2r, also a nonreflective similarity. % 3) Transform trans2r to trans2, undoing the reflection done in step 2. % 4) Use TFORMFWD to transform uv using both trans1 and trans2, % and compare the results, Returnsing the transformation corresponding % to the smaller L2 norm. % Need to reset options.K to prepare for calls to findNonreflectiveSimilarity. % This is safe because we already checked that there are enough point pairs. """ options = {'K': 2} # uv = np.array(uv) # xy = np.array(xy) # Solve for trans1 trans1, trans1_inv = findNonreflectiveSimilarity(uv, xy, options) # Solve for trans2 # manually reflect the xy data across the Y-axis xyR = xy xyR[:, 0] = -1 * xyR[:, 0] trans2r, trans2r_inv = findNonreflectiveSimilarity(uv, xyR, options) # manually reflect the tform to undo the reflection done on xyR TreflectY = np.array([ [-1, 0, 0], [0, 1, 0], [0, 0, 1] ]) trans2 = np.dot(trans2r, TreflectY) # Figure out if trans1 or trans2 is better xy1 = tformfwd(trans1, uv) norm1 = norm(xy1 - xy) xy2 = tformfwd(trans2, uv) norm2 = norm(xy2 - xy) if norm1 <= norm2: return trans1, trans1_inv else: trans2_inv = inv(trans2) return trans2, trans2_inv
[ "def", "findSimilarity", "(", "uv", ",", "xy", ",", "options", "=", "None", ")", ":", "options", "=", "{", "'K'", ":", "2", "}", "# uv = np.array(uv)", "# xy = np.array(xy)", "# Solve for trans1", "trans1", ",", "trans1_inv", "=", "findNonreflectiveSimilarity", "(", "uv", ",", "xy", ",", "options", ")", "# Solve for trans2", "# manually reflect the xy data across the Y-axis", "xyR", "=", "xy", "xyR", "[", ":", ",", "0", "]", "=", "-", "1", "*", "xyR", "[", ":", ",", "0", "]", "trans2r", ",", "trans2r_inv", "=", "findNonreflectiveSimilarity", "(", "uv", ",", "xyR", ",", "options", ")", "# manually reflect the tform to undo the reflection done on xyR", "TreflectY", "=", "np", ".", "array", "(", "[", "[", "-", "1", ",", "0", ",", "0", "]", ",", "[", "0", ",", "1", ",", "0", "]", ",", "[", "0", ",", "0", ",", "1", "]", "]", ")", "trans2", "=", "np", ".", "dot", "(", "trans2r", ",", "TreflectY", ")", "# Figure out if trans1 or trans2 is better", "xy1", "=", "tformfwd", "(", "trans1", ",", "uv", ")", "norm1", "=", "norm", "(", "xy1", "-", "xy", ")", "xy2", "=", "tformfwd", "(", "trans2", ",", "uv", ")", "norm2", "=", "norm", "(", "xy2", "-", "xy", ")", "if", "norm1", "<=", "norm2", ":", "return", "trans1", ",", "trans1_inv", "else", ":", "trans2_inv", "=", "inv", "(", "trans2", ")", "return", "trans2", ",", "trans2_inv" ]
https://github.com/clcarwin/sphereface_pytorch/blob/e2f11f0b0f394cf75b9f65ce8d353e4390b0948c/matlab_cp2tform.py#L340-L432
nortikin/sverchok
7b460f01317c15f2681bfa3e337c5e7346f3711b
menu.py
python
SverchNodeItem.make_add_operator
(self)
Create operator class which adds specific type of node. Tooltip (docstring) for that operator is copied from node class docstring.
Create operator class which adds specific type of node. Tooltip (docstring) for that operator is copied from node class docstring.
[ "Create", "operator", "class", "which", "adds", "specific", "type", "of", "node", ".", "Tooltip", "(", "docstring", ")", "for", "that", "operator", "is", "copied", "from", "node", "class", "docstring", "." ]
def make_add_operator(self): """ Create operator class which adds specific type of node. Tooltip (docstring) for that operator is copied from node class docstring. """ global node_add_operators class SverchNodeAddOperator(bl_operators.node.NodeAddOperator, bpy.types.Operator): """Wrapper for node.add_node operator to add specific node""" bl_idname = "node.sv_add_" + self.get_idname() bl_label = "Add {} node".format(self.label) bl_options = {'REGISTER', 'UNDO'} def execute(operator, context): # please not be confused: "operator" here references to # SverchNodeAddOperator instance, and "self" references to # SverchNodeItem instance. operator.use_transform = True operator.type = self.nodetype node = operator.create_node(context) apply_default_preset(node) return {'FINISHED'} node_class = self.get_node_class() SverchNodeAddOperator.__name__ = node_class.__name__ if hasattr(node_class, "docstring"): SverchNodeAddOperator.__doc__ = node_class.docstring.get_tooltip() else: SverchNodeAddOperator.__doc__ = node_class.__doc__ node_add_operators[self.get_idname()] = SverchNodeAddOperator bpy.utils.register_class(SverchNodeAddOperator)
[ "def", "make_add_operator", "(", "self", ")", ":", "global", "node_add_operators", "class", "SverchNodeAddOperator", "(", "bl_operators", ".", "node", ".", "NodeAddOperator", ",", "bpy", ".", "types", ".", "Operator", ")", ":", "\"\"\"Wrapper for node.add_node operator to add specific node\"\"\"", "bl_idname", "=", "\"node.sv_add_\"", "+", "self", ".", "get_idname", "(", ")", "bl_label", "=", "\"Add {} node\"", ".", "format", "(", "self", ".", "label", ")", "bl_options", "=", "{", "'REGISTER'", ",", "'UNDO'", "}", "def", "execute", "(", "operator", ",", "context", ")", ":", "# please not be confused: \"operator\" here references to", "# SverchNodeAddOperator instance, and \"self\" references to", "# SverchNodeItem instance.", "operator", ".", "use_transform", "=", "True", "operator", ".", "type", "=", "self", ".", "nodetype", "node", "=", "operator", ".", "create_node", "(", "context", ")", "apply_default_preset", "(", "node", ")", "return", "{", "'FINISHED'", "}", "node_class", "=", "self", ".", "get_node_class", "(", ")", "SverchNodeAddOperator", ".", "__name__", "=", "node_class", ".", "__name__", "if", "hasattr", "(", "node_class", ",", "\"docstring\"", ")", ":", "SverchNodeAddOperator", ".", "__doc__", "=", "node_class", ".", "docstring", ".", "get_tooltip", "(", ")", "else", ":", "SverchNodeAddOperator", ".", "__doc__", "=", "node_class", ".", "__doc__", "node_add_operators", "[", "self", ".", "get_idname", "(", ")", "]", "=", "SverchNodeAddOperator", "bpy", ".", "utils", ".", "register_class", "(", "SverchNodeAddOperator", ")" ]
https://github.com/nortikin/sverchok/blob/7b460f01317c15f2681bfa3e337c5e7346f3711b/menu.py#L229-L264