nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
sequence
function
stringlengths
34
151k
function_tokens
sequence
url
stringlengths
90
278
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/smtplib.py
python
SMTP.auth_cram_md5
(self, challenge=None)
return self.user + " " + hmac.HMAC( self.password.encode('ascii'), challenge, 'md5').hexdigest()
Authobject to use with CRAM-MD5 authentication. Requires self.user and self.password to be set.
Authobject to use with CRAM-MD5 authentication. Requires self.user and self.password to be set.
[ "Authobject", "to", "use", "with", "CRAM", "-", "MD5", "authentication", ".", "Requires", "self", ".", "user", "and", "self", ".", "password", "to", "be", "set", "." ]
def auth_cram_md5(self, challenge=None): """ Authobject to use with CRAM-MD5 authentication. Requires self.user and self.password to be set.""" # CRAM-MD5 does not support initial-response. if challenge is None: return None return self.user + " " + hmac.HMAC( self.password.encode('ascii'), challenge, 'md5').hexdigest()
[ "def", "auth_cram_md5", "(", "self", ",", "challenge", "=", "None", ")", ":", "# CRAM-MD5 does not support initial-response.", "if", "challenge", "is", "None", ":", "return", "None", "return", "self", ".", "user", "+", "\" \"", "+", "hmac", ".", "HMAC", "(", "self", ".", "password", ".", "encode", "(", "'ascii'", ")", ",", "challenge", ",", "'md5'", ")", ".", "hexdigest", "(", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/smtplib.py#L664-L671
albertz/openlierox
d316c14a8eb57848ef56e9bfa7b23a56f694a51b
tools/DedicatedServerVideo/gdata/tlslite/TLSRecordLayer.py
python
TLSRecordLayer.getCipherImplementation
(self)
return self._writeState.encContext.implementation
Get the name of the cipher implementation used with this connection. @rtype: str @return: The name of the cipher implementation used with this connection. Either 'python', 'cryptlib', 'openssl', or 'pycrypto'.
Get the name of the cipher implementation used with this connection.
[ "Get", "the", "name", "of", "the", "cipher", "implementation", "used", "with", "this", "connection", "." ]
def getCipherImplementation(self): """Get the name of the cipher implementation used with this connection. @rtype: str @return: The name of the cipher implementation used with this connection. Either 'python', 'cryptlib', 'openssl', or 'pycrypto'. """ if not self._writeState.encContext: return None return self._writeState.encContext.implementation
[ "def", "getCipherImplementation", "(", "self", ")", ":", "if", "not", "self", ".", "_writeState", ".", "encContext", ":", "return", "None", "return", "self", ".", "_writeState", ".", "encContext", ".", "implementation" ]
https://github.com/albertz/openlierox/blob/d316c14a8eb57848ef56e9bfa7b23a56f694a51b/tools/DedicatedServerVideo/gdata/tlslite/TLSRecordLayer.py#L354-L365
francinexue/xuefu
b6ff79747a42e020588c0c0a921048e08fe4680c
ctpx/ctp2/ctptd.py
python
CtpTd.onRspQryBrokerTradingAlgos
(self, BrokerTradingAlgosField, RspInfoField, requestId, final)
请求查询经纪公司交易算法响应
请求查询经纪公司交易算法响应
[ "请求查询经纪公司交易算法响应" ]
def onRspQryBrokerTradingAlgos(self, BrokerTradingAlgosField, RspInfoField, requestId, final): """请求查询经纪公司交易算法响应""" pass
[ "def", "onRspQryBrokerTradingAlgos", "(", "self", ",", "BrokerTradingAlgosField", ",", "RspInfoField", ",", "requestId", ",", "final", ")", ":", "pass" ]
https://github.com/francinexue/xuefu/blob/b6ff79747a42e020588c0c0a921048e08fe4680c/ctpx/ctp2/ctptd.py#L451-L453
may0324/DeepCompression-caffe
0aff6c1287bda4cfc7f378ed8a16524e1afabd8c
python/caffe/draw.py
python
choose_color_by_layertype
(layertype)
return color
Define colors for nodes based on the layer type.
Define colors for nodes based on the layer type.
[ "Define", "colors", "for", "nodes", "based", "on", "the", "layer", "type", "." ]
def choose_color_by_layertype(layertype): """Define colors for nodes based on the layer type. """ color = '#6495ED' # Default if layertype == 'Convolution' or layertype == 'Deconvolution': color = '#FF5050' elif layertype == 'Pooling': color = '#FF9900' elif layertype == 'InnerProduct': color = '#CC33FF' return color
[ "def", "choose_color_by_layertype", "(", "layertype", ")", ":", "color", "=", "'#6495ED'", "# Default", "if", "layertype", "==", "'Convolution'", "or", "layertype", "==", "'Deconvolution'", ":", "color", "=", "'#FF5050'", "elif", "layertype", "==", "'Pooling'", ":", "color", "=", "'#FF9900'", "elif", "layertype", "==", "'InnerProduct'", ":", "color", "=", "'#CC33FF'", "return", "color" ]
https://github.com/may0324/DeepCompression-caffe/blob/0aff6c1287bda4cfc7f378ed8a16524e1afabd8c/python/caffe/draw.py#L117-L127
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/codecs.py
python
IncrementalDecoder.reset
(self)
Resets the decoder to the initial state.
Resets the decoder to the initial state.
[ "Resets", "the", "decoder", "to", "the", "initial", "state", "." ]
def reset(self): """ Resets the decoder to the initial state. """
[ "def", "reset", "(", "self", ")", ":" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/codecs.py#L251-L254
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/fsspec/utils.py
python
stringify_path
(filepath)
return filepath
Attempt to convert a path-like object to a string. Parameters ---------- filepath: object to be converted Returns ------- filepath_str: maybe a string version of the object Notes ----- Objects supporting the fspath protocol (Python 3.6+) are coerced according to its __fspath__ method. For backwards compatibility with older Python version, pathlib.Path objects are specially coerced. Any other object is passed through unchanged, which includes bytes, strings, buffers, or anything else that's not even path-like.
Attempt to convert a path-like object to a string.
[ "Attempt", "to", "convert", "a", "path", "-", "like", "object", "to", "a", "string", "." ]
def stringify_path(filepath): """ Attempt to convert a path-like object to a string. Parameters ---------- filepath: object to be converted Returns ------- filepath_str: maybe a string version of the object Notes ----- Objects supporting the fspath protocol (Python 3.6+) are coerced according to its __fspath__ method. For backwards compatibility with older Python version, pathlib.Path objects are specially coerced. Any other object is passed through unchanged, which includes bytes, strings, buffers, or anything else that's not even path-like. """ if hasattr(filepath, "__fspath__"): return filepath.__fspath__() elif isinstance(filepath, pathlib.Path): return str(filepath) return filepath
[ "def", "stringify_path", "(", "filepath", ")", ":", "if", "hasattr", "(", "filepath", ",", "\"__fspath__\"", ")", ":", "return", "filepath", ".", "__fspath__", "(", ")", "elif", "isinstance", "(", "filepath", ",", "pathlib", ".", "Path", ")", ":", "return", "str", "(", "filepath", ")", "return", "filepath" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/fsspec/utils.py#L281-L307
psi4/psi4
be533f7f426b6ccc263904e55122899b16663395
psi4/driver/procrouting/proc.py
python
run_scf_gradient
(name, **kwargs)
return ref_wfn
Function encoding sequence of PSI module calls for a SCF gradient calculation.
Function encoding sequence of PSI module calls for a SCF gradient calculation.
[ "Function", "encoding", "sequence", "of", "PSI", "module", "calls", "for", "a", "SCF", "gradient", "calculation", "." ]
def run_scf_gradient(name, **kwargs): """Function encoding sequence of PSI module calls for a SCF gradient calculation. """ dft_func = False if "dft_functional" in kwargs: dft_func = True optstash = proc_util.scf_set_reference_local(name, is_dft=dft_func) # Bypass the scf call if a reference wavefunction is given ref_wfn = kwargs.get('ref_wfn', None) if ref_wfn is None: ref_wfn = run_scf(name, **kwargs) if core.get_option('SCF', 'REFERENCE') in ['ROHF', 'CUHF']: ref_wfn.semicanonicalize() if hasattr(ref_wfn, "_disp_functor"): disp_grad = ref_wfn._disp_functor.compute_gradient(ref_wfn.molecule(), ref_wfn) ref_wfn.set_variable("-D Gradient", disp_grad) grad = core.scfgrad(ref_wfn) if ref_wfn.basisset().has_ECP(): core.print_out("\n\n ==> Adding ECP gradient terms (computed numerically) <==\n") # Build a map of atom->ECP number old_print = ref_wfn.get_print() ref_wfn.set_print(0) delta = 0.0001 natom = ref_wfn.molecule().natom() mints = core.MintsHelper(ref_wfn) ecpgradmat = core.Matrix("ECP Gradient", natom, 3) ecpgradmat.zero() ecpgrad = np.asarray(ecpgradmat) Dmat = ref_wfn.Da_subset("AO") Dmat.add(ref_wfn.Db_subset("AO")) def displaced_energy(atom, displacement): mints.basisset().move_atom(atom, displacement) E = Dmat.vector_dot(mints.ao_ecp()) mints.basisset().move_atom(atom, -1*displacement) return E for atom in range(natom): for xyz in range(3): transvec = core.Vector3(0.0) transvec[xyz] += delta # +1 displacement Ep1 = displaced_energy(atom, 1*transvec) # -1 displacement Em1 = displaced_energy(atom, -1*transvec) # +2 displacement Ep2 = displaced_energy(atom, 2*transvec) # -2 displacement Em2 = displaced_energy(atom, -2*transvec) # Evaluate ecpgrad[atom, xyz] = (Em2 + 8*Ep1 - 8*Em1 - Ep2) / (12*delta) ecpgradmat.symmetrize_gradient(ref_wfn.molecule()) ecpgradmat.print_atom_vector() grad.add(ecpgradmat) grad.print_atom_vector() ref_wfn.set_print(old_print) ref_wfn.set_gradient(grad) ref_wfn.set_variable("SCF TOTAL GRADIENT", grad) # P::e SCF if ref_wfn.functional().needs_xc(): ref_wfn.set_variable("DFT TOTAL GRADIENT", grad) # overwritten later for DH -- TODO when DH gradients # P::e SCF else: ref_wfn.set_variable("HF TOTAL GRADIENT", grad) # P::e SCF # Shove variables into global space for k, v in ref_wfn.variables().items(): core.set_variable(k, v) optstash.restore() return ref_wfn
[ "def", "run_scf_gradient", "(", "name", ",", "*", "*", "kwargs", ")", ":", "dft_func", "=", "False", "if", "\"dft_functional\"", "in", "kwargs", ":", "dft_func", "=", "True", "optstash", "=", "proc_util", ".", "scf_set_reference_local", "(", "name", ",", "is_dft", "=", "dft_func", ")", "# Bypass the scf call if a reference wavefunction is given", "ref_wfn", "=", "kwargs", ".", "get", "(", "'ref_wfn'", ",", "None", ")", "if", "ref_wfn", "is", "None", ":", "ref_wfn", "=", "run_scf", "(", "name", ",", "*", "*", "kwargs", ")", "if", "core", ".", "get_option", "(", "'SCF'", ",", "'REFERENCE'", ")", "in", "[", "'ROHF'", ",", "'CUHF'", "]", ":", "ref_wfn", ".", "semicanonicalize", "(", ")", "if", "hasattr", "(", "ref_wfn", ",", "\"_disp_functor\"", ")", ":", "disp_grad", "=", "ref_wfn", ".", "_disp_functor", ".", "compute_gradient", "(", "ref_wfn", ".", "molecule", "(", ")", ",", "ref_wfn", ")", "ref_wfn", ".", "set_variable", "(", "\"-D Gradient\"", ",", "disp_grad", ")", "grad", "=", "core", ".", "scfgrad", "(", "ref_wfn", ")", "if", "ref_wfn", ".", "basisset", "(", ")", ".", "has_ECP", "(", ")", ":", "core", ".", "print_out", "(", "\"\\n\\n ==> Adding ECP gradient terms (computed numerically) <==\\n\"", ")", "# Build a map of atom->ECP number", "old_print", "=", "ref_wfn", ".", "get_print", "(", ")", "ref_wfn", ".", "set_print", "(", "0", ")", "delta", "=", "0.0001", "natom", "=", "ref_wfn", ".", "molecule", "(", ")", ".", "natom", "(", ")", "mints", "=", "core", ".", "MintsHelper", "(", "ref_wfn", ")", "ecpgradmat", "=", "core", ".", "Matrix", "(", "\"ECP Gradient\"", ",", "natom", ",", "3", ")", "ecpgradmat", ".", "zero", "(", ")", "ecpgrad", "=", "np", ".", "asarray", "(", "ecpgradmat", ")", "Dmat", "=", "ref_wfn", ".", "Da_subset", "(", "\"AO\"", ")", "Dmat", ".", "add", "(", "ref_wfn", ".", "Db_subset", "(", "\"AO\"", ")", ")", "def", "displaced_energy", "(", "atom", ",", "displacement", ")", ":", "mints", ".", "basisset", "(", ")", ".", "move_atom", "(", "atom", ",", "displacement", ")", "E", "=", "Dmat", ".", "vector_dot", "(", "mints", ".", "ao_ecp", "(", ")", ")", "mints", ".", "basisset", "(", ")", ".", "move_atom", "(", "atom", ",", "-", "1", "*", "displacement", ")", "return", "E", "for", "atom", "in", "range", "(", "natom", ")", ":", "for", "xyz", "in", "range", "(", "3", ")", ":", "transvec", "=", "core", ".", "Vector3", "(", "0.0", ")", "transvec", "[", "xyz", "]", "+=", "delta", "# +1 displacement", "Ep1", "=", "displaced_energy", "(", "atom", ",", "1", "*", "transvec", ")", "# -1 displacement", "Em1", "=", "displaced_energy", "(", "atom", ",", "-", "1", "*", "transvec", ")", "# +2 displacement", "Ep2", "=", "displaced_energy", "(", "atom", ",", "2", "*", "transvec", ")", "# -2 displacement", "Em2", "=", "displaced_energy", "(", "atom", ",", "-", "2", "*", "transvec", ")", "# Evaluate", "ecpgrad", "[", "atom", ",", "xyz", "]", "=", "(", "Em2", "+", "8", "*", "Ep1", "-", "8", "*", "Em1", "-", "Ep2", ")", "/", "(", "12", "*", "delta", ")", "ecpgradmat", ".", "symmetrize_gradient", "(", "ref_wfn", ".", "molecule", "(", ")", ")", "ecpgradmat", ".", "print_atom_vector", "(", ")", "grad", ".", "add", "(", "ecpgradmat", ")", "grad", ".", "print_atom_vector", "(", ")", "ref_wfn", ".", "set_print", "(", "old_print", ")", "ref_wfn", ".", "set_gradient", "(", "grad", ")", "ref_wfn", ".", "set_variable", "(", "\"SCF TOTAL GRADIENT\"", ",", "grad", ")", "# P::e SCF", "if", "ref_wfn", ".", "functional", "(", ")", ".", "needs_xc", "(", ")", ":", "ref_wfn", ".", "set_variable", "(", "\"DFT TOTAL GRADIENT\"", ",", "grad", ")", "# overwritten later for DH -- TODO when DH gradients # P::e SCF", "else", ":", "ref_wfn", ".", "set_variable", "(", "\"HF TOTAL GRADIENT\"", ",", "grad", ")", "# P::e SCF", "# Shove variables into global space", "for", "k", ",", "v", "in", "ref_wfn", ".", "variables", "(", ")", ".", "items", "(", ")", ":", "core", ".", "set_variable", "(", "k", ",", "v", ")", "optstash", ".", "restore", "(", ")", "return", "ref_wfn" ]
https://github.com/psi4/psi4/blob/be533f7f426b6ccc263904e55122899b16663395/psi4/driver/procrouting/proc.py#L2475-L2553
giuspen/cherrytree
84712f206478fcf9acf30174009ad28c648c6344
pygtk2/modules/core.py
python
CherryTree.anchor_edit_dialog
(self, pixbuf, iter_insert, iter_bound=None)
Anchor Edit Dialog
Anchor Edit Dialog
[ "Anchor", "Edit", "Dialog" ]
def anchor_edit_dialog(self, pixbuf, iter_insert, iter_bound=None): """Anchor Edit Dialog""" if "anchor" in dir (pixbuf): anchor_curr_name = pixbuf.anchor dialog_title = _("Edit Anchor") else: anchor_curr_name = "" dialog_title = _("Insert Anchor") ret_anchor_name = support.dialog_img_n_entry(self.window, dialog_title, anchor_curr_name, "anchor") if not ret_anchor_name: return pixbuf.anchor = ret_anchor_name if iter_bound != None: # only in case of modify image_justification = self.state_machine.get_iter_alignment(iter_insert) image_offset = iter_insert.get_offset() self.curr_buffer.delete(iter_insert, iter_bound) iter_insert = self.curr_buffer.get_iter_at_offset(image_offset) else: image_justification = None self.image_insert(iter_insert, pixbuf, image_justification)
[ "def", "anchor_edit_dialog", "(", "self", ",", "pixbuf", ",", "iter_insert", ",", "iter_bound", "=", "None", ")", ":", "if", "\"anchor\"", "in", "dir", "(", "pixbuf", ")", ":", "anchor_curr_name", "=", "pixbuf", ".", "anchor", "dialog_title", "=", "_", "(", "\"Edit Anchor\"", ")", "else", ":", "anchor_curr_name", "=", "\"\"", "dialog_title", "=", "_", "(", "\"Insert Anchor\"", ")", "ret_anchor_name", "=", "support", ".", "dialog_img_n_entry", "(", "self", ".", "window", ",", "dialog_title", ",", "anchor_curr_name", ",", "\"anchor\"", ")", "if", "not", "ret_anchor_name", ":", "return", "pixbuf", ".", "anchor", "=", "ret_anchor_name", "if", "iter_bound", "!=", "None", ":", "# only in case of modify", "image_justification", "=", "self", ".", "state_machine", ".", "get_iter_alignment", "(", "iter_insert", ")", "image_offset", "=", "iter_insert", ".", "get_offset", "(", ")", "self", ".", "curr_buffer", ".", "delete", "(", "iter_insert", ",", "iter_bound", ")", "iter_insert", "=", "self", ".", "curr_buffer", ".", "get_iter_at_offset", "(", "image_offset", ")", "else", ":", "image_justification", "=", "None", "self", ".", "image_insert", "(", "iter_insert", ",", "pixbuf", ",", "image_justification", ")" ]
https://github.com/giuspen/cherrytree/blob/84712f206478fcf9acf30174009ad28c648c6344/pygtk2/modules/core.py#L3830-L3847
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/bsddb/dbtables.py
python
bsdTableDB.ListTableColumns
(self, table)
Return a list of columns in the given table. [] if the table doesn't exist.
Return a list of columns in the given table. [] if the table doesn't exist.
[ "Return", "a", "list", "of", "columns", "in", "the", "given", "table", ".", "[]", "if", "the", "table", "doesn", "t", "exist", "." ]
def ListTableColumns(self, table): """Return a list of columns in the given table. [] if the table doesn't exist. """ assert isinstance(table, str) if contains_metastrings(table): raise ValueError, "bad table name: contains reserved metastrings" columnlist_key = _columns_key(table) if not getattr(self.db, "has_key")(columnlist_key): return [] pickledcolumnlist = getattr(self.db, "get_bytes", self.db.get)(columnlist_key) if pickledcolumnlist: return pickle.loads(pickledcolumnlist) else: return []
[ "def", "ListTableColumns", "(", "self", ",", "table", ")", ":", "assert", "isinstance", "(", "table", ",", "str", ")", "if", "contains_metastrings", "(", "table", ")", ":", "raise", "ValueError", ",", "\"bad table name: contains reserved metastrings\"", "columnlist_key", "=", "_columns_key", "(", "table", ")", "if", "not", "getattr", "(", "self", ".", "db", ",", "\"has_key\"", ")", "(", "columnlist_key", ")", ":", "return", "[", "]", "pickledcolumnlist", "=", "getattr", "(", "self", ".", "db", ",", "\"get_bytes\"", ",", "self", ".", "db", ".", "get", ")", "(", "columnlist_key", ")", "if", "pickledcolumnlist", ":", "return", "pickle", ".", "loads", "(", "pickledcolumnlist", ")", "else", ":", "return", "[", "]" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/bsddb/dbtables.py#L357-L373
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Code/Tools/waf-1.7.13/crywaflib/compile_settings_windows.py
python
load_release_windows_settings
(conf)
Setup all compiler and linker settings shared over all windows configurations for the 'debug' configuration
Setup all compiler and linker settings shared over all windows configurations for the 'debug' configuration
[ "Setup", "all", "compiler", "and", "linker", "settings", "shared", "over", "all", "windows", "configurations", "for", "the", "debug", "configuration" ]
def load_release_windows_settings(conf): """ Setup all compiler and linker settings shared over all windows configurations for the 'debug' configuration """ v = conf.env conf.load_windows_common_settings()
[ "def", "load_release_windows_settings", "(", "conf", ")", ":", "v", "=", "conf", ".", "env", "conf", ".", "load_windows_common_settings", "(", ")" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Code/Tools/waf-1.7.13/crywaflib/compile_settings_windows.py#L50-L56
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
tools/Pylint/run_pylint.py
python
create_dir_if_required
(path)
Create the given directory if it doesn't exist Arguments: path (str): Absolute path to a directory
Create the given directory if it doesn't exist
[ "Create", "the", "given", "directory", "if", "it", "doesn", "t", "exist" ]
def create_dir_if_required(path): """ Create the given directory if it doesn't exist Arguments: path (str): Absolute path to a directory """ if path and not os.path.exists(path): os.makedirs(path)
[ "def", "create_dir_if_required", "(", "path", ")", ":", "if", "path", "and", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "os", ".", "makedirs", "(", "path", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/tools/Pylint/run_pylint.py#L230-L238
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py2/pandas/core/indexes/base.py
python
Index._reset_identity
(self)
return self
Initializes or resets ``_id`` attribute with new object.
Initializes or resets ``_id`` attribute with new object.
[ "Initializes", "or", "resets", "_id", "attribute", "with", "new", "object", "." ]
def _reset_identity(self): """ Initializes or resets ``_id`` attribute with new object. """ self._id = _Identity() return self
[ "def", "_reset_identity", "(", "self", ")", ":", "self", ".", "_id", "=", "_Identity", "(", ")", "return", "self" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/indexes/base.py#L633-L638
mapsme/omim
1892903b63f2c85b16ed4966d21fe76aba06b9ba
tools/python/stylesheet/webcolors/webcolors.py
python
rgb_to_rgb_percent
(rgb_triplet)
return tuple(map(lambda d: specials.get(d, '%.02f%%' % ((d / 255.0) * 100)), rgb_triplet))
Convert a 3-tuple of integers, suitable for use in an ``rgb()`` color triplet, to a 3-tuple of percentages suitable for use in representing that color. This function makes some trade-offs in terms of the accuracy of the final representation; for some common integer values, special-case logic is used to ensure a precise result (e.g., integer 128 will always convert to '50%', integer 32 will always convert to '12.5%'), but for all other values a standard Python ``float`` is used and rounded to two decimal places, which may result in a loss of precision for some values. Examples: >>> rgb_to_rgb_percent((255, 255, 255)) ('100%', '100%', '100%') >>> rgb_to_rgb_percent((0, 0, 128)) ('0%', '0%', '50%') >>> rgb_to_rgb_percent((33, 56, 192)) ('12.94%', '21.96%', '75.29%') >>> rgb_to_rgb_percent((64, 32, 16)) ('25%', '12.5%', '6.25%')
Convert a 3-tuple of integers, suitable for use in an ``rgb()`` color triplet, to a 3-tuple of percentages suitable for use in representing that color.
[ "Convert", "a", "3", "-", "tuple", "of", "integers", "suitable", "for", "use", "in", "an", "rgb", "()", "color", "triplet", "to", "a", "3", "-", "tuple", "of", "percentages", "suitable", "for", "use", "in", "representing", "that", "color", "." ]
def rgb_to_rgb_percent(rgb_triplet): """ Convert a 3-tuple of integers, suitable for use in an ``rgb()`` color triplet, to a 3-tuple of percentages suitable for use in representing that color. This function makes some trade-offs in terms of the accuracy of the final representation; for some common integer values, special-case logic is used to ensure a precise result (e.g., integer 128 will always convert to '50%', integer 32 will always convert to '12.5%'), but for all other values a standard Python ``float`` is used and rounded to two decimal places, which may result in a loss of precision for some values. Examples: >>> rgb_to_rgb_percent((255, 255, 255)) ('100%', '100%', '100%') >>> rgb_to_rgb_percent((0, 0, 128)) ('0%', '0%', '50%') >>> rgb_to_rgb_percent((33, 56, 192)) ('12.94%', '21.96%', '75.29%') >>> rgb_to_rgb_percent((64, 32, 16)) ('25%', '12.5%', '6.25%') """ # In order to maintain precision for common values, # 256 / 2**n is special-cased for values of n # from 0 through 4, as well as 0 itself. specials = {255: '100%', 128: '50%', 64: '25%', 32: '12.5%', 16: '6.25%', 0: '0%'} return tuple(map(lambda d: specials.get(d, '%.02f%%' % ((d / 255.0) * 100)), rgb_triplet))
[ "def", "rgb_to_rgb_percent", "(", "rgb_triplet", ")", ":", "# In order to maintain precision for common values,", "# 256 / 2**n is special-cased for values of n", "# from 0 through 4, as well as 0 itself.", "specials", "=", "{", "255", ":", "'100%'", ",", "128", ":", "'50%'", ",", "64", ":", "'25%'", ",", "32", ":", "'12.5%'", ",", "16", ":", "'6.25%'", ",", "0", ":", "'0%'", "}", "return", "tuple", "(", "map", "(", "lambda", "d", ":", "specials", ".", "get", "(", "d", ",", "'%.02f%%'", "%", "(", "(", "d", "/", "255.0", ")", "*", "100", ")", ")", ",", "rgb_triplet", ")", ")" ]
https://github.com/mapsme/omim/blob/1892903b63f2c85b16ed4966d21fe76aba06b9ba/tools/python/stylesheet/webcolors/webcolors.py#L722-L754
BVLC/caffe
9b891540183ddc834a02b2bd81b31afae71b2153
scripts/cpp_lint.py
python
RemoveMultiLineCommentsFromRange
(lines, begin, end)
Clears a range of lines for multi-line comments.
Clears a range of lines for multi-line comments.
[ "Clears", "a", "range", "of", "lines", "for", "multi", "-", "line", "comments", "." ]
def RemoveMultiLineCommentsFromRange(lines, begin, end): """Clears a range of lines for multi-line comments.""" # Having // dummy comments makes the lines non-empty, so we will not get # unnecessary blank line warnings later in the code. for i in range(begin, end): lines[i] = '// dummy'
[ "def", "RemoveMultiLineCommentsFromRange", "(", "lines", ",", "begin", ",", "end", ")", ":", "# Having // dummy comments makes the lines non-empty, so we will not get", "# unnecessary blank line warnings later in the code.", "for", "i", "in", "range", "(", "begin", ",", "end", ")", ":", "lines", "[", "i", "]", "=", "'// dummy'" ]
https://github.com/BVLC/caffe/blob/9b891540183ddc834a02b2bd81b31afae71b2153/scripts/cpp_lint.py#L1147-L1152
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/lib/nanfunctions.py
python
nanprod
(a, axis=None, dtype=None, out=None, keepdims=np._NoValue)
return np.prod(a, axis=axis, dtype=dtype, out=out, keepdims=keepdims)
Return the product of array elements over a given axis treating Not a Numbers (NaNs) as ones. One is returned for slices that are all-NaN or empty. .. versionadded:: 1.10.0 Parameters ---------- a : array_like Array containing numbers whose product is desired. If `a` is not an array, a conversion is attempted. axis : {int, tuple of int, None}, optional Axis or axes along which the product is computed. The default is to compute the product of the flattened array. dtype : data-type, optional The type of the returned array and of the accumulator in which the elements are summed. By default, the dtype of `a` is used. An exception is when `a` has an integer type with less precision than the platform (u)intp. In that case, the default will be either (u)int32 or (u)int64 depending on whether the platform is 32 or 64 bits. For inexact inputs, dtype must be inexact. out : ndarray, optional Alternate output array in which to place the result. The default is ``None``. If provided, it must have the same shape as the expected output, but the type will be cast if necessary. See `ufuncs-output-type` for more details. The casting of NaN to integer can yield unexpected results. keepdims : bool, optional If True, the axes which are reduced are left in the result as dimensions with size one. With this option, the result will broadcast correctly against the original `arr`. Returns ------- nanprod : ndarray A new array holding the result is returned unless `out` is specified, in which case it is returned. See Also -------- numpy.prod : Product across array propagating NaNs. isnan : Show which elements are NaN. Examples -------- >>> np.nanprod(1) 1 >>> np.nanprod([1]) 1 >>> np.nanprod([1, np.nan]) 1.0 >>> a = np.array([[1, 2], [3, np.nan]]) >>> np.nanprod(a) 6.0 >>> np.nanprod(a, axis=0) array([3., 2.])
Return the product of array elements over a given axis treating Not a Numbers (NaNs) as ones.
[ "Return", "the", "product", "of", "array", "elements", "over", "a", "given", "axis", "treating", "Not", "a", "Numbers", "(", "NaNs", ")", "as", "ones", "." ]
def nanprod(a, axis=None, dtype=None, out=None, keepdims=np._NoValue): """ Return the product of array elements over a given axis treating Not a Numbers (NaNs) as ones. One is returned for slices that are all-NaN or empty. .. versionadded:: 1.10.0 Parameters ---------- a : array_like Array containing numbers whose product is desired. If `a` is not an array, a conversion is attempted. axis : {int, tuple of int, None}, optional Axis or axes along which the product is computed. The default is to compute the product of the flattened array. dtype : data-type, optional The type of the returned array and of the accumulator in which the elements are summed. By default, the dtype of `a` is used. An exception is when `a` has an integer type with less precision than the platform (u)intp. In that case, the default will be either (u)int32 or (u)int64 depending on whether the platform is 32 or 64 bits. For inexact inputs, dtype must be inexact. out : ndarray, optional Alternate output array in which to place the result. The default is ``None``. If provided, it must have the same shape as the expected output, but the type will be cast if necessary. See `ufuncs-output-type` for more details. The casting of NaN to integer can yield unexpected results. keepdims : bool, optional If True, the axes which are reduced are left in the result as dimensions with size one. With this option, the result will broadcast correctly against the original `arr`. Returns ------- nanprod : ndarray A new array holding the result is returned unless `out` is specified, in which case it is returned. See Also -------- numpy.prod : Product across array propagating NaNs. isnan : Show which elements are NaN. Examples -------- >>> np.nanprod(1) 1 >>> np.nanprod([1]) 1 >>> np.nanprod([1, np.nan]) 1.0 >>> a = np.array([[1, 2], [3, np.nan]]) >>> np.nanprod(a) 6.0 >>> np.nanprod(a, axis=0) array([3., 2.]) """ a, mask = _replace_nan(a, 1) return np.prod(a, axis=axis, dtype=dtype, out=out, keepdims=keepdims)
[ "def", "nanprod", "(", "a", ",", "axis", "=", "None", ",", "dtype", "=", "None", ",", "out", "=", "None", ",", "keepdims", "=", "np", ".", "_NoValue", ")", ":", "a", ",", "mask", "=", "_replace_nan", "(", "a", ",", "1", ")", "return", "np", ".", "prod", "(", "a", ",", "axis", "=", "axis", ",", "dtype", "=", "dtype", ",", "out", "=", "out", ",", "keepdims", "=", "keepdims", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/lib/nanfunctions.py#L658-L720
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/glcanvas.py
python
GLCanvas.SetupPalette
(*args, **kwargs)
return _glcanvas.GLCanvas_SetupPalette(*args, **kwargs)
SetupPalette(self, Palette palette)
SetupPalette(self, Palette palette)
[ "SetupPalette", "(", "self", "Palette", "palette", ")" ]
def SetupPalette(*args, **kwargs): """SetupPalette(self, Palette palette)""" return _glcanvas.GLCanvas_SetupPalette(*args, **kwargs)
[ "def", "SetupPalette", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_glcanvas", ".", "GLCanvas_SetupPalette", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/glcanvas.py#L142-L144
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
tools/telemetry/third_party/pyserial/serial/serialutil.py
python
SerialBase.getBaudrate
(self)
return self._baudrate
Get the current baud rate setting.
Get the current baud rate setting.
[ "Get", "the", "current", "baud", "rate", "setting", "." ]
def getBaudrate(self): """Get the current baud rate setting.""" return self._baudrate
[ "def", "getBaudrate", "(", "self", ")", ":", "return", "self", ".", "_baudrate" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/telemetry/third_party/pyserial/serial/serialutil.py#L347-L349
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/training/saver.py
python
Saver.set_last_checkpoints_with_time
(self, last_checkpoints_with_time)
Sets the list of old checkpoint filenames and timestamps. Args: last_checkpoints_with_time: A list of tuples of checkpoint filenames and timestamps. Raises: AssertionError: If last_checkpoints_with_time is not a list.
Sets the list of old checkpoint filenames and timestamps.
[ "Sets", "the", "list", "of", "old", "checkpoint", "filenames", "and", "timestamps", "." ]
def set_last_checkpoints_with_time(self, last_checkpoints_with_time): """Sets the list of old checkpoint filenames and timestamps. Args: last_checkpoints_with_time: A list of tuples of checkpoint filenames and timestamps. Raises: AssertionError: If last_checkpoints_with_time is not a list. """ assert isinstance(last_checkpoints_with_time, list) self._last_checkpoints = last_checkpoints_with_time
[ "def", "set_last_checkpoints_with_time", "(", "self", ",", "last_checkpoints_with_time", ")", ":", "assert", "isinstance", "(", "last_checkpoints_with_time", ",", "list", ")", "self", ".", "_last_checkpoints", "=", "last_checkpoints_with_time" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/training/saver.py#L1464-L1475
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py2/scipy/io/mmio.py
python
mmwrite
(target, a, comment='', field=None, precision=None, symmetry=None)
Writes the sparse or dense array `a` to Matrix Market file-like `target`. Parameters ---------- target : str or file-like Matrix Market filename (extension .mtx) or open file-like object. a : array like Sparse or dense 2D array. comment : str, optional Comments to be prepended to the Matrix Market file. field : None or str, optional Either 'real', 'complex', 'pattern', or 'integer'. precision : None or int, optional Number of digits to display for real or complex values. symmetry : None or str, optional Either 'general', 'symmetric', 'skew-symmetric', or 'hermitian'. If symmetry is None the symmetry type of 'a' is determined by its values.
Writes the sparse or dense array `a` to Matrix Market file-like `target`.
[ "Writes", "the", "sparse", "or", "dense", "array", "a", "to", "Matrix", "Market", "file", "-", "like", "target", "." ]
def mmwrite(target, a, comment='', field=None, precision=None, symmetry=None): """ Writes the sparse or dense array `a` to Matrix Market file-like `target`. Parameters ---------- target : str or file-like Matrix Market filename (extension .mtx) or open file-like object. a : array like Sparse or dense 2D array. comment : str, optional Comments to be prepended to the Matrix Market file. field : None or str, optional Either 'real', 'complex', 'pattern', or 'integer'. precision : None or int, optional Number of digits to display for real or complex values. symmetry : None or str, optional Either 'general', 'symmetric', 'skew-symmetric', or 'hermitian'. If symmetry is None the symmetry type of 'a' is determined by its values. """ MMFile().write(target, a, comment, field, precision, symmetry)
[ "def", "mmwrite", "(", "target", ",", "a", ",", "comment", "=", "''", ",", "field", "=", "None", ",", "precision", "=", "None", ",", "symmetry", "=", "None", ")", ":", "MMFile", "(", ")", ".", "write", "(", "target", ",", "a", ",", "comment", ",", "field", ",", "precision", ",", "symmetry", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/io/mmio.py#L80-L101
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/python/summary/event_accumulator.py
python
_Remap
(x, x0, x1, y0, y1)
return y0 + (x - x0) * float(y1 - y0) / (x1 - x0)
Linearly map from [x0, x1] unto [y0, y1].
Linearly map from [x0, x1] unto [y0, y1].
[ "Linearly", "map", "from", "[", "x0", "x1", "]", "unto", "[", "y0", "y1", "]", "." ]
def _Remap(x, x0, x1, y0, y1): """Linearly map from [x0, x1] unto [y0, y1].""" return y0 + (x - x0) * float(y1 - y0) / (x1 - x0)
[ "def", "_Remap", "(", "x", ",", "x0", ",", "x1", ",", "y0", ",", "y1", ")", ":", "return", "y0", "+", "(", "x", "-", "x0", ")", "*", "float", "(", "y1", "-", "y0", ")", "/", "(", "x1", "-", "x0", ")" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/summary/event_accumulator.py#L661-L663
facebookincubator/BOLT
88c70afe9d388ad430cc150cc158641701397f70
mlir/python/mlir/dialects/linalg/opdsl/ops/core_named_ops.py
python
conv_1d_nwc_wcf
( I=TensorDef(T1, S.N, S.OW * S.SW + S.KW * S.DW, S.C), K=TensorDef(T2, S.KW, S.C, S.F), O=TensorDef(U, S.N, S.OW, S.F, output=True), strides=IndexAttrDef(S.SW), dilations=IndexAttrDef(S.DW))
Performs 1-D convolution. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output.
Performs 1-D convolution.
[ "Performs", "1", "-", "D", "convolution", "." ]
def conv_1d_nwc_wcf( I=TensorDef(T1, S.N, S.OW * S.SW + S.KW * S.DW, S.C), K=TensorDef(T2, S.KW, S.C, S.F), O=TensorDef(U, S.N, S.OW, S.F, output=True), strides=IndexAttrDef(S.SW), dilations=IndexAttrDef(S.DW)): """Performs 1-D convolution. Numeric casting is performed on the operands to the inner multiply, promoting them to the same data type as the accumulator/output. """ implements(ConvolutionOpInterface) domain(D.n, D.ow, D.f, D.kw, D.c) O[D.n, D.ow, D.f] += TypeFn.cast(U, I[D.n, D.ow * S.SW + D.kw * S.DW, D.c]) * TypeFn.cast(U, K[D.kw, D.c, D.f])
[ "def", "conv_1d_nwc_wcf", "(", "I", "=", "TensorDef", "(", "T1", ",", "S", ".", "N", ",", "S", ".", "OW", "*", "S", ".", "SW", "+", "S", ".", "KW", "*", "S", ".", "DW", ",", "S", ".", "C", ")", ",", "K", "=", "TensorDef", "(", "T2", ",", "S", ".", "KW", ",", "S", ".", "C", ",", "S", ".", "F", ")", ",", "O", "=", "TensorDef", "(", "U", ",", "S", ".", "N", ",", "S", ".", "OW", ",", "S", ".", "F", ",", "output", "=", "True", ")", ",", "strides", "=", "IndexAttrDef", "(", "S", ".", "SW", ")", ",", "dilations", "=", "IndexAttrDef", "(", "S", ".", "DW", ")", ")", ":", "implements", "(", "ConvolutionOpInterface", ")", "domain", "(", "D", ".", "n", ",", "D", ".", "ow", ",", "D", ".", "f", ",", "D", ".", "kw", ",", "D", ".", "c", ")", "O", "[", "D", ".", "n", ",", "D", ".", "ow", ",", "D", ".", "f", "]", "+=", "TypeFn", ".", "cast", "(", "U", ",", "I", "[", "D", ".", "n", ",", "D", ".", "ow", "*", "S", ".", "SW", "+", "D", ".", "kw", "*", "S", ".", "DW", ",", "D", ".", "c", "]", ")", "*", "TypeFn", ".", "cast", "(", "U", ",", "K", "[", "D", ".", "kw", ",", "D", ".", "c", ",", "D", ".", "f", "]", ")" ]
https://github.com/facebookincubator/BOLT/blob/88c70afe9d388ad430cc150cc158641701397f70/mlir/python/mlir/dialects/linalg/opdsl/ops/core_named_ops.py#L223-L238
apiaryio/snowcrash
b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3
tools/gyp/pylib/gyp/msvs_emulation.py
python
MsvsSettings.HasExplicitAsmRules
(self, spec)
return self._HasExplicitRuleForExtension(spec, 'asm')
Determine if there's an explicit rule for asm files. When there isn't we need to generate implicit rules to assemble .asm files.
Determine if there's an explicit rule for asm files. When there isn't we need to generate implicit rules to assemble .asm files.
[ "Determine", "if", "there", "s", "an", "explicit", "rule", "for", "asm", "files", ".", "When", "there", "isn", "t", "we", "need", "to", "generate", "implicit", "rules", "to", "assemble", ".", "asm", "files", "." ]
def HasExplicitAsmRules(self, spec): """Determine if there's an explicit rule for asm files. When there isn't we need to generate implicit rules to assemble .asm files.""" return self._HasExplicitRuleForExtension(spec, 'asm')
[ "def", "HasExplicitAsmRules", "(", "self", ",", "spec", ")", ":", "return", "self", ".", "_HasExplicitRuleForExtension", "(", "spec", ",", "'asm'", ")" ]
https://github.com/apiaryio/snowcrash/blob/b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3/tools/gyp/pylib/gyp/msvs_emulation.py#L835-L838
turi-code/SFrame
796b9bdfb2fa1b881d82080754643c7e68629cd2
oss_src/unity/python/sframe/util/cloudpickle.py
python
CloudPickler.save_function
(self, obj, name=None)
Registered with the dispatch to handle all function types. Determines what kind of function obj is (e.g. lambda, defined at interactive prompt, etc) and handles the pickling appropriately.
Registered with the dispatch to handle all function types.
[ "Registered", "with", "the", "dispatch", "to", "handle", "all", "function", "types", "." ]
def save_function(self, obj, name=None): """ Registered with the dispatch to handle all function types. Determines what kind of function obj is (e.g. lambda, defined at interactive prompt, etc) and handles the pickling appropriately. """ write = self.write if name is None: name = obj.__name__ modname = pickle.whichmodule(obj, name) # print('which gives %s %s %s' % (modname, obj, name)) try: themodule = sys.modules[modname] except KeyError: # eval'd items such as namedtuple give invalid items for their function __module__ modname = '__main__' if modname == '__main__': themodule = None if themodule: self.modules.add(themodule) if getattr(themodule, name, None) is obj: return self.save_global(obj, name) # if func is lambda, def'ed at prompt, is in main, or is nested, then # we'll pickle the actual function object rather than simply saving a # reference (as is done in default pickler), via save_function_tuple. if islambda(obj) or obj.__code__.co_filename == '<stdin>' or themodule is None: #print("save global", islambda(obj), obj.__code__.co_filename, modname, themodule) self.save_function_tuple(obj) return else: # func is nested klass = getattr(themodule, name, None) if klass is None or klass is not obj: self.save_function_tuple(obj) return if obj.__dict__: # essentially save_reduce, but workaround needed to avoid recursion self.save(_restore_attr) write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n') self.memoize(obj) self.save(obj.__dict__) write(pickle.TUPLE + pickle.REDUCE) else: write(pickle.GLOBAL + modname + '\n' + name + '\n') self.memoize(obj)
[ "def", "save_function", "(", "self", ",", "obj", ",", "name", "=", "None", ")", ":", "write", "=", "self", ".", "write", "if", "name", "is", "None", ":", "name", "=", "obj", ".", "__name__", "modname", "=", "pickle", ".", "whichmodule", "(", "obj", ",", "name", ")", "# print('which gives %s %s %s' % (modname, obj, name))", "try", ":", "themodule", "=", "sys", ".", "modules", "[", "modname", "]", "except", "KeyError", ":", "# eval'd items such as namedtuple give invalid items for their function __module__", "modname", "=", "'__main__'", "if", "modname", "==", "'__main__'", ":", "themodule", "=", "None", "if", "themodule", ":", "self", ".", "modules", ".", "add", "(", "themodule", ")", "if", "getattr", "(", "themodule", ",", "name", ",", "None", ")", "is", "obj", ":", "return", "self", ".", "save_global", "(", "obj", ",", "name", ")", "# if func is lambda, def'ed at prompt, is in main, or is nested, then", "# we'll pickle the actual function object rather than simply saving a", "# reference (as is done in default pickler), via save_function_tuple.", "if", "islambda", "(", "obj", ")", "or", "obj", ".", "__code__", ".", "co_filename", "==", "'<stdin>'", "or", "themodule", "is", "None", ":", "#print(\"save global\", islambda(obj), obj.__code__.co_filename, modname, themodule)", "self", ".", "save_function_tuple", "(", "obj", ")", "return", "else", ":", "# func is nested", "klass", "=", "getattr", "(", "themodule", ",", "name", ",", "None", ")", "if", "klass", "is", "None", "or", "klass", "is", "not", "obj", ":", "self", ".", "save_function_tuple", "(", "obj", ")", "return", "if", "obj", ".", "__dict__", ":", "# essentially save_reduce, but workaround needed to avoid recursion", "self", ".", "save", "(", "_restore_attr", ")", "write", "(", "pickle", ".", "MARK", "+", "pickle", ".", "GLOBAL", "+", "modname", "+", "'\\n'", "+", "name", "+", "'\\n'", ")", "self", ".", "memoize", "(", "obj", ")", "self", ".", "save", "(", "obj", ".", "__dict__", ")", "write", "(", "pickle", ".", "TUPLE", "+", "pickle", ".", "REDUCE", ")", "else", ":", "write", "(", "pickle", ".", "GLOBAL", "+", "modname", "+", "'\\n'", "+", "name", "+", "'\\n'", ")", "self", ".", "memoize", "(", "obj", ")" ]
https://github.com/turi-code/SFrame/blob/796b9bdfb2fa1b881d82080754643c7e68629cd2/oss_src/unity/python/sframe/util/cloudpickle.py#L162-L211
SFTtech/openage
d6a08c53c48dc1e157807471df92197f6ca9e04d
openage/convert/service/debug_info.py
python
debug_game_version
(debugdir, loglevel, args)
Create debug output for the detected game version. :param debugdir: Output directory for the debug info. :type debugdir: Directory :param loglevel: Determines how detailed the output is. :type loglevel: int :param args: CLI arguments. :type args: Namespace
Create debug output for the detected game version.
[ "Create", "debug", "output", "for", "the", "detected", "game", "version", "." ]
def debug_game_version(debugdir, loglevel, args): """ Create debug output for the detected game version. :param debugdir: Output directory for the debug info. :type debugdir: Directory :param loglevel: Determines how detailed the output is. :type loglevel: int :param args: CLI arguments. :type args: Namespace """ if loglevel < 2: return # Log game version logfile = debugdir.joinpath("init/")["game_version"] logtext = "" logtext += ( f"game edition:\n" f" - {args.game_version[0]}\n" ) if len(args.game_version[1]) > 0: logtext += "game expansions:\n" for expansion in args.game_version[1]: logtext += f" - {expansion}\n" else: logtext += "game expansions: none detected" with logfile.open("w") as log: log.write(logtext)
[ "def", "debug_game_version", "(", "debugdir", ",", "loglevel", ",", "args", ")", ":", "if", "loglevel", "<", "2", ":", "return", "# Log game version", "logfile", "=", "debugdir", ".", "joinpath", "(", "\"init/\"", ")", "[", "\"game_version\"", "]", "logtext", "=", "\"\"", "logtext", "+=", "(", "f\"game edition:\\n\"", "f\" - {args.game_version[0]}\\n\"", ")", "if", "len", "(", "args", ".", "game_version", "[", "1", "]", ")", ">", "0", ":", "logtext", "+=", "\"game expansions:\\n\"", "for", "expansion", "in", "args", ".", "game_version", "[", "1", "]", ":", "logtext", "+=", "f\" - {expansion}\\n\"", "else", ":", "logtext", "+=", "\"game expansions: none detected\"", "with", "logfile", ".", "open", "(", "\"w\"", ")", "as", "log", ":", "log", ".", "write", "(", "logtext", ")" ]
https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/convert/service/debug_info.py#L54-L86
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/mapreduce/mapreduce/shuffler.py
python
_MergingReader.from_json
(cls, json)
return cls(json["offsets"], json["max_values_count"], json["max_values_size"])
Restore reader from json state.
Restore reader from json state.
[ "Restore", "reader", "from", "json", "state", "." ]
def from_json(cls, json): """Restore reader from json state.""" return cls(json["offsets"], json["max_values_count"], json["max_values_size"])
[ "def", "from_json", "(", "cls", ",", "json", ")", ":", "return", "cls", "(", "json", "[", "\"offsets\"", "]", ",", "json", "[", "\"max_values_count\"", "]", ",", "json", "[", "\"max_values_size\"", "]", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/mapreduce/mapreduce/shuffler.py#L373-L377
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/wrap_function.py
python
_lift_single_variable
(old_variable, graph, variable_holder)
return new_variable
Lifts `old_variable` out of the `FuncGraph` `graph`.
Lifts `old_variable` out of the `FuncGraph` `graph`.
[ "Lifts", "old_variable", "out", "of", "the", "FuncGraph", "graph", "." ]
def _lift_single_variable(old_variable, graph, variable_holder): """Lifts `old_variable` out of the `FuncGraph` `graph`.""" new_variable = resource_variable_ops.UninitializedVariable( shape=old_variable.shape, dtype=old_variable.dtype, name=old_variable.op.name, trainable=old_variable.trainable, extra_handle_data=old_variable.handle) new_variable._initializer_op = old_variable._initializer_op # pylint: disable=protected-access graph.add_capture(new_variable.handle, old_variable.handle) # Now that we've added the new variable to graph.captures, # graph.capture will use that cached value and do some post-processing # on the capture like recording it on the tape. graph.capture(new_variable.handle) # pylint: disable=protected-access variable_name = new_variable.name.split(":")[0] variable_holder._variables_by_name[variable_name] = new_variable graph._weak_variables.append(weakref.ref(new_variable)) # pylint: enable=protected-access graph.watch_variable(new_variable) return new_variable
[ "def", "_lift_single_variable", "(", "old_variable", ",", "graph", ",", "variable_holder", ")", ":", "new_variable", "=", "resource_variable_ops", ".", "UninitializedVariable", "(", "shape", "=", "old_variable", ".", "shape", ",", "dtype", "=", "old_variable", ".", "dtype", ",", "name", "=", "old_variable", ".", "op", ".", "name", ",", "trainable", "=", "old_variable", ".", "trainable", ",", "extra_handle_data", "=", "old_variable", ".", "handle", ")", "new_variable", ".", "_initializer_op", "=", "old_variable", ".", "_initializer_op", "# pylint: disable=protected-access", "graph", ".", "add_capture", "(", "new_variable", ".", "handle", ",", "old_variable", ".", "handle", ")", "# Now that we've added the new variable to graph.captures,", "# graph.capture will use that cached value and do some post-processing", "# on the capture like recording it on the tape.", "graph", ".", "capture", "(", "new_variable", ".", "handle", ")", "# pylint: disable=protected-access", "variable_name", "=", "new_variable", ".", "name", ".", "split", "(", "\":\"", ")", "[", "0", "]", "variable_holder", ".", "_variables_by_name", "[", "variable_name", "]", "=", "new_variable", "graph", ".", "_weak_variables", ".", "append", "(", "weakref", ".", "ref", "(", "new_variable", ")", ")", "# pylint: enable=protected-access", "graph", ".", "watch_variable", "(", "new_variable", ")", "return", "new_variable" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/wrap_function.py#L122-L142
vslavik/poedit
f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a
deps/boost/tools/build/src/build/generators.py
python
Generator.target_types
(self)
return self.target_types_
Returns the list of target types that this generator produces. It is assumed to be always the same -- i.e. it cannot change depending list of sources.
Returns the list of target types that this generator produces. It is assumed to be always the same -- i.e. it cannot change depending list of sources.
[ "Returns", "the", "list", "of", "target", "types", "that", "this", "generator", "produces", ".", "It", "is", "assumed", "to", "be", "always", "the", "same", "--", "i", ".", "e", ".", "it", "cannot", "change", "depending", "list", "of", "sources", "." ]
def target_types (self): """ Returns the list of target types that this generator produces. It is assumed to be always the same -- i.e. it cannot change depending list of sources. """ return self.target_types_
[ "def", "target_types", "(", "self", ")", ":", "return", "self", ".", "target_types_" ]
https://github.com/vslavik/poedit/blob/f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a/deps/boost/tools/build/src/build/generators.py#L286-L291
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/distutils/command/config.py
python
dump_file
(filename, head=None)
Dumps a file content into log.info. If head is not None, will be dumped before the file content.
Dumps a file content into log.info.
[ "Dumps", "a", "file", "content", "into", "log", ".", "info", "." ]
def dump_file(filename, head=None): """Dumps a file content into log.info. If head is not None, will be dumped before the file content. """ if head is None: log.info('%s' % filename) else: log.info(head) file = open(filename) try: log.info(file.read()) finally: file.close()
[ "def", "dump_file", "(", "filename", ",", "head", "=", "None", ")", ":", "if", "head", "is", "None", ":", "log", ".", "info", "(", "'%s'", "%", "filename", ")", "else", ":", "log", ".", "info", "(", "head", ")", "file", "=", "open", "(", "filename", ")", "try", ":", "log", ".", "info", "(", "file", ".", "read", "(", ")", ")", "finally", ":", "file", ".", "close", "(", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/distutils/command/config.py#L344-L357
NVIDIA/DALI
bf16cc86ba8f091b145f91962f21fe1b6aff243d
docs/examples/use_cases/tensorflow/efficientdet/pipeline/anchors_utils/argmax_matcher.py
python
ArgMaxMatcher._set_values_using_indicator
(self, x, indicator, val)
return x * (1 - indicator) + val * indicator
Set the indicated fields of x to val. Args: x: tensor. indicator: boolean with same shape as x. val: scalar with value to set. Returns: modified tensor.
Set the indicated fields of x to val.
[ "Set", "the", "indicated", "fields", "of", "x", "to", "val", "." ]
def _set_values_using_indicator(self, x, indicator, val): """Set the indicated fields of x to val. Args: x: tensor. indicator: boolean with same shape as x. val: scalar with value to set. Returns: modified tensor. """ indicator = tf.cast(indicator, x.dtype) return x * (1 - indicator) + val * indicator
[ "def", "_set_values_using_indicator", "(", "self", ",", "x", ",", "indicator", ",", "val", ")", ":", "indicator", "=", "tf", ".", "cast", "(", "indicator", ",", "x", ".", "dtype", ")", "return", "x", "*", "(", "1", "-", "indicator", ")", "+", "val", "*", "indicator" ]
https://github.com/NVIDIA/DALI/blob/bf16cc86ba8f091b145f91962f21fe1b6aff243d/docs/examples/use_cases/tensorflow/efficientdet/pipeline/anchors_utils/argmax_matcher.py#L206-L218
alibaba/weex_js_engine
2bdf4b6f020c1fc99c63f649718f6faf7e27fdde
jni/v8core/v8/build/gyp/pylib/gyp/generator/android.py
python
AndroidMkWriter.NormalizeLdFlags
(self, ld_flags)
return clean_ldflags
Clean up ldflags from gyp file. Remove any ldflags that contain android_top_dir. Args: ld_flags: ldflags from gyp files. Returns: clean ldflags
Clean up ldflags from gyp file. Remove any ldflags that contain android_top_dir.
[ "Clean", "up", "ldflags", "from", "gyp", "file", ".", "Remove", "any", "ldflags", "that", "contain", "android_top_dir", "." ]
def NormalizeLdFlags(self, ld_flags): """ Clean up ldflags from gyp file. Remove any ldflags that contain android_top_dir. Args: ld_flags: ldflags from gyp files. Returns: clean ldflags """ clean_ldflags = [] for flag in ld_flags: if self.android_top_dir in flag: continue clean_ldflags.append(flag) return clean_ldflags
[ "def", "NormalizeLdFlags", "(", "self", ",", "ld_flags", ")", ":", "clean_ldflags", "=", "[", "]", "for", "flag", "in", "ld_flags", ":", "if", "self", ".", "android_top_dir", "in", "flag", ":", "continue", "clean_ldflags", ".", "append", "(", "flag", ")", "return", "clean_ldflags" ]
https://github.com/alibaba/weex_js_engine/blob/2bdf4b6f020c1fc99c63f649718f6faf7e27fdde/jni/v8core/v8/build/gyp/pylib/gyp/generator/android.py#L672-L687
nucleic/atom
9f0cb2a8101dd63c354a98ebc7489b2c616dc82a
atom/atom.py
python
Atom.__getstate__
(self)
return state
The base implementation of the pickle getstate protocol. This base class implementation handles the generic case where the object and all of its state are pickable. This includes state stored in Atom members, as well as any instance dict or slot attributes. Subclasses which require further customization should reimplement this method and modify the dict generated by this base class method.
The base implementation of the pickle getstate protocol.
[ "The", "base", "implementation", "of", "the", "pickle", "getstate", "protocol", "." ]
def __getstate__(self): """The base implementation of the pickle getstate protocol. This base class implementation handles the generic case where the object and all of its state are pickable. This includes state stored in Atom members, as well as any instance dict or slot attributes. Subclasses which require further customization should reimplement this method and modify the dict generated by this base class method. """ state = {} state.update(getattr(self, "__dict__", {})) slots = copyreg._slotnames(type(self)) if slots: for name in slots: state[name] = getattr(self, name) for key in self.members(): state[key] = getattr(self, key) return state
[ "def", "__getstate__", "(", "self", ")", ":", "state", "=", "{", "}", "state", ".", "update", "(", "getattr", "(", "self", ",", "\"__dict__\"", ",", "{", "}", ")", ")", "slots", "=", "copyreg", ".", "_slotnames", "(", "type", "(", "self", ")", ")", "if", "slots", ":", "for", "name", "in", "slots", ":", "state", "[", "name", "]", "=", "getattr", "(", "self", ",", "name", ")", "for", "key", "in", "self", ".", "members", "(", ")", ":", "state", "[", "key", "]", "=", "getattr", "(", "self", ",", "key", ")", "return", "state" ]
https://github.com/nucleic/atom/blob/9f0cb2a8101dd63c354a98ebc7489b2c616dc82a/atom/atom.py#L513-L532
ricardoquesada/Spidermonkey
4a75ea2543408bd1b2c515aa95901523eeef7858
python/configobj/validate.py
python
is_option
(value, *options)
return value
This check matches the value to any of a set of options. >>> vtor.check('option("yoda", "jedi")', 'yoda') 'yoda' >>> vtor.check('option("yoda", "jedi")', 'jed') Traceback (most recent call last): VdtValueError: the value "jed" is unacceptable. >>> vtor.check('option("yoda", "jedi")', 0) Traceback (most recent call last): VdtTypeError: the value "0" is of the wrong type.
This check matches the value to any of a set of options. >>> vtor.check('option("yoda", "jedi")', 'yoda') 'yoda' >>> vtor.check('option("yoda", "jedi")', 'jed') Traceback (most recent call last): VdtValueError: the value "jed" is unacceptable. >>> vtor.check('option("yoda", "jedi")', 0) Traceback (most recent call last): VdtTypeError: the value "0" is of the wrong type.
[ "This", "check", "matches", "the", "value", "to", "any", "of", "a", "set", "of", "options", ".", ">>>", "vtor", ".", "check", "(", "option", "(", "yoda", "jedi", ")", "yoda", ")", "yoda", ">>>", "vtor", ".", "check", "(", "option", "(", "yoda", "jedi", ")", "jed", ")", "Traceback", "(", "most", "recent", "call", "last", ")", ":", "VdtValueError", ":", "the", "value", "jed", "is", "unacceptable", ".", ">>>", "vtor", ".", "check", "(", "option", "(", "yoda", "jedi", ")", "0", ")", "Traceback", "(", "most", "recent", "call", "last", ")", ":", "VdtTypeError", ":", "the", "value", "0", "is", "of", "the", "wrong", "type", "." ]
def is_option(value, *options): """ This check matches the value to any of a set of options. >>> vtor.check('option("yoda", "jedi")', 'yoda') 'yoda' >>> vtor.check('option("yoda", "jedi")', 'jed') Traceback (most recent call last): VdtValueError: the value "jed" is unacceptable. >>> vtor.check('option("yoda", "jedi")', 0) Traceback (most recent call last): VdtTypeError: the value "0" is of the wrong type. """ if not isinstance(value, basestring): raise VdtTypeError(value) if not value in options: raise VdtValueError(value) return value
[ "def", "is_option", "(", "value", ",", "*", "options", ")", ":", "if", "not", "isinstance", "(", "value", ",", "basestring", ")", ":", "raise", "VdtTypeError", "(", "value", ")", "if", "not", "value", "in", "options", ":", "raise", "VdtValueError", "(", "value", ")", "return", "value" ]
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/python/configobj/validate.py#L1299-L1316
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/models/image/cifar10/cifar10_input.py
python
inputs
(eval_data, data_dir, batch_size)
return _generate_image_and_label_batch(float_image, read_input.label, min_queue_examples, batch_size, shuffle=False)
Construct input for CIFAR evaluation using the Reader ops. Args: eval_data: bool, indicating if one should use the train or eval data set. data_dir: Path to the CIFAR-10 data directory. batch_size: Number of images per batch. Returns: images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size. labels: Labels. 1D tensor of [batch_size] size.
Construct input for CIFAR evaluation using the Reader ops.
[ "Construct", "input", "for", "CIFAR", "evaluation", "using", "the", "Reader", "ops", "." ]
def inputs(eval_data, data_dir, batch_size): """Construct input for CIFAR evaluation using the Reader ops. Args: eval_data: bool, indicating if one should use the train or eval data set. data_dir: Path to the CIFAR-10 data directory. batch_size: Number of images per batch. Returns: images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size. labels: Labels. 1D tensor of [batch_size] size. """ if not eval_data: filenames = [os.path.join(data_dir, 'data_batch_%d.bin' % i) for i in xrange(1, 6)] num_examples_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN else: filenames = [os.path.join(data_dir, 'test_batch.bin')] num_examples_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_EVAL for f in filenames: if not tf.gfile.Exists(f): raise ValueError('Failed to find file: ' + f) # Create a queue that produces the filenames to read. filename_queue = tf.train.string_input_producer(filenames) # Read examples from files in the filename queue. read_input = read_cifar10(filename_queue) reshaped_image = tf.cast(read_input.uint8image, tf.float32) height = IMAGE_SIZE width = IMAGE_SIZE # Image processing for evaluation. # Crop the central [height, width] of the image. resized_image = tf.image.resize_image_with_crop_or_pad(reshaped_image, width, height) # Subtract off the mean and divide by the variance of the pixels. float_image = tf.image.per_image_whitening(resized_image) # Ensure that the random shuffling has good mixing properties. min_fraction_of_examples_in_queue = 0.4 min_queue_examples = int(num_examples_per_epoch * min_fraction_of_examples_in_queue) # Generate a batch of images and labels by building up a queue of examples. return _generate_image_and_label_batch(float_image, read_input.label, min_queue_examples, batch_size, shuffle=False)
[ "def", "inputs", "(", "eval_data", ",", "data_dir", ",", "batch_size", ")", ":", "if", "not", "eval_data", ":", "filenames", "=", "[", "os", ".", "path", ".", "join", "(", "data_dir", ",", "'data_batch_%d.bin'", "%", "i", ")", "for", "i", "in", "xrange", "(", "1", ",", "6", ")", "]", "num_examples_per_epoch", "=", "NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN", "else", ":", "filenames", "=", "[", "os", ".", "path", ".", "join", "(", "data_dir", ",", "'test_batch.bin'", ")", "]", "num_examples_per_epoch", "=", "NUM_EXAMPLES_PER_EPOCH_FOR_EVAL", "for", "f", "in", "filenames", ":", "if", "not", "tf", ".", "gfile", ".", "Exists", "(", "f", ")", ":", "raise", "ValueError", "(", "'Failed to find file: '", "+", "f", ")", "# Create a queue that produces the filenames to read.", "filename_queue", "=", "tf", ".", "train", ".", "string_input_producer", "(", "filenames", ")", "# Read examples from files in the filename queue.", "read_input", "=", "read_cifar10", "(", "filename_queue", ")", "reshaped_image", "=", "tf", ".", "cast", "(", "read_input", ".", "uint8image", ",", "tf", ".", "float32", ")", "height", "=", "IMAGE_SIZE", "width", "=", "IMAGE_SIZE", "# Image processing for evaluation.", "# Crop the central [height, width] of the image.", "resized_image", "=", "tf", ".", "image", ".", "resize_image_with_crop_or_pad", "(", "reshaped_image", ",", "width", ",", "height", ")", "# Subtract off the mean and divide by the variance of the pixels.", "float_image", "=", "tf", ".", "image", ".", "per_image_whitening", "(", "resized_image", ")", "# Ensure that the random shuffling has good mixing properties.", "min_fraction_of_examples_in_queue", "=", "0.4", "min_queue_examples", "=", "int", "(", "num_examples_per_epoch", "*", "min_fraction_of_examples_in_queue", ")", "# Generate a batch of images and labels by building up a queue of examples.", "return", "_generate_image_and_label_batch", "(", "float_image", ",", "read_input", ".", "label", ",", "min_queue_examples", ",", "batch_size", ",", "shuffle", "=", "False", ")" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/models/image/cifar10/cifar10_input.py#L197-L247
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/tf_asymmetry_fitting/tf_asymmetry_fitting_model.py
python
TFAsymmetryFittingModel._update_tf_asymmetry_single_fit_functions_after_sequential
(self, workspaces: list, functions: list)
Updates the TF single fit functions after a sequential fit has been run on the Sequential fitting tab.
Updates the TF single fit functions after a sequential fit has been run on the Sequential fitting tab.
[ "Updates", "the", "TF", "single", "fit", "functions", "after", "a", "sequential", "fit", "has", "been", "run", "on", "the", "Sequential", "fitting", "tab", "." ]
def _update_tf_asymmetry_single_fit_functions_after_sequential(self, workspaces: list, functions: list) -> None: """Updates the TF single fit functions after a sequential fit has been run on the Sequential fitting tab.""" dataset_names = self.fitting_context.dataset_names for workspace_index, workspace_name in enumerate(workspaces): if workspace_name in dataset_names: dataset_index = dataset_names.index(workspace_name) self.update_tf_asymmetry_single_fit_function(dataset_index, functions[workspace_index])
[ "def", "_update_tf_asymmetry_single_fit_functions_after_sequential", "(", "self", ",", "workspaces", ":", "list", ",", "functions", ":", "list", ")", "->", "None", ":", "dataset_names", "=", "self", ".", "fitting_context", ".", "dataset_names", "for", "workspace_index", ",", "workspace_name", "in", "enumerate", "(", "workspaces", ")", ":", "if", "workspace_name", "in", "dataset_names", ":", "dataset_index", "=", "dataset_names", ".", "index", "(", "workspace_name", ")", "self", ".", "update_tf_asymmetry_single_fit_function", "(", "dataset_index", ",", "functions", "[", "workspace_index", "]", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/tf_asymmetry_fitting/tf_asymmetry_fitting_model.py#L891-L898
chromiumembedded/cef
80caf947f3fe2210e5344713c5281d8af9bdc295
tools/make_distrib.py
python
create_tar_archive
(input_dir, format)
Creates a tar archive of the specified input directory.
Creates a tar archive of the specified input directory.
[ "Creates", "a", "tar", "archive", "of", "the", "specified", "input", "directory", "." ]
def create_tar_archive(input_dir, format): """ Creates a tar archive of the specified input directory. """ # Supported formats include "gz" and "bz2". tar_file = input_dir + '.tar.' + format tf = tarfile.open(tar_file, "w:" + format) # The default tar format changed from GNU_FORMAT to PAX_FORMAT in Python 3.8. # However, PAX_FORMAT generates additional @PaxHeader entries and truncates file # names on Windows, so we'll stick with the previous default. tf.format = tarfile.GNU_FORMAT tf.add(input_dir, arcname=os.path.basename(input_dir)) tf.close()
[ "def", "create_tar_archive", "(", "input_dir", ",", "format", ")", ":", "# Supported formats include \"gz\" and \"bz2\".", "tar_file", "=", "input_dir", "+", "'.tar.'", "+", "format", "tf", "=", "tarfile", ".", "open", "(", "tar_file", ",", "\"w:\"", "+", "format", ")", "# The default tar format changed from GNU_FORMAT to PAX_FORMAT in Python 3.8.", "# However, PAX_FORMAT generates additional @PaxHeader entries and truncates file", "# names on Windows, so we'll stick with the previous default.", "tf", ".", "format", "=", "tarfile", ".", "GNU_FORMAT", "tf", ".", "add", "(", "input_dir", ",", "arcname", "=", "os", ".", "path", ".", "basename", "(", "input_dir", ")", ")", "tf", ".", "close", "(", ")" ]
https://github.com/chromiumembedded/cef/blob/80caf947f3fe2210e5344713c5281d8af9bdc295/tools/make_distrib.py#L42-L52
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/distutils/file_util.py
python
move_file
(src, dst, verbose=1, dry_run=0)
return dst
Move a file 'src' to 'dst'. If 'dst' is a directory, the file will be moved into it with the same name; otherwise, 'src' is just renamed to 'dst'. Return the new full name of the file. Handles cross-device moves on Unix using 'copy_file()'. What about other systems???
Move a file 'src' to 'dst'.
[ "Move", "a", "file", "src", "to", "dst", "." ]
def move_file (src, dst, verbose=1, dry_run=0): """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will be moved into it with the same name; otherwise, 'src' is just renamed to 'dst'. Return the new full name of the file. Handles cross-device moves on Unix using 'copy_file()'. What about other systems??? """ from os.path import exists, isfile, isdir, basename, dirname import errno if verbose >= 1: log.info("moving %s -> %s", src, dst) if dry_run: return dst if not isfile(src): raise DistutilsFileError("can't move '%s': not a regular file" % src) if isdir(dst): dst = os.path.join(dst, basename(src)) elif exists(dst): raise DistutilsFileError( "can't move '%s': destination '%s' already exists" % (src, dst)) if not isdir(dirname(dst)): raise DistutilsFileError( "can't move '%s': destination '%s' not a valid path" % \ (src, dst)) copy_it = 0 try: os.rename(src, dst) except os.error, (num, msg): if num == errno.EXDEV: copy_it = 1 else: raise DistutilsFileError( "couldn't move '%s' to '%s': %s" % (src, dst, msg)) if copy_it: copy_file(src, dst, verbose=verbose) try: os.unlink(src) except os.error, (num, msg): try: os.unlink(dst) except os.error: pass raise DistutilsFileError( ("couldn't move '%s' to '%s' by copy/delete: " + "delete '%s' failed: %s") % (src, dst, src, msg)) return dst
[ "def", "move_file", "(", "src", ",", "dst", ",", "verbose", "=", "1", ",", "dry_run", "=", "0", ")", ":", "from", "os", ".", "path", "import", "exists", ",", "isfile", ",", "isdir", ",", "basename", ",", "dirname", "import", "errno", "if", "verbose", ">=", "1", ":", "log", ".", "info", "(", "\"moving %s -> %s\"", ",", "src", ",", "dst", ")", "if", "dry_run", ":", "return", "dst", "if", "not", "isfile", "(", "src", ")", ":", "raise", "DistutilsFileError", "(", "\"can't move '%s': not a regular file\"", "%", "src", ")", "if", "isdir", "(", "dst", ")", ":", "dst", "=", "os", ".", "path", ".", "join", "(", "dst", ",", "basename", "(", "src", ")", ")", "elif", "exists", "(", "dst", ")", ":", "raise", "DistutilsFileError", "(", "\"can't move '%s': destination '%s' already exists\"", "%", "(", "src", ",", "dst", ")", ")", "if", "not", "isdir", "(", "dirname", "(", "dst", ")", ")", ":", "raise", "DistutilsFileError", "(", "\"can't move '%s': destination '%s' not a valid path\"", "%", "(", "src", ",", "dst", ")", ")", "copy_it", "=", "0", "try", ":", "os", ".", "rename", "(", "src", ",", "dst", ")", "except", "os", ".", "error", ",", "(", "num", ",", "msg", ")", ":", "if", "num", "==", "errno", ".", "EXDEV", ":", "copy_it", "=", "1", "else", ":", "raise", "DistutilsFileError", "(", "\"couldn't move '%s' to '%s': %s\"", "%", "(", "src", ",", "dst", ",", "msg", ")", ")", "if", "copy_it", ":", "copy_file", "(", "src", ",", "dst", ",", "verbose", "=", "verbose", ")", "try", ":", "os", ".", "unlink", "(", "src", ")", "except", "os", ".", "error", ",", "(", "num", ",", "msg", ")", ":", "try", ":", "os", ".", "unlink", "(", "dst", ")", "except", "os", ".", "error", ":", "pass", "raise", "DistutilsFileError", "(", "(", "\"couldn't move '%s' to '%s' by copy/delete: \"", "+", "\"delete '%s' failed: %s\"", ")", "%", "(", "src", ",", "dst", ",", "src", ",", "msg", ")", ")", "return", "dst" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/distutils/file_util.py#L162-L219
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py3/scipy/stats/_multivariate.py
python
dirichlet_gen.var
(self, alpha)
return _squeeze_output(out)
Compute the variance of the dirichlet distribution. Parameters ---------- %(_dirichlet_doc_default_callparams)s Returns ------- v : ndarray or scalar Variance of the Dirichlet distribution.
Compute the variance of the dirichlet distribution.
[ "Compute", "the", "variance", "of", "the", "dirichlet", "distribution", "." ]
def var(self, alpha): """ Compute the variance of the dirichlet distribution. Parameters ---------- %(_dirichlet_doc_default_callparams)s Returns ------- v : ndarray or scalar Variance of the Dirichlet distribution. """ alpha = _dirichlet_check_parameters(alpha) alpha0 = np.sum(alpha) out = (alpha * (alpha0 - alpha)) / ((alpha0 * alpha0) * (alpha0 + 1)) return _squeeze_output(out)
[ "def", "var", "(", "self", ",", "alpha", ")", ":", "alpha", "=", "_dirichlet_check_parameters", "(", "alpha", ")", "alpha0", "=", "np", ".", "sum", "(", "alpha", ")", "out", "=", "(", "alpha", "*", "(", "alpha0", "-", "alpha", ")", ")", "/", "(", "(", "alpha0", "*", "alpha0", ")", "*", "(", "alpha0", "+", "1", ")", ")", "return", "_squeeze_output", "(", "out", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/stats/_multivariate.py#L1495-L1514
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/vision_opencv/image_geometry/src/image_geometry/cameramodels.py
python
PinholeCameraModel.getDeltaX
(self, deltaU, Z)
return Z * deltaU / fx
:param deltaU: delta u in pixels :type deltaU: float :param Z: Z, in cartesian space :type Z: float :rtype: float Compute delta X, given Z in cartesian space and delta u in pixels. For given Z, this is the inverse of :meth:`getDeltaU`.
:param deltaU: delta u in pixels :type deltaU: float :param Z: Z, in cartesian space :type Z: float :rtype: float
[ ":", "param", "deltaU", ":", "delta", "u", "in", "pixels", ":", "type", "deltaU", ":", "float", ":", "param", "Z", ":", "Z", "in", "cartesian", "space", ":", "type", "Z", ":", "float", ":", "rtype", ":", "float" ]
def getDeltaX(self, deltaU, Z): """ :param deltaU: delta u in pixels :type deltaU: float :param Z: Z, in cartesian space :type Z: float :rtype: float Compute delta X, given Z in cartesian space and delta u in pixels. For given Z, this is the inverse of :meth:`getDeltaU`. """ fx = self.P[0, 0] return Z * deltaU / fx
[ "def", "getDeltaX", "(", "self", ",", "deltaU", ",", "Z", ")", ":", "fx", "=", "self", ".", "P", "[", "0", ",", "0", "]", "return", "Z", "*", "deltaU", "/", "fx" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/vision_opencv/image_geometry/src/image_geometry/cameramodels.py#L179-L191
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_misc.py
python
TimeSpan_Week
(*args)
return _misc_.TimeSpan_Week(*args)
TimeSpan_Week() -> TimeSpan
TimeSpan_Week() -> TimeSpan
[ "TimeSpan_Week", "()", "-", ">", "TimeSpan" ]
def TimeSpan_Week(*args): """TimeSpan_Week() -> TimeSpan""" return _misc_.TimeSpan_Week(*args)
[ "def", "TimeSpan_Week", "(", "*", "args", ")", ":", "return", "_misc_", ".", "TimeSpan_Week", "(", "*", "args", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L4600-L4602
liulei01/DRBox
b5c76e033c555c9009590ab384e1f7bd3c66c237
scripts/cpp_lint.py
python
CheckSpacingForFunctionCall
(filename, line, linenum, error)
Checks for the correctness of various spacing around function calls. Args: filename: The name of the current file. line: The text of the line to check. linenum: The number of the line to check. error: The function to call with any errors found.
Checks for the correctness of various spacing around function calls.
[ "Checks", "for", "the", "correctness", "of", "various", "spacing", "around", "function", "calls", "." ]
def CheckSpacingForFunctionCall(filename, line, linenum, error): """Checks for the correctness of various spacing around function calls. Args: filename: The name of the current file. line: The text of the line to check. linenum: The number of the line to check. error: The function to call with any errors found. """ # Since function calls often occur inside if/for/while/switch # expressions - which have their own, more liberal conventions - we # first see if we should be looking inside such an expression for a # function call, to which we can apply more strict standards. fncall = line # if there's no control flow construct, look at whole line for pattern in (r'\bif\s*\((.*)\)\s*{', r'\bfor\s*\((.*)\)\s*{', r'\bwhile\s*\((.*)\)\s*[{;]', r'\bswitch\s*\((.*)\)\s*{'): match = Search(pattern, line) if match: fncall = match.group(1) # look inside the parens for function calls break # Except in if/for/while/switch, there should never be space # immediately inside parens (eg "f( 3, 4 )"). We make an exception # for nested parens ( (a+b) + c ). Likewise, there should never be # a space before a ( when it's a function argument. I assume it's a # function argument when the char before the whitespace is legal in # a function name (alnum + _) and we're not starting a macro. Also ignore # pointers and references to arrays and functions coz they're too tricky: # we use a very simple way to recognize these: # " (something)(maybe-something)" or # " (something)(maybe-something," or # " (something)[something]" # Note that we assume the contents of [] to be short enough that # they'll never need to wrap. if ( # Ignore control structures. not Search(r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b', fncall) and # Ignore pointers/references to functions. not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and # Ignore pointers/references to arrays. not Search(r' \([^)]+\)\[[^\]]+\]', fncall)): if Search(r'\w\s*\(\s(?!\s*\\$)', fncall): # a ( used for a fn call error(filename, linenum, 'whitespace/parens', 4, 'Extra space after ( in function call') elif Search(r'\(\s+(?!(\s*\\)|\()', fncall): error(filename, linenum, 'whitespace/parens', 2, 'Extra space after (') if (Search(r'\w\s+\(', fncall) and not Search(r'#\s*define|typedef', fncall) and not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall)): error(filename, linenum, 'whitespace/parens', 4, 'Extra space before ( in function call') # If the ) is followed only by a newline or a { + newline, assume it's # part of a control statement (if/while/etc), and don't complain if Search(r'[^)]\s+\)\s*[^{\s]', fncall): # If the closing parenthesis is preceded by only whitespaces, # try to give a more descriptive error message. if Search(r'^\s+\)', fncall): error(filename, linenum, 'whitespace/parens', 2, 'Closing ) should be moved to the previous line') else: error(filename, linenum, 'whitespace/parens', 2, 'Extra space before )')
[ "def", "CheckSpacingForFunctionCall", "(", "filename", ",", "line", ",", "linenum", ",", "error", ")", ":", "# Since function calls often occur inside if/for/while/switch", "# expressions - which have their own, more liberal conventions - we", "# first see if we should be looking inside such an expression for a", "# function call, to which we can apply more strict standards.", "fncall", "=", "line", "# if there's no control flow construct, look at whole line", "for", "pattern", "in", "(", "r'\\bif\\s*\\((.*)\\)\\s*{'", ",", "r'\\bfor\\s*\\((.*)\\)\\s*{'", ",", "r'\\bwhile\\s*\\((.*)\\)\\s*[{;]'", ",", "r'\\bswitch\\s*\\((.*)\\)\\s*{'", ")", ":", "match", "=", "Search", "(", "pattern", ",", "line", ")", "if", "match", ":", "fncall", "=", "match", ".", "group", "(", "1", ")", "# look inside the parens for function calls", "break", "# Except in if/for/while/switch, there should never be space", "# immediately inside parens (eg \"f( 3, 4 )\"). We make an exception", "# for nested parens ( (a+b) + c ). Likewise, there should never be", "# a space before a ( when it's a function argument. I assume it's a", "# function argument when the char before the whitespace is legal in", "# a function name (alnum + _) and we're not starting a macro. Also ignore", "# pointers and references to arrays and functions coz they're too tricky:", "# we use a very simple way to recognize these:", "# \" (something)(maybe-something)\" or", "# \" (something)(maybe-something,\" or", "# \" (something)[something]\"", "# Note that we assume the contents of [] to be short enough that", "# they'll never need to wrap.", "if", "(", "# Ignore control structures.", "not", "Search", "(", "r'\\b(if|for|while|switch|return|new|delete|catch|sizeof)\\b'", ",", "fncall", ")", "and", "# Ignore pointers/references to functions.", "not", "Search", "(", "r' \\([^)]+\\)\\([^)]*(\\)|,$)'", ",", "fncall", ")", "and", "# Ignore pointers/references to arrays.", "not", "Search", "(", "r' \\([^)]+\\)\\[[^\\]]+\\]'", ",", "fncall", ")", ")", ":", "if", "Search", "(", "r'\\w\\s*\\(\\s(?!\\s*\\\\$)'", ",", "fncall", ")", ":", "# a ( used for a fn call", "error", "(", "filename", ",", "linenum", ",", "'whitespace/parens'", ",", "4", ",", "'Extra space after ( in function call'", ")", "elif", "Search", "(", "r'\\(\\s+(?!(\\s*\\\\)|\\()'", ",", "fncall", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'whitespace/parens'", ",", "2", ",", "'Extra space after ('", ")", "if", "(", "Search", "(", "r'\\w\\s+\\('", ",", "fncall", ")", "and", "not", "Search", "(", "r'#\\s*define|typedef'", ",", "fncall", ")", "and", "not", "Search", "(", "r'\\w\\s+\\((\\w+::)*\\*\\w+\\)\\('", ",", "fncall", ")", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'whitespace/parens'", ",", "4", ",", "'Extra space before ( in function call'", ")", "# If the ) is followed only by a newline or a { + newline, assume it's", "# part of a control statement (if/while/etc), and don't complain", "if", "Search", "(", "r'[^)]\\s+\\)\\s*[^{\\s]'", ",", "fncall", ")", ":", "# If the closing parenthesis is preceded by only whitespaces,", "# try to give a more descriptive error message.", "if", "Search", "(", "r'^\\s+\\)'", ",", "fncall", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'whitespace/parens'", ",", "2", ",", "'Closing ) should be moved to the previous line'", ")", "else", ":", "error", "(", "filename", ",", "linenum", ",", "'whitespace/parens'", ",", "2", ",", "'Extra space before )'", ")" ]
https://github.com/liulei01/DRBox/blob/b5c76e033c555c9009590ab384e1f7bd3c66c237/scripts/cpp_lint.py#L2305-L2370
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/distribute/values.py
python
MirroredVariable._write_object_proto
(self, proto, options)
Update a SavedObject proto for the caller. If a DistributedVariable object supports this method, it will be called when saving with a pre-built `SavedObject` proto representing the object, plus an instance of `SaveOptions`. This method is then free to modify that proto instance. `DistributedVariable` with `AUTO` or `ON_WRITE` synchronization optionally write out information about their components to the `experimental_distributed_variable_components` field of a `SavedVariable` (depending on the `SaveOptions` variable policy). Args: proto: A pre-built `SavedObject` proto for this object. It is assumed this will be a `SavedVariable` instance. options: A `SaveOptions` instance.
Update a SavedObject proto for the caller.
[ "Update", "a", "SavedObject", "proto", "for", "the", "caller", "." ]
def _write_object_proto(self, proto, options): """Update a SavedObject proto for the caller. If a DistributedVariable object supports this method, it will be called when saving with a pre-built `SavedObject` proto representing the object, plus an instance of `SaveOptions`. This method is then free to modify that proto instance. `DistributedVariable` with `AUTO` or `ON_WRITE` synchronization optionally write out information about their components to the `experimental_distributed_variable_components` field of a `SavedVariable` (depending on the `SaveOptions` variable policy). Args: proto: A pre-built `SavedObject` proto for this object. It is assumed this will be a `SavedVariable` instance. options: A `SaveOptions` instance. """ super(MirroredVariable, self)._write_object_proto(proto, options) values_util.write_object_proto(self, proto, options)
[ "def", "_write_object_proto", "(", "self", ",", "proto", ",", "options", ")", ":", "super", "(", "MirroredVariable", ",", "self", ")", ".", "_write_object_proto", "(", "proto", ",", "options", ")", "values_util", ".", "write_object_proto", "(", "self", ",", "proto", ",", "options", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/values.py#L1190-L1209
baidu/tera
dbcd28af792d879d961bf9fc7eb60de81b437646
src/sdk/python/TeraSdk.py
python
Client.Close
(self)
销毁这个client,释放底层资源,以后不得再使用这个对象
销毁这个client,释放底层资源,以后不得再使用这个对象
[ "销毁这个client,释放底层资源,以后不得再使用这个对象" ]
def Close(self): """ 销毁这个client,释放底层资源,以后不得再使用这个对象 """ lib.tera_client_close(self.client)
[ "def", "Close", "(", "self", ")", ":", "lib", ".", "tera_client_close", "(", "self", ".", "client", ")" ]
https://github.com/baidu/tera/blob/dbcd28af792d879d961bf9fc7eb60de81b437646/src/sdk/python/TeraSdk.py#L267-L271
MegEngine/MegEngine
ce9ad07a27ec909fb8db4dd67943d24ba98fb93a
imperative/python/megengine/utils/network.py
python
NodeFilter.as_dict
(self)
return collections.OrderedDict((i.name, i) for i in self)
r"""construct an ordered dict to map from node names to objects in this iterator
r"""construct an ordered dict to map from node names to objects in this iterator
[ "r", "construct", "an", "ordered", "dict", "to", "map", "from", "node", "names", "to", "objects", "in", "this", "iterator" ]
def as_dict(self): r"""construct an ordered dict to map from node names to objects in this iterator """ return collections.OrderedDict((i.name, i) for i in self)
[ "def", "as_dict", "(", "self", ")", ":", "return", "collections", ".", "OrderedDict", "(", "(", "i", ".", "name", ",", "i", ")", "for", "i", "in", "self", ")" ]
https://github.com/MegEngine/MegEngine/blob/ce9ad07a27ec909fb8db4dd67943d24ba98fb93a/imperative/python/megengine/utils/network.py#L735-L739
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/route53/connection.py
python
Route53Connection.create_zone
(self, name, private_zone=False, vpc_id=None, vpc_region=None)
return Zone(self, zone['CreateHostedZoneResponse']['HostedZone'])
Create a new Hosted Zone. Returns a Zone object for the newly created Hosted Zone. :type name: str :param name: The name of the domain. This should be a fully-specified domain, and should end with a final period as the last label indication. If you omit the final period, Amazon Route 53 assumes the domain is relative to the root. This is the name you have registered with your DNS registrar. It is also the name you will delegate from your registrar to the Amazon Route 53 delegation servers returned in response to this request. :type private_zone: bool :param private_zone: Set True if creating a private hosted zone. :type vpc_id: str :param vpc_id: When creating a private hosted zone, the VPC Id to associate to is required. :type vpc_region: str :param vpc_id: When creating a private hosted zone, the region of the associated VPC is required.
Create a new Hosted Zone. Returns a Zone object for the newly created Hosted Zone.
[ "Create", "a", "new", "Hosted", "Zone", ".", "Returns", "a", "Zone", "object", "for", "the", "newly", "created", "Hosted", "Zone", "." ]
def create_zone(self, name, private_zone=False, vpc_id=None, vpc_region=None): """ Create a new Hosted Zone. Returns a Zone object for the newly created Hosted Zone. :type name: str :param name: The name of the domain. This should be a fully-specified domain, and should end with a final period as the last label indication. If you omit the final period, Amazon Route 53 assumes the domain is relative to the root. This is the name you have registered with your DNS registrar. It is also the name you will delegate from your registrar to the Amazon Route 53 delegation servers returned in response to this request. :type private_zone: bool :param private_zone: Set True if creating a private hosted zone. :type vpc_id: str :param vpc_id: When creating a private hosted zone, the VPC Id to associate to is required. :type vpc_region: str :param vpc_id: When creating a private hosted zone, the region of the associated VPC is required. """ zone = self.create_hosted_zone(name, private_zone=private_zone, vpc_id=vpc_id, vpc_region=vpc_region) return Zone(self, zone['CreateHostedZoneResponse']['HostedZone'])
[ "def", "create_zone", "(", "self", ",", "name", ",", "private_zone", "=", "False", ",", "vpc_id", "=", "None", ",", "vpc_region", "=", "None", ")", ":", "zone", "=", "self", ".", "create_hosted_zone", "(", "name", ",", "private_zone", "=", "private_zone", ",", "vpc_id", "=", "vpc_id", ",", "vpc_region", "=", "vpc_region", ")", "return", "Zone", "(", "self", ",", "zone", "[", "'CreateHostedZoneResponse'", "]", "[", "'HostedZone'", "]", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/route53/connection.py#L506-L535
y123456yz/reading-and-annotate-mongodb-3.6
93280293672ca7586dc24af18132aa61e4ed7fcf
mongo/buildscripts/idl/idl/errors.py
python
ParserError.__init__
(self, error_id, msg, file_name, line, column)
Construct a parser error with source location information.
Construct a parser error with source location information.
[ "Construct", "a", "parser", "error", "with", "source", "location", "information", "." ]
def __init__(self, error_id, msg, file_name, line, column): # type: (unicode, unicode, unicode, int, int) -> None """"Construct a parser error with source location information.""" # pylint: disable=too-many-arguments self.error_id = error_id self.msg = msg super(ParserError, self).__init__(file_name, line, column)
[ "def", "__init__", "(", "self", ",", "error_id", ",", "msg", ",", "file_name", ",", "line", ",", "column", ")", ":", "# type: (unicode, unicode, unicode, int, int) -> None", "# pylint: disable=too-many-arguments", "self", ".", "error_id", "=", "error_id", "self", ".", "msg", "=", "msg", "super", "(", "ParserError", ",", "self", ")", ".", "__init__", "(", "file_name", ",", "line", ",", "column", ")" ]
https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/buildscripts/idl/idl/errors.py#L105-L111
MicBosi/VisualizationLibrary
d2a0e321288152008957e29a0bc270ad192f75be
src/external/freetype/src/tools/docmaker/tohtml.py
python
HtmlFormatter.make_html_word
( self, word )
return html_quote( word )
analyze a simple word to detect cross-references and styling
analyze a simple word to detect cross-references and styling
[ "analyze", "a", "simple", "word", "to", "detect", "cross", "-", "references", "and", "styling" ]
def make_html_word( self, word ): """analyze a simple word to detect cross-references and styling""" # look for cross-references m = re_crossref.match( word ) if m: try: name = m.group( 1 ) rest = m.group( 2 ) block = self.identifiers[name] url = self.make_block_url( block ) return '<a href="' + url + '">' + name + '</a>' + rest except: # we detected a cross-reference to an unknown item sys.stderr.write( \ "WARNING: undefined cross reference '" + name + "'.\n" ) return '?' + name + '?' + rest # look for italics and bolds m = re_italic.match( word ) if m: name = m.group( 1 ) rest = m.group( 3 ) return '<i>' + name + '</i>' + rest m = re_bold.match( word ) if m: name = m.group( 1 ) rest = m.group( 3 ) return '<b>' + name + '</b>' + rest return html_quote( word )
[ "def", "make_html_word", "(", "self", ",", "word", ")", ":", "# look for cross-references", "m", "=", "re_crossref", ".", "match", "(", "word", ")", "if", "m", ":", "try", ":", "name", "=", "m", ".", "group", "(", "1", ")", "rest", "=", "m", ".", "group", "(", "2", ")", "block", "=", "self", ".", "identifiers", "[", "name", "]", "url", "=", "self", ".", "make_block_url", "(", "block", ")", "return", "'<a href=\"'", "+", "url", "+", "'\">'", "+", "name", "+", "'</a>'", "+", "rest", "except", ":", "# we detected a cross-reference to an unknown item", "sys", ".", "stderr", ".", "write", "(", "\"WARNING: undefined cross reference '\"", "+", "name", "+", "\"'.\\n\"", ")", "return", "'?'", "+", "name", "+", "'?'", "+", "rest", "# look for italics and bolds", "m", "=", "re_italic", ".", "match", "(", "word", ")", "if", "m", ":", "name", "=", "m", ".", "group", "(", "1", ")", "rest", "=", "m", ".", "group", "(", "3", ")", "return", "'<i>'", "+", "name", "+", "'</i>'", "+", "rest", "m", "=", "re_bold", ".", "match", "(", "word", ")", "if", "m", ":", "name", "=", "m", ".", "group", "(", "1", ")", "rest", "=", "m", ".", "group", "(", "3", ")", "return", "'<b>'", "+", "name", "+", "'</b>'", "+", "rest", "return", "html_quote", "(", "word", ")" ]
https://github.com/MicBosi/VisualizationLibrary/blob/d2a0e321288152008957e29a0bc270ad192f75be/src/external/freetype/src/tools/docmaker/tohtml.py#L255-L285
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/devil/devil/android/ports.py
python
IsDevicePortUsed
(device, device_port, state='')
return False
Checks whether the specified device port is used or not. Args: device: A DeviceUtils instance. device_port: Port on device we want to check. state: String of the specified state. Default is empty string, which means any state. Returns: True if the port on device is already used, otherwise returns False.
Checks whether the specified device port is used or not.
[ "Checks", "whether", "the", "specified", "device", "port", "is", "used", "or", "not", "." ]
def IsDevicePortUsed(device, device_port, state=''): """Checks whether the specified device port is used or not. Args: device: A DeviceUtils instance. device_port: Port on device we want to check. state: String of the specified state. Default is empty string, which means any state. Returns: True if the port on device is already used, otherwise returns False. """ base_urls = ('127.0.0.1:%d' % device_port, 'localhost:%d' % device_port) netstat_results = device.RunShellCommand( ['netstat', '-a'], check_return=True, large_output=True) for single_connect in netstat_results: # Column 3 is the local address which we want to check with. connect_results = single_connect.split() if connect_results[0] != 'tcp': continue if len(connect_results) < 6: raise Exception('Unexpected format while parsing netstat line: ' + single_connect) is_state_match = connect_results[5] == state if state else True if connect_results[3] in base_urls and is_state_match: return True return False
[ "def", "IsDevicePortUsed", "(", "device", ",", "device_port", ",", "state", "=", "''", ")", ":", "base_urls", "=", "(", "'127.0.0.1:%d'", "%", "device_port", ",", "'localhost:%d'", "%", "device_port", ")", "netstat_results", "=", "device", ".", "RunShellCommand", "(", "[", "'netstat'", ",", "'-a'", "]", ",", "check_return", "=", "True", ",", "large_output", "=", "True", ")", "for", "single_connect", "in", "netstat_results", ":", "# Column 3 is the local address which we want to check with.", "connect_results", "=", "single_connect", ".", "split", "(", ")", "if", "connect_results", "[", "0", "]", "!=", "'tcp'", ":", "continue", "if", "len", "(", "connect_results", ")", "<", "6", ":", "raise", "Exception", "(", "'Unexpected format while parsing netstat line: '", "+", "single_connect", ")", "is_state_match", "=", "connect_results", "[", "5", "]", "==", "state", "if", "state", "else", "True", "if", "connect_results", "[", "3", "]", "in", "base_urls", "and", "is_state_match", ":", "return", "True", "return", "False" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/devil/devil/android/ports.py#L103-L129
isl-org/Open3D
79aec3ddde6a571ce2f28e4096477e52ec465244
util/check_style.py
python
_glob_files
(directories, extensions)
return file_paths
Find files with certain extensions in directories recursively. Args: directories: list of directories, relative to the root Open3D repo directory. extensions: list of extensions, e.g. ["cpp", "h"]. Return: List of file paths.
Find files with certain extensions in directories recursively.
[ "Find", "files", "with", "certain", "extensions", "in", "directories", "recursively", "." ]
def _glob_files(directories, extensions): """ Find files with certain extensions in directories recursively. Args: directories: list of directories, relative to the root Open3D repo directory. extensions: list of extensions, e.g. ["cpp", "h"]. Return: List of file paths. """ pwd = Path(os.path.dirname(os.path.abspath(__file__))) open3d_root_dir = pwd.parent file_paths = [] for directory in directories: directory = open3d_root_dir / directory for extension in extensions: extension_regex = "*." + extension file_paths.extend(directory.rglob(extension_regex)) file_paths = [str(file_path) for file_path in file_paths] file_paths = sorted(list(set(file_paths))) return file_paths
[ "def", "_glob_files", "(", "directories", ",", "extensions", ")", ":", "pwd", "=", "Path", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", ")", "open3d_root_dir", "=", "pwd", ".", "parent", "file_paths", "=", "[", "]", "for", "directory", "in", "directories", ":", "directory", "=", "open3d_root_dir", "/", "directory", "for", "extension", "in", "extensions", ":", "extension_regex", "=", "\"*.\"", "+", "extension", "file_paths", ".", "extend", "(", "directory", ".", "rglob", "(", "extension_regex", ")", ")", "file_paths", "=", "[", "str", "(", "file_path", ")", "for", "file_path", "in", "file_paths", "]", "file_paths", "=", "sorted", "(", "list", "(", "set", "(", "file_paths", ")", ")", ")", "return", "file_paths" ]
https://github.com/isl-org/Open3D/blob/79aec3ddde6a571ce2f28e4096477e52ec465244/util/check_style.py#L85-L107
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/retryhandler.py
python
delay_exponential
(base, growth_factor, attempts)
return time_to_sleep
Calculate time to sleep based on exponential function. The format is:: base * growth_factor ^ (attempts - 1) If ``base`` is set to 'rand' then a random number between 0 and 1 will be used as the base. Base must be greater than 0, otherwise a ValueError will be raised.
Calculate time to sleep based on exponential function.
[ "Calculate", "time", "to", "sleep", "based", "on", "exponential", "function", "." ]
def delay_exponential(base, growth_factor, attempts): """Calculate time to sleep based on exponential function. The format is:: base * growth_factor ^ (attempts - 1) If ``base`` is set to 'rand' then a random number between 0 and 1 will be used as the base. Base must be greater than 0, otherwise a ValueError will be raised. """ if base == 'rand': base = random.random() elif base <= 0: raise ValueError("The 'base' param must be greater than 0, " "got: %s" % base) time_to_sleep = base * (growth_factor ** (attempts - 1)) return time_to_sleep
[ "def", "delay_exponential", "(", "base", ",", "growth_factor", ",", "attempts", ")", ":", "if", "base", "==", "'rand'", ":", "base", "=", "random", ".", "random", "(", ")", "elif", "base", "<=", "0", ":", "raise", "ValueError", "(", "\"The 'base' param must be greater than 0, \"", "\"got: %s\"", "%", "base", ")", "time_to_sleep", "=", "base", "*", "(", "growth_factor", "**", "(", "attempts", "-", "1", ")", ")", "return", "time_to_sleep" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/retryhandler.py#L39-L58
NVIDIA/MDL-SDK
aa9642b2546ad7b6236b5627385d882c2ed83c5d
src/mdl/jit/llvm/dist/bindings/python/llvm/object.py
python
Symbol.address
(self)
return lib.LLVMGetSymbolAddress(self)
The address of this symbol, in long bytes.
The address of this symbol, in long bytes.
[ "The", "address", "of", "this", "symbol", "in", "long", "bytes", "." ]
def address(self): """The address of this symbol, in long bytes.""" if self.expired: raise Exception('Symbol instance has expired.') return lib.LLVMGetSymbolAddress(self)
[ "def", "address", "(", "self", ")", ":", "if", "self", ".", "expired", ":", "raise", "Exception", "(", "'Symbol instance has expired.'", ")", "return", "lib", ".", "LLVMGetSymbolAddress", "(", "self", ")" ]
https://github.com/NVIDIA/MDL-SDK/blob/aa9642b2546ad7b6236b5627385d882c2ed83c5d/src/mdl/jit/llvm/dist/bindings/python/llvm/object.py#L314-L319
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/supertooltip.py
python
SuperToolTip.SetBodyImage
(self, bmp)
Sets the main body bitmap for :class:`SuperToolTip`. :param `bmp`: the body bitmap, a valid :class:`Bitmap` object.
Sets the main body bitmap for :class:`SuperToolTip`.
[ "Sets", "the", "main", "body", "bitmap", "for", ":", "class", ":", "SuperToolTip", "." ]
def SetBodyImage(self, bmp): """ Sets the main body bitmap for :class:`SuperToolTip`. :param `bmp`: the body bitmap, a valid :class:`Bitmap` object. """ self._embeddedImage = bmp if self._superToolTip: self._superToolTip.Invalidate()
[ "def", "SetBodyImage", "(", "self", ",", "bmp", ")", ":", "self", ".", "_embeddedImage", "=", "bmp", "if", "self", ".", "_superToolTip", ":", "self", ".", "_superToolTip", ".", "Invalidate", "(", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/supertooltip.py#L1086-L1095
openvinotoolkit/openvino
dedcbeafa8b84cccdc55ca64b8da516682b381c7
thirdparty/itt_collector/runtool/sea_runtool.py
python
TaskCombiner.context_switch
(self, time, cpu, prev, next)
Called to process context switch events on CPU. :param cpu: CPU number (int) :param prev: previous task description (dict. Example: {'tid': 2935135, 'state': 'S', 'name': u'vtplay', 'prio': 31}) :param next: next task description (dict. see above)
Called to process context switch events on CPU. :param cpu: CPU number (int) :param prev: previous task description (dict. Example: {'tid': 2935135, 'state': 'S', 'name': u'vtplay', 'prio': 31}) :param next: next task description (dict. see above)
[ "Called", "to", "process", "context", "switch", "events", "on", "CPU", ".", ":", "param", "cpu", ":", "CPU", "number", "(", "int", ")", ":", "param", "prev", ":", "previous", "task", "description", "(", "dict", ".", "Example", ":", "{", "tid", ":", "2935135", "state", ":", "S", "name", ":", "u", "vtplay", "prio", ":", "31", "}", ")", ":", "param", "next", ":", "next", "task", "description", "(", "dict", ".", "see", "above", ")" ]
def context_switch(self, time, cpu, prev, next): """ Called to process context switch events on CPU. :param cpu: CPU number (int) :param prev: previous task description (dict. Example: {'tid': 2935135, 'state': 'S', 'name': u'vtplay', 'prio': 31}) :param next: next task description (dict. see above) """ pass
[ "def", "context_switch", "(", "self", ",", "time", ",", "cpu", ",", "prev", ",", "next", ")", ":", "pass" ]
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/thirdparty/itt_collector/runtool/sea_runtool.py#L1887-L1894
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/function.py
python
Function._create_graph_function
(self, args, kwargs, override_flat_arg_shapes=None)
return graph_function
Create a `ConcreteFunction` from `args` and `kwargs`.
Create a `ConcreteFunction` from `args` and `kwargs`.
[ "Create", "a", "ConcreteFunction", "from", "args", "and", "kwargs", "." ]
def _create_graph_function(self, args, kwargs, override_flat_arg_shapes=None): """Create a `ConcreteFunction` from `args` and `kwargs`.""" if self.input_signature is None: arglen = len(args) else: arglen = len(self.input_signature) base_arg_names = self._function_spec.arg_names[:arglen] num_missing_args = arglen - len(self._function_spec.arg_names) missing_arg_names = [self._function_spec.vararg_name] * num_missing_args # Produce a list of missing args of the form ["arg_0", "arg_1", ...], # where arg is based on the self._function_spec.vararg_name. missing_arg_names = [ "%s_%d" % (arg, i) for i, arg in enumerate(missing_arg_names) ] arg_names = base_arg_names + missing_arg_names graph_function = ConcreteFunction( func_graph_module.func_graph_from_py_func( self._name, self._python_function, args, kwargs, self.input_signature, autograph=self._autograph, autograph_options=self._autograph_options, arg_names=arg_names, override_flat_arg_shapes=override_flat_arg_shapes, capture_by_value=self._capture_by_value), self._function_attributes, # Tell the ConcreteFunction to clean up its graph once it goes out of # scope. This is not the default behavior since it gets used in some # places (like Keras) where the FuncGraph lives longer than the # ConcreteFunction. shared_func_graph=False) return graph_function
[ "def", "_create_graph_function", "(", "self", ",", "args", ",", "kwargs", ",", "override_flat_arg_shapes", "=", "None", ")", ":", "if", "self", ".", "input_signature", "is", "None", ":", "arglen", "=", "len", "(", "args", ")", "else", ":", "arglen", "=", "len", "(", "self", ".", "input_signature", ")", "base_arg_names", "=", "self", ".", "_function_spec", ".", "arg_names", "[", ":", "arglen", "]", "num_missing_args", "=", "arglen", "-", "len", "(", "self", ".", "_function_spec", ".", "arg_names", ")", "missing_arg_names", "=", "[", "self", ".", "_function_spec", ".", "vararg_name", "]", "*", "num_missing_args", "# Produce a list of missing args of the form [\"arg_0\", \"arg_1\", ...],", "# where arg is based on the self._function_spec.vararg_name.", "missing_arg_names", "=", "[", "\"%s_%d\"", "%", "(", "arg", ",", "i", ")", "for", "i", ",", "arg", "in", "enumerate", "(", "missing_arg_names", ")", "]", "arg_names", "=", "base_arg_names", "+", "missing_arg_names", "graph_function", "=", "ConcreteFunction", "(", "func_graph_module", ".", "func_graph_from_py_func", "(", "self", ".", "_name", ",", "self", ".", "_python_function", ",", "args", ",", "kwargs", ",", "self", ".", "input_signature", ",", "autograph", "=", "self", ".", "_autograph", ",", "autograph_options", "=", "self", ".", "_autograph_options", ",", "arg_names", "=", "arg_names", ",", "override_flat_arg_shapes", "=", "override_flat_arg_shapes", ",", "capture_by_value", "=", "self", ".", "_capture_by_value", ")", ",", "self", ".", "_function_attributes", ",", "# Tell the ConcreteFunction to clean up its graph once it goes out of", "# scope. This is not the default behavior since it gets used in some", "# places (like Keras) where the FuncGraph lives longer than the", "# ConcreteFunction.", "shared_func_graph", "=", "False", ")", "return", "graph_function" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/function.py#L2012-L2045
sdhash/sdhash
b9eff63e4e5867e910f41fd69032bbb1c94a2a5e
sdhash-ui/cherrypy/_cpchecker.py
python
Checker._compat
(self, config)
Process config and warn on each obsolete or deprecated entry.
Process config and warn on each obsolete or deprecated entry.
[ "Process", "config", "and", "warn", "on", "each", "obsolete", "or", "deprecated", "entry", "." ]
def _compat(self, config): """Process config and warn on each obsolete or deprecated entry.""" for section, conf in config.items(): if isinstance(conf, dict): for k, v in conf.items(): if k in self.obsolete: warnings.warn("%r is obsolete. Use %r instead.\n" "section: [%s]" % (k, self.obsolete[k], section)) elif k in self.deprecated: warnings.warn("%r is deprecated. Use %r instead.\n" "section: [%s]" % (k, self.deprecated[k], section)) else: if section in self.obsolete: warnings.warn("%r is obsolete. Use %r instead." % (section, self.obsolete[section])) elif section in self.deprecated: warnings.warn("%r is deprecated. Use %r instead." % (section, self.deprecated[section]))
[ "def", "_compat", "(", "self", ",", "config", ")", ":", "for", "section", ",", "conf", "in", "config", ".", "items", "(", ")", ":", "if", "isinstance", "(", "conf", ",", "dict", ")", ":", "for", "k", ",", "v", "in", "conf", ".", "items", "(", ")", ":", "if", "k", "in", "self", ".", "obsolete", ":", "warnings", ".", "warn", "(", "\"%r is obsolete. Use %r instead.\\n\"", "\"section: [%s]\"", "%", "(", "k", ",", "self", ".", "obsolete", "[", "k", "]", ",", "section", ")", ")", "elif", "k", "in", "self", ".", "deprecated", ":", "warnings", ".", "warn", "(", "\"%r is deprecated. Use %r instead.\\n\"", "\"section: [%s]\"", "%", "(", "k", ",", "self", ".", "deprecated", "[", "k", "]", ",", "section", ")", ")", "else", ":", "if", "section", "in", "self", ".", "obsolete", ":", "warnings", ".", "warn", "(", "\"%r is obsolete. Use %r instead.\"", "%", "(", "section", ",", "self", ".", "obsolete", "[", "section", "]", ")", ")", "elif", "section", "in", "self", ".", "deprecated", ":", "warnings", ".", "warn", "(", "\"%r is deprecated. Use %r instead.\"", "%", "(", "section", ",", "self", ".", "deprecated", "[", "section", "]", ")", ")" ]
https://github.com/sdhash/sdhash/blob/b9eff63e4e5867e910f41fd69032bbb1c94a2a5e/sdhash-ui/cherrypy/_cpchecker.py#L187-L206
cinder/Cinder
e83f5bb9c01a63eec20168d02953a0879e5100f7
src/freetype/tools/glnames.py
python
dump_encoding
( file, encoding_name, encoding_list )
dump a given encoding
dump a given encoding
[ "dump", "a", "given", "encoding" ]
def dump_encoding( file, encoding_name, encoding_list ): """dump a given encoding""" write = file.write write( " /* the following are indices into the SID name table */\n" ) write( "#ifndef DEFINE_PS_TABLES_DATA\n" ) write( "#ifdef __cplusplus\n" ) write( ' extern "C"\n' ) write( "#else\n" ) write( " extern\n" ) write( "#endif\n" ) write( "#endif\n" ) write( " const unsigned short " + encoding_name + "[" + repr( len( encoding_list ) ) + "]\n" ) write( "#ifdef DEFINE_PS_TABLES_DATA\n" ) write( " =\n" ) write( " {\n" ) line = " " comma = "" col = 0 for value in encoding_list: line += comma line += "%3d" % value comma = "," col += 1 if col == 16: col = 0 comma = ",\n " write( line ) write( "\n" ) write( " }\n" ) write( "#endif /* DEFINE_PS_TABLES_DATA */\n" ) write( " ;\n\n\n" )
[ "def", "dump_encoding", "(", "file", ",", "encoding_name", ",", "encoding_list", ")", ":", "write", "=", "file", ".", "write", "write", "(", "\" /* the following are indices into the SID name table */\\n\"", ")", "write", "(", "\"#ifndef DEFINE_PS_TABLES_DATA\\n\"", ")", "write", "(", "\"#ifdef __cplusplus\\n\"", ")", "write", "(", "' extern \"C\"\\n'", ")", "write", "(", "\"#else\\n\"", ")", "write", "(", "\" extern\\n\"", ")", "write", "(", "\"#endif\\n\"", ")", "write", "(", "\"#endif\\n\"", ")", "write", "(", "\" const unsigned short \"", "+", "encoding_name", "+", "\"[\"", "+", "repr", "(", "len", "(", "encoding_list", ")", ")", "+", "\"]\\n\"", ")", "write", "(", "\"#ifdef DEFINE_PS_TABLES_DATA\\n\"", ")", "write", "(", "\" =\\n\"", ")", "write", "(", "\" {\\n\"", ")", "line", "=", "\" \"", "comma", "=", "\"\"", "col", "=", "0", "for", "value", "in", "encoding_list", ":", "line", "+=", "comma", "line", "+=", "\"%3d\"", "%", "value", "comma", "=", "\",\"", "col", "+=", "1", "if", "col", "==", "16", ":", "col", "=", "0", "comma", "=", "\",\\n \"", "write", "(", "line", ")", "write", "(", "\"\\n\"", ")", "write", "(", "\" }\\n\"", ")", "write", "(", "\"#endif /* DEFINE_PS_TABLES_DATA */\\n\"", ")", "write", "(", "\" ;\\n\\n\\n\"", ")" ]
https://github.com/cinder/Cinder/blob/e83f5bb9c01a63eec20168d02953a0879e5100f7/src/freetype/tools/glnames.py#L5211-L5245
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/concurrent/futures/process.py
python
_sendback_result
(result_queue, work_id, result=None, exception=None)
Safely send back the given result or exception
Safely send back the given result or exception
[ "Safely", "send", "back", "the", "given", "result", "or", "exception" ]
def _sendback_result(result_queue, work_id, result=None, exception=None): """Safely send back the given result or exception""" try: result_queue.put(_ResultItem(work_id, result=result, exception=exception)) except BaseException as e: exc = _ExceptionWithTraceback(e, e.__traceback__) result_queue.put(_ResultItem(work_id, exception=exc))
[ "def", "_sendback_result", "(", "result_queue", ",", "work_id", ",", "result", "=", "None", ",", "exception", "=", "None", ")", ":", "try", ":", "result_queue", ".", "put", "(", "_ResultItem", "(", "work_id", ",", "result", "=", "result", ",", "exception", "=", "exception", ")", ")", "except", "BaseException", "as", "e", ":", "exc", "=", "_ExceptionWithTraceback", "(", "e", ",", "e", ".", "__traceback__", ")", "result_queue", ".", "put", "(", "_ResultItem", "(", "work_id", ",", "exception", "=", "exc", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/concurrent/futures/process.py#L201-L208
lfortran/lfortran
6c96b79f5d86746cb2ec5acc86d4b0272ea34631
grammar/asdl.py
python
parse
(filename)
Parse ASDL from the given file and return a Module node describing it.
Parse ASDL from the given file and return a Module node describing it.
[ "Parse", "ASDL", "from", "the", "given", "file", "and", "return", "a", "Module", "node", "describing", "it", "." ]
def parse(filename): """Parse ASDL from the given file and return a Module node describing it.""" with open(filename) as f: parser = ASDLParser() return parser.parse(f.read())
[ "def", "parse", "(", "filename", ")", ":", "with", "open", "(", "filename", ")", "as", "f", ":", "parser", "=", "ASDLParser", "(", ")", "return", "parser", ".", "parse", "(", "f", ".", "read", "(", ")", ")" ]
https://github.com/lfortran/lfortran/blob/6c96b79f5d86746cb2ec5acc86d4b0272ea34631/grammar/asdl.py#L195-L199
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/lib-tk/Tkinter.py
python
PanedWindow.__init__
(self, master=None, cnf={}, **kw)
Construct a panedwindow widget with the parent MASTER. STANDARD OPTIONS background, borderwidth, cursor, height, orient, relief, width WIDGET-SPECIFIC OPTIONS handlepad, handlesize, opaqueresize, sashcursor, sashpad, sashrelief, sashwidth, showhandle,
Construct a panedwindow widget with the parent MASTER.
[ "Construct", "a", "panedwindow", "widget", "with", "the", "parent", "MASTER", "." ]
def __init__(self, master=None, cnf={}, **kw): """Construct a panedwindow widget with the parent MASTER. STANDARD OPTIONS background, borderwidth, cursor, height, orient, relief, width WIDGET-SPECIFIC OPTIONS handlepad, handlesize, opaqueresize, sashcursor, sashpad, sashrelief, sashwidth, showhandle, """ Widget.__init__(self, master, 'panedwindow', cnf, kw)
[ "def", "__init__", "(", "self", ",", "master", "=", "None", ",", "cnf", "=", "{", "}", ",", "*", "*", "kw", ")", ":", "Widget", ".", "__init__", "(", "self", ",", "master", ",", "'panedwindow'", ",", "cnf", ",", "kw", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/lib-tk/Tkinter.py#L3631-L3645
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/keras/backend.py
python
sum
(x, axis=None, keepdims=False)
return math_ops.reduce_sum(x, axis, keepdims)
Sum of the values in a tensor, alongside the specified axis. Args: x: A tensor or variable. axis: An integer, the axis to sum over. keepdims: A boolean, whether to keep the dimensions or not. If `keepdims` is `False`, the rank of the tensor is reduced by 1. If `keepdims` is `True`, the reduced dimension is retained with length 1. Returns: A tensor with sum of `x`.
Sum of the values in a tensor, alongside the specified axis.
[ "Sum", "of", "the", "values", "in", "a", "tensor", "alongside", "the", "specified", "axis", "." ]
def sum(x, axis=None, keepdims=False): """Sum of the values in a tensor, alongside the specified axis. Args: x: A tensor or variable. axis: An integer, the axis to sum over. keepdims: A boolean, whether to keep the dimensions or not. If `keepdims` is `False`, the rank of the tensor is reduced by 1. If `keepdims` is `True`, the reduced dimension is retained with length 1. Returns: A tensor with sum of `x`. """ return math_ops.reduce_sum(x, axis, keepdims)
[ "def", "sum", "(", "x", ",", "axis", "=", "None", ",", "keepdims", "=", "False", ")", ":", "return", "math_ops", ".", "reduce_sum", "(", "x", ",", "axis", ",", "keepdims", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/backend.py#L2322-L2336
RobotLocomotion/drake
0e18a34604c45ed65bc9018a54f7610f91cdad5b
tools/workspace/drake_visualizer/_drake_visualizer_builtin_scripts/show_hydroelastic_contact.py
python
_BodyContact.clean
(self, timestamp: int)
Removes all contacts that don't match the given timestamp.
Removes all contacts that don't match the given timestamp.
[ "Removes", "all", "contacts", "that", "don", "t", "match", "the", "given", "timestamp", "." ]
def clean(self, timestamp: int): """Removes all contacts that don't match the given timestamp.""" to_remove = [] for key, contact in self._contacts.items(): if contact.timestamp != timestamp: contact.clear() to_remove.append(key) for key in to_remove: del self._contacts[key] if len(self) == 0: self.clear()
[ "def", "clean", "(", "self", ",", "timestamp", ":", "int", ")", ":", "to_remove", "=", "[", "]", "for", "key", ",", "contact", "in", "self", ".", "_contacts", ".", "items", "(", ")", ":", "if", "contact", ".", "timestamp", "!=", "timestamp", ":", "contact", ".", "clear", "(", ")", "to_remove", ".", "append", "(", "key", ")", "for", "key", "in", "to_remove", ":", "del", "self", ".", "_contacts", "[", "key", "]", "if", "len", "(", "self", ")", "==", "0", ":", "self", ".", "clear", "(", ")" ]
https://github.com/RobotLocomotion/drake/blob/0e18a34604c45ed65bc9018a54f7610f91cdad5b/tools/workspace/drake_visualizer/_drake_visualizer_builtin_scripts/show_hydroelastic_contact.py#L681-L691
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/hypertreelist.py
python
TreeListHeaderWindow.SetCustomRenderer
(self, renderer=None)
Associate a custom renderer with the header - all columns will use it :param `renderer`: a class able to correctly render header buttons :note: the renderer class **must** implement the method `DrawHeaderButton`
Associate a custom renderer with the header - all columns will use it
[ "Associate", "a", "custom", "renderer", "with", "the", "header", "-", "all", "columns", "will", "use", "it" ]
def SetCustomRenderer(self, renderer=None): """ Associate a custom renderer with the header - all columns will use it :param `renderer`: a class able to correctly render header buttons :note: the renderer class **must** implement the method `DrawHeaderButton` """ self._headerCustomRenderer = renderer
[ "def", "SetCustomRenderer", "(", "self", ",", "renderer", "=", "None", ")", ":", "self", ".", "_headerCustomRenderer", "=", "renderer" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/hypertreelist.py#L916-L925
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/keras/engine/keras_tensor.py
python
KerasTensor.dtype
(self)
return self._type_spec._dtype
Returns the `dtype` symbolically inferred for this Keras output.
Returns the `dtype` symbolically inferred for this Keras output.
[ "Returns", "the", "dtype", "symbolically", "inferred", "for", "this", "Keras", "output", "." ]
def dtype(self): """Returns the `dtype` symbolically inferred for this Keras output.""" # TODO(kaftan): This is only valid for normal/sparse/ragged tensors. # may need to raise an error when it's not valid for a type_spec, # but some keras code (e.g. build-related stuff) will likely fail when # it can't access shape or dtype return self._type_spec._dtype
[ "def", "dtype", "(", "self", ")", ":", "# TODO(kaftan): This is only valid for normal/sparse/ragged tensors.", "# may need to raise an error when it's not valid for a type_spec,", "# but some keras code (e.g. build-related stuff) will likely fail when", "# it can't access shape or dtype", "return", "self", ".", "_type_spec", ".", "_dtype" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/engine/keras_tensor.py#L322-L328
OGRECave/ogre-next
287307980e6de8910f04f3cc0994451b075071fd
Tools/BlenderExport/ogrepkg/base.py
python
Log.__init__
(self)
return
Constructor.
Constructor.
[ "Constructor", "." ]
def __init__(self): """Constructor. """ Singleton.__init__(self) Model.__init__(self) self.clear() return
[ "def", "__init__", "(", "self", ")", ":", "Singleton", ".", "__init__", "(", "self", ")", "Model", ".", "__init__", "(", "self", ")", "self", ".", "clear", "(", ")", "return" ]
https://github.com/OGRECave/ogre-next/blob/287307980e6de8910f04f3cc0994451b075071fd/Tools/BlenderExport/ogrepkg/base.py#L117-L123
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py2/pandas/io/sql.py
python
_wrap_result
(data, columns, index_col=None, coerce_float=True, parse_dates=None)
return frame
Wrap result set of query in a DataFrame.
Wrap result set of query in a DataFrame.
[ "Wrap", "result", "set", "of", "query", "in", "a", "DataFrame", "." ]
def _wrap_result(data, columns, index_col=None, coerce_float=True, parse_dates=None): """Wrap result set of query in a DataFrame.""" frame = DataFrame.from_records(data, columns=columns, coerce_float=coerce_float) frame = _parse_date_columns(frame, parse_dates) if index_col is not None: frame.set_index(index_col, inplace=True) return frame
[ "def", "_wrap_result", "(", "data", ",", "columns", ",", "index_col", "=", "None", ",", "coerce_float", "=", "True", ",", "parse_dates", "=", "None", ")", ":", "frame", "=", "DataFrame", ".", "from_records", "(", "data", ",", "columns", "=", "columns", ",", "coerce_float", "=", "coerce_float", ")", "frame", "=", "_parse_date_columns", "(", "frame", ",", "parse_dates", ")", "if", "index_col", "is", "not", "None", ":", "frame", ".", "set_index", "(", "index_col", ",", "inplace", "=", "True", ")", "return", "frame" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/io/sql.py#L136-L148
root-project/root
fcd3583bb14852bf2e8cd2415717cbaac0e75896
main/python/cmdLineUtils.py
python
isTree
(rootFile,pathSplit)
Return True if the object, corresponding to (rootFile,pathSplit), inherits from TTree
Return True if the object, corresponding to (rootFile,pathSplit), inherits from TTree
[ "Return", "True", "if", "the", "object", "corresponding", "to", "(", "rootFile", "pathSplit", ")", "inherits", "from", "TTree" ]
def isTree(rootFile,pathSplit): """ Return True if the object, corresponding to (rootFile,pathSplit), inherits from TTree """ if pathSplit == []: return False # the object is the rootFile itself else: return isTreeKey(getKey(rootFile,pathSplit))
[ "def", "isTree", "(", "rootFile", ",", "pathSplit", ")", ":", "if", "pathSplit", "==", "[", "]", ":", "return", "False", "# the object is the rootFile itself", "else", ":", "return", "isTreeKey", "(", "getKey", "(", "rootFile", ",", "pathSplit", ")", ")" ]
https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/main/python/cmdLineUtils.py#L223-L228
SequoiaDB/SequoiaDB
2894ed7e5bd6fe57330afc900cf76d0ff0df9f64
driver/python/pysequoiadb/collectionspace.py
python
collectionspace.__getattr__
(self, name)
support client.cs to access to collection. eg. cc = client() cs = cc.test cl = cs.test_cl # access to collection named 'test_cl' and we should pass '__members__' and '__methods__', becasue dir(cc) will invoke __getattr__("__members__") and __getattr__("__methods__"). if success, a collection object will be returned. Exceptions: pysequoiadb.error.SDBBaseError
support client.cs to access to collection.
[ "support", "client", ".", "cs", "to", "access", "to", "collection", "." ]
def __getattr__(self, name): """support client.cs to access to collection. eg. cc = client() cs = cc.test cl = cs.test_cl # access to collection named 'test_cl' and we should pass '__members__' and '__methods__', becasue dir(cc) will invoke __getattr__("__members__") and __getattr__("__methods__"). if success, a collection object will be returned. Exceptions: pysequoiadb.error.SDBBaseError """ if '__members__' == name or '__methods__' == name: pass else: cl = collection() try: rc = sdb.cs_get_collection(self._cs, name, cl._cl) raise_if_error(rc, "Failed to get collection: %s" % name) except SDBBaseError: del cl raise return cl
[ "def", "__getattr__", "(", "self", ",", "name", ")", ":", "if", "'__members__'", "==", "name", "or", "'__methods__'", "==", "name", ":", "pass", "else", ":", "cl", "=", "collection", "(", ")", "try", ":", "rc", "=", "sdb", ".", "cs_get_collection", "(", "self", ".", "_cs", ",", "name", ",", "cl", ".", "_cl", ")", "raise_if_error", "(", "rc", ",", "\"Failed to get collection: %s\"", "%", "name", ")", "except", "SDBBaseError", ":", "del", "cl", "raise", "return", "cl" ]
https://github.com/SequoiaDB/SequoiaDB/blob/2894ed7e5bd6fe57330afc900cf76d0ff0df9f64/driver/python/pysequoiadb/collectionspace.py#L87-L115
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/models/rnn/translate/translate.py
python
self_test
()
Test the translation model.
Test the translation model.
[ "Test", "the", "translation", "model", "." ]
def self_test(): """Test the translation model.""" with tf.Session() as sess: print("Self-test for neural translation model.") # Create model with vocabularies of 10, 2 small buckets, 2 layers of 32. model = seq2seq_model.Seq2SeqModel(10, 10, [(3, 3), (6, 6)], 32, 2, 5.0, 32, 0.3, 0.99, num_samples=8) sess.run(tf.initialize_all_variables()) # Fake data set for both the (3, 3) and (6, 6) bucket. data_set = ([([1, 1], [2, 2]), ([3, 3], [4]), ([5], [6])], [([1, 1, 1, 1, 1], [2, 2, 2, 2, 2]), ([3, 3, 3], [5, 6])]) for _ in xrange(5): # Train the fake model for 5 steps. bucket_id = random.choice([0, 1]) encoder_inputs, decoder_inputs, target_weights = model.get_batch( data_set, bucket_id) model.step(sess, encoder_inputs, decoder_inputs, target_weights, bucket_id, False)
[ "def", "self_test", "(", ")", ":", "with", "tf", ".", "Session", "(", ")", "as", "sess", ":", "print", "(", "\"Self-test for neural translation model.\"", ")", "# Create model with vocabularies of 10, 2 small buckets, 2 layers of 32.", "model", "=", "seq2seq_model", ".", "Seq2SeqModel", "(", "10", ",", "10", ",", "[", "(", "3", ",", "3", ")", ",", "(", "6", ",", "6", ")", "]", ",", "32", ",", "2", ",", "5.0", ",", "32", ",", "0.3", ",", "0.99", ",", "num_samples", "=", "8", ")", "sess", ".", "run", "(", "tf", ".", "initialize_all_variables", "(", ")", ")", "# Fake data set for both the (3, 3) and (6, 6) bucket.", "data_set", "=", "(", "[", "(", "[", "1", ",", "1", "]", ",", "[", "2", ",", "2", "]", ")", ",", "(", "[", "3", ",", "3", "]", ",", "[", "4", "]", ")", ",", "(", "[", "5", "]", ",", "[", "6", "]", ")", "]", ",", "[", "(", "[", "1", ",", "1", ",", "1", ",", "1", ",", "1", "]", ",", "[", "2", ",", "2", ",", "2", ",", "2", ",", "2", "]", ")", ",", "(", "[", "3", ",", "3", ",", "3", "]", ",", "[", "5", ",", "6", "]", ")", "]", ")", "for", "_", "in", "xrange", "(", "5", ")", ":", "# Train the fake model for 5 steps.", "bucket_id", "=", "random", ".", "choice", "(", "[", "0", ",", "1", "]", ")", "encoder_inputs", ",", "decoder_inputs", ",", "target_weights", "=", "model", ".", "get_batch", "(", "data_set", ",", "bucket_id", ")", "model", ".", "step", "(", "sess", ",", "encoder_inputs", ",", "decoder_inputs", ",", "target_weights", ",", "bucket_id", ",", "False", ")" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/models/rnn/translate/translate.py#L250-L267
deepmind/open_spiel
4ca53bea32bb2875c7385d215424048ae92f78c8
open_spiel/python/jax/nfsp.py
python
ReservoirBuffer.sample
(self, num_samples)
return random.sample(self._data, num_samples)
Returns `num_samples` uniformly sampled from the buffer. Args: num_samples: `int`, number of samples to draw. Returns: An iterable over `num_samples` random elements of the buffer. Raises: ValueError: If there are less than `num_samples` elements in the buffer
Returns `num_samples` uniformly sampled from the buffer.
[ "Returns", "num_samples", "uniformly", "sampled", "from", "the", "buffer", "." ]
def sample(self, num_samples): """Returns `num_samples` uniformly sampled from the buffer. Args: num_samples: `int`, number of samples to draw. Returns: An iterable over `num_samples` random elements of the buffer. Raises: ValueError: If there are less than `num_samples` elements in the buffer """ if len(self._data) < num_samples: raise ValueError("{} elements could not be sampled from size {}".format( num_samples, len(self._data))) return random.sample(self._data, num_samples)
[ "def", "sample", "(", "self", ",", "num_samples", ")", ":", "if", "len", "(", "self", ".", "_data", ")", "<", "num_samples", ":", "raise", "ValueError", "(", "\"{} elements could not be sampled from size {}\"", ".", "format", "(", "num_samples", ",", "len", "(", "self", ".", "_data", ")", ")", ")", "return", "random", ".", "sample", "(", "self", ".", "_data", ",", "num_samples", ")" ]
https://github.com/deepmind/open_spiel/blob/4ca53bea32bb2875c7385d215424048ae92f78c8/open_spiel/python/jax/nfsp.py#L361-L376
cms-sw/cmssw
fd9de012d503d3405420bcbeec0ec879baa57cf2
Configuration/PyReleaseValidation/python/relval_machine.py
python
convert_keys_to_string
(dictionary)
Recursively converts dictionary keys to strings. Utility to help deal with unicode keys in dictionaries created from json requests. In order to pass dict to function as **kwarg we should transform key/value to str.
Recursively converts dictionary keys to strings. Utility to help deal with unicode keys in dictionaries created from json requests. In order to pass dict to function as **kwarg we should transform key/value to str.
[ "Recursively", "converts", "dictionary", "keys", "to", "strings", ".", "Utility", "to", "help", "deal", "with", "unicode", "keys", "in", "dictionaries", "created", "from", "json", "requests", ".", "In", "order", "to", "pass", "dict", "to", "function", "as", "**", "kwarg", "we", "should", "transform", "key", "/", "value", "to", "str", "." ]
def convert_keys_to_string(dictionary): """ Recursively converts dictionary keys to strings. Utility to help deal with unicode keys in dictionaries created from json requests. In order to pass dict to function as **kwarg we should transform key/value to str. """ if isinstance(dictionary, str): return str(dictionary) elif isinstance(dictionary, collections.Mapping): return dict(map(convert_keys_to_string, dictionary.items())) elif isinstance(dictionary, collections.Iterable): return type(dictionary)(map(convert_keys_to_string, dictionary)) else: return dictionary
[ "def", "convert_keys_to_string", "(", "dictionary", ")", ":", "if", "isinstance", "(", "dictionary", ",", "str", ")", ":", "return", "str", "(", "dictionary", ")", "elif", "isinstance", "(", "dictionary", ",", "collections", ".", "Mapping", ")", ":", "return", "dict", "(", "map", "(", "convert_keys_to_string", ",", "dictionary", ".", "items", "(", ")", ")", ")", "elif", "isinstance", "(", "dictionary", ",", "collections", ".", "Iterable", ")", ":", "return", "type", "(", "dictionary", ")", "(", "map", "(", "convert_keys_to_string", ",", "dictionary", ")", ")", "else", ":", "return", "dictionary" ]
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Configuration/PyReleaseValidation/python/relval_machine.py#L35-L47
okex/V3-Open-API-SDK
c5abb0db7e2287718e0055e17e57672ce0ec7fd9
okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/cachecontrol/controller.py
python
CacheController._urlnorm
(cls, uri)
return defrag_uri
Normalize the URL to create a safe key for the cache
Normalize the URL to create a safe key for the cache
[ "Normalize", "the", "URL", "to", "create", "a", "safe", "key", "for", "the", "cache" ]
def _urlnorm(cls, uri): """Normalize the URL to create a safe key for the cache""" (scheme, authority, path, query, fragment) = parse_uri(uri) if not scheme or not authority: raise Exception("Only absolute URIs are allowed. uri = %s" % uri) scheme = scheme.lower() authority = authority.lower() if not path: path = "/" # Could do syntax based normalization of the URI before # computing the digest. See Section 6.2.2 of Std 66. request_uri = query and "?".join([path, query]) or path defrag_uri = scheme + "://" + authority + request_uri return defrag_uri
[ "def", "_urlnorm", "(", "cls", ",", "uri", ")", ":", "(", "scheme", ",", "authority", ",", "path", ",", "query", ",", "fragment", ")", "=", "parse_uri", "(", "uri", ")", "if", "not", "scheme", "or", "not", "authority", ":", "raise", "Exception", "(", "\"Only absolute URIs are allowed. uri = %s\"", "%", "uri", ")", "scheme", "=", "scheme", ".", "lower", "(", ")", "authority", "=", "authority", ".", "lower", "(", ")", "if", "not", "path", ":", "path", "=", "\"/\"", "# Could do syntax based normalization of the URI before", "# computing the digest. See Section 6.2.2 of Std 66.", "request_uri", "=", "query", "and", "\"?\"", ".", "join", "(", "[", "path", ",", "query", "]", ")", "or", "path", "defrag_uri", "=", "scheme", "+", "\"://\"", "+", "authority", "+", "request_uri", "return", "defrag_uri" ]
https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/cachecontrol/controller.py#L43-L60
plumonito/dtslam
5994bb9cf7a11981b830370db206bceb654c085d
3rdparty/opencv-git/doc/pattern_tools/svgfig.py
python
Frame.__init__
(self, xmin, xmax, ymin, ymax, *d, **kwds)
Acts like Fig, but draws a coordinate frame around the data. You also need to supply plot ranges. Frame(xmin, xmax, ymin, ymax, obj, obj, obj..., keyword options...) xmin, xmax required minimum and maximum x values (in the objs' coordinates) ymin, ymax required minimum and maximum y values (in the objs' coordinates) obj optional list drawing primitives keyword options keyword list options defined below The following are keyword options, with their default values: x, y 20, 5 upper-left corner of the Frame in SVG coordinates width, height 75, 80 width and height of the Frame in SVG coordinates flipx, flipy False, True flip the sign of the coordinate axis minusInfinity -1000 if an axis is logarithmic and an object is plotted at 0 or a negative value, -1000 will be used as a stand-in for NaN xtitle None if a string, label the x axis xticks -10 request ticks according to the standard tick specification (see help(Ticks)) xminiticks True request miniticks according to the standard minitick specification xlabels True request tick labels according to the standard tick label specification xlogbase None if a number, the axis and transformation are logarithmic with ticks at the given base (10 being the most common) (same for y) text_attr {} a dictionary of attributes for label text axis_attr {} a dictionary of attributes for the axis lines
Acts like Fig, but draws a coordinate frame around the data. You also need to supply plot ranges.
[ "Acts", "like", "Fig", "but", "draws", "a", "coordinate", "frame", "around", "the", "data", ".", "You", "also", "need", "to", "supply", "plot", "ranges", "." ]
def __init__(self, xmin, xmax, ymin, ymax, *d, **kwds): """Acts like Fig, but draws a coordinate frame around the data. You also need to supply plot ranges. Frame(xmin, xmax, ymin, ymax, obj, obj, obj..., keyword options...) xmin, xmax required minimum and maximum x values (in the objs' coordinates) ymin, ymax required minimum and maximum y values (in the objs' coordinates) obj optional list drawing primitives keyword options keyword list options defined below The following are keyword options, with their default values: x, y 20, 5 upper-left corner of the Frame in SVG coordinates width, height 75, 80 width and height of the Frame in SVG coordinates flipx, flipy False, True flip the sign of the coordinate axis minusInfinity -1000 if an axis is logarithmic and an object is plotted at 0 or a negative value, -1000 will be used as a stand-in for NaN xtitle None if a string, label the x axis xticks -10 request ticks according to the standard tick specification (see help(Ticks)) xminiticks True request miniticks according to the standard minitick specification xlabels True request tick labels according to the standard tick label specification xlogbase None if a number, the axis and transformation are logarithmic with ticks at the given base (10 being the most common) (same for y) text_attr {} a dictionary of attributes for label text axis_attr {} a dictionary of attributes for the axis lines """ self.xmin, self.xmax, self.ymin, self.ymax = xmin, xmax, ymin, ymax self.d = list(d) defaults = {"x": 20, "y": 5, "width": 75, "height": 80, "flipx": False, "flipy": True, "minusInfinity": -1000, "xtitle": None, "xticks": -10, "xminiticks": True, "xlabels": True, "x2labels": None, "xlogbase": None, "ytitle": None, "yticks": -10, "yminiticks": True, "ylabels": True, "y2labels": None, "ylogbase": None, "text_attr": {}, "axis_attr": {}, } defaults.update(kwds) kwds = defaults self.x = kwds["x"]; del kwds["x"] self.y = kwds["y"]; del kwds["y"] self.width = kwds["width"]; del kwds["width"] self.height = kwds["height"]; del kwds["height"] self.flipx = kwds["flipx"]; del kwds["flipx"] self.flipy = kwds["flipy"]; del kwds["flipy"] self.minusInfinity = kwds["minusInfinity"]; del kwds["minusInfinity"] self.xtitle = kwds["xtitle"]; del kwds["xtitle"] self.xticks = kwds["xticks"]; del kwds["xticks"] self.xminiticks = kwds["xminiticks"]; del kwds["xminiticks"] self.xlabels = kwds["xlabels"]; del kwds["xlabels"] self.x2labels = kwds["x2labels"]; del kwds["x2labels"] self.xlogbase = kwds["xlogbase"]; del kwds["xlogbase"] self.ytitle = kwds["ytitle"]; del kwds["ytitle"] self.yticks = kwds["yticks"]; del kwds["yticks"] self.yminiticks = kwds["yminiticks"]; del kwds["yminiticks"] self.ylabels = kwds["ylabels"]; del kwds["ylabels"] self.y2labels = kwds["y2labels"]; del kwds["y2labels"] self.ylogbase = kwds["ylogbase"]; del kwds["ylogbase"] self.text_attr = dict(self.text_defaults) self.text_attr.update(kwds["text_attr"]); del kwds["text_attr"] self.axis_attr = dict(self.axis_defaults) self.axis_attr.update(kwds["axis_attr"]); del kwds["axis_attr"] if len(kwds) != 0: raise TypeError, "Frame() got unexpected keyword arguments %s" % kwds.keys()
[ "def", "__init__", "(", "self", ",", "xmin", ",", "xmax", ",", "ymin", ",", "ymax", ",", "*", "d", ",", "*", "*", "kwds", ")", ":", "self", ".", "xmin", ",", "self", ".", "xmax", ",", "self", ".", "ymin", ",", "self", ".", "ymax", "=", "xmin", ",", "xmax", ",", "ymin", ",", "ymax", "self", ".", "d", "=", "list", "(", "d", ")", "defaults", "=", "{", "\"x\"", ":", "20", ",", "\"y\"", ":", "5", ",", "\"width\"", ":", "75", ",", "\"height\"", ":", "80", ",", "\"flipx\"", ":", "False", ",", "\"flipy\"", ":", "True", ",", "\"minusInfinity\"", ":", "-", "1000", ",", "\"xtitle\"", ":", "None", ",", "\"xticks\"", ":", "-", "10", ",", "\"xminiticks\"", ":", "True", ",", "\"xlabels\"", ":", "True", ",", "\"x2labels\"", ":", "None", ",", "\"xlogbase\"", ":", "None", ",", "\"ytitle\"", ":", "None", ",", "\"yticks\"", ":", "-", "10", ",", "\"yminiticks\"", ":", "True", ",", "\"ylabels\"", ":", "True", ",", "\"y2labels\"", ":", "None", ",", "\"ylogbase\"", ":", "None", ",", "\"text_attr\"", ":", "{", "}", ",", "\"axis_attr\"", ":", "{", "}", ",", "}", "defaults", ".", "update", "(", "kwds", ")", "kwds", "=", "defaults", "self", ".", "x", "=", "kwds", "[", "\"x\"", "]", "del", "kwds", "[", "\"x\"", "]", "self", ".", "y", "=", "kwds", "[", "\"y\"", "]", "del", "kwds", "[", "\"y\"", "]", "self", ".", "width", "=", "kwds", "[", "\"width\"", "]", "del", "kwds", "[", "\"width\"", "]", "self", ".", "height", "=", "kwds", "[", "\"height\"", "]", "del", "kwds", "[", "\"height\"", "]", "self", ".", "flipx", "=", "kwds", "[", "\"flipx\"", "]", "del", "kwds", "[", "\"flipx\"", "]", "self", ".", "flipy", "=", "kwds", "[", "\"flipy\"", "]", "del", "kwds", "[", "\"flipy\"", "]", "self", ".", "minusInfinity", "=", "kwds", "[", "\"minusInfinity\"", "]", "del", "kwds", "[", "\"minusInfinity\"", "]", "self", ".", "xtitle", "=", "kwds", "[", "\"xtitle\"", "]", "del", "kwds", "[", "\"xtitle\"", "]", "self", ".", "xticks", "=", "kwds", "[", "\"xticks\"", "]", "del", "kwds", "[", "\"xticks\"", "]", "self", ".", "xminiticks", "=", "kwds", "[", "\"xminiticks\"", "]", "del", "kwds", "[", "\"xminiticks\"", "]", "self", ".", "xlabels", "=", "kwds", "[", "\"xlabels\"", "]", "del", "kwds", "[", "\"xlabels\"", "]", "self", ".", "x2labels", "=", "kwds", "[", "\"x2labels\"", "]", "del", "kwds", "[", "\"x2labels\"", "]", "self", ".", "xlogbase", "=", "kwds", "[", "\"xlogbase\"", "]", "del", "kwds", "[", "\"xlogbase\"", "]", "self", ".", "ytitle", "=", "kwds", "[", "\"ytitle\"", "]", "del", "kwds", "[", "\"ytitle\"", "]", "self", ".", "yticks", "=", "kwds", "[", "\"yticks\"", "]", "del", "kwds", "[", "\"yticks\"", "]", "self", ".", "yminiticks", "=", "kwds", "[", "\"yminiticks\"", "]", "del", "kwds", "[", "\"yminiticks\"", "]", "self", ".", "ylabels", "=", "kwds", "[", "\"ylabels\"", "]", "del", "kwds", "[", "\"ylabels\"", "]", "self", ".", "y2labels", "=", "kwds", "[", "\"y2labels\"", "]", "del", "kwds", "[", "\"y2labels\"", "]", "self", ".", "ylogbase", "=", "kwds", "[", "\"ylogbase\"", "]", "del", "kwds", "[", "\"ylogbase\"", "]", "self", ".", "text_attr", "=", "dict", "(", "self", ".", "text_defaults", ")", "self", ".", "text_attr", ".", "update", "(", "kwds", "[", "\"text_attr\"", "]", ")", "del", "kwds", "[", "\"text_attr\"", "]", "self", ".", "axis_attr", "=", "dict", "(", "self", ".", "axis_defaults", ")", "self", ".", "axis_attr", ".", "update", "(", "kwds", "[", "\"axis_attr\"", "]", ")", "del", "kwds", "[", "\"axis_attr\"", "]", "if", "len", "(", "kwds", ")", "!=", "0", ":", "raise", "TypeError", ",", "\"Frame() got unexpected keyword arguments %s\"", "%", "kwds", ".", "keys", "(", ")" ]
https://github.com/plumonito/dtslam/blob/5994bb9cf7a11981b830370db206bceb654c085d/3rdparty/opencv-git/doc/pattern_tools/svgfig.py#L971-L1042
opencv/opencv
76aff8478883858f0e46746044348ebb16dc3c67
samples/dnn/speech_recognition.py
python
FilterbankFeatures.frame
(self, x, frame_length, hop_length)
return np.lib.stride_tricks.as_strided(x, shape=shape, strides=strides)
Slices a data array into (overlapping) frames. args: x : array to frame frame_length : length of frame hop_length : Number of steps to advance between frames return : A framed view of `x`
Slices a data array into (overlapping) frames. args: x : array to frame frame_length : length of frame hop_length : Number of steps to advance between frames return : A framed view of `x`
[ "Slices", "a", "data", "array", "into", "(", "overlapping", ")", "frames", ".", "args", ":", "x", ":", "array", "to", "frame", "frame_length", ":", "length", "of", "frame", "hop_length", ":", "Number", "of", "steps", "to", "advance", "between", "frames", "return", ":", "A", "framed", "view", "of", "x" ]
def frame(self, x, frame_length, hop_length): ''' Slices a data array into (overlapping) frames. args: x : array to frame frame_length : length of frame hop_length : Number of steps to advance between frames return : A framed view of `x` ''' if x.shape[-1] < frame_length: raise Exception( "Input is too short (n={:d})" " for frame_length={:d}".format(x.shape[-1], frame_length) ) x = np.asfortranarray(x) n_frames = 1 + (x.shape[-1] - frame_length) // hop_length strides = np.asarray(x.strides) new_stride = np.prod(strides[strides > 0] // x.itemsize) * x.itemsize shape = list(x.shape)[:-1] + [frame_length, n_frames] strides = list(strides) + [hop_length * new_stride] return np.lib.stride_tricks.as_strided(x, shape=shape, strides=strides)
[ "def", "frame", "(", "self", ",", "x", ",", "frame_length", ",", "hop_length", ")", ":", "if", "x", ".", "shape", "[", "-", "1", "]", "<", "frame_length", ":", "raise", "Exception", "(", "\"Input is too short (n={:d})\"", "\" for frame_length={:d}\"", ".", "format", "(", "x", ".", "shape", "[", "-", "1", "]", ",", "frame_length", ")", ")", "x", "=", "np", ".", "asfortranarray", "(", "x", ")", "n_frames", "=", "1", "+", "(", "x", ".", "shape", "[", "-", "1", "]", "-", "frame_length", ")", "//", "hop_length", "strides", "=", "np", ".", "asarray", "(", "x", ".", "strides", ")", "new_stride", "=", "np", ".", "prod", "(", "strides", "[", "strides", ">", "0", "]", "//", "x", ".", "itemsize", ")", "*", "x", ".", "itemsize", "shape", "=", "list", "(", "x", ".", "shape", ")", "[", ":", "-", "1", "]", "+", "[", "frame_length", ",", "n_frames", "]", "strides", "=", "list", "(", "strides", ")", "+", "[", "hop_length", "*", "new_stride", "]", "return", "np", ".", "lib", ".", "stride_tricks", ".", "as_strided", "(", "x", ",", "shape", "=", "shape", ",", "strides", "=", "strides", ")" ]
https://github.com/opencv/opencv/blob/76aff8478883858f0e46746044348ebb16dc3c67/samples/dnn/speech_recognition.py#L281-L301
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/pydoc.py
python
HTMLDoc.docother
(self, object, name=None, mod=None, *ignored)
return lhs + self.repr(object)
Produce HTML documentation for a data object.
Produce HTML documentation for a data object.
[ "Produce", "HTML", "documentation", "for", "a", "data", "object", "." ]
def docother(self, object, name=None, mod=None, *ignored): """Produce HTML documentation for a data object.""" lhs = name and '<strong>%s</strong> = ' % name or '' return lhs + self.repr(object)
[ "def", "docother", "(", "self", ",", "object", ",", "name", "=", "None", ",", "mod", "=", "None", ",", "*", "ignored", ")", ":", "lhs", "=", "name", "and", "'<strong>%s</strong> = '", "%", "name", "or", "''", "return", "lhs", "+", "self", ".", "repr", "(", "object", ")" ]
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/pydoc.py#L918-L921
openthread/openthread
9fcdbed9c526c70f1556d1ed84099c1535c7cd32
tools/otci/otci/otci.py
python
OTCI.dns_set_config
(self, server: Tuple[Union[str, ipaddress.IPv6Address], int], response_timeout: int = None, max_tx_attempts: int = None, recursion_desired: bool = None)
Set DNS client query config.
Set DNS client query config.
[ "Set", "DNS", "client", "query", "config", "." ]
def dns_set_config(self, server: Tuple[Union[str, ipaddress.IPv6Address], int], response_timeout: int = None, max_tx_attempts: int = None, recursion_desired: bool = None): """Set DNS client query config.""" cmd = f'dns config {str(server[0])} {server[1]}' if response_timeout is not None: cmd += f' {response_timeout}' assert max_tx_attempts is None or response_timeout is not None, "must specify `response_timeout` if `max_tx_attempts` is specified." if max_tx_attempts is not None: cmd += f' {max_tx_attempts}' assert recursion_desired is None or max_tx_attempts is not None, 'must specify `max_tx_attempts` if `recursion_desired` is specified.' if recursion_desired is not None: cmd += f' {1 if recursion_desired else 0}' self.execute_command(cmd)
[ "def", "dns_set_config", "(", "self", ",", "server", ":", "Tuple", "[", "Union", "[", "str", ",", "ipaddress", ".", "IPv6Address", "]", ",", "int", "]", ",", "response_timeout", ":", "int", "=", "None", ",", "max_tx_attempts", ":", "int", "=", "None", ",", "recursion_desired", ":", "bool", "=", "None", ")", ":", "cmd", "=", "f'dns config {str(server[0])} {server[1]}'", "if", "response_timeout", "is", "not", "None", ":", "cmd", "+=", "f' {response_timeout}'", "assert", "max_tx_attempts", "is", "None", "or", "response_timeout", "is", "not", "None", ",", "\"must specify `response_timeout` if `max_tx_attempts` is specified.\"", "if", "max_tx_attempts", "is", "not", "None", ":", "cmd", "+=", "f' {max_tx_attempts}'", "assert", "recursion_desired", "is", "None", "or", "max_tx_attempts", "is", "not", "None", ",", "'must specify `max_tx_attempts` if `recursion_desired` is specified.'", "if", "recursion_desired", "is", "not", "None", ":", "cmd", "+=", "f' {1 if recursion_desired else 0}'", "self", ".", "execute_command", "(", "cmd", ")" ]
https://github.com/openthread/openthread/blob/9fcdbed9c526c70f1556d1ed84099c1535c7cd32/tools/otci/otci/otci.py#L799-L817
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py2/scipy/signal/signaltools.py
python
sosfiltfilt
(sos, x, axis=-1, padtype='odd', padlen=None)
return y
A forward-backward digital filter using cascaded second-order sections. See `filtfilt` for more complete information about this method. Parameters ---------- sos : array_like Array of second-order filter coefficients, must have shape ``(n_sections, 6)``. Each row corresponds to a second-order section, with the first three columns providing the numerator coefficients and the last three providing the denominator coefficients. x : array_like The array of data to be filtered. axis : int, optional The axis of `x` to which the filter is applied. Default is -1. padtype : str or None, optional Must be 'odd', 'even', 'constant', or None. This determines the type of extension to use for the padded signal to which the filter is applied. If `padtype` is None, no padding is used. The default is 'odd'. padlen : int or None, optional The number of elements by which to extend `x` at both ends of `axis` before applying the filter. This value must be less than ``x.shape[axis] - 1``. ``padlen=0`` implies no padding. The default value is:: 3 * (2 * len(sos) + 1 - min((sos[:, 2] == 0).sum(), (sos[:, 5] == 0).sum())) The extra subtraction at the end attempts to compensate for poles and zeros at the origin (e.g. for odd-order filters) to yield equivalent estimates of `padlen` to those of `filtfilt` for second-order section filters built with `scipy.signal` functions. Returns ------- y : ndarray The filtered output with the same shape as `x`. See Also -------- filtfilt, sosfilt, sosfilt_zi, sosfreqz Notes ----- .. versionadded:: 0.18.0 Examples -------- >>> from scipy.signal import sosfiltfilt, butter >>> import matplotlib.pyplot as plt Create an interesting signal to filter. >>> n = 201 >>> t = np.linspace(0, 1, n) >>> np.random.seed(123) >>> x = 1 + (t < 0.5) - 0.25*t**2 + 0.05*np.random.randn(n) Create a lowpass Butterworth filter, and use it to filter `x`. >>> sos = butter(4, 0.125, output='sos') >>> y = sosfiltfilt(sos, x) For comparison, apply an 8th order filter using `sosfilt`. The filter is initialized using the mean of the first four values of `x`. >>> from scipy.signal import sosfilt, sosfilt_zi >>> sos8 = butter(8, 0.125, output='sos') >>> zi = x[:4].mean() * sosfilt_zi(sos8) >>> y2, zo = sosfilt(sos8, x, zi=zi) Plot the results. Note that the phase of `y` matches the input, while `y2` has a significant phase delay. >>> plt.plot(t, x, alpha=0.5, label='x(t)') >>> plt.plot(t, y, label='y(t)') >>> plt.plot(t, y2, label='y2(t)') >>> plt.legend(framealpha=1, shadow=True) >>> plt.grid(alpha=0.25) >>> plt.xlabel('t') >>> plt.show()
A forward-backward digital filter using cascaded second-order sections.
[ "A", "forward", "-", "backward", "digital", "filter", "using", "cascaded", "second", "-", "order", "sections", "." ]
def sosfiltfilt(sos, x, axis=-1, padtype='odd', padlen=None): """ A forward-backward digital filter using cascaded second-order sections. See `filtfilt` for more complete information about this method. Parameters ---------- sos : array_like Array of second-order filter coefficients, must have shape ``(n_sections, 6)``. Each row corresponds to a second-order section, with the first three columns providing the numerator coefficients and the last three providing the denominator coefficients. x : array_like The array of data to be filtered. axis : int, optional The axis of `x` to which the filter is applied. Default is -1. padtype : str or None, optional Must be 'odd', 'even', 'constant', or None. This determines the type of extension to use for the padded signal to which the filter is applied. If `padtype` is None, no padding is used. The default is 'odd'. padlen : int or None, optional The number of elements by which to extend `x` at both ends of `axis` before applying the filter. This value must be less than ``x.shape[axis] - 1``. ``padlen=0`` implies no padding. The default value is:: 3 * (2 * len(sos) + 1 - min((sos[:, 2] == 0).sum(), (sos[:, 5] == 0).sum())) The extra subtraction at the end attempts to compensate for poles and zeros at the origin (e.g. for odd-order filters) to yield equivalent estimates of `padlen` to those of `filtfilt` for second-order section filters built with `scipy.signal` functions. Returns ------- y : ndarray The filtered output with the same shape as `x`. See Also -------- filtfilt, sosfilt, sosfilt_zi, sosfreqz Notes ----- .. versionadded:: 0.18.0 Examples -------- >>> from scipy.signal import sosfiltfilt, butter >>> import matplotlib.pyplot as plt Create an interesting signal to filter. >>> n = 201 >>> t = np.linspace(0, 1, n) >>> np.random.seed(123) >>> x = 1 + (t < 0.5) - 0.25*t**2 + 0.05*np.random.randn(n) Create a lowpass Butterworth filter, and use it to filter `x`. >>> sos = butter(4, 0.125, output='sos') >>> y = sosfiltfilt(sos, x) For comparison, apply an 8th order filter using `sosfilt`. The filter is initialized using the mean of the first four values of `x`. >>> from scipy.signal import sosfilt, sosfilt_zi >>> sos8 = butter(8, 0.125, output='sos') >>> zi = x[:4].mean() * sosfilt_zi(sos8) >>> y2, zo = sosfilt(sos8, x, zi=zi) Plot the results. Note that the phase of `y` matches the input, while `y2` has a significant phase delay. >>> plt.plot(t, x, alpha=0.5, label='x(t)') >>> plt.plot(t, y, label='y(t)') >>> plt.plot(t, y2, label='y2(t)') >>> plt.legend(framealpha=1, shadow=True) >>> plt.grid(alpha=0.25) >>> plt.xlabel('t') >>> plt.show() """ sos, n_sections = _validate_sos(sos) # `method` is "pad"... ntaps = 2 * n_sections + 1 ntaps -= min((sos[:, 2] == 0).sum(), (sos[:, 5] == 0).sum()) edge, ext = _validate_pad(padtype, padlen, x, axis, ntaps=ntaps) # These steps follow the same form as filtfilt with modifications zi = sosfilt_zi(sos) # shape (n_sections, 2) --> (n_sections, ..., 2, ...) zi_shape = [1] * x.ndim zi_shape[axis] = 2 zi.shape = [n_sections] + zi_shape x_0 = axis_slice(ext, stop=1, axis=axis) (y, zf) = sosfilt(sos, ext, axis=axis, zi=zi * x_0) y_0 = axis_slice(y, start=-1, axis=axis) (y, zf) = sosfilt(sos, axis_reverse(y, axis=axis), axis=axis, zi=zi * y_0) y = axis_reverse(y, axis=axis) if edge > 0: y = axis_slice(y, start=edge, stop=-edge, axis=axis) return y
[ "def", "sosfiltfilt", "(", "sos", ",", "x", ",", "axis", "=", "-", "1", ",", "padtype", "=", "'odd'", ",", "padlen", "=", "None", ")", ":", "sos", ",", "n_sections", "=", "_validate_sos", "(", "sos", ")", "# `method` is \"pad\"...", "ntaps", "=", "2", "*", "n_sections", "+", "1", "ntaps", "-=", "min", "(", "(", "sos", "[", ":", ",", "2", "]", "==", "0", ")", ".", "sum", "(", ")", ",", "(", "sos", "[", ":", ",", "5", "]", "==", "0", ")", ".", "sum", "(", ")", ")", "edge", ",", "ext", "=", "_validate_pad", "(", "padtype", ",", "padlen", ",", "x", ",", "axis", ",", "ntaps", "=", "ntaps", ")", "# These steps follow the same form as filtfilt with modifications", "zi", "=", "sosfilt_zi", "(", "sos", ")", "# shape (n_sections, 2) --> (n_sections, ..., 2, ...)", "zi_shape", "=", "[", "1", "]", "*", "x", ".", "ndim", "zi_shape", "[", "axis", "]", "=", "2", "zi", ".", "shape", "=", "[", "n_sections", "]", "+", "zi_shape", "x_0", "=", "axis_slice", "(", "ext", ",", "stop", "=", "1", ",", "axis", "=", "axis", ")", "(", "y", ",", "zf", ")", "=", "sosfilt", "(", "sos", ",", "ext", ",", "axis", "=", "axis", ",", "zi", "=", "zi", "*", "x_0", ")", "y_0", "=", "axis_slice", "(", "y", ",", "start", "=", "-", "1", ",", "axis", "=", "axis", ")", "(", "y", ",", "zf", ")", "=", "sosfilt", "(", "sos", ",", "axis_reverse", "(", "y", ",", "axis", "=", "axis", ")", ",", "axis", "=", "axis", ",", "zi", "=", "zi", "*", "y_0", ")", "y", "=", "axis_reverse", "(", "y", ",", "axis", "=", "axis", ")", "if", "edge", ">", "0", ":", "y", "=", "axis_slice", "(", "y", ",", "start", "=", "edge", ",", "stop", "=", "-", "edge", ",", "axis", "=", "axis", ")", "return", "y" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/signal/signaltools.py#L3299-L3407
echronos/echronos
c996f1d2c8af6c6536205eb319c1bf1d4d84569c
external_tools/ply_info/example/BASIC/basparse.py
python
p_number
(p)
number : INTEGER | FLOAT
number : INTEGER | FLOAT
[ "number", ":", "INTEGER", "|", "FLOAT" ]
def p_number(p): '''number : INTEGER | FLOAT''' p[0] = eval(p[1])
[ "def", "p_number", "(", "p", ")", ":", "p", "[", "0", "]", "=", "eval", "(", "p", "[", "1", "]", ")" ]
https://github.com/echronos/echronos/blob/c996f1d2c8af6c6536205eb319c1bf1d4d84569c/external_tools/ply_info/example/BASIC/basparse.py#L358-L361
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/pip/commands/search.py
python
transform_hits
(hits)
return package_list
The list from pypi is really a list of versions. We want a list of packages with the list of versions stored inline. This converts the list from pypi into one we can use.
The list from pypi is really a list of versions. We want a list of packages with the list of versions stored inline. This converts the list from pypi into one we can use.
[ "The", "list", "from", "pypi", "is", "really", "a", "list", "of", "versions", ".", "We", "want", "a", "list", "of", "packages", "with", "the", "list", "of", "versions", "stored", "inline", ".", "This", "converts", "the", "list", "from", "pypi", "into", "one", "we", "can", "use", "." ]
def transform_hits(hits): """ The list from pypi is really a list of versions. We want a list of packages with the list of versions stored inline. This converts the list from pypi into one we can use. """ packages = {} for hit in hits: name = hit['name'] summary = hit['summary'] version = hit['version'] score = hit['_pypi_ordering'] if score is None: score = 0 if name not in packages.keys(): packages[name] = {'name': name, 'summary': summary, 'versions': [version], 'score': score} else: packages[name]['versions'].append(version) # if this is the highest version, replace summary and score if version == highest_version(packages[name]['versions']): packages[name]['summary'] = summary packages[name]['score'] = score # each record has a unique name now, so we will convert the dict into a list sorted by score package_list = sorted(packages.values(), key=lambda x: x['score'], reverse=True) return package_list
[ "def", "transform_hits", "(", "hits", ")", ":", "packages", "=", "{", "}", "for", "hit", "in", "hits", ":", "name", "=", "hit", "[", "'name'", "]", "summary", "=", "hit", "[", "'summary'", "]", "version", "=", "hit", "[", "'version'", "]", "score", "=", "hit", "[", "'_pypi_ordering'", "]", "if", "score", "is", "None", ":", "score", "=", "0", "if", "name", "not", "in", "packages", ".", "keys", "(", ")", ":", "packages", "[", "name", "]", "=", "{", "'name'", ":", "name", ",", "'summary'", ":", "summary", ",", "'versions'", ":", "[", "version", "]", ",", "'score'", ":", "score", "}", "else", ":", "packages", "[", "name", "]", "[", "'versions'", "]", ".", "append", "(", "version", ")", "# if this is the highest version, replace summary and score", "if", "version", "==", "highest_version", "(", "packages", "[", "name", "]", "[", "'versions'", "]", ")", ":", "packages", "[", "name", "]", "[", "'summary'", "]", "=", "summary", "packages", "[", "name", "]", "[", "'score'", "]", "=", "score", "# each record has a unique name now, so we will convert the dict into a list sorted by score", "package_list", "=", "sorted", "(", "packages", ".", "values", "(", ")", ",", "key", "=", "lambda", "x", ":", "x", "[", "'score'", "]", ",", "reverse", "=", "True", ")", "return", "package_list" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/pip/commands/search.py#L56-L83
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/base.py
python
TensorFlowEstimator.save
(self, path)
Saves checkpoints and graph to given path. Args: path: Folder to save model to.
Saves checkpoints and graph to given path.
[ "Saves", "checkpoints", "and", "graph", "to", "given", "path", "." ]
def save(self, path): """Saves checkpoints and graph to given path. Args: path: Folder to save model to. """ if self._graph is None: raise NotFittedError # Copy model dir into new path. _copy_dir(self.model_dir, path) # Save model definition. all_params = self.get_params() params = {} for key, value in all_params.items(): if not callable(value) and value is not None: params[key] = value params['class_name'] = type(self).__name__ model_def = json.dumps( params, default=lambda o: o.__dict__ if hasattr(o, '__dict__') else None) _write_with_backup(os.path.join(path, 'model.def'), model_def)
[ "def", "save", "(", "self", ",", "path", ")", ":", "if", "self", ".", "_graph", "is", "None", ":", "raise", "NotFittedError", "# Copy model dir into new path.", "_copy_dir", "(", "self", ".", "model_dir", ",", "path", ")", "# Save model definition.", "all_params", "=", "self", ".", "get_params", "(", ")", "params", "=", "{", "}", "for", "key", ",", "value", "in", "all_params", ".", "items", "(", ")", ":", "if", "not", "callable", "(", "value", ")", "and", "value", "is", "not", "None", ":", "params", "[", "key", "]", "=", "value", "params", "[", "'class_name'", "]", "=", "type", "(", "self", ")", ".", "__name__", "model_def", "=", "json", ".", "dumps", "(", "params", ",", "default", "=", "lambda", "o", ":", "o", ".", "__dict__", "if", "hasattr", "(", "o", ",", "'__dict__'", ")", "else", "None", ")", "_write_with_backup", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'model.def'", ")", ",", "model_def", ")" ]
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/base.py#L272-L294
vslavik/poedit
f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a
deps/boost/libs/metaparse/tools/benchmark/generate.py
python
Mode.description
(self)
The description of the mode
The description of the mode
[ "The", "description", "of", "the", "mode" ]
def description(self): """The description of the mode""" if self.identifier == 'bmp': return 'Using BOOST_METAPARSE_STRING' elif self.identifier == 'man': return 'Generating strings manually'
[ "def", "description", "(", "self", ")", ":", "if", "self", ".", "identifier", "==", "'bmp'", ":", "return", "'Using BOOST_METAPARSE_STRING'", "elif", "self", ".", "identifier", "==", "'man'", ":", "return", "'Generating strings manually'" ]
https://github.com/vslavik/poedit/blob/f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a/deps/boost/libs/metaparse/tools/benchmark/generate.py#L82-L87
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/popen2.py
python
Popen3.__init__
(self, cmd, capturestderr=False, bufsize=-1)
The parameter 'cmd' is the shell command to execute in a sub-process. On UNIX, 'cmd' may be a sequence, in which case arguments will be passed directly to the program without shell intervention (as with os.spawnv()). If 'cmd' is a string it will be passed to the shell (as with os.system()). The 'capturestderr' flag, if true, specifies that the object should capture standard error output of the child process. The default is false. If the 'bufsize' parameter is specified, it specifies the size of the I/O buffers to/from the child process.
The parameter 'cmd' is the shell command to execute in a sub-process. On UNIX, 'cmd' may be a sequence, in which case arguments will be passed directly to the program without shell intervention (as with os.spawnv()). If 'cmd' is a string it will be passed to the shell (as with os.system()). The 'capturestderr' flag, if true, specifies that the object should capture standard error output of the child process. The default is false. If the 'bufsize' parameter is specified, it specifies the size of the I/O buffers to/from the child process.
[ "The", "parameter", "cmd", "is", "the", "shell", "command", "to", "execute", "in", "a", "sub", "-", "process", ".", "On", "UNIX", "cmd", "may", "be", "a", "sequence", "in", "which", "case", "arguments", "will", "be", "passed", "directly", "to", "the", "program", "without", "shell", "intervention", "(", "as", "with", "os", ".", "spawnv", "()", ")", ".", "If", "cmd", "is", "a", "string", "it", "will", "be", "passed", "to", "the", "shell", "(", "as", "with", "os", ".", "system", "()", ")", ".", "The", "capturestderr", "flag", "if", "true", "specifies", "that", "the", "object", "should", "capture", "standard", "error", "output", "of", "the", "child", "process", ".", "The", "default", "is", "false", ".", "If", "the", "bufsize", "parameter", "is", "specified", "it", "specifies", "the", "size", "of", "the", "I", "/", "O", "buffers", "to", "/", "from", "the", "child", "process", "." ]
def __init__(self, cmd, capturestderr=False, bufsize=-1): """The parameter 'cmd' is the shell command to execute in a sub-process. On UNIX, 'cmd' may be a sequence, in which case arguments will be passed directly to the program without shell intervention (as with os.spawnv()). If 'cmd' is a string it will be passed to the shell (as with os.system()). The 'capturestderr' flag, if true, specifies that the object should capture standard error output of the child process. The default is false. If the 'bufsize' parameter is specified, it specifies the size of the I/O buffers to/from the child process.""" _cleanup() self.cmd = cmd p2cread, p2cwrite = os.pipe() c2pread, c2pwrite = os.pipe() if capturestderr: errout, errin = os.pipe() self.pid = os.fork() if self.pid == 0: # Child os.dup2(p2cread, 0) os.dup2(c2pwrite, 1) if capturestderr: os.dup2(errin, 2) self._run_child(cmd) os.close(p2cread) self.tochild = os.fdopen(p2cwrite, 'w', bufsize) os.close(c2pwrite) self.fromchild = os.fdopen(c2pread, 'r', bufsize) if capturestderr: os.close(errin) self.childerr = os.fdopen(errout, 'r', bufsize) else: self.childerr = None
[ "def", "__init__", "(", "self", ",", "cmd", ",", "capturestderr", "=", "False", ",", "bufsize", "=", "-", "1", ")", ":", "_cleanup", "(", ")", "self", ".", "cmd", "=", "cmd", "p2cread", ",", "p2cwrite", "=", "os", ".", "pipe", "(", ")", "c2pread", ",", "c2pwrite", "=", "os", ".", "pipe", "(", ")", "if", "capturestderr", ":", "errout", ",", "errin", "=", "os", ".", "pipe", "(", ")", "self", ".", "pid", "=", "os", ".", "fork", "(", ")", "if", "self", ".", "pid", "==", "0", ":", "# Child", "os", ".", "dup2", "(", "p2cread", ",", "0", ")", "os", ".", "dup2", "(", "c2pwrite", ",", "1", ")", "if", "capturestderr", ":", "os", ".", "dup2", "(", "errin", ",", "2", ")", "self", ".", "_run_child", "(", "cmd", ")", "os", ".", "close", "(", "p2cread", ")", "self", ".", "tochild", "=", "os", ".", "fdopen", "(", "p2cwrite", ",", "'w'", ",", "bufsize", ")", "os", ".", "close", "(", "c2pwrite", ")", "self", ".", "fromchild", "=", "os", ".", "fdopen", "(", "c2pread", ",", "'r'", ",", "bufsize", ")", "if", "capturestderr", ":", "os", ".", "close", "(", "errin", ")", "self", ".", "childerr", "=", "os", ".", "fdopen", "(", "errout", ",", "'r'", ",", "bufsize", ")", "else", ":", "self", ".", "childerr", "=", "None" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/popen2.py#L40-L72
adobe/chromium
cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7
tools/valgrind/suppressions.py
python
GlobToRegex
(glob_pattern, ignore_case=False)
return ''.join(regex)
Translate glob wildcards (*?) into regex syntax. Escape the rest.
Translate glob wildcards (*?) into regex syntax. Escape the rest.
[ "Translate", "glob", "wildcards", "(", "*", "?", ")", "into", "regex", "syntax", ".", "Escape", "the", "rest", "." ]
def GlobToRegex(glob_pattern, ignore_case=False): """Translate glob wildcards (*?) into regex syntax. Escape the rest.""" regex = '' for char in glob_pattern: if char == '*': regex += '.*' elif char == '?': regex += '.' elif ignore_case and char.isalpha(): regex += '[%s%s]' % (char.lower(), char.upper()) else: regex += re.escape(char) return ''.join(regex)
[ "def", "GlobToRegex", "(", "glob_pattern", ",", "ignore_case", "=", "False", ")", ":", "regex", "=", "''", "for", "char", "in", "glob_pattern", ":", "if", "char", "==", "'*'", ":", "regex", "+=", "'.*'", "elif", "char", "==", "'?'", ":", "regex", "+=", "'.'", "elif", "ignore_case", "and", "char", ".", "isalpha", "(", ")", ":", "regex", "+=", "'[%s%s]'", "%", "(", "char", ".", "lower", "(", ")", ",", "char", ".", "upper", "(", ")", ")", "else", ":", "regex", "+=", "re", ".", "escape", "(", "char", ")", "return", "''", ".", "join", "(", "regex", ")" ]
https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/tools/valgrind/suppressions.py#L35-L47
apache/mesos
97d9a4063332aae3825d78de71611657e05cf5e2
support/cpplint.py
python
ProcessGlobalSuppresions
(lines)
Updates the list of global error suppressions. Parses any lint directives in the file that have global effect. Args: lines: An array of strings, each representing a line of the file, with the last element being empty if the file is terminated with a newline.
Updates the list of global error suppressions.
[ "Updates", "the", "list", "of", "global", "error", "suppressions", "." ]
def ProcessGlobalSuppresions(lines): """Updates the list of global error suppressions. Parses any lint directives in the file that have global effect. Args: lines: An array of strings, each representing a line of the file, with the last element being empty if the file is terminated with a newline. """ for line in lines: if _SEARCH_C_FILE.search(line): for category in _DEFAULT_C_SUPPRESSED_CATEGORIES: _global_error_suppressions[category] = True if _SEARCH_KERNEL_FILE.search(line): for category in _DEFAULT_KERNEL_SUPPRESSED_CATEGORIES: _global_error_suppressions[category] = True
[ "def", "ProcessGlobalSuppresions", "(", "lines", ")", ":", "for", "line", "in", "lines", ":", "if", "_SEARCH_C_FILE", ".", "search", "(", "line", ")", ":", "for", "category", "in", "_DEFAULT_C_SUPPRESSED_CATEGORIES", ":", "_global_error_suppressions", "[", "category", "]", "=", "True", "if", "_SEARCH_KERNEL_FILE", ".", "search", "(", "line", ")", ":", "for", "category", "in", "_DEFAULT_KERNEL_SUPPRESSED_CATEGORIES", ":", "_global_error_suppressions", "[", "category", "]", "=", "True" ]
https://github.com/apache/mesos/blob/97d9a4063332aae3825d78de71611657e05cf5e2/support/cpplint.py#L620-L635
mapnik/mapnik
f3da900c355e1d15059c4a91b00203dcc9d9f0ef
scons/scons-local-4.1.0/SCons/Node/__init__.py
python
Node.is_literal
(self)
return 1
Always pass the string representation of a Node to the command interpreter literally.
Always pass the string representation of a Node to the command interpreter literally.
[ "Always", "pass", "the", "string", "representation", "of", "a", "Node", "to", "the", "command", "interpreter", "literally", "." ]
def is_literal(self): """Always pass the string representation of a Node to the command interpreter literally.""" return 1
[ "def", "is_literal", "(", "self", ")", ":", "return", "1" ]
https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Node/__init__.py#L1533-L1536
gem5/gem5
141cc37c2d4b93959d4c249b8f7e6a8b2ef75338
ext/ply/example/calc/calc.py
python
p_statement_expr
(p)
statement : expression
statement : expression
[ "statement", ":", "expression" ]
def p_statement_expr(p): 'statement : expression' print(p[1])
[ "def", "p_statement_expr", "(", "p", ")", ":", "print", "(", "p", "[", "1", "]", ")" ]
https://github.com/gem5/gem5/blob/141cc37c2d4b93959d4c249b8f7e6a8b2ef75338/ext/ply/example/calc/calc.py#L58-L60
google/nucleus
68d3947fafba1337f294c0668a6e1c7f3f1273e3
nucleus/io/bedgraph.py
python
NativeBedGraphReader.query
(self)
Returns an iterator for going through the records in the region. NOTE: This function is not currently implemented by NativeBedGraphReader though it could be implemented for sorted, tabix-indexed BedGraph files.
Returns an iterator for going through the records in the region.
[ "Returns", "an", "iterator", "for", "going", "through", "the", "records", "in", "the", "region", "." ]
def query(self): """Returns an iterator for going through the records in the region. NOTE: This function is not currently implemented by NativeBedGraphReader though it could be implemented for sorted, tabix-indexed BedGraph files. """ raise NotImplementedError('Can not currently query a BedGraph file')
[ "def", "query", "(", "self", ")", ":", "raise", "NotImplementedError", "(", "'Can not currently query a BedGraph file'", ")" ]
https://github.com/google/nucleus/blob/68d3947fafba1337f294c0668a6e1c7f3f1273e3/nucleus/io/bedgraph.py#L87-L93
cinder/Cinder
e83f5bb9c01a63eec20168d02953a0879e5100f7
docs/generateDocs.py
python
process_ci_prefix_tag
(bs4, tag, in_path)
Finds the referenced tag's object if existent and adds the path to the prefix file to the class to be parsed later :param tag: The ci tag with a defined prefix attribute :param in_path: The path to the refix content :return:
Finds the referenced tag's object if existent and adds the path to the prefix file to the class to be parsed later :param tag: The ci tag with a defined prefix attribute :param in_path: The path to the refix content :return:
[ "Finds", "the", "referenced", "tag", "s", "object", "if", "existent", "and", "adds", "the", "path", "to", "the", "prefix", "file", "to", "the", "class", "to", "be", "parsed", "later", ":", "param", "tag", ":", "The", "ci", "tag", "with", "a", "defined", "prefix", "attribute", ":", "param", "in_path", ":", "The", "path", "to", "the", "refix", "content", ":", "return", ":" ]
def process_ci_prefix_tag(bs4, tag, in_path): """ Finds the referenced tag's object if existent and adds the path to the prefix file to the class to be parsed later :param tag: The ci tag with a defined prefix attribute :param in_path: The path to the refix content :return: """ in_path = in_path.replace('\\', '/') in_dir = get_path_dir(in_path) obj_ref = find_ci_tag_ref(tag) if obj_ref and type(obj_ref) is SymbolMap.Class: # get tag content prefix_content = "" for c in tag.contents: content = c.encode("utf-8", errors="replace") prefix_content += content # generate bs4 from content and update links as reltive from the template path # could alternatively set the absolute paths of content, which would then be turned into rel paths later new_bs4 = generate_bs4_from_string(prefix_content) update_links(new_bs4, in_dir, in_path, TEMPLATE_PATH) # get updated body content and assign as prefix_content prefix_content = "" for c in new_bs4.body: content = c.encode("utf-8", errors="replace") prefix_content += content obj_ref.define_prefix(prefix_content)
[ "def", "process_ci_prefix_tag", "(", "bs4", ",", "tag", ",", "in_path", ")", ":", "in_path", "=", "in_path", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", "in_dir", "=", "get_path_dir", "(", "in_path", ")", "obj_ref", "=", "find_ci_tag_ref", "(", "tag", ")", "if", "obj_ref", "and", "type", "(", "obj_ref", ")", "is", "SymbolMap", ".", "Class", ":", "# get tag content", "prefix_content", "=", "\"\"", "for", "c", "in", "tag", ".", "contents", ":", "content", "=", "c", ".", "encode", "(", "\"utf-8\"", ",", "errors", "=", "\"replace\"", ")", "prefix_content", "+=", "content", "# generate bs4 from content and update links as reltive from the template path", "# could alternatively set the absolute paths of content, which would then be turned into rel paths later", "new_bs4", "=", "generate_bs4_from_string", "(", "prefix_content", ")", "update_links", "(", "new_bs4", ",", "in_dir", ",", "in_path", ",", "TEMPLATE_PATH", ")", "# get updated body content and assign as prefix_content", "prefix_content", "=", "\"\"", "for", "c", "in", "new_bs4", ".", "body", ":", "content", "=", "c", ".", "encode", "(", "\"utf-8\"", ",", "errors", "=", "\"replace\"", ")", "prefix_content", "+=", "content", "obj_ref", ".", "define_prefix", "(", "prefix_content", ")" ]
https://github.com/cinder/Cinder/blob/e83f5bb9c01a63eec20168d02953a0879e5100f7/docs/generateDocs.py#L2733-L2763
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/keras/engine/compile_utils.py
python
MetricsContainer._get_metric_objects
(self, metrics, y_t, y_p)
return [self._get_metric_object(m, y_t, y_p) for m in metrics]
Convert user-supplied metrics to `Metric` objects.
Convert user-supplied metrics to `Metric` objects.
[ "Convert", "user", "-", "supplied", "metrics", "to", "Metric", "objects", "." ]
def _get_metric_objects(self, metrics, y_t, y_p): """Convert user-supplied metrics to `Metric` objects.""" metrics = nest.flatten(metrics) return [self._get_metric_object(m, y_t, y_p) for m in metrics]
[ "def", "_get_metric_objects", "(", "self", ",", "metrics", ",", "y_t", ",", "y_p", ")", ":", "metrics", "=", "nest", ".", "flatten", "(", "metrics", ")", "return", "[", "self", ".", "_get_metric_object", "(", "m", ",", "y_t", ",", "y_p", ")", "for", "m", "in", "metrics", "]" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/engine/compile_utils.py#L482-L485
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/cookielib.py
python
time2netscape
(t=None)
return "%s %02d-%s-%04d %02d:%02d:%02d GMT" % ( DAYS[wday], mday, MONTHS[mon-1], year, hour, min, sec)
Return a string representing time in seconds since epoch, t. If the function is called without an argument, it will use the current time. The format of the returned string is like this: Wed, DD-Mon-YYYY HH:MM:SS GMT
Return a string representing time in seconds since epoch, t.
[ "Return", "a", "string", "representing", "time", "in", "seconds", "since", "epoch", "t", "." ]
def time2netscape(t=None): """Return a string representing time in seconds since epoch, t. If the function is called without an argument, it will use the current time. The format of the returned string is like this: Wed, DD-Mon-YYYY HH:MM:SS GMT """ if t is None: t = time.time() year, mon, mday, hour, min, sec, wday = time.gmtime(t)[:7] return "%s %02d-%s-%04d %02d:%02d:%02d GMT" % ( DAYS[wday], mday, MONTHS[mon-1], year, hour, min, sec)
[ "def", "time2netscape", "(", "t", "=", "None", ")", ":", "if", "t", "is", "None", ":", "t", "=", "time", ".", "time", "(", ")", "year", ",", "mon", ",", "mday", ",", "hour", ",", "min", ",", "sec", ",", "wday", "=", "time", ".", "gmtime", "(", "t", ")", "[", ":", "7", "]", "return", "\"%s %02d-%s-%04d %02d:%02d:%02d GMT\"", "%", "(", "DAYS", "[", "wday", "]", ",", "mday", ",", "MONTHS", "[", "mon", "-", "1", "]", ",", "year", ",", "hour", ",", "min", ",", "sec", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/cookielib.py#L103-L117
apple/swift-lldb
d74be846ef3e62de946df343e8c234bde93a8912
utils/vim-lldb/python-vim-lldb/vim_ui.py
python
UI.hideWindow
(self, name)
return True
Hides window pane specified by name
Hides window pane specified by name
[ "Hides", "window", "pane", "specified", "by", "name" ]
def hideWindow(self, name): """ Hides window pane specified by name """ if not self.paneCol.havePane(name): sys.stderr.write("unknown window: %s" % name) return False self.paneCol.hide([name]) return True
[ "def", "hideWindow", "(", "self", ",", "name", ")", ":", "if", "not", "self", ".", "paneCol", ".", "havePane", "(", "name", ")", ":", "sys", ".", "stderr", ".", "write", "(", "\"unknown window: %s\"", "%", "name", ")", "return", "False", "self", ".", "paneCol", ".", "hide", "(", "[", "name", "]", ")", "return", "True" ]
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/utils/vim-lldb/python-vim-lldb/vim_ui.py#L248-L254
google/certificate-transparency
2588562fd306a447958471b6f06c1069619c1641
python/ct/crypto/asn1/types.py
python
Any._convert_value
(cls, value)
The value of an Any is the undecoded value.
The value of an Any is the undecoded value.
[ "The", "value", "of", "an", "Any", "is", "the", "undecoded", "value", "." ]
def _convert_value(cls, value): """The value of an Any is the undecoded value.""" # Always return the undecoded value for consistency; the # decoded/decoded_value properties can be used to retrieve the # decoded contents. if isinstance(value, Any): # This gets ambiguous real fast (do we keep the original tags or # replace with our own tags?) so we ban it. raise TypeError("Instantiating Any from another Any is illegal") elif isinstance(value, Abstract): return value.encode() else: raise TypeError("Cannot convert %s to %s" % (type(value), cls.__name__))
[ "def", "_convert_value", "(", "cls", ",", "value", ")", ":", "# Always return the undecoded value for consistency; the", "# decoded/decoded_value properties can be used to retrieve the", "# decoded contents.", "if", "isinstance", "(", "value", ",", "Any", ")", ":", "# This gets ambiguous real fast (do we keep the original tags or", "# replace with our own tags?) so we ban it.", "raise", "TypeError", "(", "\"Instantiating Any from another Any is illegal\"", ")", "elif", "isinstance", "(", "value", ",", "Abstract", ")", ":", "return", "value", ".", "encode", "(", ")", "else", ":", "raise", "TypeError", "(", "\"Cannot convert %s to %s\"", "%", "(", "type", "(", "value", ")", ",", "cls", ".", "__name__", ")", ")" ]
https://github.com/google/certificate-transparency/blob/2588562fd306a447958471b6f06c1069619c1641/python/ct/crypto/asn1/types.py#L1026-L1039
pytorch/ELF
e851e786ced8d26cf470f08a6b9bf7e413fc63f7
src_py/rlpytorch/sampler/sample_methods.py
python
epsilon_greedy
(state_curr, args, node="pi")
return sample_multinomial(state_curr, args, node=node, greedy=True)
epsilon greedy sampling Args: state_curr(dict): current state containing all data args(dict): customized arguments for sampling. `epsilon` is used node(str): name string for policy, default is "pi" Returns: A list of actions using epsilon greedy sampling.
epsilon greedy sampling
[ "epsilon", "greedy", "sampling" ]
def epsilon_greedy(state_curr, args, node="pi"): ''' epsilon greedy sampling Args: state_curr(dict): current state containing all data args(dict): customized arguments for sampling. `epsilon` is used node(str): name string for policy, default is "pi" Returns: A list of actions using epsilon greedy sampling. ''' return sample_multinomial(state_curr, args, node=node, greedy=True)
[ "def", "epsilon_greedy", "(", "state_curr", ",", "args", ",", "node", "=", "\"pi\"", ")", ":", "return", "sample_multinomial", "(", "state_curr", ",", "args", ",", "node", "=", "node", ",", "greedy", "=", "True", ")" ]
https://github.com/pytorch/ELF/blob/e851e786ced8d26cf470f08a6b9bf7e413fc63f7/src_py/rlpytorch/sampler/sample_methods.py#L128-L139
ricardoquesada/Spidermonkey
4a75ea2543408bd1b2c515aa95901523eeef7858
python/mozbuild/mozbuild/frontend/sandbox.py
python
Sandbox.__init__
(self, context, builtins=None)
Initialize a Sandbox ready for execution.
Initialize a Sandbox ready for execution.
[ "Initialize", "a", "Sandbox", "ready", "for", "execution", "." ]
def __init__(self, context, builtins=None): """Initialize a Sandbox ready for execution. """ self._builtins = builtins or self.BUILTINS dict.__setitem__(self, '__builtins__', self._builtins) assert isinstance(self._builtins, ReadOnlyDict) assert isinstance(context, Context) self._context = context self._execution_stack = [] # We need to record this because it gets swallowed as part of # evaluation. self._last_name_error = None
[ "def", "__init__", "(", "self", ",", "context", ",", "builtins", "=", "None", ")", ":", "self", ".", "_builtins", "=", "builtins", "or", "self", ".", "BUILTINS", "dict", ".", "__setitem__", "(", "self", ",", "'__builtins__'", ",", "self", ".", "_builtins", ")", "assert", "isinstance", "(", "self", ".", "_builtins", ",", "ReadOnlyDict", ")", "assert", "isinstance", "(", "context", ",", "Context", ")", "self", ".", "_context", "=", "context", "self", ".", "_execution_stack", "=", "[", "]", "# We need to record this because it gets swallowed as part of", "# evaluation.", "self", ".", "_last_name_error", "=", "None" ]
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/python/mozbuild/mozbuild/frontend/sandbox.py#L110-L124
raymondlu/super-animation-samples
04234269112ff0dc32447f27a761dbbb00b8ba17
samples/cocos2d-x-3.1/CocosLuaGame2/frameworks/cocos2d-x/tools/bindings-generator/generator.py
python
build_namespace
(cursor, namespaces=[])
return namespaces
build the full namespace for a specific cursor
build the full namespace for a specific cursor
[ "build", "the", "full", "namespace", "for", "a", "specific", "cursor" ]
def build_namespace(cursor, namespaces=[]): ''' build the full namespace for a specific cursor ''' if cursor: parent = cursor.semantic_parent if parent: if parent.kind == cindex.CursorKind.NAMESPACE or parent.kind == cindex.CursorKind.CLASS_DECL: namespaces.append(parent.displayname) build_namespace(parent, namespaces) return namespaces
[ "def", "build_namespace", "(", "cursor", ",", "namespaces", "=", "[", "]", ")", ":", "if", "cursor", ":", "parent", "=", "cursor", ".", "semantic_parent", "if", "parent", ":", "if", "parent", ".", "kind", "==", "cindex", ".", "CursorKind", ".", "NAMESPACE", "or", "parent", ".", "kind", "==", "cindex", ".", "CursorKind", ".", "CLASS_DECL", ":", "namespaces", ".", "append", "(", "parent", ".", "displayname", ")", "build_namespace", "(", "parent", ",", "namespaces", ")", "return", "namespaces" ]
https://github.com/raymondlu/super-animation-samples/blob/04234269112ff0dc32447f27a761dbbb00b8ba17/samples/cocos2d-x-3.1/CocosLuaGame2/frameworks/cocos2d-x/tools/bindings-generator/generator.py#L104-L115
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/re2/re2/unicode.py
python
Scripts
(unicode_dir=_UNICODE_DIR)
return scripts
Returns dict mapping script names to code lists. Args: unicode_dir: Unicode data directory Returns: dict mapping script names to code lists
Returns dict mapping script names to code lists.
[ "Returns", "dict", "mapping", "script", "names", "to", "code", "lists", "." ]
def Scripts(unicode_dir=_UNICODE_DIR): """Returns dict mapping script names to code lists. Args: unicode_dir: Unicode data directory Returns: dict mapping script names to code lists """ scripts = {} def DoLine(codes, fields): """Process single Scripts.txt line, updating scripts.""" (_, name) = fields scripts.setdefault(name, []).extend(codes) ReadUnicodeTable(unicode_dir+"/Scripts.txt", 2, DoLine) return scripts
[ "def", "Scripts", "(", "unicode_dir", "=", "_UNICODE_DIR", ")", ":", "scripts", "=", "{", "}", "def", "DoLine", "(", "codes", ",", "fields", ")", ":", "\"\"\"Process single Scripts.txt line, updating scripts.\"\"\"", "(", "_", ",", "name", ")", "=", "fields", "scripts", ".", "setdefault", "(", "name", ",", "[", "]", ")", ".", "extend", "(", "codes", ")", "ReadUnicodeTable", "(", "unicode_dir", "+", "\"/Scripts.txt\"", ",", "2", ",", "DoLine", ")", "return", "scripts" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/re2/re2/unicode.py#L253-L271
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/python/profiler/pprof_profiler.py
python
profile
(graph, run_metadata, output_dir=None)
return profile_files
Generate profiles in pprof format. See https://github.com/google/pprof/blob/master/proto/profile.proto for pprof proto format. Args: graph: A `Graph` object. run_metadata: A `RunMetadata` proto. output_dir: (string) Directory to output pprof profile to. Profile files for each device will be stored in compressed serialized proto format. If output_dir is None, profile protos will be printed to stdout instead. Returns: List of output files created by this profile call. (Note: this list will be empty if output_dir is None)
Generate profiles in pprof format.
[ "Generate", "profiles", "in", "pprof", "format", "." ]
def profile(graph, run_metadata, output_dir=None): """Generate profiles in pprof format. See https://github.com/google/pprof/blob/master/proto/profile.proto for pprof proto format. Args: graph: A `Graph` object. run_metadata: A `RunMetadata` proto. output_dir: (string) Directory to output pprof profile to. Profile files for each device will be stored in compressed serialized proto format. If output_dir is None, profile protos will be printed to stdout instead. Returns: List of output files created by this profile call. (Note: this list will be empty if output_dir is None) """ profiles = get_profiles(graph, run_metadata) output_file_template = None if output_dir: if not os.path.isdir(output_dir): os.makedirs(output_dir) time_suffix = time.strftime('%Y%m%d%H%M%S') output_file_template = os.path.join( output_dir, '%s_' + time_suffix + '.pb.gz') profile_files = [] for device, pprof_proto in profiles.items(): if output_file_template is None: print('No output directory specified, printing to stdout instead.') print(pprof_proto) else: device_name = str(device).strip('/').translate( maketrans('/:', '__')) profile_file = output_file_template % device_name profile_files.append(profile_file) with gzip.open(profile_file, 'w') as output_file: print('Writing profile to %s...' % profile_file) output_file.write(pprof_proto.SerializeToString()) return profile_files
[ "def", "profile", "(", "graph", ",", "run_metadata", ",", "output_dir", "=", "None", ")", ":", "profiles", "=", "get_profiles", "(", "graph", ",", "run_metadata", ")", "output_file_template", "=", "None", "if", "output_dir", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "output_dir", ")", ":", "os", ".", "makedirs", "(", "output_dir", ")", "time_suffix", "=", "time", ".", "strftime", "(", "'%Y%m%d%H%M%S'", ")", "output_file_template", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "'%s_'", "+", "time_suffix", "+", "'.pb.gz'", ")", "profile_files", "=", "[", "]", "for", "device", ",", "pprof_proto", "in", "profiles", ".", "items", "(", ")", ":", "if", "output_file_template", "is", "None", ":", "print", "(", "'No output directory specified, printing to stdout instead.'", ")", "print", "(", "pprof_proto", ")", "else", ":", "device_name", "=", "str", "(", "device", ")", ".", "strip", "(", "'/'", ")", ".", "translate", "(", "maketrans", "(", "'/:'", ",", "'__'", ")", ")", "profile_file", "=", "output_file_template", "%", "device_name", "profile_files", ".", "append", "(", "profile_file", ")", "with", "gzip", ".", "open", "(", "profile_file", ",", "'w'", ")", "as", "output_file", ":", "print", "(", "'Writing profile to %s...'", "%", "profile_file", ")", "output_file", ".", "write", "(", "pprof_proto", ".", "SerializeToString", "(", ")", ")", "return", "profile_files" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/profiler/pprof_profiler.py#L405-L445
MythTV/mythtv
d282a209cb8be85d036f85a62a8ec971b67d45f4
mythtv/programs/scripts/internetcontent/nv_python_libs/xsltfunctions/tedtalksXSL_api.py
python
xpathFunctions.stripSubstring
(self, string, startText, terminatorChar)
return string[:index].strip()
Return a substring terminated by specific character(s) return a substring
Return a substring terminated by specific character(s) return a substring
[ "Return", "a", "substring", "terminated", "by", "specific", "character", "(", "s", ")", "return", "a", "substring" ]
def stripSubstring(self, string, startText, terminatorChar): '''Return a substring terminated by specific character(s) return a substring ''' index = string.find(startText) if index == -1: return '' string = string[index+len(startText):] index = string.find(terminatorChar) if index == -1: return '' return string[:index].strip()
[ "def", "stripSubstring", "(", "self", ",", "string", ",", "startText", ",", "terminatorChar", ")", ":", "index", "=", "string", ".", "find", "(", "startText", ")", "if", "index", "==", "-", "1", ":", "return", "''", "string", "=", "string", "[", "index", "+", "len", "(", "startText", ")", ":", "]", "index", "=", "string", ".", "find", "(", "terminatorChar", ")", "if", "index", "==", "-", "1", ":", "return", "''", "return", "string", "[", ":", "index", "]", ".", "strip", "(", ")" ]
https://github.com/MythTV/mythtv/blob/d282a209cb8be85d036f85a62a8ec971b67d45f4/mythtv/programs/scripts/internetcontent/nv_python_libs/xsltfunctions/tedtalksXSL_api.py#L231-L242
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/gslib/translation_helper.py
python
LifecycleTranslation.BotoLifecycleFromMessage
(cls, lifecycle_message)
return boto_lifecycle
Translates an apitools message to a boto lifecycle object.
Translates an apitools message to a boto lifecycle object.
[ "Translates", "an", "apitools", "message", "to", "a", "boto", "lifecycle", "object", "." ]
def BotoLifecycleFromMessage(cls, lifecycle_message): """Translates an apitools message to a boto lifecycle object.""" boto_lifecycle = boto.gs.lifecycle.LifecycleConfig() if lifecycle_message: for rule_message in lifecycle_message.rule: boto_rule = boto.gs.lifecycle.Rule() if (rule_message.action and rule_message.action.type and rule_message.action.type.lower() == 'delete'): boto_rule.action = boto.gs.lifecycle.DELETE if rule_message.condition: if rule_message.condition.age: boto_rule.conditions[boto.gs.lifecycle.AGE] = ( str(rule_message.condition.age)) if rule_message.condition.createdBefore: boto_rule.conditions[boto.gs.lifecycle.CREATED_BEFORE] = ( str(rule_message.condition.createdBefore)) if rule_message.condition.isLive: boto_rule.conditions[boto.gs.lifecycle.IS_LIVE] = ( str(rule_message.condition.isLive)) if rule_message.condition.numNewerVersions: boto_rule.conditions[boto.gs.lifecycle.NUM_NEWER_VERSIONS] = ( str(rule_message.condition.numNewerVersions)) boto_lifecycle.append(boto_rule) return boto_lifecycle
[ "def", "BotoLifecycleFromMessage", "(", "cls", ",", "lifecycle_message", ")", ":", "boto_lifecycle", "=", "boto", ".", "gs", ".", "lifecycle", ".", "LifecycleConfig", "(", ")", "if", "lifecycle_message", ":", "for", "rule_message", "in", "lifecycle_message", ".", "rule", ":", "boto_rule", "=", "boto", ".", "gs", ".", "lifecycle", ".", "Rule", "(", ")", "if", "(", "rule_message", ".", "action", "and", "rule_message", ".", "action", ".", "type", "and", "rule_message", ".", "action", ".", "type", ".", "lower", "(", ")", "==", "'delete'", ")", ":", "boto_rule", ".", "action", "=", "boto", ".", "gs", ".", "lifecycle", ".", "DELETE", "if", "rule_message", ".", "condition", ":", "if", "rule_message", ".", "condition", ".", "age", ":", "boto_rule", ".", "conditions", "[", "boto", ".", "gs", ".", "lifecycle", ".", "AGE", "]", "=", "(", "str", "(", "rule_message", ".", "condition", ".", "age", ")", ")", "if", "rule_message", ".", "condition", ".", "createdBefore", ":", "boto_rule", ".", "conditions", "[", "boto", ".", "gs", ".", "lifecycle", ".", "CREATED_BEFORE", "]", "=", "(", "str", "(", "rule_message", ".", "condition", ".", "createdBefore", ")", ")", "if", "rule_message", ".", "condition", ".", "isLive", ":", "boto_rule", ".", "conditions", "[", "boto", ".", "gs", ".", "lifecycle", ".", "IS_LIVE", "]", "=", "(", "str", "(", "rule_message", ".", "condition", ".", "isLive", ")", ")", "if", "rule_message", ".", "condition", ".", "numNewerVersions", ":", "boto_rule", ".", "conditions", "[", "boto", ".", "gs", ".", "lifecycle", ".", "NUM_NEWER_VERSIONS", "]", "=", "(", "str", "(", "rule_message", ".", "condition", ".", "numNewerVersions", ")", ")", "boto_lifecycle", ".", "append", "(", "boto_rule", ")", "return", "boto_lifecycle" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/gslib/translation_helper.py#L448-L471
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/python/ops/control_flow_ops.py
python
GradLoopState.history_map
(self)
return self._history_map
The map that records all the tensors needed for backprop.
The map that records all the tensors needed for backprop.
[ "The", "map", "that", "records", "all", "the", "tensors", "needed", "for", "backprop", "." ]
def history_map(self): """The map that records all the tensors needed for backprop.""" return self._history_map
[ "def", "history_map", "(", "self", ")", ":", "return", "self", ".", "_history_map" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/ops/control_flow_ops.py#L614-L616
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py2/scipy/io/netcdf.py
python
netcdf_variable._get_missing_value
(self)
return missing_value
Returns the value denoting "no data" for this variable. If this variable does not have a missing/fill value, returns None. If both _FillValue and missing_value are given, give precedence to _FillValue. The netCDF standard gives special meaning to _FillValue; missing_value is just used for compatibility with old datasets.
Returns the value denoting "no data" for this variable.
[ "Returns", "the", "value", "denoting", "no", "data", "for", "this", "variable", "." ]
def _get_missing_value(self): """ Returns the value denoting "no data" for this variable. If this variable does not have a missing/fill value, returns None. If both _FillValue and missing_value are given, give precedence to _FillValue. The netCDF standard gives special meaning to _FillValue; missing_value is just used for compatibility with old datasets. """ if '_FillValue' in self._attributes: missing_value = self._attributes['_FillValue'] elif 'missing_value' in self._attributes: missing_value = self._attributes['missing_value'] else: missing_value = None return missing_value
[ "def", "_get_missing_value", "(", "self", ")", ":", "if", "'_FillValue'", "in", "self", ".", "_attributes", ":", "missing_value", "=", "self", ".", "_attributes", "[", "'_FillValue'", "]", "elif", "'missing_value'", "in", "self", ".", "_attributes", ":", "missing_value", "=", "self", ".", "_attributes", "[", "'missing_value'", "]", "else", ":", "missing_value", "=", "None", "return", "missing_value" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/io/netcdf.py#L1047-L1065
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
Window.MacIsWindowScrollbar
(*args, **kwargs)
return _core_.Window_MacIsWindowScrollbar(*args, **kwargs)
MacIsWindowScrollbar(self, Window sb) -> bool
MacIsWindowScrollbar(self, Window sb) -> bool
[ "MacIsWindowScrollbar", "(", "self", "Window", "sb", ")", "-", ">", "bool" ]
def MacIsWindowScrollbar(*args, **kwargs): """MacIsWindowScrollbar(self, Window sb) -> bool""" return _core_.Window_MacIsWindowScrollbar(*args, **kwargs)
[ "def", "MacIsWindowScrollbar", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Window_MacIsWindowScrollbar", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L11333-L11335