text
stringlengths
89
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
630
def create_pool(dsn=None, *, min_size=10, max_size=10, max_queries=50000, max_inactive_connection_lifetime=300.0, setup=None, init=None, loop=None, connection_class=connection.Connection, **connect_kwargs): r"""Create a connection pool. Can be used either with an ``async with`` block: .. code-block:: python async with asyncpg.create_pool(user='postgres', command_timeout=60) as pool: async with pool.acquire() as con: await con.fetch('SELECT 1') Or directly with ``await``: .. code-block:: python pool = await asyncpg.create_pool(user='postgres', command_timeout=60) con = await pool.acquire() try: await con.fetch('SELECT 1') finally: await pool.release(con) .. warning:: Prepared statements and cursors returned by :meth:`Connection.prepare() <connection.Connection.prepare>` and :meth:`Connection.cursor() <connection.Connection.cursor>` become invalid once the connection is released. Likewise, all notification and log listeners are removed, and ``asyncpg`` will issue a warning if there are any listener callbacks registered on a connection that is being released to the pool. :param str dsn: Connection arguments specified using as a single string in the following format: ``postgres://user:pass@host:port/database?option=value``. :param \*\*connect_kwargs: Keyword arguments for the :func:`~asyncpg.connection.connect` function. :param Connection connection_class: The class to use for connections. Must be a subclass of :class:`~asyncpg.connection.Connection`. :param int min_size: Number of connection the pool will be initialized with. :param int max_size: Max number of connections in the pool. :param int max_queries: Number of queries after a connection is closed and replaced with a new connection. :param float max_inactive_connection_lifetime: Number of seconds after which inactive connections in the pool will be closed. Pass ``0`` to disable this mechanism. :param coroutine setup: A coroutine to prepare a connection right before it is returned from :meth:`Pool.acquire() <pool.Pool.acquire>`. An example use case would be to automatically set up notifications listeners for all connections of a pool. :param coroutine init: A coroutine to initialize a connection when it is created. An example use case would be to setup type codecs with :meth:`Connection.set_builtin_type_codec() <\ asyncpg.connection.Connection.set_builtin_type_codec>` or :meth:`Connection.set_type_codec() <\ asyncpg.connection.Connection.set_type_codec>`. :param loop: An asyncio event loop instance. If ``None``, the default event loop will be used. :return: An instance of :class:`~asyncpg.pool.Pool`. .. versionchanged:: 0.10.0 An :exc:`~asyncpg.exceptions.InterfaceError` will be raised on any attempted operation on a released connection. .. versionchanged:: 0.13.0 An :exc:`~asyncpg.exceptions.InterfaceError` will be raised on any attempted operation on a prepared statement or a cursor created on a connection that has been released to the pool. .. versionchanged:: 0.13.0 An :exc:`~asyncpg.exceptions.InterfaceWarning` will be produced if there are any active listeners (added via :meth:`Connection.add_listener() <connection.Connection.add_listener>` or :meth:`Connection.add_log_listener() <connection.Connection.add_log_listener>`) present on the connection at the moment of its release to the pool. """ return Pool( dsn, connection_class=connection_class, min_size=min_size, max_size=max_size, max_queries=max_queries, loop=loop, setup=setup, init=init, max_inactive_connection_lifetime=max_inactive_connection_lifetime, **connect_kwargs)
[ "def", "create_pool", "(", "dsn", "=", "None", ",", "*", ",", "min_size", "=", "10", ",", "max_size", "=", "10", ",", "max_queries", "=", "50000", ",", "max_inactive_connection_lifetime", "=", "300.0", ",", "setup", "=", "None", ",", "init", "=", "None", ",", "loop", "=", "None", ",", "connection_class", "=", "connection", ".", "Connection", ",", "*", "*", "connect_kwargs", ")", ":", "return", "Pool", "(", "dsn", ",", "connection_class", "=", "connection_class", ",", "min_size", "=", "min_size", ",", "max_size", "=", "max_size", ",", "max_queries", "=", "max_queries", ",", "loop", "=", "loop", ",", "setup", "=", "setup", ",", "init", "=", "init", ",", "max_inactive_connection_lifetime", "=", "max_inactive_connection_lifetime", ",", "*", "*", "connect_kwargs", ")" ]
37.553571
21.821429
def discard_local_changes(cwd, path='.', user=None, password=None, ignore_retcode=False, output_encoding=None): ''' .. versionadded:: 2019.2.0 Runs a ``git checkout -- <path>`` from the directory specified by ``cwd``. cwd The path to the git checkout path path relative to cwd (defaults to ``.``) user User under which to run the git command. By default, the command is run by the user under which the minion is running. password Windows only. Required when specifying ``user``. This parameter will be ignored on non-Windows platforms. ignore_retcode : False If ``True``, do not log an error to the minion log if the git command returns a nonzero exit status. output_encoding Use this option to specify which encoding to use to decode the output from any git commands which are run. This should not be needed in most cases. .. note:: This should only be needed if the files in the repository were created with filenames using an encoding other than UTF-8 to handle Unicode characters. CLI Example: .. code-block:: bash salt myminion git.discard_local_changes /path/to/repo salt myminion git.discard_local_changes /path/to/repo path=foo ''' cwd = _expand_path(cwd, user) command = ['git', 'checkout', '--', path] # Checkout message goes to stderr return _git_run(command, cwd=cwd, user=user, password=password, ignore_retcode=ignore_retcode, redirect_stderr=True, output_encoding=output_encoding)['stdout']
[ "def", "discard_local_changes", "(", "cwd", ",", "path", "=", "'.'", ",", "user", "=", "None", ",", "password", "=", "None", ",", "ignore_retcode", "=", "False", ",", "output_encoding", "=", "None", ")", ":", "cwd", "=", "_expand_path", "(", "cwd", ",", "user", ")", "command", "=", "[", "'git'", ",", "'checkout'", ",", "'--'", ",", "path", "]", "# Checkout message goes to stderr", "return", "_git_run", "(", "command", ",", "cwd", "=", "cwd", ",", "user", "=", "user", ",", "password", "=", "password", ",", "ignore_retcode", "=", "ignore_retcode", ",", "redirect_stderr", "=", "True", ",", "output_encoding", "=", "output_encoding", ")", "[", "'stdout'", "]" ]
32.553571
22.732143
def extend(func): """Allow to extend a bot: Create a module with some useful routine: .. literalinclude:: ../examples/myextends.py .. >>> import sys >>> sys.path.append('examples') >>> from irc3 import IrcBot >>> IrcBot.defaults.update(asynchronous=False, testing=True) Now you can use those routine in your bot:: >>> bot = IrcBot() >>> bot.include('myextends') >>> print(bot.my_usefull_function(1)) my_usefull_function(*(1,)) >>> print(bot.my_usefull_method(2)) my_usefull_method(*(2,)) """ def callback(context, name, ob): obj = context.context if info.scope == 'class': f = getattr(obj.get_plugin(ob), func.__name__) else: f = func setattr(obj, f.__name__, f.__get__(obj, obj.__class__)) info = venusian.attach(func, callback, category='irc3.extend') return func
[ "def", "extend", "(", "func", ")", ":", "def", "callback", "(", "context", ",", "name", ",", "ob", ")", ":", "obj", "=", "context", ".", "context", "if", "info", ".", "scope", "==", "'class'", ":", "f", "=", "getattr", "(", "obj", ".", "get_plugin", "(", "ob", ")", ",", "func", ".", "__name__", ")", "else", ":", "f", "=", "func", "setattr", "(", "obj", ",", "f", ".", "__name__", ",", "f", ".", "__get__", "(", "obj", ",", "obj", ".", "__class__", ")", ")", "info", "=", "venusian", ".", "attach", "(", "func", ",", "callback", ",", "category", "=", "'irc3.extend'", ")", "return", "func" ]
29.419355
17.096774
def s3am_upload_job(job, file_id, file_name, s3_dir, s3_key_path=None): """Job version of s3am_upload""" work_dir = job.fileStore.getLocalTempDir() fpath = job.fileStore.readGlobalFile(file_id, os.path.join(work_dir, file_name)) s3am_upload(job=job, fpath=fpath, s3_dir=s3_dir, num_cores=job.cores, s3_key_path=s3_key_path)
[ "def", "s3am_upload_job", "(", "job", ",", "file_id", ",", "file_name", ",", "s3_dir", ",", "s3_key_path", "=", "None", ")", ":", "work_dir", "=", "job", ".", "fileStore", ".", "getLocalTempDir", "(", ")", "fpath", "=", "job", ".", "fileStore", ".", "readGlobalFile", "(", "file_id", ",", "os", ".", "path", ".", "join", "(", "work_dir", ",", "file_name", ")", ")", "s3am_upload", "(", "job", "=", "job", ",", "fpath", "=", "fpath", ",", "s3_dir", "=", "s3_dir", ",", "num_cores", "=", "job", ".", "cores", ",", "s3_key_path", "=", "s3_key_path", ")" ]
67
27.8
def _apply_pipeline_and_get_build_instance(self, X_factory, mX_factory, category_idx_store, df, parse_pipeline, term_idx_store, metadata_idx_store, y): ''' Parameters ---------- X_factory mX_factory category_idx_store df parse_pipeline term_idx_store metadata_idx_store y Returns ------- CorpusDF ''' df.apply(parse_pipeline.parse, axis=1) y = np.array(y) X, mX = build_sparse_matrices(y, X_factory, mX_factory) return CorpusDF(df, X, mX, y, self._text_col, term_idx_store, category_idx_store, metadata_idx_store)
[ "def", "_apply_pipeline_and_get_build_instance", "(", "self", ",", "X_factory", ",", "mX_factory", ",", "category_idx_store", ",", "df", ",", "parse_pipeline", ",", "term_idx_store", ",", "metadata_idx_store", ",", "y", ")", ":", "df", ".", "apply", "(", "parse_pipeline", ".", "parse", ",", "axis", "=", "1", ")", "y", "=", "np", ".", "array", "(", "y", ")", "X", ",", "mX", "=", "build_sparse_matrices", "(", "y", ",", "X_factory", ",", "mX_factory", ")", "return", "CorpusDF", "(", "df", ",", "X", ",", "mX", ",", "y", ",", "self", ".", "_text_col", ",", "term_idx_store", ",", "category_idx_store", ",", "metadata_idx_store", ")" ]
27.638889
20.805556
def valid(self, instance, schema): """Validate schema.""" try: jsonschema.validate(instance, schema) except jsonschema.exceptions.ValidationError as ex: self.stderr.write(" VALIDATION ERROR: {}".format(instance['name'] if 'name' in instance else '')) self.stderr.write(" path: {}".format(ex.path)) self.stderr.write(" message: {}".format(ex.message)) self.stderr.write(" validator: {}".format(ex.validator)) self.stderr.write(" val. value: {}".format(ex.validator_value)) return False try: # Check that default values fit field schema. for field in ['input', 'output', 'schema']: for schema, _, path in iterate_schema({}, instance.get(field, {})): if 'default' in schema: validate_schema({schema['name']: schema['default']}, [schema]) except ValidationError: self.stderr.write(" VALIDATION ERROR: {}".format(instance['name'])) self.stderr.write(" Default value of field '{}' is not valid.". format(path)) return False return True
[ "def", "valid", "(", "self", ",", "instance", ",", "schema", ")", ":", "try", ":", "jsonschema", ".", "validate", "(", "instance", ",", "schema", ")", "except", "jsonschema", ".", "exceptions", ".", "ValidationError", "as", "ex", ":", "self", ".", "stderr", ".", "write", "(", "\" VALIDATION ERROR: {}\"", ".", "format", "(", "instance", "[", "'name'", "]", "if", "'name'", "in", "instance", "else", "''", ")", ")", "self", ".", "stderr", ".", "write", "(", "\" path: {}\"", ".", "format", "(", "ex", ".", "path", ")", ")", "self", ".", "stderr", ".", "write", "(", "\" message: {}\"", ".", "format", "(", "ex", ".", "message", ")", ")", "self", ".", "stderr", ".", "write", "(", "\" validator: {}\"", ".", "format", "(", "ex", ".", "validator", ")", ")", "self", ".", "stderr", ".", "write", "(", "\" val. value: {}\"", ".", "format", "(", "ex", ".", "validator_value", ")", ")", "return", "False", "try", ":", "# Check that default values fit field schema.", "for", "field", "in", "[", "'input'", ",", "'output'", ",", "'schema'", "]", ":", "for", "schema", ",", "_", ",", "path", "in", "iterate_schema", "(", "{", "}", ",", "instance", ".", "get", "(", "field", ",", "{", "}", ")", ")", ":", "if", "'default'", "in", "schema", ":", "validate_schema", "(", "{", "schema", "[", "'name'", "]", ":", "schema", "[", "'default'", "]", "}", ",", "[", "schema", "]", ")", "except", "ValidationError", ":", "self", ".", "stderr", ".", "write", "(", "\" VALIDATION ERROR: {}\"", ".", "format", "(", "instance", "[", "'name'", "]", ")", ")", "self", ".", "stderr", ".", "write", "(", "\" Default value of field '{}' is not valid.\"", ".", "format", "(", "path", ")", ")", "return", "False", "return", "True" ]
50.458333
27.875
def go(fn, *args, **kwargs): """Launch an operation on a thread and get a handle to its future result. >>> from time import sleep >>> def print_sleep_print(duration): ... sleep(duration) ... print('hello from background thread') ... sleep(duration) ... print('goodbye from background thread') ... return 'return value' ... >>> future = go(print_sleep_print, 0.1) >>> sleep(0.15) hello from background thread >>> print('main thread') main thread >>> result = future() goodbye from background thread >>> result 'return value' """ if not callable(fn): raise TypeError('go() requires a function, not %r' % (fn,)) result = [None] error = [] def target(): try: result[0] = fn(*args, **kwargs) except Exception: # Are we in interpreter shutdown? if sys: error.extend(sys.exc_info()) t = threading.Thread(target=target) t.daemon = True t.start() def get_result(timeout=10): t.join(timeout) if t.is_alive(): raise AssertionError('timed out waiting for %r' % fn) if error: reraise(*error) return result[0] return get_result
[ "def", "go", "(", "fn", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "callable", "(", "fn", ")", ":", "raise", "TypeError", "(", "'go() requires a function, not %r'", "%", "(", "fn", ",", ")", ")", "result", "=", "[", "None", "]", "error", "=", "[", "]", "def", "target", "(", ")", ":", "try", ":", "result", "[", "0", "]", "=", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", ":", "# Are we in interpreter shutdown?", "if", "sys", ":", "error", ".", "extend", "(", "sys", ".", "exc_info", "(", ")", ")", "t", "=", "threading", ".", "Thread", "(", "target", "=", "target", ")", "t", ".", "daemon", "=", "True", "t", ".", "start", "(", ")", "def", "get_result", "(", "timeout", "=", "10", ")", ":", "t", ".", "join", "(", "timeout", ")", "if", "t", ".", "is_alive", "(", ")", ":", "raise", "AssertionError", "(", "'timed out waiting for %r'", "%", "fn", ")", "if", "error", ":", "reraise", "(", "*", "error", ")", "return", "result", "[", "0", "]", "return", "get_result" ]
26.297872
17.489362
def update_catalog_extent(self, current_extent): # type: (int) -> None ''' A method to update the extent associated with this Boot Catalog. Parameters: current_extent - New extent to associate with this Boot Catalog Returns: Nothing. ''' if not self._initialized: raise pycdlibexception.PyCdlibInternalError('El Torito Boot Catalog not yet initialized') self.br.update_boot_system_use(struct.pack('=L', current_extent))
[ "def", "update_catalog_extent", "(", "self", ",", "current_extent", ")", ":", "# type: (int) -> None", "if", "not", "self", ".", "_initialized", ":", "raise", "pycdlibexception", ".", "PyCdlibInternalError", "(", "'El Torito Boot Catalog not yet initialized'", ")", "self", ".", "br", ".", "update_boot_system_use", "(", "struct", ".", "pack", "(", "'=L'", ",", "current_extent", ")", ")" ]
35.857143
27.857143
def bind(self, form): """Bind to filters form.""" field = self.field(default=self.default, **self.field_kwargs) form._fields[self.name] = field.bind(form, self.name, prefix=form._prefix)
[ "def", "bind", "(", "self", ",", "form", ")", ":", "field", "=", "self", ".", "field", "(", "default", "=", "self", ".", "default", ",", "*", "*", "self", ".", "field_kwargs", ")", "form", ".", "_fields", "[", "self", ".", "name", "]", "=", "field", ".", "bind", "(", "form", ",", "self", ".", "name", ",", "prefix", "=", "form", ".", "_prefix", ")" ]
51.75
22.5
def fit(self, X, y=None): ''' Learn the linear transformation to flipped eigenvalues. Parameters ---------- X : array, shape [n, n] The *symmetric* input similarities. If X is asymmetric, it will be treated as if it were symmetric based on its lower-triangular part. ''' n = X.shape[0] if X.shape != (n, n): raise TypeError("Input must be a square matrix.") # TODO: only get negative eigs somehow? memory = get_memory(self.memory) vals, vecs = memory.cache(scipy.linalg.eigh, ignore=['overwrite_a'])( X, overwrite_a=not self.copy) vals = vals[:, None] self.flip_ = np.dot(vecs, np.sign(vals) * vecs.T) return self
[ "def", "fit", "(", "self", ",", "X", ",", "y", "=", "None", ")", ":", "n", "=", "X", ".", "shape", "[", "0", "]", "if", "X", ".", "shape", "!=", "(", "n", ",", "n", ")", ":", "raise", "TypeError", "(", "\"Input must be a square matrix.\"", ")", "# TODO: only get negative eigs somehow?", "memory", "=", "get_memory", "(", "self", ".", "memory", ")", "vals", ",", "vecs", "=", "memory", ".", "cache", "(", "scipy", ".", "linalg", ".", "eigh", ",", "ignore", "=", "[", "'overwrite_a'", "]", ")", "(", "X", ",", "overwrite_a", "=", "not", "self", ".", "copy", ")", "vals", "=", "vals", "[", ":", ",", "None", "]", "self", ".", "flip_", "=", "np", ".", "dot", "(", "vecs", ",", "np", ".", "sign", "(", "vals", ")", "*", "vecs", ".", "T", ")", "return", "self" ]
34.318182
22.318182
def start_output (self): """ Write start of checking info as sql comment. """ super(SQLLogger, self).start_output() if self.has_part("intro"): self.write_intro() self.writeln() self.flush()
[ "def", "start_output", "(", "self", ")", ":", "super", "(", "SQLLogger", ",", "self", ")", ".", "start_output", "(", ")", "if", "self", ".", "has_part", "(", "\"intro\"", ")", ":", "self", ".", "write_intro", "(", ")", "self", ".", "writeln", "(", ")", "self", ".", "flush", "(", ")" ]
28.555556
8.777778
def check_dimensionless_vertical_coordinate(self, ds): ''' Check the validity of dimensionless coordinates under CF CF §4.3.2 The units attribute is not required for dimensionless coordinates. The standard_name attribute associates a coordinate with its definition from Appendix D, Dimensionless Vertical Coordinates. The definition provides a mapping between the dimensionless coordinate values and dimensional values that can positively and uniquely indicate the location of the data. A new attribute, formula_terms, is used to associate terms in the definitions with variables in a netCDF file. To maintain backwards compatibility with COARDS the use of these attributes is not required, but is strongly recommended. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] z_variables = cfutil.get_z_variables(ds) deprecated_units = [ 'level', 'layer', 'sigma_level' ] for name in z_variables: variable = ds.variables[name] standard_name = getattr(variable, 'standard_name', None) units = getattr(variable, 'units', None) formula_terms = getattr(variable, 'formula_terms', None) # Skip the variable if it's dimensional if (formula_terms is None and standard_name not in dimless_vertical_coordinates): continue is_not_deprecated = TestCtx(BaseCheck.LOW, self.section_titles["4.3"]) is_not_deprecated.assert_true(units not in deprecated_units, "§4.3.2: units are deprecated by CF in variable {}: {}" "".format(name, units)) ret_val.append(is_not_deprecated.to_result()) ret_val.append(self._check_formula_terms(ds, name)) return ret_val
[ "def", "check_dimensionless_vertical_coordinate", "(", "self", ",", "ds", ")", ":", "ret_val", "=", "[", "]", "z_variables", "=", "cfutil", ".", "get_z_variables", "(", "ds", ")", "deprecated_units", "=", "[", "'level'", ",", "'layer'", ",", "'sigma_level'", "]", "for", "name", "in", "z_variables", ":", "variable", "=", "ds", ".", "variables", "[", "name", "]", "standard_name", "=", "getattr", "(", "variable", ",", "'standard_name'", ",", "None", ")", "units", "=", "getattr", "(", "variable", ",", "'units'", ",", "None", ")", "formula_terms", "=", "getattr", "(", "variable", ",", "'formula_terms'", ",", "None", ")", "# Skip the variable if it's dimensional", "if", "(", "formula_terms", "is", "None", "and", "standard_name", "not", "in", "dimless_vertical_coordinates", ")", ":", "continue", "is_not_deprecated", "=", "TestCtx", "(", "BaseCheck", ".", "LOW", ",", "self", ".", "section_titles", "[", "\"4.3\"", "]", ")", "is_not_deprecated", ".", "assert_true", "(", "units", "not", "in", "deprecated_units", ",", "\"§4.3.2: units are deprecated by CF in variable {}: {}\"", "\"\"", ".", "format", "(", "name", ",", "units", ")", ")", "ret_val", ".", "append", "(", "is_not_deprecated", ".", "to_result", "(", ")", ")", "ret_val", ".", "append", "(", "self", ".", "_check_formula_terms", "(", "ds", ",", "name", ")", ")", "return", "ret_val" ]
40.877551
25.408163
def _do_conjunction(self, _and=("and", "e", "en", "et", "und", "y")): """ Attach conjunctions. CC-words like "and" and "or" between two chunks indicate a conjunction. """ w = self.words if len(w) > 2 and w[-2].type == "CC" and w[-2].chunk is None: cc = w[-2].string.lower() in _and and AND or OR ch1 = w[-3].chunk ch2 = w[-1].chunk if ch1 is not None and \ ch2 is not None: ch1.conjunctions.append(ch2, cc) ch2.conjunctions.append(ch1, cc)
[ "def", "_do_conjunction", "(", "self", ",", "_and", "=", "(", "\"and\"", ",", "\"e\"", ",", "\"en\"", ",", "\"et\"", ",", "\"und\"", ",", "\"y\"", ")", ")", ":", "w", "=", "self", ".", "words", "if", "len", "(", "w", ")", ">", "2", "and", "w", "[", "-", "2", "]", ".", "type", "==", "\"CC\"", "and", "w", "[", "-", "2", "]", ".", "chunk", "is", "None", ":", "cc", "=", "w", "[", "-", "2", "]", ".", "string", ".", "lower", "(", ")", "in", "_and", "and", "AND", "or", "OR", "ch1", "=", "w", "[", "-", "3", "]", ".", "chunk", "ch2", "=", "w", "[", "-", "1", "]", ".", "chunk", "if", "ch1", "is", "not", "None", "and", "ch2", "is", "not", "None", ":", "ch1", ".", "conjunctions", ".", "append", "(", "ch2", ",", "cc", ")", "ch2", ".", "conjunctions", ".", "append", "(", "ch1", ",", "cc", ")" ]
43.615385
14.615385
def recv(self, bufsize): """Buffers up to _chunk_size bytes when the internal buffer has less than `bufsize` bytes.""" assert bufsize > 0, 'a positive bufsize is required' if len(self._buffer) < bufsize: readable, _, _ = safe_select([self._socket], [], [], self._select_timeout) if readable: recvd = self._socket.recv(max(self._chunk_size, bufsize)) self._buffer = self._buffer + recvd return_buf, self._buffer = self._buffer[:bufsize], self._buffer[bufsize:] return return_buf
[ "def", "recv", "(", "self", ",", "bufsize", ")", ":", "assert", "bufsize", ">", "0", ",", "'a positive bufsize is required'", "if", "len", "(", "self", ".", "_buffer", ")", "<", "bufsize", ":", "readable", ",", "_", ",", "_", "=", "safe_select", "(", "[", "self", ".", "_socket", "]", ",", "[", "]", ",", "[", "]", ",", "self", ".", "_select_timeout", ")", "if", "readable", ":", "recvd", "=", "self", ".", "_socket", ".", "recv", "(", "max", "(", "self", ".", "_chunk_size", ",", "bufsize", ")", ")", "self", ".", "_buffer", "=", "self", ".", "_buffer", "+", "recvd", "return_buf", ",", "self", ".", "_buffer", "=", "self", ".", "_buffer", "[", ":", "bufsize", "]", ",", "self", ".", "_buffer", "[", "bufsize", ":", "]", "return", "return_buf" ]
46.909091
20.272727
def print_matching_trees(arg_dict, tree_format, exact, verbose): """The `TreeRef` instance returned by the oti.find_trees(... wrap_response=True) can be used as an argument to the phylesystem_api.get call. If you pass in a string (instead of a TreeRef), the string will be interpreted as a study ID """ from peyotl.sugar import phylesystem_api tree_list = ot_find_tree(arg_dict, exact=exact, verbose=verbose) for tree_ref in tree_list: print(tree_ref) print(phylesystem_api.get(tree_ref, format=tree_format))
[ "def", "print_matching_trees", "(", "arg_dict", ",", "tree_format", ",", "exact", ",", "verbose", ")", ":", "from", "peyotl", ".", "sugar", "import", "phylesystem_api", "tree_list", "=", "ot_find_tree", "(", "arg_dict", ",", "exact", "=", "exact", ",", "verbose", "=", "verbose", ")", "for", "tree_ref", "in", "tree_list", ":", "print", "(", "tree_ref", ")", "print", "(", "phylesystem_api", ".", "get", "(", "tree_ref", ",", "format", "=", "tree_format", ")", ")" ]
54.3
18.6
def _validate_input_data(self, data, request): """ Validate input data. :param request: the HTTP request :param data: the parsed data :return: if validation is performed and succeeds the data is converted into whatever format the validation uses (by default Django's Forms) If not, the data is returned unchanged. :raises: HttpStatusCodeError if data is not valid """ validator = self._get_input_validator(request) if isinstance(data, (list, tuple)): return map(validator.validate, data) else: return validator.validate(data)
[ "def", "_validate_input_data", "(", "self", ",", "data", ",", "request", ")", ":", "validator", "=", "self", ".", "_get_input_validator", "(", "request", ")", "if", "isinstance", "(", "data", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "map", "(", "validator", ".", "validate", ",", "data", ")", "else", ":", "return", "validator", ".", "validate", "(", "data", ")" ]
40.125
16.3125
def rg(self): """ Brazilian RG, return plain numbers. Check: https://www.ngmatematica.com/2014/02/como-determinar-o-digito-verificador-do.html """ digits = self.generator.random.sample(range(0, 9), 8) checksum = sum(i * digits[i - 2] for i in range(2, 10)) last_digit = 11 - (checksum % 11) if last_digit == 10: digits.append('X') elif last_digit == 11: digits.append(0) else: digits.append(last_digit) return ''.join(map(str, digits))
[ "def", "rg", "(", "self", ")", ":", "digits", "=", "self", ".", "generator", ".", "random", ".", "sample", "(", "range", "(", "0", ",", "9", ")", ",", "8", ")", "checksum", "=", "sum", "(", "i", "*", "digits", "[", "i", "-", "2", "]", "for", "i", "in", "range", "(", "2", ",", "10", ")", ")", "last_digit", "=", "11", "-", "(", "checksum", "%", "11", ")", "if", "last_digit", "==", "10", ":", "digits", ".", "append", "(", "'X'", ")", "elif", "last_digit", "==", "11", ":", "digits", ".", "append", "(", "0", ")", "else", ":", "digits", ".", "append", "(", "last_digit", ")", "return", "''", ".", "join", "(", "map", "(", "str", ",", "digits", ")", ")" ]
30.333333
18.111111
def optimizer(name): """Get pre-registered optimizer keyed by name. `name` should be snake case, though SGD -> sgd, RMSProp -> rms_prop and UpperCamelCase -> snake_case conversions included for legacy support. Args: name: name of optimizer used in registration. This should be a snake case identifier, though others supported for legacy reasons. Returns: optimizer """ warn_msg = ("Please update `registry.optimizer` callsite " "(likely due to a `HParams.optimizer` value)") if name == "SGD": name = "sgd" tf.logging.warning("'SGD' optimizer now keyed by 'sgd'. %s" % warn_msg) elif name == "RMSProp": name = "rms_prop" tf.logging.warning( "'RMSProp' optimizer now keyed by 'rms_prop'. %s" % warn_msg) else: snake_name = misc_utils.camelcase_to_snakecase(name) if name != snake_name: tf.logging.warning( "optimizer names now keyed by snake_case names. %s" % warn_msg) name = snake_name return Registries.optimizers[name]
[ "def", "optimizer", "(", "name", ")", ":", "warn_msg", "=", "(", "\"Please update `registry.optimizer` callsite \"", "\"(likely due to a `HParams.optimizer` value)\"", ")", "if", "name", "==", "\"SGD\"", ":", "name", "=", "\"sgd\"", "tf", ".", "logging", ".", "warning", "(", "\"'SGD' optimizer now keyed by 'sgd'. %s\"", "%", "warn_msg", ")", "elif", "name", "==", "\"RMSProp\"", ":", "name", "=", "\"rms_prop\"", "tf", ".", "logging", ".", "warning", "(", "\"'RMSProp' optimizer now keyed by 'rms_prop'. %s\"", "%", "warn_msg", ")", "else", ":", "snake_name", "=", "misc_utils", ".", "camelcase_to_snakecase", "(", "name", ")", "if", "name", "!=", "snake_name", ":", "tf", ".", "logging", ".", "warning", "(", "\"optimizer names now keyed by snake_case names. %s\"", "%", "warn_msg", ")", "name", "=", "snake_name", "return", "Registries", ".", "optimizers", "[", "name", "]" ]
34.448276
23.586207
def get_sdc_by_ip(self, ip): """ Get ScaleIO SDC object by its ip :param name: IP address of SDC :return: ScaleIO SDC object :raise KeyError: No SDC with specified IP found :rtype: SDC object """ if self.conn.is_ip_addr(ip): for sdc in self.sdc: if sdc.sdcIp == ip: return sdc raise KeyError("SDS of that name not found") else: raise ValueError("Malformed IP address - get_sdc_by_ip()")
[ "def", "get_sdc_by_ip", "(", "self", ",", "ip", ")", ":", "if", "self", ".", "conn", ".", "is_ip_addr", "(", "ip", ")", ":", "for", "sdc", "in", "self", ".", "sdc", ":", "if", "sdc", ".", "sdcIp", "==", "ip", ":", "return", "sdc", "raise", "KeyError", "(", "\"SDS of that name not found\"", ")", "else", ":", "raise", "ValueError", "(", "\"Malformed IP address - get_sdc_by_ip()\"", ")" ]
34.4
9.866667
def dmat(c,nocc): "Form the density matrix from the first nocc orbitals of c" return np.dot(c[:,:nocc],c[:,:nocc].T)
[ "def", "dmat", "(", "c", ",", "nocc", ")", ":", "return", "np", ".", "dot", "(", "c", "[", ":", ",", ":", "nocc", "]", ",", "c", "[", ":", ",", ":", "nocc", "]", ".", "T", ")" ]
40.666667
16
def set(self, key, value): """set key data""" if self._db: self._db.hset(self.index, key, value)
[ "def", "set", "(", "self", ",", "key", ",", "value", ")", ":", "if", "self", ".", "_db", ":", "self", ".", "_db", ".", "hset", "(", "self", ".", "index", ",", "key", ",", "value", ")" ]
30.25
10.75
def delete_taskrun(taskrun_id): """Delete the given taskrun. :param task: PYBOSSA task """ try: res = _pybossa_req('delete', 'taskrun', taskrun_id) if type(res).__name__ == 'bool': return True else: return res except: # pragma: no cover raise
[ "def", "delete_taskrun", "(", "taskrun_id", ")", ":", "try", ":", "res", "=", "_pybossa_req", "(", "'delete'", ",", "'taskrun'", ",", "taskrun_id", ")", "if", "type", "(", "res", ")", ".", "__name__", "==", "'bool'", ":", "return", "True", "else", ":", "return", "res", "except", ":", "# pragma: no cover", "raise" ]
23.692308
16.076923
def run_with_gunicorn(self, **options): """Run with gunicorn.""" import gunicorn.app.base from gunicorn.six import iteritems import multiprocessing class GourdeApplication(gunicorn.app.base.BaseApplication): def __init__(self, app, options=None): self.options = options or {} self.application = app super(GourdeApplication, self).__init__() def load_config(self): config = dict([(key, value) for key, value in iteritems(self.options) if key in self.cfg.settings and value is not None]) for key, value in iteritems(config): self.cfg.set(key.lower(), value) def load(self): return self.application options = { 'bind': '%s:%s' % (self.host, self.port), 'workers': self.threads or ((multiprocessing.cpu_count() * 2) + 1), 'debug': self.debug, **options, } GourdeApplication(self.app, options).run()
[ "def", "run_with_gunicorn", "(", "self", ",", "*", "*", "options", ")", ":", "import", "gunicorn", ".", "app", ".", "base", "from", "gunicorn", ".", "six", "import", "iteritems", "import", "multiprocessing", "class", "GourdeApplication", "(", "gunicorn", ".", "app", ".", "base", ".", "BaseApplication", ")", ":", "def", "__init__", "(", "self", ",", "app", ",", "options", "=", "None", ")", ":", "self", ".", "options", "=", "options", "or", "{", "}", "self", ".", "application", "=", "app", "super", "(", "GourdeApplication", ",", "self", ")", ".", "__init__", "(", ")", "def", "load_config", "(", "self", ")", ":", "config", "=", "dict", "(", "[", "(", "key", ",", "value", ")", "for", "key", ",", "value", "in", "iteritems", "(", "self", ".", "options", ")", "if", "key", "in", "self", ".", "cfg", ".", "settings", "and", "value", "is", "not", "None", "]", ")", "for", "key", ",", "value", "in", "iteritems", "(", "config", ")", ":", "self", ".", "cfg", ".", "set", "(", "key", ".", "lower", "(", ")", ",", "value", ")", "def", "load", "(", "self", ")", ":", "return", "self", ".", "application", "options", "=", "{", "'bind'", ":", "'%s:%s'", "%", "(", "self", ".", "host", ",", "self", ".", "port", ")", ",", "'workers'", ":", "self", ".", "threads", "or", "(", "(", "multiprocessing", ".", "cpu_count", "(", ")", "*", "2", ")", "+", "1", ")", ",", "'debug'", ":", "self", ".", "debug", ",", "*", "*", "options", ",", "}", "GourdeApplication", "(", "self", ".", "app", ",", "options", ")", ".", "run", "(", ")" ]
36.758621
19.034483
def routers_removed_from_hosting_device(self, context, router_ids, hosting_device): """Notify cfg agent that routers have been removed from hosting device. @param: context - information about tenant, user etc @param: router-ids - list of ids @param: hosting_device - device hosting the routers """ self._agent_notification_bulk( context, 'router_removed_from_hosting_device', router_ids, hosting_device, operation=None)
[ "def", "routers_removed_from_hosting_device", "(", "self", ",", "context", ",", "router_ids", ",", "hosting_device", ")", ":", "self", ".", "_agent_notification_bulk", "(", "context", ",", "'router_removed_from_hosting_device'", ",", "router_ids", ",", "hosting_device", ",", "operation", "=", "None", ")" ]
52.6
12
def get_state(self, force_update=False): """ Returns 0 if off and 1 if on. """ if force_update or self._state is None: return int(self.basicevent.GetBinaryState()['BinaryState']) return self._state
[ "def", "get_state", "(", "self", ",", "force_update", "=", "False", ")", ":", "if", "force_update", "or", "self", ".", "_state", "is", "None", ":", "return", "int", "(", "self", ".", "basicevent", ".", "GetBinaryState", "(", ")", "[", "'BinaryState'", "]", ")", "return", "self", ".", "_state" ]
34.714286
7.857143
def get_compass_raw(self): """ Magnetometer x y z raw data in uT (micro teslas) """ raw = self._get_raw_data('compassValid', 'compass') if raw is not None: self._last_compass_raw = raw return deepcopy(self._last_compass_raw)
[ "def", "get_compass_raw", "(", "self", ")", ":", "raw", "=", "self", ".", "_get_raw_data", "(", "'compassValid'", ",", "'compass'", ")", "if", "raw", "is", "not", "None", ":", "self", ".", "_last_compass_raw", "=", "raw", "return", "deepcopy", "(", "self", ".", "_last_compass_raw", ")" ]
25.181818
17.181818
def read_header(headerlabels, options): """ read csv header, returns prop_indices dictionary, {'label' : index} where label is the csv column label, and index the column index """ # set input variables status_field = options.status_field # confirm that status_field exists status_field_matcher = re.compile(status_field) if not any(x for x in headerlabels if status_field_matcher.match(x)): print("\n The status_field column '{s}' doesn't exist\n".format(s=status_field)) sys.exit(1) # initiate prop_column dictionary and csv column index prop_indices = {} index = 0 for label in headerlabels: prop_indices[label] = index index += 1 return prop_indices
[ "def", "read_header", "(", "headerlabels", ",", "options", ")", ":", "# set input variables", "status_field", "=", "options", ".", "status_field", "# confirm that status_field exists", "status_field_matcher", "=", "re", ".", "compile", "(", "status_field", ")", "if", "not", "any", "(", "x", "for", "x", "in", "headerlabels", "if", "status_field_matcher", ".", "match", "(", "x", ")", ")", ":", "print", "(", "\"\\n The status_field column '{s}' doesn't exist\\n\"", ".", "format", "(", "s", "=", "status_field", ")", ")", "sys", ".", "exit", "(", "1", ")", "# initiate prop_column dictionary and csv column index", "prop_indices", "=", "{", "}", "index", "=", "0", "for", "label", "in", "headerlabels", ":", "prop_indices", "[", "label", "]", "=", "index", "index", "+=", "1", "return", "prop_indices" ]
29.708333
20.916667
def join_host_port(host, port): """Joins a hostname and port together. This is a minimal implementation intended to cope with IPv6 literals. For example, _join_host_port('::1', 80) == '[::1]:80'. :Args: - host - A hostname. - port - An integer port. """ if ':' in host and not host.startswith('['): return '[%s]:%d' % (host, port) return '%s:%d' % (host, port)
[ "def", "join_host_port", "(", "host", ",", "port", ")", ":", "if", "':'", "in", "host", "and", "not", "host", ".", "startswith", "(", "'['", ")", ":", "return", "'[%s]:%d'", "%", "(", "host", ",", "port", ")", "return", "'%s:%d'", "%", "(", "host", ",", "port", ")" ]
28.714286
17.5
def get_local_client( c_path=os.path.join(syspaths.CONFIG_DIR, 'master'), mopts=None, skip_perm_errors=False, io_loop=None, auto_reconnect=False): ''' .. versionadded:: 2014.7.0 Read in the config and return the correct LocalClient object based on the configured transport :param IOLoop io_loop: io_loop used for events. Pass in an io_loop if you want asynchronous operation for obtaining events. Eg use of set_event_handler() API. Otherwise, operation will be synchronous. ''' if mopts: opts = mopts else: # Late import to prevent circular import import salt.config opts = salt.config.client_config(c_path) # TODO: AIO core is separate from transport return LocalClient( mopts=opts, skip_perm_errors=skip_perm_errors, io_loop=io_loop, auto_reconnect=auto_reconnect)
[ "def", "get_local_client", "(", "c_path", "=", "os", ".", "path", ".", "join", "(", "syspaths", ".", "CONFIG_DIR", ",", "'master'", ")", ",", "mopts", "=", "None", ",", "skip_perm_errors", "=", "False", ",", "io_loop", "=", "None", ",", "auto_reconnect", "=", "False", ")", ":", "if", "mopts", ":", "opts", "=", "mopts", "else", ":", "# Late import to prevent circular import", "import", "salt", ".", "config", "opts", "=", "salt", ".", "config", ".", "client_config", "(", "c_path", ")", "# TODO: AIO core is separate from transport", "return", "LocalClient", "(", "mopts", "=", "opts", ",", "skip_perm_errors", "=", "skip_perm_errors", ",", "io_loop", "=", "io_loop", ",", "auto_reconnect", "=", "auto_reconnect", ")" ]
31.967742
19.967742
def process_input_data(filename, imager, grid_data, grid_norm, grid_weights): """Reads visibility data from a Measurement Set. The visibility grid or weights grid is updated accordingly. Visibility data are read from disk in blocks of size num_baselines. Args: filename (str): Name of Measurement Set to open. imager (oskar.Imager): Handle to configured imager. grid_data (numpy.ndarray or None): Visibility grid to populate. grid_norm (float) Current grid normalisation. grid_weights (numpy.ndarray): Weights grid to populate or read. Returns: grid_norm (float): Updated grid normalisation. """ # Get data from the input Measurement Set. ms = oskar.MeasurementSet.open(filename) block_start = 0 num_rows = ms.num_rows num_baselines = ms.num_stations * (ms.num_stations - 1) // 2 # Loop over data blocks of size num_baselines. while block_start < num_rows: block_size = num_rows - block_start if block_size > num_baselines: block_size = num_baselines # Get the baseline coordinates. (Replace this with a query to LTS.) uvw = ms.read_column('UVW', block_start, block_size) # Read the Stokes-I visibility weights. vis_weights = ms.read_column('WEIGHT', block_start, block_size) if ms.num_pols == 4: vis_weights = 0.5 * (vis_weights[:, 0] + vis_weights[:, 3]) # Loop over frequency channels. # (We expect there to be only one channel here, but loop just in case.) for j in range(ms.num_channels): # Get coordinates in wavelengths. coords = uvw * (ms.freq_start_hz + j * ms.freq_inc_hz) / 299792458. # Get the Stokes-I visibilities for this channel. vis_data = None if not imager.coords_only: vis_data = ms.read_vis(block_start, j, 1, block_size) if ms.num_pols == 4: vis_data = 0.5 * (vis_data[0, :, 0] + vis_data[0, :, 3]) # Update the grid plane with this visibility block. grid_norm = imager.update_plane( coords[:, 0], coords[:, 1], coords[:, 2], vis_data, vis_weights, grid_data, grid_norm, grid_weights) # Increment start row by block size. block_start += block_size # Return updated grid normalisation. return grid_norm
[ "def", "process_input_data", "(", "filename", ",", "imager", ",", "grid_data", ",", "grid_norm", ",", "grid_weights", ")", ":", "# Get data from the input Measurement Set.", "ms", "=", "oskar", ".", "MeasurementSet", ".", "open", "(", "filename", ")", "block_start", "=", "0", "num_rows", "=", "ms", ".", "num_rows", "num_baselines", "=", "ms", ".", "num_stations", "*", "(", "ms", ".", "num_stations", "-", "1", ")", "//", "2", "# Loop over data blocks of size num_baselines.", "while", "block_start", "<", "num_rows", ":", "block_size", "=", "num_rows", "-", "block_start", "if", "block_size", ">", "num_baselines", ":", "block_size", "=", "num_baselines", "# Get the baseline coordinates. (Replace this with a query to LTS.)", "uvw", "=", "ms", ".", "read_column", "(", "'UVW'", ",", "block_start", ",", "block_size", ")", "# Read the Stokes-I visibility weights.", "vis_weights", "=", "ms", ".", "read_column", "(", "'WEIGHT'", ",", "block_start", ",", "block_size", ")", "if", "ms", ".", "num_pols", "==", "4", ":", "vis_weights", "=", "0.5", "*", "(", "vis_weights", "[", ":", ",", "0", "]", "+", "vis_weights", "[", ":", ",", "3", "]", ")", "# Loop over frequency channels.", "# (We expect there to be only one channel here, but loop just in case.)", "for", "j", "in", "range", "(", "ms", ".", "num_channels", ")", ":", "# Get coordinates in wavelengths.", "coords", "=", "uvw", "*", "(", "ms", ".", "freq_start_hz", "+", "j", "*", "ms", ".", "freq_inc_hz", ")", "/", "299792458.", "# Get the Stokes-I visibilities for this channel.", "vis_data", "=", "None", "if", "not", "imager", ".", "coords_only", ":", "vis_data", "=", "ms", ".", "read_vis", "(", "block_start", ",", "j", ",", "1", ",", "block_size", ")", "if", "ms", ".", "num_pols", "==", "4", ":", "vis_data", "=", "0.5", "*", "(", "vis_data", "[", "0", ",", ":", ",", "0", "]", "+", "vis_data", "[", "0", ",", ":", ",", "3", "]", ")", "# Update the grid plane with this visibility block.", "grid_norm", "=", "imager", ".", "update_plane", "(", "coords", "[", ":", ",", "0", "]", ",", "coords", "[", ":", ",", "1", "]", ",", "coords", "[", ":", ",", "2", "]", ",", "vis_data", ",", "vis_weights", ",", "grid_data", ",", "grid_norm", ",", "grid_weights", ")", "# Increment start row by block size.", "block_start", "+=", "block_size", "# Return updated grid normalisation.", "return", "grid_norm" ]
40.683333
22.5
def inferObjectWithRandomMovements(self, objectDescription, numSensations=None, randomLocation=False, checkFalseConvergence=True): """ Attempt to recognize the specified object with the network. Randomly move the sensor over the object until the object is recognized. @param objectDescription (dict) For example: {"name": "Object 1", "features": [{"top": 0, "left": 0, "width": 10, "height": 10, "name": "A"}, {"top": 0, "left": 10, "width": 10, "height": 10, "name": "B"}]} @param numSensations (int or None) Set this to run the network for a fixed number of sensations. Otherwise this method will run until the object is recognized or until maxTraversals is reached. @return (bool) True if inference succeeded """ self.reset() for monitor in self.monitors.values(): monitor.beforeInferObject(objectDescription) currentStep = 0 finished = False inferred = False inferredStep = None prevTouchSequence = None for _ in xrange(self.maxTraversals): # Choose touch sequence. while True: touchSequence = range(len(objectDescription["features"])) random.shuffle(touchSequence) # Make sure the first touch will cause a movement. if (prevTouchSequence is not None and touchSequence[0] == prevTouchSequence[-1]): continue break for iFeature in touchSequence: currentStep += 1 feature = objectDescription["features"][iFeature] self._move(feature, randomLocation=randomLocation) featureSDR = self.features[feature["name"]] self._sense(featureSDR, learn=False, waitForSettle=False) if not inferred: # Use the sensory-activated cells to detect whether the object has been # recognized. In some models, this set of cells is equivalent to the # active cells. In others, a set of cells around the sensory-activated # cells become active. In either case, if these sensory-activated cells # are correct, it implies that the input layer's representation is # classifiable -- the location layer just correctly classified it. representation = self.column.getSensoryAssociatedLocationRepresentation() target_representations = set(np.concatenate( self.locationRepresentations[ (objectDescription["name"], iFeature)])) inferred = (set(representation) <= target_representations) if inferred: inferredStep = currentStep if not inferred and tuple(representation) in self.representationSet: # We have converged to an incorrect representation - declare failure. print("Converged to an incorrect representation!") return None finished = ((inferred and numSensations is None) or (numSensations is not None and currentStep == numSensations)) if finished: break prevTouchSequence = touchSequence if finished: break for monitor in self.monitors.values(): monitor.afterInferObject(objectDescription, inferredStep) return inferredStep
[ "def", "inferObjectWithRandomMovements", "(", "self", ",", "objectDescription", ",", "numSensations", "=", "None", ",", "randomLocation", "=", "False", ",", "checkFalseConvergence", "=", "True", ")", ":", "self", ".", "reset", "(", ")", "for", "monitor", "in", "self", ".", "monitors", ".", "values", "(", ")", ":", "monitor", ".", "beforeInferObject", "(", "objectDescription", ")", "currentStep", "=", "0", "finished", "=", "False", "inferred", "=", "False", "inferredStep", "=", "None", "prevTouchSequence", "=", "None", "for", "_", "in", "xrange", "(", "self", ".", "maxTraversals", ")", ":", "# Choose touch sequence.", "while", "True", ":", "touchSequence", "=", "range", "(", "len", "(", "objectDescription", "[", "\"features\"", "]", ")", ")", "random", ".", "shuffle", "(", "touchSequence", ")", "# Make sure the first touch will cause a movement.", "if", "(", "prevTouchSequence", "is", "not", "None", "and", "touchSequence", "[", "0", "]", "==", "prevTouchSequence", "[", "-", "1", "]", ")", ":", "continue", "break", "for", "iFeature", "in", "touchSequence", ":", "currentStep", "+=", "1", "feature", "=", "objectDescription", "[", "\"features\"", "]", "[", "iFeature", "]", "self", ".", "_move", "(", "feature", ",", "randomLocation", "=", "randomLocation", ")", "featureSDR", "=", "self", ".", "features", "[", "feature", "[", "\"name\"", "]", "]", "self", ".", "_sense", "(", "featureSDR", ",", "learn", "=", "False", ",", "waitForSettle", "=", "False", ")", "if", "not", "inferred", ":", "# Use the sensory-activated cells to detect whether the object has been", "# recognized. In some models, this set of cells is equivalent to the", "# active cells. In others, a set of cells around the sensory-activated", "# cells become active. In either case, if these sensory-activated cells", "# are correct, it implies that the input layer's representation is", "# classifiable -- the location layer just correctly classified it.", "representation", "=", "self", ".", "column", ".", "getSensoryAssociatedLocationRepresentation", "(", ")", "target_representations", "=", "set", "(", "np", ".", "concatenate", "(", "self", ".", "locationRepresentations", "[", "(", "objectDescription", "[", "\"name\"", "]", ",", "iFeature", ")", "]", ")", ")", "inferred", "=", "(", "set", "(", "representation", ")", "<=", "target_representations", ")", "if", "inferred", ":", "inferredStep", "=", "currentStep", "if", "not", "inferred", "and", "tuple", "(", "representation", ")", "in", "self", ".", "representationSet", ":", "# We have converged to an incorrect representation - declare failure.", "print", "(", "\"Converged to an incorrect representation!\"", ")", "return", "None", "finished", "=", "(", "(", "inferred", "and", "numSensations", "is", "None", ")", "or", "(", "numSensations", "is", "not", "None", "and", "currentStep", "==", "numSensations", ")", ")", "if", "finished", ":", "break", "prevTouchSequence", "=", "touchSequence", "if", "finished", ":", "break", "for", "monitor", "in", "self", ".", "monitors", ".", "values", "(", ")", ":", "monitor", ".", "afterInferObject", "(", "objectDescription", ",", "inferredStep", ")", "return", "inferredStep" ]
36.433333
24.055556
def get_instances(name, lifecycle_state="InService", health_status="Healthy", attribute="private_ip_address", attributes=None, region=None, key=None, keyid=None, profile=None): ''' return attribute of all instances in the named autoscale group. CLI example:: salt-call boto_asg.get_instances my_autoscale_group_name ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) ec2_conn = _get_ec2_conn(region=region, key=key, keyid=keyid, profile=profile) retries = 30 while True: try: asgs = conn.get_all_groups(names=[name]) break except boto.exception.BotoServerError as e: if retries and e.code == 'Throttling': log.debug('Throttled by AWS API, retrying in 5 seconds...') time.sleep(5) retries -= 1 continue log.error(e) return False if len(asgs) != 1: log.debug("name '%s' returns multiple ASGs: %s", name, [asg.name for asg in asgs]) return False asg = asgs[0] instance_ids = [] # match lifecycle_state and health_status for i in asg.instances: if lifecycle_state is not None and i.lifecycle_state != lifecycle_state: continue if health_status is not None and i.health_status != health_status: continue instance_ids.append(i.instance_id) # get full instance info, so that we can return the attribute instances = ec2_conn.get_only_instances(instance_ids=instance_ids) if attributes: return [[_convert_attribute(instance, attr) for attr in attributes] for instance in instances] else: # properly handle case when not all instances have the requested attribute return [_convert_attribute(instance, attribute) for instance in instances if getattr(instance, attribute)]
[ "def", "get_instances", "(", "name", ",", "lifecycle_state", "=", "\"InService\"", ",", "health_status", "=", "\"Healthy\"", ",", "attribute", "=", "\"private_ip_address\"", ",", "attributes", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "ec2_conn", "=", "_get_ec2_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "retries", "=", "30", "while", "True", ":", "try", ":", "asgs", "=", "conn", ".", "get_all_groups", "(", "names", "=", "[", "name", "]", ")", "break", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "if", "retries", "and", "e", ".", "code", "==", "'Throttling'", ":", "log", ".", "debug", "(", "'Throttled by AWS API, retrying in 5 seconds...'", ")", "time", ".", "sleep", "(", "5", ")", "retries", "-=", "1", "continue", "log", ".", "error", "(", "e", ")", "return", "False", "if", "len", "(", "asgs", ")", "!=", "1", ":", "log", ".", "debug", "(", "\"name '%s' returns multiple ASGs: %s\"", ",", "name", ",", "[", "asg", ".", "name", "for", "asg", "in", "asgs", "]", ")", "return", "False", "asg", "=", "asgs", "[", "0", "]", "instance_ids", "=", "[", "]", "# match lifecycle_state and health_status", "for", "i", "in", "asg", ".", "instances", ":", "if", "lifecycle_state", "is", "not", "None", "and", "i", ".", "lifecycle_state", "!=", "lifecycle_state", ":", "continue", "if", "health_status", "is", "not", "None", "and", "i", ".", "health_status", "!=", "health_status", ":", "continue", "instance_ids", ".", "append", "(", "i", ".", "instance_id", ")", "# get full instance info, so that we can return the attribute", "instances", "=", "ec2_conn", ".", "get_only_instances", "(", "instance_ids", "=", "instance_ids", ")", "if", "attributes", ":", "return", "[", "[", "_convert_attribute", "(", "instance", ",", "attr", ")", "for", "attr", "in", "attributes", "]", "for", "instance", "in", "instances", "]", "else", ":", "# properly handle case when not all instances have the requested attribute", "return", "[", "_convert_attribute", "(", "instance", ",", "attribute", ")", "for", "instance", "in", "instances", "if", "getattr", "(", "instance", ",", "attribute", ")", "]" ]
41.844444
27
def sils(T,f,c,d,h): """sils -- LP lotsizing for the single item lot sizing problem Parameters: - T: number of periods - P: set of products - f[t]: set-up costs (on period t) - c[t]: variable costs - d[t]: demand values - h[t]: holding costs Returns a model, ready to be solved. """ model = Model("single item lotsizing") Ts = range(1,T+1) M = sum(d[t] for t in Ts) y,x,I = {},{},{} for t in Ts: y[t] = model.addVar(vtype="I", ub=1, name="y(%s)"%t) x[t] = model.addVar(vtype="C", ub=M, name="x(%s)"%t) I[t] = model.addVar(vtype="C", name="I(%s)"%t) I[0] = 0 for t in Ts: model.addCons(x[t] <= M*y[t], "ConstrUB(%s)"%t) model.addCons(I[t-1] + x[t] == I[t] + d[t], "FlowCons(%s)"%t) model.setObjective(\ quicksum(f[t]*y[t] + c[t]*x[t] + h[t]*I[t] for t in Ts),\ "minimize") model.data = y,x,I return model
[ "def", "sils", "(", "T", ",", "f", ",", "c", ",", "d", ",", "h", ")", ":", "model", "=", "Model", "(", "\"single item lotsizing\"", ")", "Ts", "=", "range", "(", "1", ",", "T", "+", "1", ")", "M", "=", "sum", "(", "d", "[", "t", "]", "for", "t", "in", "Ts", ")", "y", ",", "x", ",", "I", "=", "{", "}", ",", "{", "}", ",", "{", "}", "for", "t", "in", "Ts", ":", "y", "[", "t", "]", "=", "model", ".", "addVar", "(", "vtype", "=", "\"I\"", ",", "ub", "=", "1", ",", "name", "=", "\"y(%s)\"", "%", "t", ")", "x", "[", "t", "]", "=", "model", ".", "addVar", "(", "vtype", "=", "\"C\"", ",", "ub", "=", "M", ",", "name", "=", "\"x(%s)\"", "%", "t", ")", "I", "[", "t", "]", "=", "model", ".", "addVar", "(", "vtype", "=", "\"C\"", ",", "name", "=", "\"I(%s)\"", "%", "t", ")", "I", "[", "0", "]", "=", "0", "for", "t", "in", "Ts", ":", "model", ".", "addCons", "(", "x", "[", "t", "]", "<=", "M", "*", "y", "[", "t", "]", ",", "\"ConstrUB(%s)\"", "%", "t", ")", "model", ".", "addCons", "(", "I", "[", "t", "-", "1", "]", "+", "x", "[", "t", "]", "==", "I", "[", "t", "]", "+", "d", "[", "t", "]", ",", "\"FlowCons(%s)\"", "%", "t", ")", "model", ".", "setObjective", "(", "quicksum", "(", "f", "[", "t", "]", "*", "y", "[", "t", "]", "+", "c", "[", "t", "]", "*", "x", "[", "t", "]", "+", "h", "[", "t", "]", "*", "I", "[", "t", "]", "for", "t", "in", "Ts", ")", ",", "\"minimize\"", ")", "model", ".", "data", "=", "y", ",", "x", ",", "I", "return", "model" ]
30.193548
17.774194
def init_app(self, app, sessionstore=None, register_blueprint=True): """Flask application initialization. :param app: The Flask application. :param sessionstore: store for sessions. Passed to ``flask-kvsession``. If ``None`` then Redis is configured. (Default: ``None``) :param register_blueprint: If ``True``, the application registers the blueprints. (Default: ``True``) """ self.make_session_permanent(app) return super(InvenioAccountsUI, self).init_app( app, sessionstore=sessionstore, register_blueprint=register_blueprint )
[ "def", "init_app", "(", "self", ",", "app", ",", "sessionstore", "=", "None", ",", "register_blueprint", "=", "True", ")", ":", "self", ".", "make_session_permanent", "(", "app", ")", "return", "super", "(", "InvenioAccountsUI", ",", "self", ")", ".", "init_app", "(", "app", ",", "sessionstore", "=", "sessionstore", ",", "register_blueprint", "=", "register_blueprint", ")" ]
42.666667
15
def _get_permutations(num_results, dims, seed=None): """Uniform iid sample from the space of permutations. Draws a sample of size `num_results` from the group of permutations of degrees specified by the `dims` tensor. These are packed together into one tensor such that each row is one sample from each of the dimensions in `dims`. For example, if dims = [2,3] and num_results = 2, the result is a tensor of shape [2, 2 + 3] and the first row of the result might look like: [1, 0, 2, 0, 1]. The first two elements are a permutation over 2 elements while the next three are a permutation over 3 elements. Args: num_results: A positive scalar `Tensor` of integral type. The number of draws from the discrete uniform distribution over the permutation groups. dims: A 1D `Tensor` of the same dtype as `num_results`. The degree of the permutation groups from which to sample. seed: (Optional) Python integer to seed the random number generator. Returns: permutations: A `Tensor` of shape `[num_results, sum(dims)]` and the same dtype as `dims`. """ sample_range = tf.range(num_results) stream = distributions.SeedStream(seed, salt='MCMCSampleHaltonSequence3') def generate_one(d): seed = stream() fn = lambda _: tf.random.shuffle(tf.range(d), seed=seed) return tf.map_fn( fn, sample_range, parallel_iterations=1 if seed is not None else 10) return tf.concat([generate_one(d) for d in tf.unstack(dims)], axis=-1)
[ "def", "_get_permutations", "(", "num_results", ",", "dims", ",", "seed", "=", "None", ")", ":", "sample_range", "=", "tf", ".", "range", "(", "num_results", ")", "stream", "=", "distributions", ".", "SeedStream", "(", "seed", ",", "salt", "=", "'MCMCSampleHaltonSequence3'", ")", "def", "generate_one", "(", "d", ")", ":", "seed", "=", "stream", "(", ")", "fn", "=", "lambda", "_", ":", "tf", ".", "random", ".", "shuffle", "(", "tf", ".", "range", "(", "d", ")", ",", "seed", "=", "seed", ")", "return", "tf", ".", "map_fn", "(", "fn", ",", "sample_range", ",", "parallel_iterations", "=", "1", "if", "seed", "is", "not", "None", "else", "10", ")", "return", "tf", ".", "concat", "(", "[", "generate_one", "(", "d", ")", "for", "d", "in", "tf", ".", "unstack", "(", "dims", ")", "]", ",", "axis", "=", "-", "1", ")" ]
45.272727
25.515152
def get_result(self): """ Build compiled SQL expression from the bottom up and return as a string """ translated = self.translate(self.expr) if self._needs_name(self.expr): # TODO: this could fail in various ways name = self.expr.get_name() translated = self.name(translated, name) return translated
[ "def", "get_result", "(", "self", ")", ":", "translated", "=", "self", ".", "translate", "(", "self", ".", "expr", ")", "if", "self", ".", "_needs_name", "(", "self", ".", "expr", ")", ":", "# TODO: this could fail in various ways", "name", "=", "self", ".", "expr", ".", "get_name", "(", ")", "translated", "=", "self", ".", "name", "(", "translated", ",", "name", ")", "return", "translated" ]
37.4
10.4
def bounding_boxes(self): """ A list of minimal bounding boxes (`~photutils.BoundingBox`), one for each position, enclosing the exact elliptical apertures. """ cos_theta = np.cos(self.theta) sin_theta = np.sin(self.theta) ax = self.a_out * cos_theta ay = self.a_out * sin_theta bx = self.b_out * -sin_theta by = self.b_out * cos_theta dx = np.sqrt(ax*ax + bx*bx) dy = np.sqrt(ay*ay + by*by) xmin = self.positions[:, 0] - dx xmax = self.positions[:, 0] + dx ymin = self.positions[:, 1] - dy ymax = self.positions[:, 1] + dy return [BoundingBox._from_float(x0, x1, y0, y1) for x0, x1, y0, y1 in zip(xmin, xmax, ymin, ymax)]
[ "def", "bounding_boxes", "(", "self", ")", ":", "cos_theta", "=", "np", ".", "cos", "(", "self", ".", "theta", ")", "sin_theta", "=", "np", ".", "sin", "(", "self", ".", "theta", ")", "ax", "=", "self", ".", "a_out", "*", "cos_theta", "ay", "=", "self", ".", "a_out", "*", "sin_theta", "bx", "=", "self", ".", "b_out", "*", "-", "sin_theta", "by", "=", "self", ".", "b_out", "*", "cos_theta", "dx", "=", "np", ".", "sqrt", "(", "ax", "*", "ax", "+", "bx", "*", "bx", ")", "dy", "=", "np", ".", "sqrt", "(", "ay", "*", "ay", "+", "by", "*", "by", ")", "xmin", "=", "self", ".", "positions", "[", ":", ",", "0", "]", "-", "dx", "xmax", "=", "self", ".", "positions", "[", ":", ",", "0", "]", "+", "dx", "ymin", "=", "self", ".", "positions", "[", ":", ",", "1", "]", "-", "dy", "ymax", "=", "self", ".", "positions", "[", ":", ",", "1", "]", "+", "dy", "return", "[", "BoundingBox", ".", "_from_float", "(", "x0", ",", "x1", ",", "y0", ",", "y1", ")", "for", "x0", ",", "x1", ",", "y0", ",", "y1", "in", "zip", "(", "xmin", ",", "xmax", ",", "ymin", ",", "ymax", ")", "]" ]
34.318182
12.227273
def _parse_query(self, source): """Parse one of the rules as either objectfilter or dottysql. Example: _parse_query("5 + 5") # Returns Sum(Literal(5), Literal(5)) Arguments: source: A rule in either objectfilter or dottysql syntax. Returns: The AST to represent the rule. """ if self.OBJECTFILTER_WORDS.search(source): syntax_ = "objectfilter" else: syntax_ = None # Default it is. return query.Query(source, syntax=syntax_)
[ "def", "_parse_query", "(", "self", ",", "source", ")", ":", "if", "self", ".", "OBJECTFILTER_WORDS", ".", "search", "(", "source", ")", ":", "syntax_", "=", "\"objectfilter\"", "else", ":", "syntax_", "=", "None", "# Default it is.", "return", "query", ".", "Query", "(", "source", ",", "syntax", "=", "syntax_", ")" ]
28.789474
17.947368
def get_attribute(self): """Gets the appropriate module attribute name for a collection corresponding to the context's element type.""" attributes = ['dependencies', 'publics', 'members', 'types', 'executables'] #Find the correct attribute based on the type of the context if self.context.el_type in [Function, Subroutine]: attribute = attributes[4] elif self.context.el_type == CustomType: attribute = attributes[3] else: attribute = attributes[2] return attribute
[ "def", "get_attribute", "(", "self", ")", ":", "attributes", "=", "[", "'dependencies'", ",", "'publics'", ",", "'members'", ",", "'types'", ",", "'executables'", "]", "#Find the correct attribute based on the type of the context", "if", "self", ".", "context", ".", "el_type", "in", "[", "Function", ",", "Subroutine", "]", ":", "attribute", "=", "attributes", "[", "4", "]", "elif", "self", ".", "context", ".", "el_type", "==", "CustomType", ":", "attribute", "=", "attributes", "[", "3", "]", "else", ":", "attribute", "=", "attributes", "[", "2", "]", "return", "attribute" ]
41.5
13.642857
def si_round(val): ''' round to a "scientific notation" tuple of (factor, exponent) such that 1 < factor < 1000, and factor * 10 ** exponent == val ''' if val < 0: neg = True val = -val elif val == 0: return 0, 0 else: neg = False exp = math.log(val) / math.log(1000) if exp < 0: exp = int(exp) - 1 else: exp = int(exp) val = val / 1000.0 ** exp if neg: val = -val return val, 3 * exp
[ "def", "si_round", "(", "val", ")", ":", "if", "val", "<", "0", ":", "neg", "=", "True", "val", "=", "-", "val", "elif", "val", "==", "0", ":", "return", "0", ",", "0", "else", ":", "neg", "=", "False", "exp", "=", "math", ".", "log", "(", "val", ")", "/", "math", ".", "log", "(", "1000", ")", "if", "exp", "<", "0", ":", "exp", "=", "int", "(", "exp", ")", "-", "1", "else", ":", "exp", "=", "int", "(", "exp", ")", "val", "=", "val", "/", "1000.0", "**", "exp", "if", "neg", ":", "val", "=", "-", "val", "return", "val", ",", "3", "*", "exp" ]
22.47619
22.380952
def collision_warning(self, item): """ Given a string, print a warning if this could collide with a Zappa core package module. Use for app functions and events. """ namespace_collisions = [ "zappa.", "wsgi.", "middleware.", "handler.", "util.", "letsencrypt.", "cli." ] for namespace_collision in namespace_collisions: if item.startswith(namespace_collision): click.echo(click.style("Warning!", fg="red", bold=True) + " You may have a namespace collision between " + click.style(item, bold=True) + " and " + click.style(namespace_collision, bold=True) + "! You may want to rename that file.")
[ "def", "collision_warning", "(", "self", ",", "item", ")", ":", "namespace_collisions", "=", "[", "\"zappa.\"", ",", "\"wsgi.\"", ",", "\"middleware.\"", ",", "\"handler.\"", ",", "\"util.\"", ",", "\"letsencrypt.\"", ",", "\"cli.\"", "]", "for", "namespace_collision", "in", "namespace_collisions", ":", "if", "item", ".", "startswith", "(", "namespace_collision", ")", ":", "click", ".", "echo", "(", "click", ".", "style", "(", "\"Warning!\"", ",", "fg", "=", "\"red\"", ",", "bold", "=", "True", ")", "+", "\" You may have a namespace collision between \"", "+", "click", ".", "style", "(", "item", ",", "bold", "=", "True", ")", "+", "\" and \"", "+", "click", ".", "style", "(", "namespace_collision", ",", "bold", "=", "True", ")", "+", "\"! You may want to rename that file.\"", ")" ]
42.894737
19.526316
def encryption(self): """ Property for accessing :class:`EncryptionManager` instance, which is used to manage encryption. :rtype: yagocd.resources.encryption.EncryptionManager """ if self._encryption_manager is None: self._encryption_manager = EncryptionManager(session=self._session) return self._encryption_manager
[ "def", "encryption", "(", "self", ")", ":", "if", "self", ".", "_encryption_manager", "is", "None", ":", "self", ".", "_encryption_manager", "=", "EncryptionManager", "(", "session", "=", "self", ".", "_session", ")", "return", "self", ".", "_encryption_manager" ]
37.6
15.4
def revrank_dict(dict, key=lambda t: t[1], as_tuple=False): """ Reverse sorts a #dict by a given key, optionally returning it as a #tuple. By default, the @dict is sorted by it's value. @dict: the #dict you wish to sorts @key: the #sorted key to use @as_tuple: returns result as a #tuple ((k, v),...) -> :class:OrderedDict or #tuple """ sorted_list = sorted(dict.items(), key=key, reverse=True) return OrderedDict(sorted_list) if not as_tuple else tuple(sorted_list)
[ "def", "revrank_dict", "(", "dict", ",", "key", "=", "lambda", "t", ":", "t", "[", "1", "]", ",", "as_tuple", "=", "False", ")", ":", "sorted_list", "=", "sorted", "(", "dict", ".", "items", "(", ")", ",", "key", "=", "key", ",", "reverse", "=", "True", ")", "return", "OrderedDict", "(", "sorted_list", ")", "if", "not", "as_tuple", "else", "tuple", "(", "sorted_list", ")" ]
42.75
16.833333
def get_matching_kwargs(func, kwargs): """Takes a function and keyword arguments and returns the ones that can be passed.""" args, uses_startstar = _get_argspec(func) if uses_startstar: return kwargs.copy() else: matching_kwargs = dict((k, kwargs[k]) for k in args if k in kwargs) return matching_kwargs
[ "def", "get_matching_kwargs", "(", "func", ",", "kwargs", ")", ":", "args", ",", "uses_startstar", "=", "_get_argspec", "(", "func", ")", "if", "uses_startstar", ":", "return", "kwargs", ".", "copy", "(", ")", "else", ":", "matching_kwargs", "=", "dict", "(", "(", "k", ",", "kwargs", "[", "k", "]", ")", "for", "k", "in", "args", "if", "k", "in", "kwargs", ")", "return", "matching_kwargs" ]
42
14.125
def _enable_notifications_failed(self, dbus_error): """ Called when notification enabling has failed. """ if ((dbus_error.get_dbus_name() == 'org.bluez.Error.Failed') and ((dbus_error.get_dbus_message() == "Already notifying") or (dbus_error.get_dbus_message() == "No notify session started"))): # Ignore cases where notifications where already enabled or already disabled return error = _error_from_dbus_error(dbus_error) self.service.device.characteristic_enable_notifications_failed(characteristic=self, error=error)
[ "def", "_enable_notifications_failed", "(", "self", ",", "dbus_error", ")", ":", "if", "(", "(", "dbus_error", ".", "get_dbus_name", "(", ")", "==", "'org.bluez.Error.Failed'", ")", "and", "(", "(", "dbus_error", ".", "get_dbus_message", "(", ")", "==", "\"Already notifying\"", ")", "or", "(", "dbus_error", ".", "get_dbus_message", "(", ")", "==", "\"No notify session started\"", ")", ")", ")", ":", "# Ignore cases where notifications where already enabled or already disabled", "return", "error", "=", "_error_from_dbus_error", "(", "dbus_error", ")", "self", ".", "service", ".", "device", ".", "characteristic_enable_notifications_failed", "(", "characteristic", "=", "self", ",", "error", "=", "error", ")" ]
55.090909
24.363636
def get_available_columns(self, dataset_ids): """ Retrieves the set of columns from the combination of dataset ids given :param dataset_ids: The id of the dataset to retrieve columns from :type dataset_ids: list of int :return: A list of column names from the dataset ids given. :rtype: list of str """ if not isinstance(dataset_ids, list): dataset_ids = [dataset_ids] data = { "dataset_ids": dataset_ids } failure_message = "Failed to get available columns in dataset(s) {}".format(dataset_ids) return self._get_success_json(self._post_json( 'v1/datasets/get-available-columns', data, failure_message=failure_message))['data']
[ "def", "get_available_columns", "(", "self", ",", "dataset_ids", ")", ":", "if", "not", "isinstance", "(", "dataset_ids", ",", "list", ")", ":", "dataset_ids", "=", "[", "dataset_ids", "]", "data", "=", "{", "\"dataset_ids\"", ":", "dataset_ids", "}", "failure_message", "=", "\"Failed to get available columns in dataset(s) {}\"", ".", "format", "(", "dataset_ids", ")", "return", "self", ".", "_get_success_json", "(", "self", ".", "_post_json", "(", "'v1/datasets/get-available-columns'", ",", "data", ",", "failure_message", "=", "failure_message", ")", ")", "[", "'data'", "]" ]
36.142857
23.47619
def create_bayesian_tear_sheet(returns, benchmark_rets=None, live_start_date=None, samples=2000, return_fig=False, stoch_vol=False, progressbar=True): """ Generate a number of Bayesian distributions and a Bayesian cone plot of returns. Plots: Sharpe distribution, annual volatility distribution, annual alpha distribution, beta distribution, predicted 1 and 5 day returns distributions, and a cumulative returns cone plot. Parameters ---------- returns : pd.Series Daily returns of the strategy, noncumulative. - See full explanation in create_full_tear_sheet. benchmark_rets : pd.Series, optional Daily noncumulative returns of the benchmark. - This is in the same style as returns. live_start_date : datetime, optional The point in time when the strategy began live trading, after its backtest period. samples : int, optional Number of posterior samples to draw. return_fig : boolean, optional If True, returns the figure that was plotted on. stoch_vol : boolean, optional If True, run and plot the stochastic volatility model progressbar : boolean, optional If True, show a progress bar """ if not have_bayesian: raise NotImplementedError( "Bayesian tear sheet requirements not found.\n" "Run 'pip install pyfolio[bayesian]' to install " "bayesian requirements." ) if live_start_date is None: raise NotImplementedError( 'Bayesian tear sheet requires setting of live_start_date' ) live_start_date = ep.utils.get_utc_timestamp(live_start_date) df_train = returns.loc[returns.index < live_start_date] df_test = returns.loc[returns.index >= live_start_date] # Run T model with missing data print("Running T model") previous_time = time() # track the total run time of the Bayesian tear sheet start_time = previous_time trace_t, ppc_t = bayesian.run_model('t', df_train, returns_test=df_test, samples=samples, ppc=True, progressbar=progressbar) previous_time = timer("T model", previous_time) # Compute BEST model print("\nRunning BEST model") trace_best = bayesian.run_model('best', df_train, returns_test=df_test, samples=samples, progressbar=progressbar) previous_time = timer("BEST model", previous_time) # Plot results fig = plt.figure(figsize=(14, 10 * 2)) gs = gridspec.GridSpec(9, 2, wspace=0.3, hspace=0.3) axs = [] row = 0 # Plot Bayesian cone ax_cone = plt.subplot(gs[row, :]) bayesian.plot_bayes_cone(df_train, df_test, ppc_t, ax=ax_cone) previous_time = timer("plotting Bayesian cone", previous_time) # Plot BEST results row += 1 axs.append(plt.subplot(gs[row, 0])) axs.append(plt.subplot(gs[row, 1])) row += 1 axs.append(plt.subplot(gs[row, 0])) axs.append(plt.subplot(gs[row, 1])) row += 1 axs.append(plt.subplot(gs[row, 0])) axs.append(plt.subplot(gs[row, 1])) row += 1 # Effect size across two axs.append(plt.subplot(gs[row, :])) bayesian.plot_best(trace=trace_best, axs=axs) previous_time = timer("plotting BEST results", previous_time) # Compute Bayesian predictions row += 1 ax_ret_pred_day = plt.subplot(gs[row, 0]) ax_ret_pred_week = plt.subplot(gs[row, 1]) day_pred = ppc_t[:, 0] p5 = scipy.stats.scoreatpercentile(day_pred, 5) sns.distplot(day_pred, ax=ax_ret_pred_day ) ax_ret_pred_day.axvline(p5, linestyle='--', linewidth=3.) ax_ret_pred_day.set_xlabel('Predicted returns 1 day') ax_ret_pred_day.set_ylabel('Frequency') ax_ret_pred_day.text(0.4, 0.9, 'Bayesian VaR = %.2f' % p5, verticalalignment='bottom', horizontalalignment='right', transform=ax_ret_pred_day.transAxes) previous_time = timer("computing Bayesian predictions", previous_time) # Plot Bayesian VaRs week_pred = ( np.cumprod(ppc_t[:, :5] + 1, 1) - 1)[:, -1] p5 = scipy.stats.scoreatpercentile(week_pred, 5) sns.distplot(week_pred, ax=ax_ret_pred_week ) ax_ret_pred_week.axvline(p5, linestyle='--', linewidth=3.) ax_ret_pred_week.set_xlabel('Predicted cum returns 5 days') ax_ret_pred_week.set_ylabel('Frequency') ax_ret_pred_week.text(0.4, 0.9, 'Bayesian VaR = %.2f' % p5, verticalalignment='bottom', horizontalalignment='right', transform=ax_ret_pred_week.transAxes) previous_time = timer("plotting Bayesian VaRs estimate", previous_time) # Run alpha beta model if benchmark_rets is not None: print("\nRunning alpha beta model") benchmark_rets = benchmark_rets.loc[df_train.index] trace_alpha_beta = bayesian.run_model('alpha_beta', df_train, bmark=benchmark_rets, samples=samples, progressbar=progressbar) previous_time = timer("running alpha beta model", previous_time) # Plot alpha and beta row += 1 ax_alpha = plt.subplot(gs[row, 0]) ax_beta = plt.subplot(gs[row, 1]) sns.distplot((1 + trace_alpha_beta['alpha'][100:])**252 - 1, ax=ax_alpha) sns.distplot(trace_alpha_beta['beta'][100:], ax=ax_beta) ax_alpha.set_xlabel('Annual Alpha') ax_alpha.set_ylabel('Belief') ax_beta.set_xlabel('Beta') ax_beta.set_ylabel('Belief') previous_time = timer("plotting alpha beta model", previous_time) if stoch_vol: # run stochastic volatility model returns_cutoff = 400 print( "\nRunning stochastic volatility model on " "most recent {} days of returns.".format(returns_cutoff) ) if df_train.size > returns_cutoff: df_train_truncated = df_train[-returns_cutoff:] _, trace_stoch_vol = bayesian.model_stoch_vol(df_train_truncated) previous_time = timer( "running stochastic volatility model", previous_time) # plot latent volatility row += 1 ax_volatility = plt.subplot(gs[row, :]) bayesian.plot_stoch_vol( df_train_truncated, trace=trace_stoch_vol, ax=ax_volatility) previous_time = timer( "plotting stochastic volatility model", previous_time) total_time = time() - start_time print("\nTotal runtime was {:.2f} seconds.".format(total_time)) gs.tight_layout(fig) if return_fig: return fig
[ "def", "create_bayesian_tear_sheet", "(", "returns", ",", "benchmark_rets", "=", "None", ",", "live_start_date", "=", "None", ",", "samples", "=", "2000", ",", "return_fig", "=", "False", ",", "stoch_vol", "=", "False", ",", "progressbar", "=", "True", ")", ":", "if", "not", "have_bayesian", ":", "raise", "NotImplementedError", "(", "\"Bayesian tear sheet requirements not found.\\n\"", "\"Run 'pip install pyfolio[bayesian]' to install \"", "\"bayesian requirements.\"", ")", "if", "live_start_date", "is", "None", ":", "raise", "NotImplementedError", "(", "'Bayesian tear sheet requires setting of live_start_date'", ")", "live_start_date", "=", "ep", ".", "utils", ".", "get_utc_timestamp", "(", "live_start_date", ")", "df_train", "=", "returns", ".", "loc", "[", "returns", ".", "index", "<", "live_start_date", "]", "df_test", "=", "returns", ".", "loc", "[", "returns", ".", "index", ">=", "live_start_date", "]", "# Run T model with missing data", "print", "(", "\"Running T model\"", ")", "previous_time", "=", "time", "(", ")", "# track the total run time of the Bayesian tear sheet", "start_time", "=", "previous_time", "trace_t", ",", "ppc_t", "=", "bayesian", ".", "run_model", "(", "'t'", ",", "df_train", ",", "returns_test", "=", "df_test", ",", "samples", "=", "samples", ",", "ppc", "=", "True", ",", "progressbar", "=", "progressbar", ")", "previous_time", "=", "timer", "(", "\"T model\"", ",", "previous_time", ")", "# Compute BEST model", "print", "(", "\"\\nRunning BEST model\"", ")", "trace_best", "=", "bayesian", ".", "run_model", "(", "'best'", ",", "df_train", ",", "returns_test", "=", "df_test", ",", "samples", "=", "samples", ",", "progressbar", "=", "progressbar", ")", "previous_time", "=", "timer", "(", "\"BEST model\"", ",", "previous_time", ")", "# Plot results", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "14", ",", "10", "*", "2", ")", ")", "gs", "=", "gridspec", ".", "GridSpec", "(", "9", ",", "2", ",", "wspace", "=", "0.3", ",", "hspace", "=", "0.3", ")", "axs", "=", "[", "]", "row", "=", "0", "# Plot Bayesian cone", "ax_cone", "=", "plt", ".", "subplot", "(", "gs", "[", "row", ",", ":", "]", ")", "bayesian", ".", "plot_bayes_cone", "(", "df_train", ",", "df_test", ",", "ppc_t", ",", "ax", "=", "ax_cone", ")", "previous_time", "=", "timer", "(", "\"plotting Bayesian cone\"", ",", "previous_time", ")", "# Plot BEST results", "row", "+=", "1", "axs", ".", "append", "(", "plt", ".", "subplot", "(", "gs", "[", "row", ",", "0", "]", ")", ")", "axs", ".", "append", "(", "plt", ".", "subplot", "(", "gs", "[", "row", ",", "1", "]", ")", ")", "row", "+=", "1", "axs", ".", "append", "(", "plt", ".", "subplot", "(", "gs", "[", "row", ",", "0", "]", ")", ")", "axs", ".", "append", "(", "plt", ".", "subplot", "(", "gs", "[", "row", ",", "1", "]", ")", ")", "row", "+=", "1", "axs", ".", "append", "(", "plt", ".", "subplot", "(", "gs", "[", "row", ",", "0", "]", ")", ")", "axs", ".", "append", "(", "plt", ".", "subplot", "(", "gs", "[", "row", ",", "1", "]", ")", ")", "row", "+=", "1", "# Effect size across two", "axs", ".", "append", "(", "plt", ".", "subplot", "(", "gs", "[", "row", ",", ":", "]", ")", ")", "bayesian", ".", "plot_best", "(", "trace", "=", "trace_best", ",", "axs", "=", "axs", ")", "previous_time", "=", "timer", "(", "\"plotting BEST results\"", ",", "previous_time", ")", "# Compute Bayesian predictions", "row", "+=", "1", "ax_ret_pred_day", "=", "plt", ".", "subplot", "(", "gs", "[", "row", ",", "0", "]", ")", "ax_ret_pred_week", "=", "plt", ".", "subplot", "(", "gs", "[", "row", ",", "1", "]", ")", "day_pred", "=", "ppc_t", "[", ":", ",", "0", "]", "p5", "=", "scipy", ".", "stats", ".", "scoreatpercentile", "(", "day_pred", ",", "5", ")", "sns", ".", "distplot", "(", "day_pred", ",", "ax", "=", "ax_ret_pred_day", ")", "ax_ret_pred_day", ".", "axvline", "(", "p5", ",", "linestyle", "=", "'--'", ",", "linewidth", "=", "3.", ")", "ax_ret_pred_day", ".", "set_xlabel", "(", "'Predicted returns 1 day'", ")", "ax_ret_pred_day", ".", "set_ylabel", "(", "'Frequency'", ")", "ax_ret_pred_day", ".", "text", "(", "0.4", ",", "0.9", ",", "'Bayesian VaR = %.2f'", "%", "p5", ",", "verticalalignment", "=", "'bottom'", ",", "horizontalalignment", "=", "'right'", ",", "transform", "=", "ax_ret_pred_day", ".", "transAxes", ")", "previous_time", "=", "timer", "(", "\"computing Bayesian predictions\"", ",", "previous_time", ")", "# Plot Bayesian VaRs", "week_pred", "=", "(", "np", ".", "cumprod", "(", "ppc_t", "[", ":", ",", ":", "5", "]", "+", "1", ",", "1", ")", "-", "1", ")", "[", ":", ",", "-", "1", "]", "p5", "=", "scipy", ".", "stats", ".", "scoreatpercentile", "(", "week_pred", ",", "5", ")", "sns", ".", "distplot", "(", "week_pred", ",", "ax", "=", "ax_ret_pred_week", ")", "ax_ret_pred_week", ".", "axvline", "(", "p5", ",", "linestyle", "=", "'--'", ",", "linewidth", "=", "3.", ")", "ax_ret_pred_week", ".", "set_xlabel", "(", "'Predicted cum returns 5 days'", ")", "ax_ret_pred_week", ".", "set_ylabel", "(", "'Frequency'", ")", "ax_ret_pred_week", ".", "text", "(", "0.4", ",", "0.9", ",", "'Bayesian VaR = %.2f'", "%", "p5", ",", "verticalalignment", "=", "'bottom'", ",", "horizontalalignment", "=", "'right'", ",", "transform", "=", "ax_ret_pred_week", ".", "transAxes", ")", "previous_time", "=", "timer", "(", "\"plotting Bayesian VaRs estimate\"", ",", "previous_time", ")", "# Run alpha beta model", "if", "benchmark_rets", "is", "not", "None", ":", "print", "(", "\"\\nRunning alpha beta model\"", ")", "benchmark_rets", "=", "benchmark_rets", ".", "loc", "[", "df_train", ".", "index", "]", "trace_alpha_beta", "=", "bayesian", ".", "run_model", "(", "'alpha_beta'", ",", "df_train", ",", "bmark", "=", "benchmark_rets", ",", "samples", "=", "samples", ",", "progressbar", "=", "progressbar", ")", "previous_time", "=", "timer", "(", "\"running alpha beta model\"", ",", "previous_time", ")", "# Plot alpha and beta", "row", "+=", "1", "ax_alpha", "=", "plt", ".", "subplot", "(", "gs", "[", "row", ",", "0", "]", ")", "ax_beta", "=", "plt", ".", "subplot", "(", "gs", "[", "row", ",", "1", "]", ")", "sns", ".", "distplot", "(", "(", "1", "+", "trace_alpha_beta", "[", "'alpha'", "]", "[", "100", ":", "]", ")", "**", "252", "-", "1", ",", "ax", "=", "ax_alpha", ")", "sns", ".", "distplot", "(", "trace_alpha_beta", "[", "'beta'", "]", "[", "100", ":", "]", ",", "ax", "=", "ax_beta", ")", "ax_alpha", ".", "set_xlabel", "(", "'Annual Alpha'", ")", "ax_alpha", ".", "set_ylabel", "(", "'Belief'", ")", "ax_beta", ".", "set_xlabel", "(", "'Beta'", ")", "ax_beta", ".", "set_ylabel", "(", "'Belief'", ")", "previous_time", "=", "timer", "(", "\"plotting alpha beta model\"", ",", "previous_time", ")", "if", "stoch_vol", ":", "# run stochastic volatility model", "returns_cutoff", "=", "400", "print", "(", "\"\\nRunning stochastic volatility model on \"", "\"most recent {} days of returns.\"", ".", "format", "(", "returns_cutoff", ")", ")", "if", "df_train", ".", "size", ">", "returns_cutoff", ":", "df_train_truncated", "=", "df_train", "[", "-", "returns_cutoff", ":", "]", "_", ",", "trace_stoch_vol", "=", "bayesian", ".", "model_stoch_vol", "(", "df_train_truncated", ")", "previous_time", "=", "timer", "(", "\"running stochastic volatility model\"", ",", "previous_time", ")", "# plot latent volatility", "row", "+=", "1", "ax_volatility", "=", "plt", ".", "subplot", "(", "gs", "[", "row", ",", ":", "]", ")", "bayesian", ".", "plot_stoch_vol", "(", "df_train_truncated", ",", "trace", "=", "trace_stoch_vol", ",", "ax", "=", "ax_volatility", ")", "previous_time", "=", "timer", "(", "\"plotting stochastic volatility model\"", ",", "previous_time", ")", "total_time", "=", "time", "(", ")", "-", "start_time", "print", "(", "\"\\nTotal runtime was {:.2f} seconds.\"", ".", "format", "(", "total_time", ")", ")", "gs", ".", "tight_layout", "(", "fig", ")", "if", "return_fig", ":", "return", "fig" ]
37.559783
18.668478
def find_example_dir(): """ Find examples dir .. a little bit ugly.. """ # Replace %s with directory to check for shoebot menus. code_stub = textwrap.dedent(""" from pkg_resources import resource_filename, Requirement, DistributionNotFound try: print(resource_filename(Requirement.parse('shoebot'), '%s')) except DistributionNotFound: pass """) # Needs to run in same python env as shoebot (may be different to gedits) code = code_stub % 'share/shoebot/examples' cmd = ["python", "-c", code] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, errors = p.communicate() if errors: print('Shoebot experienced errors searching for install and examples.') print('Errors:\n{0}'.format(errors.decode('utf-8'))) return None else: examples_dir = output.decode('utf-8').strip() if os.path.isdir(examples_dir): return examples_dir # If user is running 'setup.py develop' then examples could be right here #code = "from pkg_resources import resource_filename, Requirement; print resource_filename(Requirement.parse('shoebot'), 'examples/')" code = code_stub % 'examples/' cmd = ["python", "-c", code] p = subprocess.Popen(cmd, stdout=subprocess.PIPE) output, errors = p.communicate() examples_dir = output.decode('utf-8').strip() if os.path.isdir(examples_dir): return examples_dir if examples_dir: print('Shoebot could not find examples at: {0}'.format(examples_dir)) else: print('Shoebot could not find install dir and examples.')
[ "def", "find_example_dir", "(", ")", ":", "# Replace %s with directory to check for shoebot menus.", "code_stub", "=", "textwrap", ".", "dedent", "(", "\"\"\"\n from pkg_resources import resource_filename, Requirement, DistributionNotFound\n try:\n print(resource_filename(Requirement.parse('shoebot'), '%s'))\n except DistributionNotFound:\n pass\n\n \"\"\"", ")", "# Needs to run in same python env as shoebot (may be different to gedits)", "code", "=", "code_stub", "%", "'share/shoebot/examples'", "cmd", "=", "[", "\"python\"", ",", "\"-c\"", ",", "code", "]", "p", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "output", ",", "errors", "=", "p", ".", "communicate", "(", ")", "if", "errors", ":", "print", "(", "'Shoebot experienced errors searching for install and examples.'", ")", "print", "(", "'Errors:\\n{0}'", ".", "format", "(", "errors", ".", "decode", "(", "'utf-8'", ")", ")", ")", "return", "None", "else", ":", "examples_dir", "=", "output", ".", "decode", "(", "'utf-8'", ")", ".", "strip", "(", ")", "if", "os", ".", "path", ".", "isdir", "(", "examples_dir", ")", ":", "return", "examples_dir", "# If user is running 'setup.py develop' then examples could be right here", "#code = \"from pkg_resources import resource_filename, Requirement; print resource_filename(Requirement.parse('shoebot'), 'examples/')\"", "code", "=", "code_stub", "%", "'examples/'", "cmd", "=", "[", "\"python\"", ",", "\"-c\"", ",", "code", "]", "p", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "output", ",", "errors", "=", "p", ".", "communicate", "(", ")", "examples_dir", "=", "output", ".", "decode", "(", "'utf-8'", ")", ".", "strip", "(", ")", "if", "os", ".", "path", ".", "isdir", "(", "examples_dir", ")", ":", "return", "examples_dir", "if", "examples_dir", ":", "print", "(", "'Shoebot could not find examples at: {0}'", ".", "format", "(", "examples_dir", ")", ")", "else", ":", "print", "(", "'Shoebot could not find install dir and examples.'", ")" ]
38.651163
21.697674
def check_reservation_rooms(self): ''' This method is used to validate the reservation_line. ----------------------------------------------------- @param self: object pointer @return: raise a warning depending on the validation ''' ctx = dict(self._context) or {} for reservation in self: cap = 0 for rec in reservation.reservation_line: if len(rec.reserve) == 0: raise ValidationError(_( 'Please Select Rooms For Reservation.')) for room in rec.reserve: cap += room.capacity if not ctx.get('duplicate'): if (reservation.adults + reservation.children) > cap: raise ValidationError(_( 'Room Capacity Exceeded \n' ' Please Select Rooms According to' ' Members Accomodation.')) if reservation.adults <= 0: raise ValidationError(_('Adults must be more than 0'))
[ "def", "check_reservation_rooms", "(", "self", ")", ":", "ctx", "=", "dict", "(", "self", ".", "_context", ")", "or", "{", "}", "for", "reservation", "in", "self", ":", "cap", "=", "0", "for", "rec", "in", "reservation", ".", "reservation_line", ":", "if", "len", "(", "rec", ".", "reserve", ")", "==", "0", ":", "raise", "ValidationError", "(", "_", "(", "'Please Select Rooms For Reservation.'", ")", ")", "for", "room", "in", "rec", ".", "reserve", ":", "cap", "+=", "room", ".", "capacity", "if", "not", "ctx", ".", "get", "(", "'duplicate'", ")", ":", "if", "(", "reservation", ".", "adults", "+", "reservation", ".", "children", ")", ">", "cap", ":", "raise", "ValidationError", "(", "_", "(", "'Room Capacity Exceeded \\n'", "' Please Select Rooms According to'", "' Members Accomodation.'", ")", ")", "if", "reservation", ".", "adults", "<=", "0", ":", "raise", "ValidationError", "(", "_", "(", "'Adults must be more than 0'", ")", ")" ]
44.416667
12.75
def get_direction(self, direction, rev=False): """ Translate a direction in compass degrees into 'up' or 'down'. """ if (direction < 90.0) or (direction >= 270.0): if not rev: return 'up' else: return 'down' elif (90.0 <= direction < 270.0): if not rev: return 'down' else: return 'up' else: return 'none'
[ "def", "get_direction", "(", "self", ",", "direction", ",", "rev", "=", "False", ")", ":", "if", "(", "direction", "<", "90.0", ")", "or", "(", "direction", ">=", "270.0", ")", ":", "if", "not", "rev", ":", "return", "'up'", "else", ":", "return", "'down'", "elif", "(", "90.0", "<=", "direction", "<", "270.0", ")", ":", "if", "not", "rev", ":", "return", "'down'", "else", ":", "return", "'up'", "else", ":", "return", "'none'" ]
28.875
13.75
def project_meta(self, attributes): """ Projects the specified metadata attributes to new region fields :param attributes: a list of metadata attributes :return: a new GDataframe with additional region fields """ if not isinstance(attributes, list): raise TypeError('attributes must be a list') meta_to_project = self.meta[attributes].applymap(lambda l: ", ".join(l)) new_regs = self.regs.merge(meta_to_project, left_index=True, right_index=True) return GDataframe(regs=new_regs, meta=self.meta)
[ "def", "project_meta", "(", "self", ",", "attributes", ")", ":", "if", "not", "isinstance", "(", "attributes", ",", "list", ")", ":", "raise", "TypeError", "(", "'attributes must be a list'", ")", "meta_to_project", "=", "self", ".", "meta", "[", "attributes", "]", ".", "applymap", "(", "lambda", "l", ":", "\", \"", ".", "join", "(", "l", ")", ")", "new_regs", "=", "self", ".", "regs", ".", "merge", "(", "meta_to_project", ",", "left_index", "=", "True", ",", "right_index", "=", "True", ")", "return", "GDataframe", "(", "regs", "=", "new_regs", ",", "meta", "=", "self", ".", "meta", ")" ]
51.090909
18.727273
def mse(mean, estimator): """ Description: Calculates the Mean Squared Error (MSE) of an estimation on flat numpy ndarrays. Parameters: mean: actual value (numpy ndarray) estimator: estimated value of the mean (numpy ndarray) """ return np.mean((np.asarray(estimator) - np.asarray(mean)) ** 2, axis=0)
[ "def", "mse", "(", "mean", ",", "estimator", ")", ":", "return", "np", ".", "mean", "(", "(", "np", ".", "asarray", "(", "estimator", ")", "-", "np", ".", "asarray", "(", "mean", ")", ")", "**", "2", ",", "axis", "=", "0", ")" ]
35.8
14.4
def find(soup, name=None, attrs=None, recursive=True, text=None, **kwargs): """Modified find method; see `find_all`, above. """ tags = find_all( soup, name, attrs or {}, recursive, text, 1, **kwargs ) if tags: return tags[0]
[ "def", "find", "(", "soup", ",", "name", "=", "None", ",", "attrs", "=", "None", ",", "recursive", "=", "True", ",", "text", "=", "None", ",", "*", "*", "kwargs", ")", ":", "tags", "=", "find_all", "(", "soup", ",", "name", ",", "attrs", "or", "{", "}", ",", "recursive", ",", "text", ",", "1", ",", "*", "*", "kwargs", ")", "if", "tags", ":", "return", "tags", "[", "0", "]" ]
28.111111
21.888889
async def rcpt(self, recipient, options=None): """ Sends a SMTP 'RCPT' command. - Indicates a recipient for the e-mail. For further details, please check out `RFC 5321 § 4.1.1.3`_ and `§ 3.3`_. Args: recipient (str): E-mail address of one recipient. options (list of str or None, optional): Additional options to send along with the *RCPT* command. Raises: ConnectionResetError: If the connection with the server is unexpectedely lost. SMTPCommandFailedError: If the RCPT command fails. Returns: (int, str): A (code, message) 2-tuple containing the server response. .. _`RFC 5321 § 4.1.1.3`: https://tools.ietf.org/html/rfc5321#section-4.1.1.3 .. _`§ 3.3`: https://tools.ietf.org/html/rfc5321#section-3.3 """ if options is None: options = [] to_addr = "TO:{}".format(quoteaddr(recipient)) code, message = await self.do_cmd("RCPT", to_addr, *options) return code, message
[ "async", "def", "rcpt", "(", "self", ",", "recipient", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "[", "]", "to_addr", "=", "\"TO:{}\"", ".", "format", "(", "quoteaddr", "(", "recipient", ")", ")", "code", ",", "message", "=", "await", "self", ".", "do_cmd", "(", "\"RCPT\"", ",", "to_addr", ",", "*", "options", ")", "return", "code", ",", "message" ]
34.806452
25.064516
def user(self, user): """ Sets the user of this WebCredentials. The name of the account to login to. :param user: The user of this WebCredentials. :type: str """ if user is None: raise ValueError("Invalid value for `user`, must not be `None`") if user is not None and len(user) > 1024: raise ValueError("Invalid value for `user`, length must be less than or equal to `1024`") self._user = user
[ "def", "user", "(", "self", ",", "user", ")", ":", "if", "user", "is", "None", ":", "raise", "ValueError", "(", "\"Invalid value for `user`, must not be `None`\"", ")", "if", "user", "is", "not", "None", "and", "len", "(", "user", ")", ">", "1024", ":", "raise", "ValueError", "(", "\"Invalid value for `user`, length must be less than or equal to `1024`\"", ")", "self", ".", "_user", "=", "user" ]
34.142857
20
def inject(): """Injects pout into the builtins module so it can be called from anywhere without having to be explicitely imported, this is really just for convenience when debugging https://stackoverflow.com/questions/142545/python-how-to-make-a-cross-module-variable """ try: from .compat import builtins module = sys.modules[__name__] setattr(builtins, __name__, module) #builtins.pout = pout except ImportError: pass
[ "def", "inject", "(", ")", ":", "try", ":", "from", ".", "compat", "import", "builtins", "module", "=", "sys", ".", "modules", "[", "__name__", "]", "setattr", "(", "builtins", ",", "__name__", ",", "module", ")", "#builtins.pout = pout", "except", "ImportError", ":", "pass" ]
29.75
22.4375
def drop(self, async_=False, if_exists=False, **kw): """ Drop this table. :param async_: run asynchronously if True :return: None """ async_ = kw.get('async', async_) return self.parent.delete(self, async_=async_, if_exists=if_exists)
[ "def", "drop", "(", "self", ",", "async_", "=", "False", ",", "if_exists", "=", "False", ",", "*", "*", "kw", ")", ":", "async_", "=", "kw", ".", "get", "(", "'async'", ",", "async_", ")", "return", "self", ".", "parent", ".", "delete", "(", "self", ",", "async_", "=", "async_", ",", "if_exists", "=", "if_exists", ")" ]
31.444444
14.555556
def wrap_in_ndarray(value): """Wraps the argument in a numpy.ndarray. If value is a scalar, it is converted in a list first. If value is array-like, the shape is conserved. """ if hasattr(value, "__len__"): return np.array(value) else: return np.array([value])
[ "def", "wrap_in_ndarray", "(", "value", ")", ":", "if", "hasattr", "(", "value", ",", "\"__len__\"", ")", ":", "return", "np", ".", "array", "(", "value", ")", "else", ":", "return", "np", ".", "array", "(", "[", "value", "]", ")" ]
24.833333
18.666667
def to_vars_dict(self): """ Return local state which is relevant for the cluster setup process. """ return { 'aws_access_key_id': self._access_key, 'aws_secret_access_key': self._secret_key, 'aws_region': self._region_name, 'aws_vpc_name': (self._vpc or ''), 'aws_vpc_id': (self._vpc_id or ''), }
[ "def", "to_vars_dict", "(", "self", ")", ":", "return", "{", "'aws_access_key_id'", ":", "self", ".", "_access_key", ",", "'aws_secret_access_key'", ":", "self", ".", "_secret_key", ",", "'aws_region'", ":", "self", ".", "_region_name", ",", "'aws_vpc_name'", ":", "(", "self", ".", "_vpc", "or", "''", ")", ",", "'aws_vpc_id'", ":", "(", "self", ".", "_vpc_id", "or", "''", ")", ",", "}" ]
38.727273
17.090909
def _from_dict(cls, _dict): """Initialize a LogQueryResponseResult object from a json dictionary.""" args = {} if 'environment_id' in _dict: args['environment_id'] = _dict.get('environment_id') if 'customer_id' in _dict: args['customer_id'] = _dict.get('customer_id') if 'document_type' in _dict: args['document_type'] = _dict.get('document_type') if 'natural_language_query' in _dict: args['natural_language_query'] = _dict.get('natural_language_query') if 'document_results' in _dict: args[ 'document_results'] = LogQueryResponseResultDocuments._from_dict( _dict.get('document_results')) if 'created_timestamp' in _dict: args['created_timestamp'] = string_to_datetime( _dict.get('created_timestamp')) if 'client_timestamp' in _dict: args['client_timestamp'] = string_to_datetime( _dict.get('client_timestamp')) if 'query_id' in _dict: args['query_id'] = _dict.get('query_id') if 'session_token' in _dict: args['session_token'] = _dict.get('session_token') if 'collection_id' in _dict: args['collection_id'] = _dict.get('collection_id') if 'display_rank' in _dict: args['display_rank'] = _dict.get('display_rank') if 'document_id' in _dict: args['document_id'] = _dict.get('document_id') if 'event_type' in _dict: args['event_type'] = _dict.get('event_type') if 'result_type' in _dict: args['result_type'] = _dict.get('result_type') return cls(**args)
[ "def", "_from_dict", "(", "cls", ",", "_dict", ")", ":", "args", "=", "{", "}", "if", "'environment_id'", "in", "_dict", ":", "args", "[", "'environment_id'", "]", "=", "_dict", ".", "get", "(", "'environment_id'", ")", "if", "'customer_id'", "in", "_dict", ":", "args", "[", "'customer_id'", "]", "=", "_dict", ".", "get", "(", "'customer_id'", ")", "if", "'document_type'", "in", "_dict", ":", "args", "[", "'document_type'", "]", "=", "_dict", ".", "get", "(", "'document_type'", ")", "if", "'natural_language_query'", "in", "_dict", ":", "args", "[", "'natural_language_query'", "]", "=", "_dict", ".", "get", "(", "'natural_language_query'", ")", "if", "'document_results'", "in", "_dict", ":", "args", "[", "'document_results'", "]", "=", "LogQueryResponseResultDocuments", ".", "_from_dict", "(", "_dict", ".", "get", "(", "'document_results'", ")", ")", "if", "'created_timestamp'", "in", "_dict", ":", "args", "[", "'created_timestamp'", "]", "=", "string_to_datetime", "(", "_dict", ".", "get", "(", "'created_timestamp'", ")", ")", "if", "'client_timestamp'", "in", "_dict", ":", "args", "[", "'client_timestamp'", "]", "=", "string_to_datetime", "(", "_dict", ".", "get", "(", "'client_timestamp'", ")", ")", "if", "'query_id'", "in", "_dict", ":", "args", "[", "'query_id'", "]", "=", "_dict", ".", "get", "(", "'query_id'", ")", "if", "'session_token'", "in", "_dict", ":", "args", "[", "'session_token'", "]", "=", "_dict", ".", "get", "(", "'session_token'", ")", "if", "'collection_id'", "in", "_dict", ":", "args", "[", "'collection_id'", "]", "=", "_dict", ".", "get", "(", "'collection_id'", ")", "if", "'display_rank'", "in", "_dict", ":", "args", "[", "'display_rank'", "]", "=", "_dict", ".", "get", "(", "'display_rank'", ")", "if", "'document_id'", "in", "_dict", ":", "args", "[", "'document_id'", "]", "=", "_dict", ".", "get", "(", "'document_id'", ")", "if", "'event_type'", "in", "_dict", ":", "args", "[", "'event_type'", "]", "=", "_dict", ".", "get", "(", "'event_type'", ")", "if", "'result_type'", "in", "_dict", ":", "args", "[", "'result_type'", "]", "=", "_dict", ".", "get", "(", "'result_type'", ")", "return", "cls", "(", "*", "*", "args", ")" ]
46.916667
12.972222
def log(self, n=None, template=None, **kwargs): """ Run the repository log command Returns: str: output of log command (``svn log -l <n> <--kwarg=value>``) """ cmd = ['svn', 'log'] if n: cmd.append('-l%d' % n) cmd.extend( (('--%s=%s' % (k, v)) for (k, v) in kwargs.items()) ) return self.sh(cmd, shell=False)
[ "def", "log", "(", "self", ",", "n", "=", "None", ",", "template", "=", "None", ",", "*", "*", "kwargs", ")", ":", "cmd", "=", "[", "'svn'", ",", "'log'", "]", "if", "n", ":", "cmd", ".", "append", "(", "'-l%d'", "%", "n", ")", "cmd", ".", "extend", "(", "(", "(", "'--%s=%s'", "%", "(", "k", ",", "v", ")", ")", "for", "(", "k", ",", "v", ")", "in", "kwargs", ".", "items", "(", ")", ")", ")", "return", "self", ".", "sh", "(", "cmd", ",", "shell", "=", "False", ")" ]
28.857143
16.285714
def _instantiate_app(self, target_cls, kwargs): """For App targets, convert BundleAdaptor to BundleProps.""" parse_context = ParseContext(kwargs['address'].spec_path, dict()) bundleprops_factory = Bundle(parse_context) kwargs['bundles'] = [ bundleprops_factory.create_bundle_props(bundle) for bundle in kwargs['bundles'] ] return target_cls(build_graph=self, **kwargs)
[ "def", "_instantiate_app", "(", "self", ",", "target_cls", ",", "kwargs", ")", ":", "parse_context", "=", "ParseContext", "(", "kwargs", "[", "'address'", "]", ".", "spec_path", ",", "dict", "(", ")", ")", "bundleprops_factory", "=", "Bundle", "(", "parse_context", ")", "kwargs", "[", "'bundles'", "]", "=", "[", "bundleprops_factory", ".", "create_bundle_props", "(", "bundle", ")", "for", "bundle", "in", "kwargs", "[", "'bundles'", "]", "]", "return", "target_cls", "(", "build_graph", "=", "self", ",", "*", "*", "kwargs", ")" ]
39.6
15.8
def main(sample_id, fastq_pair, gsize, minimum_coverage, opts): """ Main executor of the integrity_coverage template. Parameters ---------- sample_id : str Sample Identification string. fastq_pair : list Two element list containing the paired FastQ files. gsize : float or int Estimate of genome size in Mb. minimum_coverage : float or int Minimum coverage required for a sample to pass the coverage check opts : list List of arbitrary options. See `Expected input`_. """ logger.info("Starting integrity coverage main") # Check for runtime options if "-e" in opts: skip_encoding = True else: skip_encoding = False # Information for encoding guess gmin, gmax = 99, 0 encoding = [] phred = None # Information for coverage estimation chars = 0 nreads = 0 # Information on maximum read length max_read_length = 0 # Get compression of each FastQ pair file file_objects = [] for fastq in fastq_pair: logger.info("Processing file {}".format(fastq)) logger.info("[{}] Guessing file compression".format(fastq)) ftype = guess_file_compression(fastq) # This can guess the compression of gz, bz2 and zip. If it cannot # find the compression type, it tries to open a regular file if ftype: logger.info("[{}] Found file compression: {}".format( fastq, ftype)) file_objects.append(COPEN[ftype](fastq, "rt")) else: logger.info("[{}] File compression not found. Assuming an " "uncompressed file".format(fastq)) file_objects.append(open(fastq)) logger.info("Starting FastQ file parsing") # The '*_encoding' file stores a string with the encoding ('Sanger') # If no encoding is guessed, 'None' should be stored # The '*_phred' file stores a string with the phred score ('33') # If no phred is guessed, 'None' should be stored # The '*_coverage' file stores the estimated coverage ('88') # The '*_report' file stores a csv report of the file # The '*_max_len' file stores a string with the maximum contig len ('155') with open("{}_encoding".format(sample_id), "w") as enc_fh, \ open("{}_phred".format(sample_id), "w") as phred_fh, \ open("{}_coverage".format(sample_id), "w") as cov_fh, \ open("{}_report".format(sample_id), "w") as cov_rep, \ open("{}_max_len".format(sample_id), "w") as len_fh, \ open(".report.json", "w") as json_report, \ open(".status", "w") as status_fh, \ open(".fail", "w") as fail_fh: try: # Iterate over both pair files sequentially using itertools.chain for i, line in enumerate(chain(*file_objects)): # Parse only every 4th line of the file for the encoding # e.g.: AAAA/EEEEEEEEEEE<EEEEEEEEEEEEEEEEEEEEEEEEE (...) if (i + 1) % 4 == 0 and not skip_encoding: # It is important to strip() the line so that any newline # character is removed and not accounted for in the # encoding guess lmin, lmax = get_qual_range(line.strip()) # Guess new encoding if the range expands the previously # set boundaries of gmin and gmax if lmin < gmin or lmax > gmax: gmin, gmax = min(lmin, gmin), max(lmax, gmax) encoding, phred = get_encodings_in_range(gmin, gmax) logger.debug( "Updating estimates at line {} with range {} to" " '{}' (encoding) and '{}' (phred)".format( i, [lmin, lmax], encoding, phred)) # Parse only every 2nd line of the file for the coverage # e.g.: GGATAATCTACCTTGACGATTTGTACTGGCGTTGGTTTCTTA (...) if (i + 3) % 4 == 0: read_len = len(line.strip()) chars += read_len nreads += 1 # Evaluate maximum read length for sample if read_len > max_read_length: logger.debug("Updating maximum read length at line " "{} to {}".format(i, read_len)) max_read_length = read_len # End of FastQ parsing logger.info("Finished FastQ file parsing") # The minimum expected coverage for a sample to pass exp_coverage = round(chars / (gsize * 1e6), 2) # Set json report if "-e" not in opts: json_dic = { "tableRow": [{ "sample": sample_id, "data": [ {"header": "Raw BP", "value": chars, "table": "qc", "columnBar": True}, {"header": "Reads", "value": nreads, "table": "qc", "columnBar": True}, {"header": "Coverage", "value": exp_coverage, "table": "qc", "columnBar": True, "failThreshold": minimum_coverage } ] }], "plotData": [{ "sample": sample_id, "data": { "sparkline": chars } }], } else: json_dic = { "tableRow": [{ "sample": sample_id, "data": [ {"header": "Coverage", "value": exp_coverage, "table": "qc", "columnBar": True, "failThreshold": minimum_coverage } ], }], } # Get encoding if len(encoding) > 0: encoding = set(encoding) phred = set(phred) # Get encoding and phred as strings # e.g. enc: Sanger, Illumina-1.8 # e.g. phred: 64 enc = "{}".format(",".join([x for x in encoding])) phred = "{}".format(",".join(str(x) for x in phred)) logger.info("Encoding set to {}".format(enc)) logger.info("Phred set to {}".format(enc)) enc_fh.write(enc) phred_fh.write(phred) # Encoding not found else: if not skip_encoding: encoding_msg = "Could not guess encoding and phred from " \ "FastQ" logger.warning(encoding_msg) json_dic["warnings"] = [{ "sample": sample_id, "table": "qc", "value": [encoding_msg] }] enc_fh.write("None") phred_fh.write("None") # Estimate coverage logger.info("Estimating coverage based on a genome size of " "{}".format(gsize)) logger.info("Expected coverage is {}".format(exp_coverage)) if exp_coverage >= minimum_coverage: cov_rep.write("{},{},{}\\n".format( sample_id, str(exp_coverage), "PASS")) cov_fh.write(str(exp_coverage)) status_fh.write("pass") # Estimated coverage does not pass minimum threshold else: fail_msg = "Sample with low coverage ({}), below the {} " \ "threshold".format(exp_coverage, minimum_coverage) logger.error(fail_msg) fail_fh.write(fail_msg) cov_fh.write("fail") status_fh.write("fail") cov_rep.write("{},{},{}\\n".format( sample_id, str(exp_coverage), "FAIL")) json_dic["fail"] = [{ "sample": sample_id, "table": "qc", "value": [fail_msg] }] json_report.write(json.dumps(json_dic, separators=(",", ":"))) # Maximum read length len_fh.write("{}".format(max_read_length)) # This exception is raised when the input FastQ files are corrupted except EOFError: logger.error("The FastQ files could not be correctly " "parsed. They may be corrupt") for fh in [enc_fh, phred_fh, cov_fh, cov_rep, len_fh]: fh.write("corrupt") status_fh.write("fail") fail_fh.write("Could not read/parse FastQ. " "Possibly corrupt file")
[ "def", "main", "(", "sample_id", ",", "fastq_pair", ",", "gsize", ",", "minimum_coverage", ",", "opts", ")", ":", "logger", ".", "info", "(", "\"Starting integrity coverage main\"", ")", "# Check for runtime options", "if", "\"-e\"", "in", "opts", ":", "skip_encoding", "=", "True", "else", ":", "skip_encoding", "=", "False", "# Information for encoding guess", "gmin", ",", "gmax", "=", "99", ",", "0", "encoding", "=", "[", "]", "phred", "=", "None", "# Information for coverage estimation", "chars", "=", "0", "nreads", "=", "0", "# Information on maximum read length", "max_read_length", "=", "0", "# Get compression of each FastQ pair file", "file_objects", "=", "[", "]", "for", "fastq", "in", "fastq_pair", ":", "logger", ".", "info", "(", "\"Processing file {}\"", ".", "format", "(", "fastq", ")", ")", "logger", ".", "info", "(", "\"[{}] Guessing file compression\"", ".", "format", "(", "fastq", ")", ")", "ftype", "=", "guess_file_compression", "(", "fastq", ")", "# This can guess the compression of gz, bz2 and zip. If it cannot", "# find the compression type, it tries to open a regular file", "if", "ftype", ":", "logger", ".", "info", "(", "\"[{}] Found file compression: {}\"", ".", "format", "(", "fastq", ",", "ftype", ")", ")", "file_objects", ".", "append", "(", "COPEN", "[", "ftype", "]", "(", "fastq", ",", "\"rt\"", ")", ")", "else", ":", "logger", ".", "info", "(", "\"[{}] File compression not found. Assuming an \"", "\"uncompressed file\"", ".", "format", "(", "fastq", ")", ")", "file_objects", ".", "append", "(", "open", "(", "fastq", ")", ")", "logger", ".", "info", "(", "\"Starting FastQ file parsing\"", ")", "# The '*_encoding' file stores a string with the encoding ('Sanger')", "# If no encoding is guessed, 'None' should be stored", "# The '*_phred' file stores a string with the phred score ('33')", "# If no phred is guessed, 'None' should be stored", "# The '*_coverage' file stores the estimated coverage ('88')", "# The '*_report' file stores a csv report of the file", "# The '*_max_len' file stores a string with the maximum contig len ('155')", "with", "open", "(", "\"{}_encoding\"", ".", "format", "(", "sample_id", ")", ",", "\"w\"", ")", "as", "enc_fh", ",", "open", "(", "\"{}_phred\"", ".", "format", "(", "sample_id", ")", ",", "\"w\"", ")", "as", "phred_fh", ",", "open", "(", "\"{}_coverage\"", ".", "format", "(", "sample_id", ")", ",", "\"w\"", ")", "as", "cov_fh", ",", "open", "(", "\"{}_report\"", ".", "format", "(", "sample_id", ")", ",", "\"w\"", ")", "as", "cov_rep", ",", "open", "(", "\"{}_max_len\"", ".", "format", "(", "sample_id", ")", ",", "\"w\"", ")", "as", "len_fh", ",", "open", "(", "\".report.json\"", ",", "\"w\"", ")", "as", "json_report", ",", "open", "(", "\".status\"", ",", "\"w\"", ")", "as", "status_fh", ",", "open", "(", "\".fail\"", ",", "\"w\"", ")", "as", "fail_fh", ":", "try", ":", "# Iterate over both pair files sequentially using itertools.chain", "for", "i", ",", "line", "in", "enumerate", "(", "chain", "(", "*", "file_objects", ")", ")", ":", "# Parse only every 4th line of the file for the encoding", "# e.g.: AAAA/EEEEEEEEEEE<EEEEEEEEEEEEEEEEEEEEEEEEE (...)", "if", "(", "i", "+", "1", ")", "%", "4", "==", "0", "and", "not", "skip_encoding", ":", "# It is important to strip() the line so that any newline", "# character is removed and not accounted for in the", "# encoding guess", "lmin", ",", "lmax", "=", "get_qual_range", "(", "line", ".", "strip", "(", ")", ")", "# Guess new encoding if the range expands the previously", "# set boundaries of gmin and gmax", "if", "lmin", "<", "gmin", "or", "lmax", ">", "gmax", ":", "gmin", ",", "gmax", "=", "min", "(", "lmin", ",", "gmin", ")", ",", "max", "(", "lmax", ",", "gmax", ")", "encoding", ",", "phred", "=", "get_encodings_in_range", "(", "gmin", ",", "gmax", ")", "logger", ".", "debug", "(", "\"Updating estimates at line {} with range {} to\"", "\" '{}' (encoding) and '{}' (phred)\"", ".", "format", "(", "i", ",", "[", "lmin", ",", "lmax", "]", ",", "encoding", ",", "phred", ")", ")", "# Parse only every 2nd line of the file for the coverage", "# e.g.: GGATAATCTACCTTGACGATTTGTACTGGCGTTGGTTTCTTA (...)", "if", "(", "i", "+", "3", ")", "%", "4", "==", "0", ":", "read_len", "=", "len", "(", "line", ".", "strip", "(", ")", ")", "chars", "+=", "read_len", "nreads", "+=", "1", "# Evaluate maximum read length for sample", "if", "read_len", ">", "max_read_length", ":", "logger", ".", "debug", "(", "\"Updating maximum read length at line \"", "\"{} to {}\"", ".", "format", "(", "i", ",", "read_len", ")", ")", "max_read_length", "=", "read_len", "# End of FastQ parsing", "logger", ".", "info", "(", "\"Finished FastQ file parsing\"", ")", "# The minimum expected coverage for a sample to pass", "exp_coverage", "=", "round", "(", "chars", "/", "(", "gsize", "*", "1e6", ")", ",", "2", ")", "# Set json report", "if", "\"-e\"", "not", "in", "opts", ":", "json_dic", "=", "{", "\"tableRow\"", ":", "[", "{", "\"sample\"", ":", "sample_id", ",", "\"data\"", ":", "[", "{", "\"header\"", ":", "\"Raw BP\"", ",", "\"value\"", ":", "chars", ",", "\"table\"", ":", "\"qc\"", ",", "\"columnBar\"", ":", "True", "}", ",", "{", "\"header\"", ":", "\"Reads\"", ",", "\"value\"", ":", "nreads", ",", "\"table\"", ":", "\"qc\"", ",", "\"columnBar\"", ":", "True", "}", ",", "{", "\"header\"", ":", "\"Coverage\"", ",", "\"value\"", ":", "exp_coverage", ",", "\"table\"", ":", "\"qc\"", ",", "\"columnBar\"", ":", "True", ",", "\"failThreshold\"", ":", "minimum_coverage", "}", "]", "}", "]", ",", "\"plotData\"", ":", "[", "{", "\"sample\"", ":", "sample_id", ",", "\"data\"", ":", "{", "\"sparkline\"", ":", "chars", "}", "}", "]", ",", "}", "else", ":", "json_dic", "=", "{", "\"tableRow\"", ":", "[", "{", "\"sample\"", ":", "sample_id", ",", "\"data\"", ":", "[", "{", "\"header\"", ":", "\"Coverage\"", ",", "\"value\"", ":", "exp_coverage", ",", "\"table\"", ":", "\"qc\"", ",", "\"columnBar\"", ":", "True", ",", "\"failThreshold\"", ":", "minimum_coverage", "}", "]", ",", "}", "]", ",", "}", "# Get encoding", "if", "len", "(", "encoding", ")", ">", "0", ":", "encoding", "=", "set", "(", "encoding", ")", "phred", "=", "set", "(", "phred", ")", "# Get encoding and phred as strings", "# e.g. enc: Sanger, Illumina-1.8", "# e.g. phred: 64", "enc", "=", "\"{}\"", ".", "format", "(", "\",\"", ".", "join", "(", "[", "x", "for", "x", "in", "encoding", "]", ")", ")", "phred", "=", "\"{}\"", ".", "format", "(", "\",\"", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "phred", ")", ")", "logger", ".", "info", "(", "\"Encoding set to {}\"", ".", "format", "(", "enc", ")", ")", "logger", ".", "info", "(", "\"Phred set to {}\"", ".", "format", "(", "enc", ")", ")", "enc_fh", ".", "write", "(", "enc", ")", "phred_fh", ".", "write", "(", "phred", ")", "# Encoding not found", "else", ":", "if", "not", "skip_encoding", ":", "encoding_msg", "=", "\"Could not guess encoding and phred from \"", "\"FastQ\"", "logger", ".", "warning", "(", "encoding_msg", ")", "json_dic", "[", "\"warnings\"", "]", "=", "[", "{", "\"sample\"", ":", "sample_id", ",", "\"table\"", ":", "\"qc\"", ",", "\"value\"", ":", "[", "encoding_msg", "]", "}", "]", "enc_fh", ".", "write", "(", "\"None\"", ")", "phred_fh", ".", "write", "(", "\"None\"", ")", "# Estimate coverage", "logger", ".", "info", "(", "\"Estimating coverage based on a genome size of \"", "\"{}\"", ".", "format", "(", "gsize", ")", ")", "logger", ".", "info", "(", "\"Expected coverage is {}\"", ".", "format", "(", "exp_coverage", ")", ")", "if", "exp_coverage", ">=", "minimum_coverage", ":", "cov_rep", ".", "write", "(", "\"{},{},{}\\\\n\"", ".", "format", "(", "sample_id", ",", "str", "(", "exp_coverage", ")", ",", "\"PASS\"", ")", ")", "cov_fh", ".", "write", "(", "str", "(", "exp_coverage", ")", ")", "status_fh", ".", "write", "(", "\"pass\"", ")", "# Estimated coverage does not pass minimum threshold", "else", ":", "fail_msg", "=", "\"Sample with low coverage ({}), below the {} \"", "\"threshold\"", ".", "format", "(", "exp_coverage", ",", "minimum_coverage", ")", "logger", ".", "error", "(", "fail_msg", ")", "fail_fh", ".", "write", "(", "fail_msg", ")", "cov_fh", ".", "write", "(", "\"fail\"", ")", "status_fh", ".", "write", "(", "\"fail\"", ")", "cov_rep", ".", "write", "(", "\"{},{},{}\\\\n\"", ".", "format", "(", "sample_id", ",", "str", "(", "exp_coverage", ")", ",", "\"FAIL\"", ")", ")", "json_dic", "[", "\"fail\"", "]", "=", "[", "{", "\"sample\"", ":", "sample_id", ",", "\"table\"", ":", "\"qc\"", ",", "\"value\"", ":", "[", "fail_msg", "]", "}", "]", "json_report", ".", "write", "(", "json", ".", "dumps", "(", "json_dic", ",", "separators", "=", "(", "\",\"", ",", "\":\"", ")", ")", ")", "# Maximum read length", "len_fh", ".", "write", "(", "\"{}\"", ".", "format", "(", "max_read_length", ")", ")", "# This exception is raised when the input FastQ files are corrupted", "except", "EOFError", ":", "logger", ".", "error", "(", "\"The FastQ files could not be correctly \"", "\"parsed. They may be corrupt\"", ")", "for", "fh", "in", "[", "enc_fh", ",", "phred_fh", ",", "cov_fh", ",", "cov_rep", ",", "len_fh", "]", ":", "fh", ".", "write", "(", "\"corrupt\"", ")", "status_fh", ".", "write", "(", "\"fail\"", ")", "fail_fh", ".", "write", "(", "\"Could not read/parse FastQ. \"", "\"Possibly corrupt file\"", ")" ]
39.868996
18.279476
def _replace_placeholder(sql_statement, variable): """ Return the string obtained by replacing the specified placeholders by its corresponding value. @param sql_statement: the string expression of a SQL statement to replace placeholders with their corresponding values. @param variable: the variable to use to replace the corresponding placeholder(s) in the SQL statement. * ``name``: name of the variable. * ``type``: an instance of ``PlaceholderType``. * ``value``: the value of this variable to replace the corresponding placeholder(s) of this variable in the SQL statement. @return: a string expression of the SQL statement where the paceholders of the specified variable have been replace by the value of this variable, depending on the type of this varialble. """ (variable_name, variable_type, variable_value) = variable sql_value = RdbmsConnection._expand_placeholder_value(variable_value) if variable_type == PlaceholderType.simple_list \ else ','.join([ '(%s)' % RdbmsConnection._expand_placeholder_value(v) for v in variable_value ]) return re.sub(PATTERN_SQL_PLACEHOLDER_EXPRESSIONS[variable_type] % variable_name, sql_value, sql_statement)
[ "def", "_replace_placeholder", "(", "sql_statement", ",", "variable", ")", ":", "(", "variable_name", ",", "variable_type", ",", "variable_value", ")", "=", "variable", "sql_value", "=", "RdbmsConnection", ".", "_expand_placeholder_value", "(", "variable_value", ")", "if", "variable_type", "==", "PlaceholderType", ".", "simple_list", "else", "','", ".", "join", "(", "[", "'(%s)'", "%", "RdbmsConnection", ".", "_expand_placeholder_value", "(", "v", ")", "for", "v", "in", "variable_value", "]", ")", "return", "re", ".", "sub", "(", "PATTERN_SQL_PLACEHOLDER_EXPRESSIONS", "[", "variable_type", "]", "%", "variable_name", ",", "sql_value", ",", "sql_statement", ")" ]
44.1
33.233333
def _list_availability_zones(vm_=None): ''' List all availability zones in the current region ''' ret = {} params = {'Action': 'DescribeAvailabilityZones', 'Filter.0.Name': 'region-name', 'Filter.0.Value.0': get_location(vm_)} result = aws.query(params, location=get_location(vm_), provider=get_provider(), opts=__opts__, sigver='4') for zone in result: ret[zone['zoneName']] = zone['zoneState'] return ret
[ "def", "_list_availability_zones", "(", "vm_", "=", "None", ")", ":", "ret", "=", "{", "}", "params", "=", "{", "'Action'", ":", "'DescribeAvailabilityZones'", ",", "'Filter.0.Name'", ":", "'region-name'", ",", "'Filter.0.Value.0'", ":", "get_location", "(", "vm_", ")", "}", "result", "=", "aws", ".", "query", "(", "params", ",", "location", "=", "get_location", "(", "vm_", ")", ",", "provider", "=", "get_provider", "(", ")", ",", "opts", "=", "__opts__", ",", "sigver", "=", "'4'", ")", "for", "zone", "in", "result", ":", "ret", "[", "zone", "[", "'zoneName'", "]", "]", "=", "zone", "[", "'zoneState'", "]", "return", "ret" ]
29
18.157895
def find_message_handler(self, handler_name, handler_type='primary'): """Returns the MessageHandler given its name and type for this class.""" ret = lib.EnvFindDefmessageHandler( self._env, self._cls, handler_name.encode(), handler_type.encode()) if ret == 0: raise CLIPSError(self._env) return MessageHandler(self._env, self._cls, ret)
[ "def", "find_message_handler", "(", "self", ",", "handler_name", ",", "handler_type", "=", "'primary'", ")", ":", "ret", "=", "lib", ".", "EnvFindDefmessageHandler", "(", "self", ".", "_env", ",", "self", ".", "_cls", ",", "handler_name", ".", "encode", "(", ")", ",", "handler_type", ".", "encode", "(", ")", ")", "if", "ret", "==", "0", ":", "raise", "CLIPSError", "(", "self", ".", "_env", ")", "return", "MessageHandler", "(", "self", ".", "_env", ",", "self", ".", "_cls", ",", "ret", ")" ]
48.25
18.5
async def connect(self, host, port=DEFAULT_PORT): """ :py:func:`asyncio.coroutine` Connect to server. :param host: host name for connection :type host: :py:class:`str` :param port: port number for connection :type port: :py:class:`int` """ await super().connect(host, port) code, info = await self.command(None, "220", "120") return info
[ "async", "def", "connect", "(", "self", ",", "host", ",", "port", "=", "DEFAULT_PORT", ")", ":", "await", "super", "(", ")", ".", "connect", "(", "host", ",", "port", ")", "code", ",", "info", "=", "await", "self", ".", "command", "(", "None", ",", "\"220\"", ",", "\"120\"", ")", "return", "info" ]
27.6
14
def parseLinkAttrs(html): """Find all link tags in a string representing a HTML document and return a list of their attributes. @param html: the text to parse @type html: str or unicode @return: A list of dictionaries of attributes, one for each link tag @rtype: [[(type(html), type(html))]] """ stripped = removed_re.sub('', html) html_mo = html_find.search(stripped) if html_mo is None or html_mo.start('contents') == -1: return [] start, end = html_mo.span('contents') head_mo = head_find.search(stripped, start, end) if head_mo is None or head_mo.start('contents') == -1: return [] start, end = head_mo.span('contents') link_mos = link_find.finditer(stripped, head_mo.start(), head_mo.end()) matches = [] for link_mo in link_mos: start = link_mo.start() + 5 link_attrs = {} for attr_mo in attr_find.finditer(stripped, start): if attr_mo.lastgroup == 'end_link': break # Either q_val or unq_val must be present, but not both # unq_val is a True (non-empty) value if it is present attr_name, q_val, unq_val = attr_mo.group( 'attr_name', 'q_val', 'unq_val') attr_val = ent_replace.sub(replaceEnt, unq_val or q_val) link_attrs[attr_name] = attr_val matches.append(link_attrs) return matches
[ "def", "parseLinkAttrs", "(", "html", ")", ":", "stripped", "=", "removed_re", ".", "sub", "(", "''", ",", "html", ")", "html_mo", "=", "html_find", ".", "search", "(", "stripped", ")", "if", "html_mo", "is", "None", "or", "html_mo", ".", "start", "(", "'contents'", ")", "==", "-", "1", ":", "return", "[", "]", "start", ",", "end", "=", "html_mo", ".", "span", "(", "'contents'", ")", "head_mo", "=", "head_find", ".", "search", "(", "stripped", ",", "start", ",", "end", ")", "if", "head_mo", "is", "None", "or", "head_mo", ".", "start", "(", "'contents'", ")", "==", "-", "1", ":", "return", "[", "]", "start", ",", "end", "=", "head_mo", ".", "span", "(", "'contents'", ")", "link_mos", "=", "link_find", ".", "finditer", "(", "stripped", ",", "head_mo", ".", "start", "(", ")", ",", "head_mo", ".", "end", "(", ")", ")", "matches", "=", "[", "]", "for", "link_mo", "in", "link_mos", ":", "start", "=", "link_mo", ".", "start", "(", ")", "+", "5", "link_attrs", "=", "{", "}", "for", "attr_mo", "in", "attr_find", ".", "finditer", "(", "stripped", ",", "start", ")", ":", "if", "attr_mo", ".", "lastgroup", "==", "'end_link'", ":", "break", "# Either q_val or unq_val must be present, but not both", "# unq_val is a True (non-empty) value if it is present", "attr_name", ",", "q_val", ",", "unq_val", "=", "attr_mo", ".", "group", "(", "'attr_name'", ",", "'q_val'", ",", "'unq_val'", ")", "attr_val", "=", "ent_replace", ".", "sub", "(", "replaceEnt", ",", "unq_val", "or", "q_val", ")", "link_attrs", "[", "attr_name", "]", "=", "attr_val", "matches", ".", "append", "(", "link_attrs", ")", "return", "matches" ]
32.904762
18.928571
def render(self, context, instance, placeholder): """ Update the context with plugin's data """ entries = Entry.published.search(instance.query) if instance.number_of_entries: entries = entries[:instance.number_of_entries] context = super(CMSQueryEntriesPlugin, self).render( context, instance, placeholder) context['entries'] = entries return context
[ "def", "render", "(", "self", ",", "context", ",", "instance", ",", "placeholder", ")", ":", "entries", "=", "Entry", ".", "published", ".", "search", "(", "instance", ".", "query", ")", "if", "instance", ".", "number_of_entries", ":", "entries", "=", "entries", "[", ":", "instance", ".", "number_of_entries", "]", "context", "=", "super", "(", "CMSQueryEntriesPlugin", ",", "self", ")", ".", "render", "(", "context", ",", "instance", ",", "placeholder", ")", "context", "[", "'entries'", "]", "=", "entries", "return", "context" ]
35.75
11.25
def _read_holidays(self, filename): """ Read holidays from an iCalendar-format file. """ cal = Calendar.from_ical(open(filename, 'rb').read()) holidays = [] for component in cal.walk('VEVENT'): start = component.decoded('DTSTART') try: end = component.decoded('DTEND') except KeyError: # RFC allows DTEND to be missing if isinstance(start, datetime): # For DATETIME instances, the event ends immediately. end = start elif isinstance(start, date): # For DATE instances, the event ends tomorrow end = start + timedelta(days=1) else: raise KeyError, 'DTEND is missing and DTSTART is not of DATE or DATETIME type' if isinstance(start, date) and not isinstance(start, datetime): assert (isinstance(end, date) and not isinstance(end, datetime)), \ 'DTSTART is of DATE type but DTEND is not of DATE type (got %r instead)' % type(end) # All-day event, set times to midnight local time start = datetime.combine(start, time.min) end = datetime.combine(end, time.min) # check for TZ data if start.tzinfo is None or end.tzinfo is None: # One of them is missing tzinfo, replace both with this office's # local time. Assume standard time if ambiguous. start = self.tz.localize(start, is_dst=False) end = self.tz.localize(end, is_dst=False) yield (start, end)
[ "def", "_read_holidays", "(", "self", ",", "filename", ")", ":", "cal", "=", "Calendar", ".", "from_ical", "(", "open", "(", "filename", ",", "'rb'", ")", ".", "read", "(", ")", ")", "holidays", "=", "[", "]", "for", "component", "in", "cal", ".", "walk", "(", "'VEVENT'", ")", ":", "start", "=", "component", ".", "decoded", "(", "'DTSTART'", ")", "try", ":", "end", "=", "component", ".", "decoded", "(", "'DTEND'", ")", "except", "KeyError", ":", "# RFC allows DTEND to be missing", "if", "isinstance", "(", "start", ",", "datetime", ")", ":", "# For DATETIME instances, the event ends immediately.", "end", "=", "start", "elif", "isinstance", "(", "start", ",", "date", ")", ":", "# For DATE instances, the event ends tomorrow", "end", "=", "start", "+", "timedelta", "(", "days", "=", "1", ")", "else", ":", "raise", "KeyError", ",", "'DTEND is missing and DTSTART is not of DATE or DATETIME type'", "if", "isinstance", "(", "start", ",", "date", ")", "and", "not", "isinstance", "(", "start", ",", "datetime", ")", ":", "assert", "(", "isinstance", "(", "end", ",", "date", ")", "and", "not", "isinstance", "(", "end", ",", "datetime", ")", ")", ",", "'DTSTART is of DATE type but DTEND is not of DATE type (got %r instead)'", "%", "type", "(", "end", ")", "# All-day event, set times to midnight local time", "start", "=", "datetime", ".", "combine", "(", "start", ",", "time", ".", "min", ")", "end", "=", "datetime", ".", "combine", "(", "end", ",", "time", ".", "min", ")", "# check for TZ data", "if", "start", ".", "tzinfo", "is", "None", "or", "end", ".", "tzinfo", "is", "None", ":", "# One of them is missing tzinfo, replace both with this office's", "# local time. Assume standard time if ambiguous.", "start", "=", "self", ".", "tz", ".", "localize", "(", "start", ",", "is_dst", "=", "False", ")", "end", "=", "self", ".", "tz", ".", "localize", "(", "end", ",", "is_dst", "=", "False", ")", "yield", "(", "start", ",", "end", ")" ]
35.891892
17.405405
def ULT(self, o): """ Unsigned less than. :param o: The other operand :return: TrueResult(), FalseResult(), or MaybeResult() """ unsigned_bounds_1 = self._unsigned_bounds() unsigned_bounds_2 = o._unsigned_bounds() ret = [] for lb_1, ub_1 in unsigned_bounds_1: for lb_2, ub_2 in unsigned_bounds_2: if ub_1 < lb_2: ret.append(TrueResult()) elif lb_1 >= ub_2: ret.append(FalseResult()) else: ret.append(MaybeResult()) if all(r.identical(TrueResult()) for r in ret): return TrueResult() elif all(r.identical(FalseResult()) for r in ret): return FalseResult() else: return MaybeResult()
[ "def", "ULT", "(", "self", ",", "o", ")", ":", "unsigned_bounds_1", "=", "self", ".", "_unsigned_bounds", "(", ")", "unsigned_bounds_2", "=", "o", ".", "_unsigned_bounds", "(", ")", "ret", "=", "[", "]", "for", "lb_1", ",", "ub_1", "in", "unsigned_bounds_1", ":", "for", "lb_2", ",", "ub_2", "in", "unsigned_bounds_2", ":", "if", "ub_1", "<", "lb_2", ":", "ret", ".", "append", "(", "TrueResult", "(", ")", ")", "elif", "lb_1", ">=", "ub_2", ":", "ret", ".", "append", "(", "FalseResult", "(", ")", ")", "else", ":", "ret", ".", "append", "(", "MaybeResult", "(", ")", ")", "if", "all", "(", "r", ".", "identical", "(", "TrueResult", "(", ")", ")", "for", "r", "in", "ret", ")", ":", "return", "TrueResult", "(", ")", "elif", "all", "(", "r", ".", "identical", "(", "FalseResult", "(", ")", ")", "for", "r", "in", "ret", ")", ":", "return", "FalseResult", "(", ")", "else", ":", "return", "MaybeResult", "(", ")" ]
30.037037
15.222222
def get_pull_requests(self): "https://developer.github.com/v3/pulls/#list-pull-requests" g = self.github query = {'state': 'all'} if self.args.github_token: query['access_token'] = g['token'] def f(pull): if self.args.ignore_closed: return (pull['state'] == 'opened' or (pull['state'] == 'closed' and pull['merged_at'])) else: return True pulls = filter(f, self.get(g['url'] + "/repos/" + g['repo'] + "/pulls", query, self.args.cache)) return dict([(str(pull['number']), pull) for pull in pulls])
[ "def", "get_pull_requests", "(", "self", ")", ":", "g", "=", "self", ".", "github", "query", "=", "{", "'state'", ":", "'all'", "}", "if", "self", ".", "args", ".", "github_token", ":", "query", "[", "'access_token'", "]", "=", "g", "[", "'token'", "]", "def", "f", "(", "pull", ")", ":", "if", "self", ".", "args", ".", "ignore_closed", ":", "return", "(", "pull", "[", "'state'", "]", "==", "'opened'", "or", "(", "pull", "[", "'state'", "]", "==", "'closed'", "and", "pull", "[", "'merged_at'", "]", ")", ")", "else", ":", "return", "True", "pulls", "=", "filter", "(", "f", ",", "self", ".", "get", "(", "g", "[", "'url'", "]", "+", "\"/repos/\"", "+", "g", "[", "'repo'", "]", "+", "\"/pulls\"", ",", "query", ",", "self", ".", "args", ".", "cache", ")", ")", "return", "dict", "(", "[", "(", "str", "(", "pull", "[", "'number'", "]", ")", ",", "pull", ")", "for", "pull", "in", "pulls", "]", ")" ]
40.235294
18.470588
def detect_view_name(self, environ: Dict[str, Any]) -> str: """ get view name from routing args """ urlvars = environ.get('wsgiorg.routing_args', [(), {}])[1] return urlvars.get(self.action_var_name)
[ "def", "detect_view_name", "(", "self", ",", "environ", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "str", ":", "urlvars", "=", "environ", ".", "get", "(", "'wsgiorg.routing_args'", ",", "[", "(", ")", ",", "{", "}", "]", ")", "[", "1", "]", "return", "urlvars", ".", "get", "(", "self", ".", "action_var_name", ")" ]
55
13.25
async def gather_candidates(self): """ Gather local candidates. You **must** call this coroutine before calling :meth:`connect`. """ if not self._local_candidates_start: self._local_candidates_start = True addresses = get_host_addresses(use_ipv4=self._use_ipv4, use_ipv6=self._use_ipv6) for component in self._components: self._local_candidates += await self.get_component_candidates( component=component, addresses=addresses) self._local_candidates_end = True
[ "async", "def", "gather_candidates", "(", "self", ")", ":", "if", "not", "self", ".", "_local_candidates_start", ":", "self", ".", "_local_candidates_start", "=", "True", "addresses", "=", "get_host_addresses", "(", "use_ipv4", "=", "self", ".", "_use_ipv4", ",", "use_ipv6", "=", "self", ".", "_use_ipv6", ")", "for", "component", "in", "self", ".", "_components", ":", "self", ".", "_local_candidates", "+=", "await", "self", ".", "get_component_candidates", "(", "component", "=", "component", ",", "addresses", "=", "addresses", ")", "self", ".", "_local_candidates_end", "=", "True" ]
42.285714
14.142857
def footrule_dist(params1, params2=None): r"""Compute Spearman's footrule distance between two models. This function computes Spearman's footrule distance between the rankings induced by two parameter vectors. Let :math:`\sigma_i` be the rank of item ``i`` in the model described by ``params1``, and :math:`\tau_i` be its rank in the model described by ``params2``. Spearman's footrule distance is defined by .. math:: \sum_{i=1}^N | \sigma_i - \tau_i | By convention, items with the lowest parameters are ranked first (i.e., sorted using the natural order). If the argument ``params2`` is ``None``, the second model is assumed to rank the items by their index: item ``0`` has rank 1, item ``1`` has rank 2, etc. Parameters ---------- params1 : array_like Parameters of the first model. params2 : array_like, optional Parameters of the second model. Returns ------- dist : float Spearman's footrule distance. """ assert params2 is None or len(params1) == len(params2) ranks1 = rankdata(params1, method="average") if params2 is None: ranks2 = np.arange(1, len(params1) + 1, dtype=float) else: ranks2 = rankdata(params2, method="average") return np.sum(np.abs(ranks1 - ranks2))
[ "def", "footrule_dist", "(", "params1", ",", "params2", "=", "None", ")", ":", "assert", "params2", "is", "None", "or", "len", "(", "params1", ")", "==", "len", "(", "params2", ")", "ranks1", "=", "rankdata", "(", "params1", ",", "method", "=", "\"average\"", ")", "if", "params2", "is", "None", ":", "ranks2", "=", "np", ".", "arange", "(", "1", ",", "len", "(", "params1", ")", "+", "1", ",", "dtype", "=", "float", ")", "else", ":", "ranks2", "=", "rankdata", "(", "params2", ",", "method", "=", "\"average\"", ")", "return", "np", ".", "sum", "(", "np", ".", "abs", "(", "ranks1", "-", "ranks2", ")", ")" ]
33.076923
22.846154
def eigh(a, eigvec=True, rcond=None): """ Eigenvalues and eigenvectors of symmetric matrix ``a``. Args: a: Two-dimensional, square Hermitian matrix/array of numbers and/or :class:`gvar.GVar`\s. Array elements must be real-valued if `gvar.GVar`\s are involved (i.e., symmetric matrix). eigvec (bool): If ``True`` (default), method returns a tuple of arrays ``(val, vec)`` where ``val[i]`` are the eigenvalues of ``a`` (in ascending order), and ``vec[:, i]`` are the corresponding eigenvectors of ``a``. Only ``val`` is returned if ``eigvec=False``. rcond (float): Eigenvalues whose difference is smaller than ``rcond`` times their sum are assumed to be degenerate (and ignored) when computing variances for the eigvectors. Default (``rcond=None``) is ``max(M,N)`` times machine precision. Returns: Tuple ``(val,vec)`` of eigenvalues and eigenvectors of matrix ``a`` if parameter ``eigvec==True`` (default). The eigenvalues ``val[i]`` are in ascending order and ``vec[:, i]`` are the corresponding eigenvalues. Only the eigenvalues ``val`` are returned if ``eigvec=False``. Raises: ValueError: If matrix is not square and two-dimensional. """ a = numpy.asarray(a) if a.dtype != object: val, vec = numpy.linalg.eigh(a) return (val, vec) if eigvec else val amean = gvar.mean(a) if amean.ndim != 2 or amean.shape[0] != amean.shape[1]: raise ValueError('bad matrix shape: ' + str(a.shape)) if rcond is None: rcond = numpy.finfo(float).eps * max(a.shape) da = a - amean val0, vec0 = numpy.linalg.eigh(amean) val = val0 + [ vec0[:, i].conjugate().dot(da.dot(vec0[:, i])) for i in range(vec0.shape[1]) ] if eigvec == True: if vec0.dtype == complex: raise ValueError('cannot evaluate eigenvectors when a is complex') vec = numpy.array(vec0, dtype=object) for i in range(len(val)): for j in range(len(val)): dval = val0[i] - val0[j] if abs(dval) < rcond * abs(val0[j] + val0[i]) or dval == 0.0: continue vec[:, i] += vec0[:, j] * ( vec0[:, j].dot(da.dot(vec0[:, i])) / dval ) return val, vec else: return val
[ "def", "eigh", "(", "a", ",", "eigvec", "=", "True", ",", "rcond", "=", "None", ")", ":", "a", "=", "numpy", ".", "asarray", "(", "a", ")", "if", "a", ".", "dtype", "!=", "object", ":", "val", ",", "vec", "=", "numpy", ".", "linalg", ".", "eigh", "(", "a", ")", "return", "(", "val", ",", "vec", ")", "if", "eigvec", "else", "val", "amean", "=", "gvar", ".", "mean", "(", "a", ")", "if", "amean", ".", "ndim", "!=", "2", "or", "amean", ".", "shape", "[", "0", "]", "!=", "amean", ".", "shape", "[", "1", "]", ":", "raise", "ValueError", "(", "'bad matrix shape: '", "+", "str", "(", "a", ".", "shape", ")", ")", "if", "rcond", "is", "None", ":", "rcond", "=", "numpy", ".", "finfo", "(", "float", ")", ".", "eps", "*", "max", "(", "a", ".", "shape", ")", "da", "=", "a", "-", "amean", "val0", ",", "vec0", "=", "numpy", ".", "linalg", ".", "eigh", "(", "amean", ")", "val", "=", "val0", "+", "[", "vec0", "[", ":", ",", "i", "]", ".", "conjugate", "(", ")", ".", "dot", "(", "da", ".", "dot", "(", "vec0", "[", ":", ",", "i", "]", ")", ")", "for", "i", "in", "range", "(", "vec0", ".", "shape", "[", "1", "]", ")", "]", "if", "eigvec", "==", "True", ":", "if", "vec0", ".", "dtype", "==", "complex", ":", "raise", "ValueError", "(", "'cannot evaluate eigenvectors when a is complex'", ")", "vec", "=", "numpy", ".", "array", "(", "vec0", ",", "dtype", "=", "object", ")", "for", "i", "in", "range", "(", "len", "(", "val", ")", ")", ":", "for", "j", "in", "range", "(", "len", "(", "val", ")", ")", ":", "dval", "=", "val0", "[", "i", "]", "-", "val0", "[", "j", "]", "if", "abs", "(", "dval", ")", "<", "rcond", "*", "abs", "(", "val0", "[", "j", "]", "+", "val0", "[", "i", "]", ")", "or", "dval", "==", "0.0", ":", "continue", "vec", "[", ":", ",", "i", "]", "+=", "vec0", "[", ":", ",", "j", "]", "*", "(", "vec0", "[", ":", ",", "j", "]", ".", "dot", "(", "da", ".", "dot", "(", "vec0", "[", ":", ",", "i", "]", ")", ")", "/", "dval", ")", "return", "val", ",", "vec", "else", ":", "return", "val" ]
42.403509
20.438596
def unary_operator(op): """ Factory function for making unary operator methods for Factors. """ # Only negate is currently supported. valid_ops = {'-'} if op not in valid_ops: raise ValueError("Invalid unary operator %s." % op) @with_doc("Unary Operator: '%s'" % op) @with_name(unary_op_name(op)) def unary_operator(self): if self.dtype != float64_dtype: raise TypeError( "Can't apply unary operator {op!r} to instance of " "{typename!r} with dtype {dtypename!r}.\n" "{op!r} is only supported for Factors of dtype " "'float64'.".format( op=op, typename=type(self).__name__, dtypename=self.dtype.name, ) ) # This can't be hoisted up a scope because the types returned by # unary_op_return_type aren't defined when the top-level function is # invoked. if isinstance(self, NumericalExpression): return NumExprFactor( "{op}({expr})".format(op=op, expr=self._expr), self.inputs, dtype=float64_dtype, ) else: return NumExprFactor( "{op}x_0".format(op=op), (self,), dtype=float64_dtype, ) return unary_operator
[ "def", "unary_operator", "(", "op", ")", ":", "# Only negate is currently supported.", "valid_ops", "=", "{", "'-'", "}", "if", "op", "not", "in", "valid_ops", ":", "raise", "ValueError", "(", "\"Invalid unary operator %s.\"", "%", "op", ")", "@", "with_doc", "(", "\"Unary Operator: '%s'\"", "%", "op", ")", "@", "with_name", "(", "unary_op_name", "(", "op", ")", ")", "def", "unary_operator", "(", "self", ")", ":", "if", "self", ".", "dtype", "!=", "float64_dtype", ":", "raise", "TypeError", "(", "\"Can't apply unary operator {op!r} to instance of \"", "\"{typename!r} with dtype {dtypename!r}.\\n\"", "\"{op!r} is only supported for Factors of dtype \"", "\"'float64'.\"", ".", "format", "(", "op", "=", "op", ",", "typename", "=", "type", "(", "self", ")", ".", "__name__", ",", "dtypename", "=", "self", ".", "dtype", ".", "name", ",", ")", ")", "# This can't be hoisted up a scope because the types returned by", "# unary_op_return_type aren't defined when the top-level function is", "# invoked.", "if", "isinstance", "(", "self", ",", "NumericalExpression", ")", ":", "return", "NumExprFactor", "(", "\"{op}({expr})\"", ".", "format", "(", "op", "=", "op", ",", "expr", "=", "self", ".", "_expr", ")", ",", "self", ".", "inputs", ",", "dtype", "=", "float64_dtype", ",", ")", "else", ":", "return", "NumExprFactor", "(", "\"{op}x_0\"", ".", "format", "(", "op", "=", "op", ")", ",", "(", "self", ",", ")", ",", "dtype", "=", "float64_dtype", ",", ")", "return", "unary_operator" ]
34.25
15.7
def _is_local_filter(filter_block): """Return True if the Filter block references no non-local fields, and False otherwise.""" # We need the "result" value of this function to be mutated within the "visitor_fn". # Since we support both Python 2 and Python 3, we can't use the "nonlocal" keyword here: # https://www.python.org/dev/peps/pep-3104/ # Instead, we use a dict to store the value we need mutated, since the "visitor_fn" # can mutate state in the parent scope, but not rebind variables in it without "nonlocal". # TODO(predrag): Revisit this if we drop support for Python 2. result = { 'is_local_filter': True } filter_predicate = filter_block.predicate def visitor_fn(expression): """Expression visitor function that looks for uses of non-local fields.""" non_local_expression_types = (ContextField, ContextFieldExistence) if isinstance(expression, non_local_expression_types): result['is_local_filter'] = False # Don't change the expression. return expression filter_predicate.visit_and_update(visitor_fn) return result['is_local_filter']
[ "def", "_is_local_filter", "(", "filter_block", ")", ":", "# We need the \"result\" value of this function to be mutated within the \"visitor_fn\".", "# Since we support both Python 2 and Python 3, we can't use the \"nonlocal\" keyword here:", "# https://www.python.org/dev/peps/pep-3104/", "# Instead, we use a dict to store the value we need mutated, since the \"visitor_fn\"", "# can mutate state in the parent scope, but not rebind variables in it without \"nonlocal\".", "# TODO(predrag): Revisit this if we drop support for Python 2.", "result", "=", "{", "'is_local_filter'", ":", "True", "}", "filter_predicate", "=", "filter_block", ".", "predicate", "def", "visitor_fn", "(", "expression", ")", ":", "\"\"\"Expression visitor function that looks for uses of non-local fields.\"\"\"", "non_local_expression_types", "=", "(", "ContextField", ",", "ContextFieldExistence", ")", "if", "isinstance", "(", "expression", ",", "non_local_expression_types", ")", ":", "result", "[", "'is_local_filter'", "]", "=", "False", "# Don't change the expression.", "return", "expression", "filter_predicate", ".", "visit_and_update", "(", "visitor_fn", ")", "return", "result", "[", "'is_local_filter'", "]" ]
43.846154
23.615385
def cloud_init_interface(name, vm_=None, **kwargs): ''' Interface between salt.cloud.lxc driver and lxc.init ``vm_`` is a mapping of vm opts in the salt.cloud format as documented for the lxc driver. This can be used either: - from the salt cloud driver - because you find the argument to give easier here than using directly lxc.init .. warning:: BE REALLY CAREFUL CHANGING DEFAULTS !!! IT'S A RETRO COMPATIBLE INTERFACE WITH THE SALT CLOUD DRIVER (ask kiorky). name name of the lxc container to create pub_key public key to preseed the minion with. Can be the keycontent or a filepath priv_key private key to preseed the minion with. Can be the keycontent or a filepath path path to the container parent directory (default: /var/lib/lxc) .. versionadded:: 2015.8.0 profile :ref:`profile <tutorial-lxc-profiles-container>` selection network_profile :ref:`network profile <tutorial-lxc-profiles-network>` selection nic_opts per interface settings compatibles with network profile (ipv4/ipv6/link/gateway/mac/netmask) eg:: - {'eth0': {'mac': '00:16:3e:01:29:40', 'gateway': None, (default) 'link': 'br0', (default) 'gateway': None, (default) 'netmask': '', (default) 'ip': '22.1.4.25'}} unconditional_install given to lxc.bootstrap (see relative doc) force_install given to lxc.bootstrap (see relative doc) config any extra argument for the salt minion config dnsservers list of DNS servers to set inside the container dns_via_dhcp do not set the dns servers, let them be set by the dhcp. autostart autostart the container at boot time password administrative password for the container bootstrap_delay delay before launching bootstrap script at Container init .. warning:: Legacy but still supported options: from_container which container we use as a template when running lxc.clone image which template do we use when we are using lxc.create. This is the default mode unless you specify something in from_container backing which backing store to use. Values can be: overlayfs, dir(default), lvm, zfs, brtfs fstype When using a blockdevice level backing store, which filesystem to use on size When using a blockdevice level backing store, which size for the filesystem to use on snapshot Use snapshot when cloning the container source vgname if using LVM: vgname lvname if using LVM: lvname thinpool: if using LVM: thinpool ip ip for the primary nic mac mac address for the primary nic netmask netmask for the primary nic (24) = ``vm_.get('netmask', '24')`` bridge bridge for the primary nic (lxcbr0) gateway network gateway for the container additional_ips additional ips which will be wired on the main bridge (br0) which is connected to internet. Be aware that you may use manual virtual mac addresses providen by you provider (online, ovh, etc). This is a list of mappings {ip: '', mac: '', netmask:''} Set gateway to None and an interface with a gateway to escape from another interface that eth0. eg:: - {'mac': '00:16:3e:01:29:40', 'gateway': None, (default) 'link': 'br0', (default) 'netmask': '', (default) 'ip': '22.1.4.25'} users administrative users for the container default: [root] and [root, ubuntu] on ubuntu default_nic name of the first interface, you should really not override this CLI Example: .. code-block:: bash salt '*' lxc.cloud_init_interface foo ''' if vm_ is None: vm_ = {} vm_ = copy.deepcopy(vm_) vm_ = salt.utils.dictupdate.update(vm_, kwargs) profile_data = copy.deepcopy( vm_.get('lxc_profile', vm_.get('profile', {}))) if not isinstance(profile_data, (dict, six.string_types)): profile_data = {} profile = get_container_profile(profile_data) def _cloud_get(k, default=None): return vm_.get(k, profile.get(k, default)) if name is None: name = vm_['name'] # if we are on ubuntu, default to ubuntu default_template = '' if __grains__.get('os', '') in ['Ubuntu']: default_template = 'ubuntu' image = _cloud_get('image') if not image: _cloud_get('template', default_template) backing = _cloud_get('backing', 'dir') if image: profile['template'] = image vgname = _cloud_get('vgname', None) if vgname: profile['vgname'] = vgname if backing: profile['backing'] = backing snapshot = _cloud_get('snapshot', False) autostart = bool(_cloud_get('autostart', True)) dnsservers = _cloud_get('dnsservers', []) dns_via_dhcp = _cloud_get('dns_via_dhcp', True) password = _cloud_get('password', 's3cr3t') password_encrypted = _cloud_get('password_encrypted', False) fstype = _cloud_get('fstype', None) lvname = _cloud_get('lvname', None) thinpool = _cloud_get('thinpool', None) pub_key = _cloud_get('pub_key', None) priv_key = _cloud_get('priv_key', None) size = _cloud_get('size', '20G') script = _cloud_get('script', None) script_args = _cloud_get('script_args', None) users = _cloud_get('users', None) if users is None: users = [] ssh_username = _cloud_get('ssh_username', None) if ssh_username and (ssh_username not in users): users.append(ssh_username) network_profile = _cloud_get('network_profile', None) nic_opts = kwargs.get('nic_opts', None) netmask = _cloud_get('netmask', '24') path = _cloud_get('path', None) bridge = _cloud_get('bridge', None) gateway = _cloud_get('gateway', None) unconditional_install = _cloud_get('unconditional_install', False) force_install = _cloud_get('force_install', True) config = _get_salt_config(_cloud_get('config', {}), **vm_) default_nic = _cloud_get('default_nic', DEFAULT_NIC) # do the interface with lxc.init mainly via nic_opts # to avoid extra and confusing extra use cases. if not isinstance(nic_opts, dict): nic_opts = salt.utils.odict.OrderedDict() # have a reference to the default nic eth0 = nic_opts.setdefault(default_nic, salt.utils.odict.OrderedDict()) # lxc config is based of ifc order, be sure to use odicts. if not isinstance(nic_opts, salt.utils.odict.OrderedDict): bnic_opts = salt.utils.odict.OrderedDict() bnic_opts.update(nic_opts) nic_opts = bnic_opts gw = None # legacy salt.cloud scheme for network interfaces settings support bridge = _cloud_get('bridge', None) ip = _cloud_get('ip', None) mac = _cloud_get('mac', None) if ip: fullip = ip if netmask: fullip += '/{0}'.format(netmask) eth0['ipv4'] = fullip if mac is not None: eth0['mac'] = mac for ix, iopts in enumerate(_cloud_get("additional_ips", [])): ifh = "eth{0}".format(ix+1) ethx = nic_opts.setdefault(ifh, {}) if gw is None: gw = iopts.get('gateway', ethx.get('gateway', None)) if gw: # only one and only one default gateway is allowed ! eth0.pop('gateway', None) gateway = None # even if the gateway if on default "eth0" nic # and we popped it will work # as we reinject or set it here. ethx['gateway'] = gw elink = iopts.get('link', ethx.get('link', None)) if elink: ethx['link'] = elink # allow dhcp aip = iopts.get('ipv4', iopts.get('ip', None)) if aip: ethx['ipv4'] = aip nm = iopts.get('netmask', '') if nm: ethx['ipv4'] += '/{0}'.format(nm) for i in ('mac', 'hwaddr'): if i in iopts: ethx['mac'] = iopts[i] break if 'mac' not in ethx: ethx['mac'] = salt.utils.network.gen_mac() # last round checking for unique gateway and such gw = None for ethx in [a for a in nic_opts]: ndata = nic_opts[ethx] if gw: ndata.pop('gateway', None) if 'gateway' in ndata: gw = ndata['gateway'] gateway = None # only use a default bridge / gateway if we configured them # via the legacy salt cloud configuration style. # On other cases, we should rely on settings provided by the new # salt lxc network profile style configuration which can # be also be overridden or a per interface basis via the nic_opts dict. if bridge: eth0['link'] = bridge if gateway: eth0['gateway'] = gateway # lxc_init_interface = {} lxc_init_interface['name'] = name lxc_init_interface['config'] = config lxc_init_interface['memory'] = _cloud_get('memory', 0) # nolimit lxc_init_interface['pub_key'] = pub_key lxc_init_interface['priv_key'] = priv_key lxc_init_interface['nic_opts'] = nic_opts for clone_from in ['clone_from', 'clone', 'from_container']: # clone_from should default to None if not available lxc_init_interface['clone_from'] = _cloud_get(clone_from, None) if lxc_init_interface['clone_from'] is not None: break lxc_init_interface['profile'] = profile lxc_init_interface['snapshot'] = snapshot lxc_init_interface['dnsservers'] = dnsservers lxc_init_interface['fstype'] = fstype lxc_init_interface['path'] = path lxc_init_interface['vgname'] = vgname lxc_init_interface['size'] = size lxc_init_interface['lvname'] = lvname lxc_init_interface['thinpool'] = thinpool lxc_init_interface['force_install'] = force_install lxc_init_interface['unconditional_install'] = ( unconditional_install ) lxc_init_interface['bootstrap_url'] = script lxc_init_interface['bootstrap_args'] = script_args lxc_init_interface['bootstrap_shell'] = _cloud_get('bootstrap_shell', 'sh') lxc_init_interface['bootstrap_delay'] = _cloud_get('bootstrap_delay', None) lxc_init_interface['autostart'] = autostart lxc_init_interface['users'] = users lxc_init_interface['password'] = password lxc_init_interface['password_encrypted'] = password_encrypted # be sure not to let objects goes inside the return # as this return will be msgpacked for use in the runner ! lxc_init_interface['network_profile'] = network_profile for i in ['cpu', 'cpuset', 'cpushare']: if _cloud_get(i, None): try: lxc_init_interface[i] = vm_[i] except KeyError: lxc_init_interface[i] = profile[i] return lxc_init_interface
[ "def", "cloud_init_interface", "(", "name", ",", "vm_", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "vm_", "is", "None", ":", "vm_", "=", "{", "}", "vm_", "=", "copy", ".", "deepcopy", "(", "vm_", ")", "vm_", "=", "salt", ".", "utils", ".", "dictupdate", ".", "update", "(", "vm_", ",", "kwargs", ")", "profile_data", "=", "copy", ".", "deepcopy", "(", "vm_", ".", "get", "(", "'lxc_profile'", ",", "vm_", ".", "get", "(", "'profile'", ",", "{", "}", ")", ")", ")", "if", "not", "isinstance", "(", "profile_data", ",", "(", "dict", ",", "six", ".", "string_types", ")", ")", ":", "profile_data", "=", "{", "}", "profile", "=", "get_container_profile", "(", "profile_data", ")", "def", "_cloud_get", "(", "k", ",", "default", "=", "None", ")", ":", "return", "vm_", ".", "get", "(", "k", ",", "profile", ".", "get", "(", "k", ",", "default", ")", ")", "if", "name", "is", "None", ":", "name", "=", "vm_", "[", "'name'", "]", "# if we are on ubuntu, default to ubuntu", "default_template", "=", "''", "if", "__grains__", ".", "get", "(", "'os'", ",", "''", ")", "in", "[", "'Ubuntu'", "]", ":", "default_template", "=", "'ubuntu'", "image", "=", "_cloud_get", "(", "'image'", ")", "if", "not", "image", ":", "_cloud_get", "(", "'template'", ",", "default_template", ")", "backing", "=", "_cloud_get", "(", "'backing'", ",", "'dir'", ")", "if", "image", ":", "profile", "[", "'template'", "]", "=", "image", "vgname", "=", "_cloud_get", "(", "'vgname'", ",", "None", ")", "if", "vgname", ":", "profile", "[", "'vgname'", "]", "=", "vgname", "if", "backing", ":", "profile", "[", "'backing'", "]", "=", "backing", "snapshot", "=", "_cloud_get", "(", "'snapshot'", ",", "False", ")", "autostart", "=", "bool", "(", "_cloud_get", "(", "'autostart'", ",", "True", ")", ")", "dnsservers", "=", "_cloud_get", "(", "'dnsservers'", ",", "[", "]", ")", "dns_via_dhcp", "=", "_cloud_get", "(", "'dns_via_dhcp'", ",", "True", ")", "password", "=", "_cloud_get", "(", "'password'", ",", "'s3cr3t'", ")", "password_encrypted", "=", "_cloud_get", "(", "'password_encrypted'", ",", "False", ")", "fstype", "=", "_cloud_get", "(", "'fstype'", ",", "None", ")", "lvname", "=", "_cloud_get", "(", "'lvname'", ",", "None", ")", "thinpool", "=", "_cloud_get", "(", "'thinpool'", ",", "None", ")", "pub_key", "=", "_cloud_get", "(", "'pub_key'", ",", "None", ")", "priv_key", "=", "_cloud_get", "(", "'priv_key'", ",", "None", ")", "size", "=", "_cloud_get", "(", "'size'", ",", "'20G'", ")", "script", "=", "_cloud_get", "(", "'script'", ",", "None", ")", "script_args", "=", "_cloud_get", "(", "'script_args'", ",", "None", ")", "users", "=", "_cloud_get", "(", "'users'", ",", "None", ")", "if", "users", "is", "None", ":", "users", "=", "[", "]", "ssh_username", "=", "_cloud_get", "(", "'ssh_username'", ",", "None", ")", "if", "ssh_username", "and", "(", "ssh_username", "not", "in", "users", ")", ":", "users", ".", "append", "(", "ssh_username", ")", "network_profile", "=", "_cloud_get", "(", "'network_profile'", ",", "None", ")", "nic_opts", "=", "kwargs", ".", "get", "(", "'nic_opts'", ",", "None", ")", "netmask", "=", "_cloud_get", "(", "'netmask'", ",", "'24'", ")", "path", "=", "_cloud_get", "(", "'path'", ",", "None", ")", "bridge", "=", "_cloud_get", "(", "'bridge'", ",", "None", ")", "gateway", "=", "_cloud_get", "(", "'gateway'", ",", "None", ")", "unconditional_install", "=", "_cloud_get", "(", "'unconditional_install'", ",", "False", ")", "force_install", "=", "_cloud_get", "(", "'force_install'", ",", "True", ")", "config", "=", "_get_salt_config", "(", "_cloud_get", "(", "'config'", ",", "{", "}", ")", ",", "*", "*", "vm_", ")", "default_nic", "=", "_cloud_get", "(", "'default_nic'", ",", "DEFAULT_NIC", ")", "# do the interface with lxc.init mainly via nic_opts", "# to avoid extra and confusing extra use cases.", "if", "not", "isinstance", "(", "nic_opts", ",", "dict", ")", ":", "nic_opts", "=", "salt", ".", "utils", ".", "odict", ".", "OrderedDict", "(", ")", "# have a reference to the default nic", "eth0", "=", "nic_opts", ".", "setdefault", "(", "default_nic", ",", "salt", ".", "utils", ".", "odict", ".", "OrderedDict", "(", ")", ")", "# lxc config is based of ifc order, be sure to use odicts.", "if", "not", "isinstance", "(", "nic_opts", ",", "salt", ".", "utils", ".", "odict", ".", "OrderedDict", ")", ":", "bnic_opts", "=", "salt", ".", "utils", ".", "odict", ".", "OrderedDict", "(", ")", "bnic_opts", ".", "update", "(", "nic_opts", ")", "nic_opts", "=", "bnic_opts", "gw", "=", "None", "# legacy salt.cloud scheme for network interfaces settings support", "bridge", "=", "_cloud_get", "(", "'bridge'", ",", "None", ")", "ip", "=", "_cloud_get", "(", "'ip'", ",", "None", ")", "mac", "=", "_cloud_get", "(", "'mac'", ",", "None", ")", "if", "ip", ":", "fullip", "=", "ip", "if", "netmask", ":", "fullip", "+=", "'/{0}'", ".", "format", "(", "netmask", ")", "eth0", "[", "'ipv4'", "]", "=", "fullip", "if", "mac", "is", "not", "None", ":", "eth0", "[", "'mac'", "]", "=", "mac", "for", "ix", ",", "iopts", "in", "enumerate", "(", "_cloud_get", "(", "\"additional_ips\"", ",", "[", "]", ")", ")", ":", "ifh", "=", "\"eth{0}\"", ".", "format", "(", "ix", "+", "1", ")", "ethx", "=", "nic_opts", ".", "setdefault", "(", "ifh", ",", "{", "}", ")", "if", "gw", "is", "None", ":", "gw", "=", "iopts", ".", "get", "(", "'gateway'", ",", "ethx", ".", "get", "(", "'gateway'", ",", "None", ")", ")", "if", "gw", ":", "# only one and only one default gateway is allowed !", "eth0", ".", "pop", "(", "'gateway'", ",", "None", ")", "gateway", "=", "None", "# even if the gateway if on default \"eth0\" nic", "# and we popped it will work", "# as we reinject or set it here.", "ethx", "[", "'gateway'", "]", "=", "gw", "elink", "=", "iopts", ".", "get", "(", "'link'", ",", "ethx", ".", "get", "(", "'link'", ",", "None", ")", ")", "if", "elink", ":", "ethx", "[", "'link'", "]", "=", "elink", "# allow dhcp", "aip", "=", "iopts", ".", "get", "(", "'ipv4'", ",", "iopts", ".", "get", "(", "'ip'", ",", "None", ")", ")", "if", "aip", ":", "ethx", "[", "'ipv4'", "]", "=", "aip", "nm", "=", "iopts", ".", "get", "(", "'netmask'", ",", "''", ")", "if", "nm", ":", "ethx", "[", "'ipv4'", "]", "+=", "'/{0}'", ".", "format", "(", "nm", ")", "for", "i", "in", "(", "'mac'", ",", "'hwaddr'", ")", ":", "if", "i", "in", "iopts", ":", "ethx", "[", "'mac'", "]", "=", "iopts", "[", "i", "]", "break", "if", "'mac'", "not", "in", "ethx", ":", "ethx", "[", "'mac'", "]", "=", "salt", ".", "utils", ".", "network", ".", "gen_mac", "(", ")", "# last round checking for unique gateway and such", "gw", "=", "None", "for", "ethx", "in", "[", "a", "for", "a", "in", "nic_opts", "]", ":", "ndata", "=", "nic_opts", "[", "ethx", "]", "if", "gw", ":", "ndata", ".", "pop", "(", "'gateway'", ",", "None", ")", "if", "'gateway'", "in", "ndata", ":", "gw", "=", "ndata", "[", "'gateway'", "]", "gateway", "=", "None", "# only use a default bridge / gateway if we configured them", "# via the legacy salt cloud configuration style.", "# On other cases, we should rely on settings provided by the new", "# salt lxc network profile style configuration which can", "# be also be overridden or a per interface basis via the nic_opts dict.", "if", "bridge", ":", "eth0", "[", "'link'", "]", "=", "bridge", "if", "gateway", ":", "eth0", "[", "'gateway'", "]", "=", "gateway", "#", "lxc_init_interface", "=", "{", "}", "lxc_init_interface", "[", "'name'", "]", "=", "name", "lxc_init_interface", "[", "'config'", "]", "=", "config", "lxc_init_interface", "[", "'memory'", "]", "=", "_cloud_get", "(", "'memory'", ",", "0", ")", "# nolimit", "lxc_init_interface", "[", "'pub_key'", "]", "=", "pub_key", "lxc_init_interface", "[", "'priv_key'", "]", "=", "priv_key", "lxc_init_interface", "[", "'nic_opts'", "]", "=", "nic_opts", "for", "clone_from", "in", "[", "'clone_from'", ",", "'clone'", ",", "'from_container'", "]", ":", "# clone_from should default to None if not available", "lxc_init_interface", "[", "'clone_from'", "]", "=", "_cloud_get", "(", "clone_from", ",", "None", ")", "if", "lxc_init_interface", "[", "'clone_from'", "]", "is", "not", "None", ":", "break", "lxc_init_interface", "[", "'profile'", "]", "=", "profile", "lxc_init_interface", "[", "'snapshot'", "]", "=", "snapshot", "lxc_init_interface", "[", "'dnsservers'", "]", "=", "dnsservers", "lxc_init_interface", "[", "'fstype'", "]", "=", "fstype", "lxc_init_interface", "[", "'path'", "]", "=", "path", "lxc_init_interface", "[", "'vgname'", "]", "=", "vgname", "lxc_init_interface", "[", "'size'", "]", "=", "size", "lxc_init_interface", "[", "'lvname'", "]", "=", "lvname", "lxc_init_interface", "[", "'thinpool'", "]", "=", "thinpool", "lxc_init_interface", "[", "'force_install'", "]", "=", "force_install", "lxc_init_interface", "[", "'unconditional_install'", "]", "=", "(", "unconditional_install", ")", "lxc_init_interface", "[", "'bootstrap_url'", "]", "=", "script", "lxc_init_interface", "[", "'bootstrap_args'", "]", "=", "script_args", "lxc_init_interface", "[", "'bootstrap_shell'", "]", "=", "_cloud_get", "(", "'bootstrap_shell'", ",", "'sh'", ")", "lxc_init_interface", "[", "'bootstrap_delay'", "]", "=", "_cloud_get", "(", "'bootstrap_delay'", ",", "None", ")", "lxc_init_interface", "[", "'autostart'", "]", "=", "autostart", "lxc_init_interface", "[", "'users'", "]", "=", "users", "lxc_init_interface", "[", "'password'", "]", "=", "password", "lxc_init_interface", "[", "'password_encrypted'", "]", "=", "password_encrypted", "# be sure not to let objects goes inside the return", "# as this return will be msgpacked for use in the runner !", "lxc_init_interface", "[", "'network_profile'", "]", "=", "network_profile", "for", "i", "in", "[", "'cpu'", ",", "'cpuset'", ",", "'cpushare'", "]", ":", "if", "_cloud_get", "(", "i", ",", "None", ")", ":", "try", ":", "lxc_init_interface", "[", "i", "]", "=", "vm_", "[", "i", "]", "except", "KeyError", ":", "lxc_init_interface", "[", "i", "]", "=", "profile", "[", "i", "]", "return", "lxc_init_interface" ]
36.3
15.796774
def yticksize(self, size, index=1): """Set the tick font size. Parameters ---------- size : int Returns ------- Chart """ self.layout['yaxis' + str(index)]['tickfont']['size'] = size return self
[ "def", "yticksize", "(", "self", ",", "size", ",", "index", "=", "1", ")", ":", "self", ".", "layout", "[", "'yaxis'", "+", "str", "(", "index", ")", "]", "[", "'tickfont'", "]", "[", "'size'", "]", "=", "size", "return", "self" ]
18.857143
22.642857
def calc_density(self, density_standard=None): """ Calculates the density of the SpectralColor. By default, Status T density is used, and the correct density distribution (Red, Green, or Blue) is chosen by comparing the Red, Green, and Blue components of the spectral sample (the values being red in via "filters"). """ if density_standard is not None: return density.ansi_density(self, density_standard) else: return density.auto_density(self)
[ "def", "calc_density", "(", "self", ",", "density_standard", "=", "None", ")", ":", "if", "density_standard", "is", "not", "None", ":", "return", "density", ".", "ansi_density", "(", "self", ",", "density_standard", ")", "else", ":", "return", "density", ".", "auto_density", "(", "self", ")" ]
47.454545
17.636364
def pixel_coords(self, latlon, reverse=False): '''return pixel coordinates in the map image for a (lat,lon) if reverse is set, then return lat/lon for a pixel coordinate ''' state = self.state if reverse: (x,y) = latlon return self.coordinates(x,y) (lat,lon) = (latlon[0], latlon[1]) return state.mt.coord_to_pixel(state.lat, state.lon, state.width, state.ground_width, lat, lon)
[ "def", "pixel_coords", "(", "self", ",", "latlon", ",", "reverse", "=", "False", ")", ":", "state", "=", "self", ".", "state", "if", "reverse", ":", "(", "x", ",", "y", ")", "=", "latlon", "return", "self", ".", "coordinates", "(", "x", ",", "y", ")", "(", "lat", ",", "lon", ")", "=", "(", "latlon", "[", "0", "]", ",", "latlon", "[", "1", "]", ")", "return", "state", ".", "mt", ".", "coord_to_pixel", "(", "state", ".", "lat", ",", "state", ".", "lon", ",", "state", ".", "width", ",", "state", ".", "ground_width", ",", "lat", ",", "lon", ")" ]
45
20.6
def get_waveset(model): """Get optimal wavelengths for sampling a given model. Parameters ---------- model : `~astropy.modeling.Model` Model. Returns ------- waveset : array-like or `None` Optimal wavelengths. `None` if undefined. Raises ------ synphot.exceptions.SynphotError Invalid model. """ if not isinstance(model, Model): raise SynphotError('{0} is not a model.'.format(model)) if isinstance(model, _CompoundModel): waveset = model._tree.evaluate(WAVESET_OPERATORS, getter=None) else: waveset = _get_sampleset(model) return waveset
[ "def", "get_waveset", "(", "model", ")", ":", "if", "not", "isinstance", "(", "model", ",", "Model", ")", ":", "raise", "SynphotError", "(", "'{0} is not a model.'", ".", "format", "(", "model", ")", ")", "if", "isinstance", "(", "model", ",", "_CompoundModel", ")", ":", "waveset", "=", "model", ".", "_tree", ".", "evaluate", "(", "WAVESET_OPERATORS", ",", "getter", "=", "None", ")", "else", ":", "waveset", "=", "_get_sampleset", "(", "model", ")", "return", "waveset" ]
22.321429
21.642857
def optgroups(self, name, value, attrs=None): """Add empty option for clearable selects.""" if not self.is_required and not self.allow_multiple_selected: self.choices = list(chain([('', '')], self.choices)) return super(Select2Mixin, self).optgroups(name, value, attrs=attrs)
[ "def", "optgroups", "(", "self", ",", "name", ",", "value", ",", "attrs", "=", "None", ")", ":", "if", "not", "self", ".", "is_required", "and", "not", "self", ".", "allow_multiple_selected", ":", "self", ".", "choices", "=", "list", "(", "chain", "(", "[", "(", "''", ",", "''", ")", "]", ",", "self", ".", "choices", ")", ")", "return", "super", "(", "Select2Mixin", ",", "self", ")", ".", "optgroups", "(", "name", ",", "value", ",", "attrs", "=", "attrs", ")" ]
61.4
18.8
def all_exist(filepaths): """Returns true if all files in the list exist.""" for fname in filepaths: if not tf.gfile.Exists(fname): return False return True
[ "def", "all_exist", "(", "filepaths", ")", ":", "for", "fname", "in", "filepaths", ":", "if", "not", "tf", ".", "gfile", ".", "Exists", "(", "fname", ")", ":", "return", "False", "return", "True" ]
27.833333
14.166667
def append(self, item): """Append item to end of model""" self.beginInsertRows(QtCore.QModelIndex(), self.rowCount(), self.rowCount()) self.items.append(item) self.endInsertRows()
[ "def", "append", "(", "self", ",", "item", ")", ":", "self", ".", "beginInsertRows", "(", "QtCore", ".", "QModelIndex", "(", ")", ",", "self", ".", "rowCount", "(", ")", ",", "self", ".", "rowCount", "(", ")", ")", "self", ".", "items", ".", "append", "(", "item", ")", "self", ".", "endInsertRows", "(", ")" ]
32.875
12.25
def build_list_regex(self): """Return the regex for the folder which contains the list of builds.""" regex = 'tinderbox-builds/%(BRANCH)s-%(PLATFORM)s%(L10N)s%(DEBUG)s/' return regex % { 'BRANCH': self.branch, 'PLATFORM': '' if self.locale_build else self.platform_regex, 'L10N': 'l10n' if self.locale_build else '', 'DEBUG': '-debug' if self.debug_build else ''}
[ "def", "build_list_regex", "(", "self", ")", ":", "regex", "=", "'tinderbox-builds/%(BRANCH)s-%(PLATFORM)s%(L10N)s%(DEBUG)s/'", "return", "regex", "%", "{", "'BRANCH'", ":", "self", ".", "branch", ",", "'PLATFORM'", ":", "''", "if", "self", ".", "locale_build", "else", "self", ".", "platform_regex", ",", "'L10N'", ":", "'l10n'", "if", "self", ".", "locale_build", "else", "''", ",", "'DEBUG'", ":", "'-debug'", "if", "self", ".", "debug_build", "else", "''", "}" ]
47.555556
19.777778
def on_finished(self): """Finished signal handler""" self.controller.is_running = False error = self.controller.current_error if error is not None: self.info(self.tr("Stopped due to error(s), see Terminal.")) else: self.info(self.tr("Finished successfully!"))
[ "def", "on_finished", "(", "self", ")", ":", "self", ".", "controller", ".", "is_running", "=", "False", "error", "=", "self", ".", "controller", ".", "current_error", "if", "error", "is", "not", "None", ":", "self", ".", "info", "(", "self", ".", "tr", "(", "\"Stopped due to error(s), see Terminal.\"", ")", ")", "else", ":", "self", ".", "info", "(", "self", ".", "tr", "(", "\"Finished successfully!\"", ")", ")" ]
35.111111
16.777778
def _write_with_fallback(s, write, fileobj): """Write the supplied string with the given write function like ``write(s)``, but use a writer for the locale's preferred encoding in case of a UnicodeEncodeError. Failing that attempt to write with 'utf-8' or 'latin-1'. """ if IPythonIOStream is not None and isinstance(fileobj, IPythonIOStream): # If the output stream is an IPython.utils.io.IOStream object that's # not going to be very helpful to us since it doesn't raise any # exceptions when an error occurs writing to its underlying stream. # There's no advantage to us using IOStream.write directly though; # instead just write directly to its underlying stream: write = fileobj.stream.write try: write(s) return write except UnicodeEncodeError: # Let's try the next approach... pass enc = locale.getpreferredencoding() try: Writer = codecs.getwriter(enc) except LookupError: Writer = codecs.getwriter(_DEFAULT_ENCODING) f = Writer(fileobj) write = f.write try: write(s) return write except UnicodeEncodeError: Writer = codecs.getwriter('latin-1') f = Writer(fileobj) write = f.write # If this doesn't work let the exception bubble up; I'm out of ideas write(s) return write
[ "def", "_write_with_fallback", "(", "s", ",", "write", ",", "fileobj", ")", ":", "if", "IPythonIOStream", "is", "not", "None", "and", "isinstance", "(", "fileobj", ",", "IPythonIOStream", ")", ":", "# If the output stream is an IPython.utils.io.IOStream object that's", "# not going to be very helpful to us since it doesn't raise any", "# exceptions when an error occurs writing to its underlying stream.", "# There's no advantage to us using IOStream.write directly though;", "# instead just write directly to its underlying stream:", "write", "=", "fileobj", ".", "stream", ".", "write", "try", ":", "write", "(", "s", ")", "return", "write", "except", "UnicodeEncodeError", ":", "# Let's try the next approach...", "pass", "enc", "=", "locale", ".", "getpreferredencoding", "(", ")", "try", ":", "Writer", "=", "codecs", ".", "getwriter", "(", "enc", ")", "except", "LookupError", ":", "Writer", "=", "codecs", ".", "getwriter", "(", "_DEFAULT_ENCODING", ")", "f", "=", "Writer", "(", "fileobj", ")", "write", "=", "f", ".", "write", "try", ":", "write", "(", "s", ")", "return", "write", "except", "UnicodeEncodeError", ":", "Writer", "=", "codecs", ".", "getwriter", "(", "'latin-1'", ")", "f", "=", "Writer", "(", "fileobj", ")", "write", "=", "f", ".", "write", "# If this doesn't work let the exception bubble up; I'm out of ideas", "write", "(", "s", ")", "return", "write" ]
32.190476
22.904762
def sync_fetch(self, task): '''Synchronization fetch, usually used in xmlrpc thread''' if not self._running: return self.ioloop.run_sync(functools.partial(self.async_fetch, task, lambda t, _, r: True)) wait_result = threading.Condition() _result = {} def callback(type, task, result): wait_result.acquire() _result['type'] = type _result['task'] = task _result['result'] = result wait_result.notify() wait_result.release() wait_result.acquire() self.ioloop.add_callback(self.fetch, task, callback) while 'result' not in _result: wait_result.wait() wait_result.release() return _result['result']
[ "def", "sync_fetch", "(", "self", ",", "task", ")", ":", "if", "not", "self", ".", "_running", ":", "return", "self", ".", "ioloop", ".", "run_sync", "(", "functools", ".", "partial", "(", "self", ".", "async_fetch", ",", "task", ",", "lambda", "t", ",", "_", ",", "r", ":", "True", ")", ")", "wait_result", "=", "threading", ".", "Condition", "(", ")", "_result", "=", "{", "}", "def", "callback", "(", "type", ",", "task", ",", "result", ")", ":", "wait_result", ".", "acquire", "(", ")", "_result", "[", "'type'", "]", "=", "type", "_result", "[", "'task'", "]", "=", "task", "_result", "[", "'result'", "]", "=", "result", "wait_result", ".", "notify", "(", ")", "wait_result", ".", "release", "(", ")", "wait_result", ".", "acquire", "(", ")", "self", ".", "ioloop", ".", "add_callback", "(", "self", ".", "fetch", ",", "task", ",", "callback", ")", "while", "'result'", "not", "in", "_result", ":", "wait_result", ".", "wait", "(", ")", "wait_result", ".", "release", "(", ")", "return", "_result", "[", "'result'", "]" ]
34.181818
16.181818
def add_sequence_flow_to_diagram(self, process_id, source_ref_id, target_ref_id, sequence_flow_name=""): """ Adds a SequenceFlow element to BPMN diagram. Requires that user passes a sourceRef and targetRef as parameters. User-defined attributes: - name :param process_id: string object. ID of parent process, :param source_ref_id: string object. ID of source node, :param target_ref_id: string object. ID of target node, :param sequence_flow_name: string object. Name of sequence flow. :return: a tuple, where first value is sequenceFlow ID, second a reference to created object. """ sequence_flow_id = BpmnDiagramGraph.id_prefix + str(uuid.uuid4()) self.sequence_flows[sequence_flow_id] = {consts.Consts.name: sequence_flow_name, consts.Consts.source_ref: source_ref_id, consts.Consts.target_ref: target_ref_id} self.diagram_graph.add_edge(source_ref_id, target_ref_id) flow = self.diagram_graph[source_ref_id][target_ref_id] flow[consts.Consts.id] = sequence_flow_id flow[consts.Consts.name] = sequence_flow_name flow[consts.Consts.process] = process_id flow[consts.Consts.source_ref] = source_ref_id flow[consts.Consts.target_ref] = target_ref_id source_node = self.diagram_graph.node[source_ref_id] target_node = self.diagram_graph.node[target_ref_id] flow[consts.Consts.waypoints] = \ [(source_node[consts.Consts.x], source_node[consts.Consts.y]), (target_node[consts.Consts.x], target_node[consts.Consts.y])] # add target node (target_ref_id) as outgoing node from source node (source_ref_id) source_node[consts.Consts.outgoing_flow].append(sequence_flow_id) # add source node (source_ref_id) as incoming node to target node (target_ref_id) target_node[consts.Consts.incoming_flow].append(sequence_flow_id) return sequence_flow_id, flow
[ "def", "add_sequence_flow_to_diagram", "(", "self", ",", "process_id", ",", "source_ref_id", ",", "target_ref_id", ",", "sequence_flow_name", "=", "\"\"", ")", ":", "sequence_flow_id", "=", "BpmnDiagramGraph", ".", "id_prefix", "+", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "self", ".", "sequence_flows", "[", "sequence_flow_id", "]", "=", "{", "consts", ".", "Consts", ".", "name", ":", "sequence_flow_name", ",", "consts", ".", "Consts", ".", "source_ref", ":", "source_ref_id", ",", "consts", ".", "Consts", ".", "target_ref", ":", "target_ref_id", "}", "self", ".", "diagram_graph", ".", "add_edge", "(", "source_ref_id", ",", "target_ref_id", ")", "flow", "=", "self", ".", "diagram_graph", "[", "source_ref_id", "]", "[", "target_ref_id", "]", "flow", "[", "consts", ".", "Consts", ".", "id", "]", "=", "sequence_flow_id", "flow", "[", "consts", ".", "Consts", ".", "name", "]", "=", "sequence_flow_name", "flow", "[", "consts", ".", "Consts", ".", "process", "]", "=", "process_id", "flow", "[", "consts", ".", "Consts", ".", "source_ref", "]", "=", "source_ref_id", "flow", "[", "consts", ".", "Consts", ".", "target_ref", "]", "=", "target_ref_id", "source_node", "=", "self", ".", "diagram_graph", ".", "node", "[", "source_ref_id", "]", "target_node", "=", "self", ".", "diagram_graph", ".", "node", "[", "target_ref_id", "]", "flow", "[", "consts", ".", "Consts", ".", "waypoints", "]", "=", "[", "(", "source_node", "[", "consts", ".", "Consts", ".", "x", "]", ",", "source_node", "[", "consts", ".", "Consts", ".", "y", "]", ")", ",", "(", "target_node", "[", "consts", ".", "Consts", ".", "x", "]", ",", "target_node", "[", "consts", ".", "Consts", ".", "y", "]", ")", "]", "# add target node (target_ref_id) as outgoing node from source node (source_ref_id)", "source_node", "[", "consts", ".", "Consts", ".", "outgoing_flow", "]", ".", "append", "(", "sequence_flow_id", ")", "# add source node (source_ref_id) as incoming node to target node (target_ref_id)", "target_node", "[", "consts", ".", "Consts", ".", "incoming_flow", "]", ".", "append", "(", "sequence_flow_id", ")", "return", "sequence_flow_id", ",", "flow" ]
55.702703
27.702703
def sendhello(self): try: # send hello cli_hello_msg = "<hello>\n" +\ " <capabilities>\n" +\ " <capability>urn:ietf:params:netconf:base:1.0</capability>\n" +\ " </capabilities>\n" +\ "</hello>\n" self._cParams.set('cli_hello', cli_hello_msg) self._hConn.sendmsg(cli_hello_msg) # recv hello ser_hello_msg = self._hConn.recvmsg() self._cParams.set('ser_hello', ser_hello_msg) except: print 'BNClient: Call sendhello fail' sys.exit() """ end of function exchgcaps """
[ "def", "sendhello", "(", "self", ")", ":", "try", ":", "# send hello\r", "cli_hello_msg", "=", "\"<hello>\\n\"", "+", "\" <capabilities>\\n\"", "+", "\" <capability>urn:ietf:params:netconf:base:1.0</capability>\\n\"", "+", "\" </capabilities>\\n\"", "+", "\"</hello>\\n\"", "self", ".", "_cParams", ".", "set", "(", "'cli_hello'", ",", "cli_hello_msg", ")", "self", ".", "_hConn", ".", "sendmsg", "(", "cli_hello_msg", ")", "# recv hello\r", "ser_hello_msg", "=", "self", ".", "_hConn", ".", "recvmsg", "(", ")", "self", ".", "_cParams", ".", "set", "(", "'ser_hello'", ",", "ser_hello_msg", ")", "except", ":", "print", "'BNClient: Call sendhello fail'", "sys", ".", "exit", "(", ")" ]
37.631579
18.105263
def gc(): """Deletes old stellar tables that are not used anymore""" def after_delete(database): click.echo("Deleted table %s" % database) app = get_app() upgrade_from_old_version(app) app.delete_orphan_snapshots(after_delete)
[ "def", "gc", "(", ")", ":", "def", "after_delete", "(", "database", ")", ":", "click", ".", "echo", "(", "\"Deleted table %s\"", "%", "database", ")", "app", "=", "get_app", "(", ")", "upgrade_from_old_version", "(", "app", ")", "app", ".", "delete_orphan_snapshots", "(", "after_delete", ")" ]
31
15.25
def list_task_definitions(self): """ Filtering not implemented """ task_arns = [] for task_definition_list in self.task_definitions.values(): task_arns.extend( [task_definition.arn for task_definition in task_definition_list]) return task_arns
[ "def", "list_task_definitions", "(", "self", ")", ":", "task_arns", "=", "[", "]", "for", "task_definition_list", "in", "self", ".", "task_definitions", ".", "values", "(", ")", ":", "task_arns", ".", "extend", "(", "[", "task_definition", ".", "arn", "for", "task_definition", "in", "task_definition_list", "]", ")", "return", "task_arns" ]
34.555556
14.333333
def _setup_segments(self): """ Parses the database file to determine what kind of database is being used and setup segment sizes and start points that will be used by the seek*() methods later. """ self._databaseType = const.COUNTRY_EDITION self._recordLength = const.STANDARD_RECORD_LENGTH self._databaseSegments = const.COUNTRY_BEGIN filepos = self._fp.tell() self._fp.seek(-3, os.SEEK_END) for i in range(const.STRUCTURE_INFO_MAX_SIZE): chars = chr(255) * 3 delim = self._fp.read(3) if PY3 and type(delim) is bytes: delim = delim.decode(ENCODING) if PY2: chars = chars.decode(ENCODING) if type(delim) is str: delim = delim.decode(ENCODING) if delim == chars: byte = self._fp.read(1) self._databaseType = ord(byte) # Compatibility with databases from April 2003 and earlier if self._databaseType >= 106: self._databaseType -= 105 if self._databaseType == const.REGION_EDITION_REV0: self._databaseSegments = const.STATE_BEGIN_REV0 elif self._databaseType == const.REGION_EDITION_REV1: self._databaseSegments = const.STATE_BEGIN_REV1 elif self._databaseType in (const.CITY_EDITION_REV0, const.CITY_EDITION_REV1, const.CITY_EDITION_REV1_V6, const.ORG_EDITION, const.ISP_EDITION, const.NETSPEED_EDITION_REV1, const.NETSPEED_EDITION_REV1_V6, const.ASNUM_EDITION, const.ASNUM_EDITION_V6): self._databaseSegments = 0 buf = self._fp.read(const.SEGMENT_RECORD_LENGTH) if PY3 and type(buf) is bytes: buf = buf.decode(ENCODING) for j in range(const.SEGMENT_RECORD_LENGTH): self._databaseSegments += (ord(buf[j]) << (j * 8)) LONG_RECORDS = (const.ORG_EDITION, const.ISP_EDITION) if self._databaseType in LONG_RECORDS: self._recordLength = const.ORG_RECORD_LENGTH break else: self._fp.seek(-4, os.SEEK_CUR) self._fp.seek(filepos, os.SEEK_SET)
[ "def", "_setup_segments", "(", "self", ")", ":", "self", ".", "_databaseType", "=", "const", ".", "COUNTRY_EDITION", "self", ".", "_recordLength", "=", "const", ".", "STANDARD_RECORD_LENGTH", "self", ".", "_databaseSegments", "=", "const", ".", "COUNTRY_BEGIN", "filepos", "=", "self", ".", "_fp", ".", "tell", "(", ")", "self", ".", "_fp", ".", "seek", "(", "-", "3", ",", "os", ".", "SEEK_END", ")", "for", "i", "in", "range", "(", "const", ".", "STRUCTURE_INFO_MAX_SIZE", ")", ":", "chars", "=", "chr", "(", "255", ")", "*", "3", "delim", "=", "self", ".", "_fp", ".", "read", "(", "3", ")", "if", "PY3", "and", "type", "(", "delim", ")", "is", "bytes", ":", "delim", "=", "delim", ".", "decode", "(", "ENCODING", ")", "if", "PY2", ":", "chars", "=", "chars", ".", "decode", "(", "ENCODING", ")", "if", "type", "(", "delim", ")", "is", "str", ":", "delim", "=", "delim", ".", "decode", "(", "ENCODING", ")", "if", "delim", "==", "chars", ":", "byte", "=", "self", ".", "_fp", ".", "read", "(", "1", ")", "self", ".", "_databaseType", "=", "ord", "(", "byte", ")", "# Compatibility with databases from April 2003 and earlier", "if", "self", ".", "_databaseType", ">=", "106", ":", "self", ".", "_databaseType", "-=", "105", "if", "self", ".", "_databaseType", "==", "const", ".", "REGION_EDITION_REV0", ":", "self", ".", "_databaseSegments", "=", "const", ".", "STATE_BEGIN_REV0", "elif", "self", ".", "_databaseType", "==", "const", ".", "REGION_EDITION_REV1", ":", "self", ".", "_databaseSegments", "=", "const", ".", "STATE_BEGIN_REV1", "elif", "self", ".", "_databaseType", "in", "(", "const", ".", "CITY_EDITION_REV0", ",", "const", ".", "CITY_EDITION_REV1", ",", "const", ".", "CITY_EDITION_REV1_V6", ",", "const", ".", "ORG_EDITION", ",", "const", ".", "ISP_EDITION", ",", "const", ".", "NETSPEED_EDITION_REV1", ",", "const", ".", "NETSPEED_EDITION_REV1_V6", ",", "const", ".", "ASNUM_EDITION", ",", "const", ".", "ASNUM_EDITION_V6", ")", ":", "self", ".", "_databaseSegments", "=", "0", "buf", "=", "self", ".", "_fp", ".", "read", "(", "const", ".", "SEGMENT_RECORD_LENGTH", ")", "if", "PY3", "and", "type", "(", "buf", ")", "is", "bytes", ":", "buf", "=", "buf", ".", "decode", "(", "ENCODING", ")", "for", "j", "in", "range", "(", "const", ".", "SEGMENT_RECORD_LENGTH", ")", ":", "self", ".", "_databaseSegments", "+=", "(", "ord", "(", "buf", "[", "j", "]", ")", "<<", "(", "j", "*", "8", ")", ")", "LONG_RECORDS", "=", "(", "const", ".", "ORG_EDITION", ",", "const", ".", "ISP_EDITION", ")", "if", "self", ".", "_databaseType", "in", "LONG_RECORDS", ":", "self", ".", "_recordLength", "=", "const", ".", "ORG_RECORD_LENGTH", "break", "else", ":", "self", ".", "_fp", ".", "seek", "(", "-", "4", ",", "os", ".", "SEEK_CUR", ")", "self", ".", "_fp", ".", "seek", "(", "filepos", ",", "os", ".", "SEEK_SET", ")" ]
40.984615
21.292308
def get_serializer_class(self): """gets the class type of the serializer :return: `rest_framework.Serializer` """ klass = None lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field if lookup_url_kwarg in self.kwargs: # Looks like this is a detail... klass = self.get_object().__class__ elif "doctype" in self.request.REQUEST: base = self.model.get_base_class() doctypes = indexable_registry.families[base] try: klass = doctypes[self.request.REQUEST["doctype"]] except KeyError: raise Http404 if hasattr(klass, "get_serializer_class"): return klass.get_serializer_class() # TODO: fix deprecation warning here -- `get_serializer_class` is going away soon! return super(ContentViewSet, self).get_serializer_class()
[ "def", "get_serializer_class", "(", "self", ")", ":", "klass", "=", "None", "lookup_url_kwarg", "=", "self", ".", "lookup_url_kwarg", "or", "self", ".", "lookup_field", "if", "lookup_url_kwarg", "in", "self", ".", "kwargs", ":", "# Looks like this is a detail...", "klass", "=", "self", ".", "get_object", "(", ")", ".", "__class__", "elif", "\"doctype\"", "in", "self", ".", "request", ".", "REQUEST", ":", "base", "=", "self", ".", "model", ".", "get_base_class", "(", ")", "doctypes", "=", "indexable_registry", ".", "families", "[", "base", "]", "try", ":", "klass", "=", "doctypes", "[", "self", ".", "request", ".", "REQUEST", "[", "\"doctype\"", "]", "]", "except", "KeyError", ":", "raise", "Http404", "if", "hasattr", "(", "klass", ",", "\"get_serializer_class\"", ")", ":", "return", "klass", ".", "get_serializer_class", "(", ")", "# TODO: fix deprecation warning here -- `get_serializer_class` is going away soon!", "return", "super", "(", "ContentViewSet", ",", "self", ")", ".", "get_serializer_class", "(", ")" ]
37.333333
17.875
def initialize(self, host): """ The EventProcessorHost can't pass itself to the AzureStorageCheckpointLeaseManager constructor because it is still being constructed. Do other initialization here also because it might throw and hence we don't want it in the constructor. """ self.host = host self.storage_client = BlockBlobService(account_name=self.storage_account_name, account_key=self.storage_account_key, sas_token=self.storage_sas_token, endpoint_suffix=self.endpoint_suffix, connection_string=self.connection_string, request_session=self.request_session) self.consumer_group_directory = self.storage_blob_prefix + self.host.eh_config.consumer_group
[ "def", "initialize", "(", "self", ",", "host", ")", ":", "self", ".", "host", "=", "host", "self", ".", "storage_client", "=", "BlockBlobService", "(", "account_name", "=", "self", ".", "storage_account_name", ",", "account_key", "=", "self", ".", "storage_account_key", ",", "sas_token", "=", "self", ".", "storage_sas_token", ",", "endpoint_suffix", "=", "self", ".", "endpoint_suffix", ",", "connection_string", "=", "self", ".", "connection_string", ",", "request_session", "=", "self", ".", "request_session", ")", "self", ".", "consumer_group_directory", "=", "self", ".", "storage_blob_prefix", "+", "self", ".", "host", ".", "eh_config", ".", "consumer_group" ]
67.071429
35.357143
def convert_softmax_output(node, **kwargs): """Map MXNet's SoftmaxOutput operator attributes to onnx's Softmax operator and return the created node. """ name = node["name"] input1_idx = kwargs["index_lookup"][node["inputs"][0][0]] input1 = kwargs["proc_nodes"][input1_idx] softmax_node = onnx.helper.make_node( "Softmax", [input1.name], [name], axis=1, name=name ) return [softmax_node]
[ "def", "convert_softmax_output", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", "=", "node", "[", "\"name\"", "]", "input1_idx", "=", "kwargs", "[", "\"index_lookup\"", "]", "[", "node", "[", "\"inputs\"", "]", "[", "0", "]", "[", "0", "]", "]", "input1", "=", "kwargs", "[", "\"proc_nodes\"", "]", "[", "input1_idx", "]", "softmax_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Softmax\"", ",", "[", "input1", ".", "name", "]", ",", "[", "name", "]", ",", "axis", "=", "1", ",", "name", "=", "name", ")", "return", "[", "softmax_node", "]" ]
24.888889
18.777778
def interpolate_delta_t(delta_t_table, tt): """Return interpolated Delta T values for the times in `tt`. The 2xN table should provide TT values as element 0 and corresponding Delta T values for element 1. For times outside the range of the table, a long-term formula is used instead. """ tt_array, delta_t_array = delta_t_table delta_t = _to_array(interp(tt, tt_array, delta_t_array, nan, nan)) missing = isnan(delta_t) if missing.any(): # Test if we are dealing with an array and proceed appropriately if missing.shape: tt = tt[missing] delta_t[missing] = delta_t_formula_morrison_and_stephenson_2004(tt) else: delta_t = delta_t_formula_morrison_and_stephenson_2004(tt) return delta_t
[ "def", "interpolate_delta_t", "(", "delta_t_table", ",", "tt", ")", ":", "tt_array", ",", "delta_t_array", "=", "delta_t_table", "delta_t", "=", "_to_array", "(", "interp", "(", "tt", ",", "tt_array", ",", "delta_t_array", ",", "nan", ",", "nan", ")", ")", "missing", "=", "isnan", "(", "delta_t", ")", "if", "missing", ".", "any", "(", ")", ":", "# Test if we are dealing with an array and proceed appropriately", "if", "missing", ".", "shape", ":", "tt", "=", "tt", "[", "missing", "]", "delta_t", "[", "missing", "]", "=", "delta_t_formula_morrison_and_stephenson_2004", "(", "tt", ")", "else", ":", "delta_t", "=", "delta_t_formula_morrison_and_stephenson_2004", "(", "tt", ")", "return", "delta_t" ]
38.5
21.65
def from_pydate(cls, pydate): """ Creates sql date object from Python date object. @param pydate: Python date @return: sql date """ return cls(days=(datetime.datetime.combine(pydate, datetime.time(0, 0, 0)) - _datetime2_base_date).days)
[ "def", "from_pydate", "(", "cls", ",", "pydate", ")", ":", "return", "cls", "(", "days", "=", "(", "datetime", ".", "datetime", ".", "combine", "(", "pydate", ",", "datetime", ".", "time", "(", "0", ",", "0", ",", "0", ")", ")", "-", "_datetime2_base_date", ")", ".", "days", ")" ]
39.714286
17.142857