input
stringlengths
11
7.65k
target
stringlengths
22
8.26k
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, *args, **kwargs): BaseConverter.__init__(self, *args, **kwargs) self.type_ = "string" self.regex = '[^(/;)]+'
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_user_login(self): c = Client() # User points the browser to the landing page res = c.post('/', follow=True) # the user is not logged in self.assertFalse(res.context['user'].is_authenticated) # and is redirected to the login page self.assertRedirects(res, '/login/') # The login page is being rendered by the correct template self.assertTemplateUsed(res, 'registration/login.html') # asks the user to login using a set of valid credentials res = c.post('/login/', data=self.credentials, follow=True) # The system acknowledges him self.assertTrue(res.context['user'].is_authenticated) # and moves him at the dashboard self.assertTemplateUsed(res, 'app/dashboard.html')
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, *args, **kwargs): FloatConverter.__init__(self, *args, **kwargs) self.type_ = "float" self.regex = '-?\\d+(\\.\\d+)?'
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_public_views(client, expectedStatus): res = client.get('/public/task/{}/map/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/3d/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/iframe/3d/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/iframe/map/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus) res = client.get('/public/task/{}/json/'.format(task.id)) self.assertTrue(res.status_code == expectedStatus)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_admin_views(self): c = Client() c.login(username='testsuperuser', password='test1234') settingId = Setting.objects.all()[0].id # During tests, sometimes this is != 1 themeId = Theme.objects.all()[0].id # During tests, sometimes this is != 1 # Can access admin menu items admin_menu_items = ['/admin/app/setting/{}/change/'.format(settingId), '/admin/app/theme/{}/change/'.format(themeId), '/admin/', '/admin/app/plugin/', '/admin/auth/user/', '/admin/auth/group/', ] for url in admin_menu_items: res = c.get(url) self.assertEqual(res.status_code, status.HTTP_200_OK) # Cannot access dev tools (not in dev mode) settings.DEV = False self.assertEqual(c.get('/dev-tools/').status_code, status.HTTP_404_NOT_FOUND) settings.DEV = True
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, *args, **kwargs): PathConverter.__init__(self, *args, **kwargs) self.type_ = "string"
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_default_group(self): # It exists self.assertTrue(Group.objects.filter(name='Default').count() == 1) # Verify that all new users are assigned to default group u = User.objects.create_user(username="default_user") u.refresh_from_db() self.assertTrue(u.groups.filter(name='Default').count() == 1)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, *args, **kwargs): BaseConverter.__init__(self, *args, **kwargs) self.type_ = "string"
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_projects(self): # Get a normal user user = User.objects.get(username="testuser") self.assertFalse(user.is_superuser) # Create a new project p = Project.objects.create(owner=user, name="test") # Have the proper permissions been set? self.assertTrue(user.has_perm("view_project", p)) self.assertTrue(user.has_perm("add_project", p)) self.assertTrue(user.has_perm("change_project", p)) self.assertTrue(user.has_perm("delete_project", p)) # Get a superuser superUser = User.objects.get(username="testsuperuser") self.assertTrue(superUser.is_superuser) # He should also have permissions, although not explicitly set self.assertTrue(superUser.has_perm("delete_project", p)) # Get another user anotherUser = User.objects.get(username="testuser2") self.assertFalse(anotherUser.is_superuser) # Should not have permission self.assertFalse(anotherUser.has_perm("delete_project", p))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, api, name): super(V1Routing, self).__init__(api, name, description='Current version of navitia API', status='current', index_endpoint='index')
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def looks_like_hash(sha): return bool(HASH_REGEX.match(sha))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _get_unique_constraints(self, table): """Retrieve information about existing unique constraints of the table This feature is needed for _recreate_table() to work properly. Unfortunately, it's not available in sqlalchemy 0.7.x/0.8.x. """ data = table.metadata.bind.execute( """SELECT sql FROM sqlite_master WHERE type='table' AND name=:table_name""", table_name=table.name ).fetchone()[0] UNIQUE_PATTERN = "CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)" return [ UniqueConstraint( *[getattr(table.columns, c.strip(' "')) for c in cols.split(",")], name=name ) for name, cols in re.findall(UNIQUE_PATTERN, data) ]
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_base_rev_args(rev): return [rev]
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _recreate_table(self, table, column=None, delta=None, omit_uniques=None): """Recreate the table properly Unlike the corresponding original method of sqlalchemy-migrate this one doesn't drop existing unique constraints when creating a new one. """ table_name = self.preparer.format_table(table) # we remove all indexes so as not to have # problems during copy and re-create for index in table.indexes: index.drop() # reflect existing unique constraints for uc in self._get_unique_constraints(table): table.append_constraint(uc) # omit given unique constraints when creating a new table if required table.constraints = set([ cons for cons in table.constraints if omit_uniques is None or cons.name not in omit_uniques ]) self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name) self.execute() insertion_string = self._modify_table(table, column, delta) table.create(bind=self.connection) self.append(insertion_string % {'table_name': table_name}) self.execute() self.append('DROP TABLE migration_tmp') self.execute()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def is_immutable_rev_checkout(self, url, dest): # type: (str, str) -> bool _, rev_options = self.get_url_rev_options(hide_url(url)) if not rev_options.rev: return False if not self.is_commit_id_equal(dest, rev_options.rev): # the current commit is different from rev, # which means rev was something else than a commit hash return False # return False in the rare case rev is both a commit hash # and a tag or a branch; we don't want to cache in that case # because that branch/tag could point to something else in the future is_tag_or_branch = bool( self.get_revision_sha(dest, rev_options.rev)[0] ) return not is_tag_or_branch
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _visit_migrate_unique_constraint(self, *p, **k): """Drop the given unique constraint The corresponding original method of sqlalchemy-migrate just raises NotImplemented error """ self.recreate_table(p[0].table, omit_uniques=[p[0].name])
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_git_version(self): VERSION_PFX = 'git version ' version = self.run_command( ['version'], show_stdout=False, stdout_only=True ) if version.startswith(VERSION_PFX): version = version[len(VERSION_PFX):].split()[0] else: version = '' # get first 3 positions of the git version because # on windows it is x.y.z.windows.t, and this parses as # LegacyVersion which always smaller than a Version. version = '.'.join(version.split('.')[:3]) return parse_version(version)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def patch_migrate(): """A workaround for SQLite's inability to alter things SQLite abilities to alter tables are very limited (please read http://www.sqlite.org/lang_altertable.html for more details). E. g. one can't drop a column or a constraint in SQLite. The workaround for this is to recreate the original table omitting the corresponding constraint (or column). sqlalchemy-migrate library has recreate_table() method that implements this workaround, but it does it wrong: - information about unique constraints of a table is not retrieved. So if you have a table with one unique constraint and a migration adding another one you will end up with a table that has only the latter unique constraint, and the former will be lost - dropping of unique constraints is not supported at all The proper way to fix this is to provide a pull-request to sqlalchemy-migrate, but the project seems to be dead. So we can go on with monkey-patching of the lib at least for now. """ # this patch is needed to ensure that recreate_table() doesn't drop # existing unique constraints of the table when creating a new one helper_cls = sqlite.SQLiteHelper helper_cls.recreate_table = _recreate_table helper_cls._get_unique_constraints = _get_unique_constraints # this patch is needed to be able to drop existing unique constraints constraint_cls = sqlite.SQLiteConstraintDropper constraint_cls.visit_migrate_unique_constraint = \ _visit_migrate_unique_constraint constraint_cls.__bases__ = (ansisql.ANSIColumnDropper, sqlite.SQLiteConstraintGenerator)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_current_branch(cls, location): """ Return the current branch, or None if HEAD isn't at a branch (e.g. detached HEAD). """ # git-symbolic-ref exits with empty stdout if "HEAD" is a detached # HEAD rather than a symbolic ref. In addition, the -q causes the # command to exit with status code 1 instead of 128 in this case # and to suppress the message to stderr. args = ['symbolic-ref', '-q', 'HEAD'] output = cls.run_command( args, extra_ok_returncodes=(1, ), show_stdout=False, stdout_only=True, cwd=location, ) ref = output.strip() if ref.startswith('refs/heads/'): return ref[len('refs/heads/'):] return None
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def db_sync(engine, abs_path, version=None, init_version=0, sanity_check=True): """Upgrade or downgrade a database. Function runs the upgrade() or downgrade() functions in change scripts. :param engine: SQLAlchemy engine instance for a given database :param abs_path: Absolute path to migrate repository. :param version: Database will upgrade/downgrade until this version. If None - database will update to the latest available version. :param init_version: Initial database version :param sanity_check: Require schema sanity checking for all tables """ if version is not None: try: version = int(version) except ValueError: raise exception.DbMigrationError( message=_("version should be an integer")) current_version = db_version(engine, abs_path, init_version) repository = _find_migrate_repo(abs_path) if sanity_check: _db_schema_sanity_check(engine) if version is None or version > current_version: return versioning_api.upgrade(engine, repository, version) else: return versioning_api.downgrade(engine, repository, version)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def export(self, location, url): # type: (str, HiddenText) -> None """Export the Git repository at the url to the destination location""" if not location.endswith('/'): location = location + '/' with TempDirectory(kind="export") as temp_dir: self.unpack(temp_dir.path, url=url) self.run_command( ['checkout-index', '-a', '-f', '--prefix', location], show_stdout=False, cwd=temp_dir.path )
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _db_schema_sanity_check(engine): """Ensure all database tables were created with required parameters. :param engine: SQLAlchemy engine instance for a given database """ if engine.name == 'mysql': onlyutf8_sql = ('SELECT TABLE_NAME,TABLE_COLLATION ' 'from information_schema.TABLES ' 'where TABLE_SCHEMA=%s and ' 'TABLE_COLLATION NOT LIKE "%%utf8%%"') table_names = [res[0] for res in engine.execute(onlyutf8_sql, engine.url.database)] if len(table_names) > 0: raise ValueError(_('Tables "%s" have non utf8 collation, ' 'please make sure all tables are CHARSET=utf8' ) % ','.join(table_names))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_revision_sha(cls, dest, rev): """ Return (sha_or_none, is_branch), where sha_or_none is a commit hash if the revision names a remote branch or tag, otherwise None. Args: dest: the repository directory. rev: the revision name. """ # Pass rev to pre-filter the list. output = cls.run_command( ['show-ref', rev], cwd=dest, show_stdout=False, stdout_only=True, on_returncode='ignore', ) refs = {} for line in output.strip().splitlines(): try: sha, ref = line.split() except ValueError: # Include the offending line to simplify troubleshooting if # this error ever occurs. raise ValueError('unexpected show-ref line: {!r}'.format(line)) refs[ref] = sha branch_ref = 'refs/remotes/origin/{}'.format(rev) tag_ref = 'refs/tags/{}'.format(rev) sha = refs.get(branch_ref) if sha is not None: return (sha, True) sha = refs.get(tag_ref) return (sha, False)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def db_version(engine, abs_path, init_version): """Show the current version of the repository. :param engine: SQLAlchemy engine instance for a given database :param abs_path: Absolute path to migrate repository :param version: Initial database version """ repository = _find_migrate_repo(abs_path) try: return versioning_api.db_version(engine, repository) except versioning_exceptions.DatabaseNotControlledError: meta = sqlalchemy.MetaData() meta.reflect(bind=engine) tables = meta.tables if len(tables) == 0 or 'alembic_version' in tables: db_version_control(engine, abs_path, version=init_version) return versioning_api.db_version(engine, repository) else: raise exception.DbMigrationError( message=_( "The database is not under version control, but has " "tables. Please stamp the current version of the schema " "manually."))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _should_fetch(cls, dest, rev): """ Return true if rev is a ref or is a commit that we don't have locally. Branches and tags are not considered in this method because they are assumed to be always available locally (which is a normal outcome of ``git clone`` and ``git fetch --tags``). """ if rev.startswith("refs/"): # Always fetch remote refs. return True if not looks_like_hash(rev): # Git fetch would fail with abbreviated commits. return False if cls.has_commit(dest, rev): # Don't fetch if we have the commit locally. return False return True
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def db_version_control(engine, abs_path, version=None): """Mark a database as under this repository's version control. Once a database is under version control, schema changes should only be done via change scripts in this repository. :param engine: SQLAlchemy engine instance for a given database :param abs_path: Absolute path to migrate repository :param version: Initial database version """ repository = _find_migrate_repo(abs_path) versioning_api.version_control(engine, repository, version) return version
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def resolve_revision(cls, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> RevOptions """ Resolve a revision to a new RevOptions object with the SHA1 of the branch, tag, or ref if found. Args: rev_options: a RevOptions object. """ rev = rev_options.arg_rev # The arg_rev property's implementation for Git ensures that the # rev return value is always non-None. assert rev is not None sha, is_branch = cls.get_revision_sha(dest, rev) if sha is not None: rev_options = rev_options.make_new(sha) rev_options.branch_name = rev if is_branch else None return rev_options # Do not show a warning for the common case of something that has # the form of a Git commit hash. if not looks_like_hash(rev): logger.warning( "Did not find branch or tag '%s', assuming revision or ref.", rev, ) if not cls._should_fetch(dest, rev): return rev_options # fetch the requested revision cls.run_command( make_command('fetch', '-q', url, rev_options.to_args()), cwd=dest, ) # Change the revision to the SHA of the ref we fetched sha = cls.get_revision(dest, rev='FETCH_HEAD') rev_options = rev_options.make_new(sha) return rev_options
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def is_commit_id_equal(cls, dest, name): """ Return whether the current commit hash equals the given name. Args: dest: the repository directory. name: a string name. """ if not name: # Then avoid an unnecessary subprocess call. return False return cls.get_revision(dest) == name
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def fetch_new(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None rev_display = rev_options.to_display() logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest)) self.run_command(make_command('clone', '-q', url, dest)) if rev_options.rev: # Then a specific revision was requested. rev_options = self.resolve_revision(dest, url, rev_options) branch_name = getattr(rev_options, 'branch_name', None) if branch_name is None: # Only do a checkout if the current commit id doesn't match # the requested revision. if not self.is_commit_id_equal(dest, rev_options.rev): cmd_args = make_command( 'checkout', '-q', rev_options.to_args(), ) self.run_command(cmd_args, cwd=dest) elif self.get_current_branch(dest) != branch_name: # Then a specific branch was requested, and that branch # is not yet checked out. track_branch = 'origin/{}'.format(branch_name) cmd_args = [ 'checkout', '-b', branch_name, '--track', track_branch, ] self.run_command(cmd_args, cwd=dest) #: repo may contain submodules self.update_submodules(dest)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def switch(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None self.run_command( make_command('config', 'remote.origin.url', url), cwd=dest, ) cmd_args = make_command('checkout', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) self.update_submodules(dest)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def update(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None # First fetch changes from the default remote if self.get_git_version() >= parse_version('1.9.0'): # fetch tags in addition to everything else self.run_command(['fetch', '-q', '--tags'], cwd=dest) else: self.run_command(['fetch', '-q'], cwd=dest) # Then reset to wanted revision (maybe even origin/master) rev_options = self.resolve_revision(dest, url, rev_options) cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args()) self.run_command(cmd_args, cwd=dest) #: update submodules self.update_submodules(dest)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_remote_url(cls, location): """ Return URL of the first remote encountered. Raises RemoteNotFoundError if the repository does not have a remote url configured. """ # We need to pass 1 for extra_ok_returncodes since the command # exits with return code 1 if there are no matching lines. stdout = cls.run_command( ['config', '--get-regexp', r'remote\..*\.url'], extra_ok_returncodes=(1, ), show_stdout=False, stdout_only=True, cwd=location, ) remotes = stdout.splitlines() try: found_remote = remotes[0] except IndexError: raise RemoteNotFoundError for remote in remotes: if remote.startswith('remote.origin.url '): found_remote = remote break url = found_remote.split(' ')[1] return url.strip()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def has_commit(cls, location, rev): """ Check if rev is a commit that is available in the local repository. """ try: cls.run_command( ['rev-parse', '-q', '--verify', "sha^" + rev], cwd=location, log_failed_cmd=False, ) except InstallationError: return False else: return True
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_revision(cls, location, rev=None): if rev is None: rev = 'HEAD' current_rev = cls.run_command( ['rev-parse', rev], show_stdout=False, stdout_only=True, cwd=location, ) return current_rev.strip()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_subdirectory(cls, location): """ Return the path to setup.py, relative to the repo root. Return None if setup.py is in the repo root. """ # find the repo root git_dir = cls.run_command( ['rev-parse', '--git-dir'], show_stdout=False, stdout_only=True, cwd=location, ).strip() if not os.path.isabs(git_dir): git_dir = os.path.join(location, git_dir) repo_root = os.path.abspath(os.path.join(git_dir, '..')) return find_path_to_setup_from_repo_root(location, repo_root)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_url_rev_and_auth(cls, url): # type: (str) -> Tuple[str, Optional[str], AuthInfo] """ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. That's required because although they use SSH they sometimes don't work with a ssh:// scheme (e.g. GitHub). But we need a scheme for parsing. Hence we remove it again afterwards and return it as a stub. """ # Works around an apparent Git bug # (see https://article.gmane.org/gmane.comp.version-control.git/146500) scheme, netloc, path, query, fragment = urlsplit(url) if scheme.endswith('file'): initial_slashes = path[:-len(path.lstrip('/'))] newpath = ( initial_slashes + urllib_request.url2pathname(path) .replace('\\', '/').lstrip('/') ) after_plus = scheme.find('+') + 1 url = scheme[:after_plus] + urlunsplit( (scheme[after_plus:], netloc, newpath, query, fragment), ) if '://' not in url: assert 'file:' not in url url = url.replace('git+', 'git+ssh://') url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) url = url.replace('ssh://', '') else: url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) return url, rev, user_pass
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def update_submodules(cls, location): if not os.path.exists(os.path.join(location, '.gitmodules')): return cls.run_command( ['submodule', 'update', '--init', '--recursive', '-q'], cwd=location, )
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_repository_root(cls, location): loc = super(Git, cls).get_repository_root(location) if loc: return loc try: r = cls.run_command( ['rev-parse', '--show-toplevel'], cwd=location, show_stdout=False, stdout_only=True, on_returncode='raise', log_failed_cmd=False, ) except BadCommand: logger.debug("could not determine if %s is under git control " "because git is not available", location) return None except InstallationError: return None return os.path.normpath(r.rstrip('\r\n'))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_spec(field, limit=10, query='', query_dsl=''): """Returns aggregation specs for a term of filtered events. The aggregation spec will summarize values of an attribute whose events fall under a filter. Args: field (str): this denotes the event attribute that is used for aggregation. limit (int): How many buckets to return, defaults to 10. query (str): the query field to run on all documents prior to aggregating the results. query_dsl (str): the query DSL field to run on all documents prior to aggregating the results (optional). Either a query string or a query DSL has to be present. Raises: ValueError: if neither query_string or query_dsl is provided. Returns: a dict value that can be used as an aggregation spec. """ if query: query_filter = { 'bool': { 'must': [ { 'query_string': { 'query': query } } ] } } elif query_dsl: query_filter = query_dsl else: raise ValueError('Neither query nor query DSL provided.') return { 'query': query_filter, 'aggs': { 'aggregation': { 'terms': { 'field': field, 'size': limit } } } }
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __call__(self, env, start_response): return env
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def chart_title(self): """Returns a title for the chart.""" if self.field: return 'Top filtered results for "{0:s}"'.format(self.field) return 'Top results for an unknown field after filtering'
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def read(self, path): raise Exception('read called with %r' % path)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def run( self, field, query_string='', query_dsl='', supported_charts='table', start_time='', end_time='', limit=10): """Run the aggregation. Args: field (str): this denotes the event attribute that is used for aggregation. query_string (str): the query field to run on all documents prior to aggregating the results. query_dsl (str): the query DSL field to run on all documents prior to aggregating the results. Either a query string or a query DSL has to be present. supported_charts: Chart type to render. Defaults to table. start_time: Optional ISO formatted date string that limits the time range for the aggregation. end_time: Optional ISO formatted date string that limits the time range for the aggregation. limit (int): How many buckets to return, defaults to 10. Returns: Instance of interface.AggregationResult with aggregation result. Raises: ValueError: if neither query_string or query_dsl is provided. """ if not (query_string or query_dsl): raise ValueError('Both query_string and query_dsl are missing') self.field = field formatted_field_name = self.format_field_by_type(field) aggregation_spec = get_spec( field=formatted_field_name, limit=limit, query=query_string, query_dsl=query_dsl) aggregation_spec = self._add_query_to_aggregation_spec( aggregation_spec, start_time=start_time, end_time=end_time) # Encoding information for Vega-Lite. encoding = { 'x': { 'field': field, 'type': 'nominal', 'sort': { 'op': 'sum', 'field': 'count', 'order': 'descending' } }, 'y': {'field': 'count', 'type': 'quantitative'}, 'tooltip': [ {'field': field, 'type': 'nominal'}, {'field': 'count', 'type': 'quantitative'}], } response = self.opensearch_aggregation(aggregation_spec) aggregations = response.get('aggregations', {}) aggregation = aggregations.get('aggregation', {}) buckets = aggregation.get('buckets', []) values = [] for bucket in buckets: d = { field: bucket.get('key', 'N/A'), 'count': bucket.get('doc_count', 0) } values.append(d) if query_string: extra_query_url = 'AND {0:s}'.format(query_string) else: extra_query_url = '' return interface.AggregationResult( encoding=encoding, values=values, chart_type=supported_charts, sketch_url=self._sketch_url, field=field, extra_query_url=extra_query_url)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def read(self, path): return False
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def items(self, section_name): if section_name != section: raise NoSectionError(section_name) return { 'memcache_servers': memcache_servers, 'memcache_serialization_support': memcache_serialization_support, 'memcache_max_connections': memcache_max_connections, }
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def read(self, path): return True
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get(self, section, option): if _section == section: if option == 'memcache_servers': if _srvs == 'error': raise NoOptionError(option, section) return _srvs elif option == 'memcache_serialization_support': if _sers == 'error': raise NoOptionError(option, section) return _sers elif option in ('memcache_max_connections', 'max_connections'): if _maxc == 'error': raise NoOptionError(option, section) return _maxc else: raise NoOptionError(option, section) else: raise NoSectionError(option)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def start_response(*args): pass
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def setUp(self): self.app = memcache.MemcacheMiddleware(FakeApp(), {})
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_cache_middleware(self): req = Request.blank('/something', environ={'REQUEST_METHOD': 'GET'}) resp = self.app(req.environ, start_response) self.assertTrue('swift.cache' in resp) self.assertTrue(isinstance(resp['swift.cache'], MemcacheRing))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_conf_inline_ratelimiting(self): with mock.patch.object(memcache, 'ConfigParser', get_config_parser()): app = memcache.MemcacheMiddleware( FakeApp(), {'error_suppression_limit': '5', 'error_suppression_interval': '2.5'}) self.assertEqual(app.memcache._error_limit_count, 5) self.assertEqual(app.memcache._error_limit_time, 2.5) self.assertEqual(app.memcache._error_limit_duration, 2.5)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_conf_inline_tls(self): fake_context = mock.Mock() with mock.patch.object(ssl, 'create_default_context', return_value=fake_context): with mock.patch.object(memcache, 'ConfigParser', get_config_parser()): memcache.MemcacheMiddleware( FakeApp(), {'tls_enabled': 'true', 'tls_cafile': 'cafile', 'tls_certfile': 'certfile', 'tls_keyfile': 'keyfile'}) ssl.create_default_context.assert_called_with(cafile='cafile') fake_context.load_cert_chain.assert_called_with('certfile', 'keyfile')
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_filter_factory(self): factory = memcache.filter_factory({'max_connections': '3'}, memcache_servers='10.10.10.10:10', memcache_serialization_support='1') thefilter = factory('myapp') self.assertEqual(thefilter.app, 'myapp') self.assertEqual(thefilter.memcache_servers, '10.10.10.10:10') self.assertEqual(thefilter.memcache._allow_pickle, False) self.assertEqual(thefilter.memcache._allow_unpickle, True) self.assertEqual( thefilter.memcache._client_cache['10.10.10.10:10'].max_size, 3)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _loadapp(self, proxy_config_path): """ Load a proxy from an app.conf to get the memcache_ring :returns: the memcache_ring of the memcache middleware filter """ with mock.patch('swift.proxy.server.Ring'): app = loadapp(proxy_config_path) memcache_ring = None while True: memcache_ring = getattr(app, 'memcache', None) if memcache_ring: break app = app.app return memcache_ring
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_real_config(self, tempdir): config = """ [pipeline:main] pipeline = cache proxy-server [app:proxy-server] use = egg:swift#proxy [filter:cache] use = egg:swift#memcache """ config_path = os.path.join(tempdir, 'test.conf') with open(config_path, 'w') as f: f.write(dedent(config)) memcache_ring = self._loadapp(config_path) # only one server by default self.assertEqual(list(memcache_ring._client_cache.keys()), ['127.0.0.1:11211']) # extra options self.assertEqual(memcache_ring._connect_timeout, 0.3) self.assertEqual(memcache_ring._pool_timeout, 1.0) # tries is limited to server count self.assertEqual(memcache_ring._tries, 1) self.assertEqual(memcache_ring._io_timeout, 2.0)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test_real_config_with_options(self, tempdir): config = """ [pipeline:main] pipeline = cache proxy-server [app:proxy-server] use = egg:swift#proxy [filter:cache] use = egg:swift#memcache memcache_servers = 10.0.0.1:11211,10.0.0.2:11211,10.0.0.3:11211, 10.0.0.4:11211 connect_timeout = 1.0 pool_timeout = 0.5 tries = 4 io_timeout = 1.0 tls_enabled = true """ config_path = os.path.join(tempdir, 'test.conf') with open(config_path, 'w') as f: f.write(dedent(config)) memcache_ring = self._loadapp(config_path) self.assertEqual(sorted(memcache_ring._client_cache.keys()), ['10.0.0.%d:11211' % i for i in range(1, 5)]) # extra options self.assertEqual(memcache_ring._connect_timeout, 1.0) self.assertEqual(memcache_ring._pool_timeout, 0.5) # tries is limited to server count self.assertEqual(memcache_ring._tries, 4) self.assertEqual(memcache_ring._io_timeout, 1.0) self.assertEqual(memcache_ring._error_limit_count, 10) self.assertEqual(memcache_ring._error_limit_time, 60) self.assertEqual(memcache_ring._error_limit_duration, 60) self.assertIsInstance( list(memcache_ring._client_cache.values())[0]._tls_context, ssl.SSLContext)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self): # Fix some key bindings self.bind("<Control-Key-a>", self.select_all) # We will need Ctrl-/ for the "stroke", but it cannot be unbound, so # let's prevent it from being passed to the standard handler self.bind("<Control-Key-/>", lambda event: "break") # Diacritical bindings for a, k in self.accents: # Little-known feature of Tk, it allows to bind an event to # multiple keystrokes self.bind("<Control-Key-%s><Key>" % k, lambda event, a=a: self.insert_accented(event.char, a))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def call_vision_api(image_filename, api_keys): api_key = api_keys['microsoft'] post_url = "https://api.projectoxford.ai/vision/v1.0/analyze?visualFeatures=Categories,Tags,Description,Faces,ImageType,Color,Adult&subscription-key=" + api_key image_data = open(image_filename, 'rb').read() result = requests.post(post_url, data=image_data, headers={'Content-Type': 'application/octet-stream'}) result.raise_for_status() return result.text
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def insert_accented(self, c, accent): if c.isalpha(): if c.isupper(): cap = 'capital' else: cap = 'small' try: c = lookup("latin %s letter %c with %s" % (cap, c, accent)) self.insert(INSERT, c) # Prevent plain letter from being inserted too, tell Tk to # stop handling this event return "break" except KeyError, e: pass
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def get_standardized_result(api_result): output = { 'tags' : [], 'captions' : [],
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, master=None, **kwargs): Entry.__init__(self, master=None, **kwargs) Diacritical.__init__(self)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def select_all(self, event=None): self.selection_range(0, END) return "break"
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, master=None, **kwargs): ScrolledText.__init__(self, master=None, **kwargs) Diacritical.__init__(self)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def select_all(self, event=None): self.tag_add(SEL, "1.0", "end-1c") self.mark_set(INSERT, "1.0") self.see(INSERT) return "break"
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test(): frame = Frame() frame.pack(fill=BOTH, expand=YES) if os.name == "nt": # Set default font for all widgets; use Windows typical default frame.option_add("*font", "Tahoma 8") # The editors entry = DiacriticalEntry(frame) entry.pack(fill=BOTH, expand=YES) text = DiacriticalText(frame, width=76, height=25, wrap=WORD) if os.name == "nt": # But this looks better than the default set above text.config(font="Arial 10") text.pack(fill=BOTH, expand=YES) text.focus() frame.master.title("Diacritical Editor") frame.mainloop()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def loaded(cls): return 'cudf' in sys.modules
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def applies(cls, obj): if not cls.loaded(): return False import cudf return isinstance(obj, (cudf.DataFrame, cudf.Series))
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def init(cls, eltype, data, kdims, vdims): import cudf import pandas as pd element_params = eltype.param.objects() kdim_param = element_params['kdims'] vdim_param = element_params['vdims'] if isinstance(data, (cudf.Series, pd.Series)): data = data.to_frame() if not isinstance(data, cudf.DataFrame): data, _, _ = PandasInterface.init(eltype, data, kdims, vdims) data = cudf.from_pandas(data) columns = list(data.columns) ncols = len(columns) index_names = [data.index.name] if index_names == [None]: index_names = ['index'] if eltype._auto_indexable_1d and ncols == 1 and kdims is None: kdims = list(index_names) if isinstance(kdim_param.bounds[1], int): ndim = min([kdim_param.bounds[1], len(kdim_param.default)]) else: ndim = None nvdim = vdim_param.bounds[1] if isinstance(vdim_param.bounds[1], int) else None if kdims and vdims is None: vdims = [c for c in columns if c not in kdims] elif vdims and kdims is None: kdims = [c for c in columns if c not in vdims][:ndim] elif kdims is None: kdims = list(columns[:ndim]) if vdims is None: vdims = [d for d in columns[ndim:((ndim+nvdim) if nvdim else None)] if d not in kdims] elif kdims == [] and vdims is None: vdims = list(columns[:nvdim if nvdim else None]) # Handle reset of index if kdims reference index by name for kd in kdims: kd = dimension_name(kd) if kd in columns: continue if any(kd == ('index' if name is None else name) for name in index_names): data = data.reset_index() break if any(isinstance(d, (np.int64, int)) for d in kdims+vdims): raise DataError("cudf DataFrame column names used as dimensions " "must be strings not integers.", cls) if kdims: kdim = dimension_name(kdims[0]) if eltype._auto_indexable_1d and ncols == 1 and kdim not in columns: data = data.copy() data.insert(0, kdim, np.arange(len(data))) for d in kdims+vdims: d = dimension_name(d) if len([c for c in columns if c == d]) > 1: raise DataError('Dimensions may not reference duplicated DataFrame ' 'columns (found duplicate %r columns). If you want to plot ' 'a column against itself simply declare two dimensions ' 'with the same name. '% d, cls) return data, {'kdims':kdims, 'vdims':vdims}, {}
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def range(cls, dataset, dimension): dimension = dataset.get_dimension(dimension, strict=True) column = dataset.data[dimension.name] if dimension.nodata is not None: column = cls.replace_value(column, dimension.nodata) if column.dtype.kind == 'O': return np.NaN, np.NaN else: return finite_range(column, column.min(), column.max())
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def values(cls, dataset, dim, expanded=True, flat=True, compute=True, keep_index=False): dim = dataset.get_dimension(dim, strict=True) data = dataset.data[dim.name] if not expanded: data = data.unique() return data.values_host if compute else data.values elif keep_index: return data elif compute: return data.values_host try: return data.values except Exception: return data.values_host
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs): # Get dimensions information dimensions = [dataset.get_dimension(d).name for d in dimensions] kdims = [kdim for kdim in dataset.kdims if kdim not in dimensions] # Update the kwargs appropriately for Element group types group_kwargs = {} group_type = dict if group_type == 'raw' else group_type if issubclass(group_type, Element): group_kwargs.update(util.get_param_values(dataset)) group_kwargs['kdims'] = kdims group_kwargs.update(kwargs) # Propagate dataset group_kwargs['dataset'] = dataset.dataset # Find all the keys along supplied dimensions keys = product(*(dataset.data[dimensions[0]].unique().values_host for d in dimensions)) # Iterate over the unique entries applying selection masks grouped_data = [] for unique_key in util.unique_iterator(keys): group_data = dataset.select(**dict(zip(dimensions, unique_key))) if not len(group_data): continue group_data = group_type(group_data, **group_kwargs) grouped_data.append((unique_key, group_data)) if issubclass(container_type, NdMapping): with item_check(False), sorted_context(False): kdims = [dataset.get_dimension(d) for d in dimensions] return container_type(grouped_data, kdims=kdims) else: return container_type(grouped_data)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def select_mask(cls, dataset, selection): """ Given a Dataset object and a dictionary with dimension keys and selection keys (i.e. tuple ranges, slices, sets, lists, or literals) return a boolean mask over the rows in the Dataset object that have been selected. """ mask = None for dim, sel in selection.items(): if isinstance(sel, tuple): sel = slice(*sel) arr = cls.values(dataset, dim, keep_index=True) if util.isdatetime(arr) and util.pd: try: sel = util.parse_datetime_selection(sel) except: pass new_masks = [] if isinstance(sel, slice): with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'invalid value encountered') if sel.start is not None: new_masks.append(sel.start <= arr) if sel.stop is not None: new_masks.append(arr < sel.stop) if not new_masks: continue new_mask = new_masks[0] for imask in new_masks[1:]: new_mask &= imask elif isinstance(sel, (set, list)): for v in sel: new_masks.append(arr==v) if not new_masks: continue new_mask = new_masks[0] for imask in new_masks[1:]: new_mask |= imask elif callable(sel): new_mask = sel(arr) else: new_mask = arr == sel if mask is None: mask = new_mask else: mask &= new_mask return mask
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def select(cls, dataset, selection_mask=None, **selection): df = dataset.data if selection_mask is None: selection_mask = cls.select_mask(dataset, selection) indexed = cls.indexed(dataset, selection) if selection_mask is not None: df = df.loc[selection_mask] if indexed and len(df) == 1 and len(dataset.vdims) == 1: return df[dataset.vdims[0].name].iloc[0] return df
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def concat_fn(cls, dataframes, **kwargs): import cudf return cudf.concat(dataframes, **kwargs)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim): data = dataset.data.copy() if dimension.name not in data: data[dimension.name] = values return data
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def aggregate(cls, dataset, dimensions, function, **kwargs): data = dataset.data cols = [d.name for d in dataset.kdims if d in dimensions] vdims = dataset.dimensions('value', label='name') reindexed = data[cols+vdims] agg = function.__name__ if len(dimensions): agg_map = {'amin': 'min', 'amax': 'max'} agg = agg_map.get(agg, agg) grouped = reindexed.groupby(cols, sort=False) if not hasattr(grouped, agg): raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg) df = getattr(grouped, agg)().reset_index() else: agg_map = {'amin': 'min', 'amax': 'max', 'size': 'count'} agg = agg_map.get(agg, agg) if not hasattr(reindexed, agg): raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg) agg = getattr(reindexed, agg)() data = dict(((col, [v]) for col, v in zip(agg.index.values_host, agg.to_array()))) df = util.pd.DataFrame(data, columns=list(agg.index.values_host)) dropped = [] for vd in vdims: if vd not in df.columns: dropped.append(vd) return df, dropped
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def iloc(cls, dataset, index): import cudf rows, cols = index scalar = False columns = list(dataset.data.columns) if isinstance(cols, slice): cols = [d.name for d in dataset.dimensions()][cols] elif np.isscalar(cols): scalar = np.isscalar(rows) cols = [dataset.get_dimension(cols).name] else: cols = [dataset.get_dimension(d).name for d in index[1]] col_index = [columns.index(c) for c in cols] if np.isscalar(rows): rows = [rows] if scalar: return dataset.data[cols[0]].iloc[rows[0]] result = dataset.data.iloc[rows, col_index] # cuDF does not handle single rows and cols indexing correctly # as of cudf=0.10.0 so we have to convert Series back to DataFrame if isinstance(result, cudf.Series): if len(cols) == 1: result = result.to_frame(cols[0]) else: result = result.to_frame().T return result
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def sort(cls, dataset, by=[], reverse=False): cols = [dataset.get_dimension(d, strict=True).name for d in by] return dataset.data.sort_values(by=cols, ascending=not reverse)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def dframe(cls, dataset, dimensions): if dimensions: return dataset.data[dimensions].to_pandas() else: return dataset.data.to_pandas()
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, key): self.key = key self.prio = random.randint(0, 1000000000) self.size = 1 self.left = None self.right = None
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def GetCompletions_Basic_test( app ): filepath = PathToTestFile( 'basic.py' ) completion_data = BuildRequest( filepath = filepath, filetype = 'python', contents = ReadFile( filepath ), line_num = 7, column_num = 3) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_items( CompletionEntryMatcher( 'a' ), CompletionEntryMatcher( 'b' ), CompletionLocationMatcher( 'line_num', 3 ), CompletionLocationMatcher( 'line_num', 4 ), CompletionLocationMatcher( 'column_num', 10 ), CompletionLocationMatcher( 'filepath', filepath ) ) )
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def update(self): self.size = 1 + size(self.left) + size(self.right)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def GetCompletions_UnicodeDescription_test( app ): filepath = PathToTestFile( 'unicode.py' ) completion_data = BuildRequest( filepath = filepath, filetype = 'python', contents = ReadFile( filepath ), force_semantic = True, line_num = 5, column_num = 3) results = app.post_json( '/completions', completion_data ).json[ 'completions' ] assert_that( results, has_item( has_entry( 'detailed_info', contains_string( u'aafäö' ) ) ) )
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def size(treap): return 0 if treap is None else treap.size
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def CombineRequest( request, data ): kw = request request.update( data ) return BuildRequest( **kw )
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def split(root, minRight): if root is None: return None, None if root.key >= minRight: left, right = split(root.left, minRight) root.left = right root.update() return left, root else: left, right = split(root.right, minRight) root.right = left root.update() return root, right
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def GetCompletions_NoSuggestions_Fallback_test( app ): # Python completer doesn't raise NO_COMPLETIONS_MESSAGE, so this is a # different code path to the Clang completer cases # TESTCASE2 (general_fallback/lang_python.py) RunTest( app, { 'description': 'param jedi does not know about (id). query="a_p"', 'request': { 'filetype' : 'python', 'filepath' : PathToTestFile( 'general_fallback', 'lang_python.py' ), 'line_num' : 28, 'column_num': 20, 'force_semantic': False, }, 'expect': { 'response': http.client.OK, 'data': has_entries( { 'completions': contains( CompletionEntryMatcher( 'a_parameter', '[ID]' ), CompletionEntryMatcher( 'another_parameter', '[ID]' ), ), 'errors': empty(), } ) }, } )
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def merge(left, right): if left is None: return right if right is None: return left if left.prio > right.prio: left.right = merge(left.right, right) left.update() return left else: right.left = merge(left, right.left) right.update() return right
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def insert(root, key): left, right = split(root, key) return merge(merge(left, Treap(key)), right)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def remove(root, key): left, right = split(root, key) return merge(left, split(right, key + 1)[1])
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def kth(root, k): if k < size(root.left): return kth(root.left, k) elif k > size(root.left): return kth(root.right, k - size(root.left) - 1) return root.key
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def dfs_print(root): if root is None: return dfs_print(root.left) print(str(root.key) + ' ', end='') dfs_print(root.right)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def test(): start = time.time() treap = None s = set() for i in range(100000): key = random.randint(0, 10000) if random.randint(0, 1) == 0: if key in s: treap = remove(treap, key) s.remove(key) elif key not in s: treap = insert(treap, key) s.add(key) assert len(s) == size(treap) for i in range(size(treap)): assert kth(treap, i) in s print(time.time() - start)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def __init__(self, region, name, retention_in_days=7): super(LogGroup, self).__init__() self.region = region self.name = name self.retention_in_days = retention_in_days
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _Create(self): """Create the log group.""" create_cmd = util.AWS_PREFIX + [ '--region', self.region, 'logs', 'create-log-group', '--log-group-name', self.name ] vm_util.IssueCommand(create_cmd)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _Delete(self): """Delete the log group.""" delete_cmd = util.AWS_PREFIX + [ '--region', self.region, 'logs', 'delete-log-group', '--log-group-name', self.name ] vm_util.IssueCommand(delete_cmd, raise_on_failure=False)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def Exists(self): """Returns True if the log group exists.""" describe_cmd = util.AWS_PREFIX + [ '--region', self.region, 'logs', 'describe-log-groups', '--log-group-name-prefix', self.name, '--no-paginate' ] stdout, _, _ = vm_util.IssueCommand(describe_cmd) log_groups = json.loads(stdout)['logGroups'] group = next((group for group in log_groups if group['logGroupName'] == self.name), None) return bool(group)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def _PostCreate(self): """Set the retention policy.""" put_cmd = util.AWS_PREFIX + [ '--region', self.region, 'logs', 'put-retention-policy', '--log-group-name', self.name, '--retention-in-days', str(self.retention_in_days) ] vm_util.IssueCommand(put_cmd)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def GetLogs(region, stream_name, group_name, token=None): """Fetches the JSON formatted log stream starting at the token.""" get_cmd = util.AWS_PREFIX + [ '--region', region, 'logs', 'get-log-events', '--start-from-head', '--log-group-name', group_name, '--log-stream-name', stream_name, ] if token: get_cmd.extend(['--next-token', token]) stdout, _, _ = vm_util.IssueCommand(get_cmd) return json.loads(stdout)
def emit(self, level, message): raise NotImplementedError('Please implement an emit method')
def begin(self): self.append({'cbs': [], 'dirty': False})