repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
ducksboard/libsaas
libsaas/services/bitbucket/repositories.py
Repo.privileges
python
def privileges(self, specific_user=None): return privileges.RepoPrivileges( self, self.user, self.repo, specific_user)
Return a resource corresponding to all privileges from this repo, either for everyone or for a specific user.
https://github.com/ducksboard/libsaas/blob/615981a3336f65be9d51ae95a48aed9ad3bd1c3c/libsaas/services/bitbucket/repositories.py#L136-L142
from libsaas import http, parsers from libsaas.services import base from . import resource, privileges, issues, links, changesets, services class Repos(resource.BitBucketResource): path = 'repositories' @base.apimethod def get(self, *args, **kwargs): url = '{0}/user/repositories/'.format(self.parent.get_url()) return http.Request('GET', url), parsers.parse_json @base.apimethod def search(self, name=None): params = base.get_params(('name', ), locals()) request = http.Request('GET', self.get_url(), params) return request, parsers.parse_json @base.apimethod def create(self, name, scm=None, is_private=False): params = base.get_params(('name', 'scm'), locals()) params['is_private'] = 'true' if is_private else 'false' request = http.Request('POST', self.get_url(), params) return request, parsers.parse_json @base.apimethod def delete(self): request = http.Request('DELETE', self.get_url()) return request, parsers.parse_json class Repo(resource.BitBucketResource): def __init__(self, parent, user, repo): self.parent = parent self.user = user self.repo = repo def get_url(self): return '{0}/repositories/{1}/{2}'.format(self.parent.get_url(), self.user, self.repo) def require_item(self): pass def require_collection(self): raise base.MethodNotSupported() @base.apimethod def tags(self): url = '{0}/tags/'.format(self.get_url()) request = http.Request('GET', url) return request, parsers.parse_json @base.apimethod def branches(self): url = '{0}/branches/'.format(self.get_url()) request = http.Request('GET', url) return request, parsers.parse_json @base.apimethod def invite(self, user, permission): url = '{0}/invitations/{1}/{2}/{3}'.format(self.parent.get_url(), self.user, self.repo, user) params = base.get_params(('permission', ), locals()) request = http.Request('POST', url, params) return request, parsers.parse_json @base.apimethod def followers(self): request = http.Request('GET', '{0}/followers/'.format(self.get_url())) return request, parsers.parse_json @base.apimethod def events(self, start=0, limit=15, etype=None): params = base.get_params(('start', 'limit', 'etype'), locals()) url = '{0}/events/'.format(self.get_url()) request = http.Request('GET', url, params) return request, parsers.parse_json @base.resource(privileges.RepoPrivileges)
MIT License
beancount/fava
src/fava/ext/__init__.py
find_extensions
python
def find_extensions( base_path: str, name: str ) -> Tuple[List[Type[FavaExtensionBase]], List[FavaExtensionError]]: classes = [] sys.path.insert(0, base_path) try: module = importlib.import_module(name) except ImportError: error = FavaExtensionError( None, f'Importing module "{name}" failed.', None ) return ( [], [error], ) for _, obj in inspect.getmembers(module, inspect.isclass): if issubclass(obj, FavaExtensionBase) and obj != FavaExtensionBase: classes.append(obj) sys.path.pop(0) if not classes: error = FavaExtensionError( None, f'Module "{name}" contains no extensions.', None, ) return ( [], [error], ) return classes, []
Find extensions in a module. Args: base_path: The module can be relative to this path. name: The name of the module containing the extensions. Returns: A tuple (classes, errors) where classes is a list of subclasses of :class:`FavaExtensionBase` found in ``name``.
https://github.com/beancount/fava/blob/703497d31c467702c59004dc2143b04ba178dce8/src/fava/ext/__init__.py#L69-L111
import ast import importlib import inspect import sys from typing import Any from typing import List from typing import Optional from typing import Tuple from typing import Type from typing import TYPE_CHECKING from beancount.core.data import Directive from fava.helpers import BeancountError if TYPE_CHECKING: from fava.core import FavaLedger class FavaExtensionError(BeancountError): class FavaExtensionBase: report_title: Optional[str] = None config: Any def __init__( self, ledger: "FavaLedger", config: Optional[str] = None ) -> None: self.ledger = ledger try: self.config = ast.literal_eval(config) if config else None except ValueError: self.config = None self.name = self.__class__.__qualname__ def after_entry_modified(self, entry: Directive, new_lines: str) -> None: def after_insert_entry(self, entry: Directive) -> None: def after_insert_metadata( self, entry: Directive, key: str, value: str ) -> None: def after_write_source(self, path: str, source: str) -> None:
MIT License
demisto/demisto-sdk
demisto_sdk/commands/common/hook_validations/docker.py
DockerImageValidator.parse_docker_image
python
def parse_docker_image(self, docker_image): if docker_image: tag = '' image = '' try: image_regex = re.findall(r'(demisto\/.+)', docker_image, re.IGNORECASE) if image_regex: image = image_regex[0] if ':' in image: image_split = image.split(':') image = image_split[0] tag = image_split[1] else: error_message, error_code = Errors.no_docker_tag(docker_image) self.handle_error(error_message, error_code, file_path=self.file_path) except IndexError: error_message, error_code = Errors.docker_not_formatted_correctly(docker_image) self.handle_error(error_message, error_code, file_path=self.file_path) return image, tag else: if self.py_version == 'python2': return 'demisto/python', self.get_docker_image_latest_tag('demisto/python', None) else: return 'demisto/python3', self.get_docker_image_latest_tag('demisto/python3', None)
Verify that the docker image is of demisto format & parse the name and tag Args: docker_image: String representation of the docker image name and tag Returns: The name and the tag of the docker image
https://github.com/demisto/demisto-sdk/blob/8d8767c2dfec77b67c35f4e1022e30ed2893e864/demisto_sdk/commands/common/hook_validations/docker.py#L310-L344
import re from datetime import datetime, timedelta from functools import lru_cache from typing import Optional import requests from pkg_resources import parse_version from demisto_sdk.commands.common.constants import IronBankDockers from demisto_sdk.commands.common.errors import Errors from demisto_sdk.commands.common.hook_validations.base_validator import BaseValidator from demisto_sdk.commands.common.tools import get_yaml requests.packages.urllib3.disable_warnings() ACCEPT_HEADER = { 'Accept': 'application/json, ' 'application/vnd.docker.distribution.manifest.v2+json, ' 'application/vnd.docker.distribution.manifest.list.v2+json' } TIMEOUT = 10 DEFAULT_REGISTRY = 'registry-1.docker.io' class DockerImageValidator(BaseValidator): def __init__(self, yml_file_path, is_modified_file, is_integration, ignored_errors=None, print_as_warnings=False, suppress_print: bool = False, json_file_path: Optional[str] = None, is_iron_bank: bool = False): super().__init__(ignored_errors=ignored_errors, print_as_warnings=print_as_warnings, suppress_print=suppress_print, json_file_path=json_file_path) self.is_valid = True self.is_modified_file = is_modified_file self.is_integration = is_integration self.file_path = yml_file_path self.yml_file = get_yaml(yml_file_path) self.py_version = self.get_python_version() self.code_type = self.get_code_type() self.yml_docker_image = self.get_docker_image_from_yml() self.from_version = self.yml_file.get('fromversion', '0') self.docker_image_name, self.docker_image_tag = self.parse_docker_image(self.yml_docker_image) self.is_latest_tag = True self.is_iron_bank = is_iron_bank self.docker_image_latest_tag = self.get_docker_image_latest_tag(self.docker_image_name, self.yml_docker_image, self.is_iron_bank) def is_docker_image_valid(self): if self.code_type == 'javascript': return True if not self.yml_docker_image: error_message, error_code = Errors.dockerimage_not_in_yml_file(self.file_path) if self.handle_error(error_message, error_code, file_path=self.file_path): self.is_valid = False if not self.docker_image_latest_tag: error_message, error_code = Errors.non_existing_docker(self.yml_docker_image) if self.handle_error(error_message, error_code, file_path=self.file_path): self.is_valid = False elif not self.is_docker_image_latest_tag(): self.is_valid = False return self.is_valid def is_docker_image_latest_tag(self): if 'demisto/python:1.3-alpine' == f'{self.docker_image_name}:{self.docker_image_tag}': error_message, error_code = Errors.default_docker_error() if self.handle_error(error_message, error_code, file_path=self.file_path): self.is_latest_tag = False return self.is_latest_tag if self.docker_image_latest_tag == "no-tag-required": return self.is_latest_tag if not self.docker_image_name or not self.docker_image_latest_tag: self.is_latest_tag = False return self.is_latest_tag if self.docker_image_latest_tag != self.docker_image_tag: error_message, error_code = Errors.docker_not_on_the_latest_tag(self.docker_image_tag, self.docker_image_latest_tag, self.is_iron_bank) suggested_fix = Errors.suggest_docker_fix(self.docker_image_name, self.file_path, self.is_iron_bank) if self.handle_error(error_message, error_code, file_path=self.file_path, suggested_fix=suggested_fix): self.is_latest_tag = False else: self.handle_error(error_message, error_code, file_path=self.file_path, warning=True) if self.docker_image_latest_tag == "latest": error_message, error_code = Errors.latest_docker_error(self.docker_image_tag, self.docker_image_name) if self.handle_error(error_message, error_code, file_path=self.file_path): self.is_latest_tag = False return self.is_latest_tag def get_code_type(self): if self.is_integration: code_type = self.yml_file.get('script').get('type', 'python') else: code_type = self.yml_file.get('type', 'python') return code_type def get_python_version(self): if self.is_integration: python_version = self.yml_file.get('script').get('subtype', 'python2') else: python_version = self.yml_file.get('subtype', 'python2') return python_version def get_docker_image_from_yml(self): if self.is_integration: docker_image = self.yml_file.get('script').get('dockerimage', '') else: docker_image = self.yml_file.get('dockerimage', '') return docker_image @staticmethod def parse_www_auth(www_auth): match = re.match(r'.*realm="(.+)",service="(.+)".*', www_auth, re.IGNORECASE) if not match: return () return match.groups() @staticmethod def docker_auth(image_name, verify_ssl=True, registry=DEFAULT_REGISTRY): res = requests.get( 'https://{}/v2/'.format(registry), headers=ACCEPT_HEADER, timeout=TIMEOUT, verify=verify_ssl ) if res.status_code == 401: realm = 'https://auth.docker.io/token' service = 'registry.docker.io' www_auth = res.headers.get('www-authenticate') if www_auth: parse_auth = DockerImageValidator.parse_www_auth(www_auth) if parse_auth: realm, service = parse_auth params = { 'scope': 'repository:{}:pull'.format(image_name), 'service': service } res = requests.get( url=realm, params=params, headers=ACCEPT_HEADER, timeout=TIMEOUT, verify=verify_ssl ) res.raise_for_status() res_json = res.json() return res_json.get('token') else: res.raise_for_status() return None @staticmethod def clear_non_numbered_tags(tags): return [tag for tag in tags if re.match(r'^(?:\d+\.)*\d+$', tag) is not None] @staticmethod def lexical_find_latest_tag(tags): only_numbered_tags = DockerImageValidator.clear_non_numbered_tags(tags) if len(only_numbered_tags) == 0: return tags[-1] max_tag = only_numbered_tags[0] for num_tag in only_numbered_tags: if parse_version(max_tag) < parse_version(num_tag): max_tag = num_tag return max_tag @staticmethod def find_latest_tag_by_date(tags: list) -> str: latest_tag_name = 'latest' latest_tag_date = datetime.now() - timedelta(days=400000) for tag in tags: tag_date = datetime.strptime(tag.get('last_updated'), '%Y-%m-%dT%H:%M:%S.%fZ') if tag_date >= latest_tag_date and tag.get('name') != 'latest': latest_tag_date = tag_date latest_tag_name = tag.get('name') return latest_tag_name @staticmethod @lru_cache(256) def get_docker_image_latest_tag_request(docker_image_name: str) -> str: tag = '' auth_token = DockerImageValidator.docker_auth(docker_image_name, False, DEFAULT_REGISTRY) headers = ACCEPT_HEADER.copy() if auth_token: headers['Authorization'] = 'Bearer {}'.format(auth_token) res = requests.get( url='https://hub.docker.com/v2/repositories/{}/tags'.format(docker_image_name), verify=False, timeout=TIMEOUT, ) if res.status_code == 200: tags = res.json().get('results', []) if tags: tag = DockerImageValidator.find_latest_tag_by_date(tags) else: res = requests.get( 'https://{}/v2/{}/tags/list'.format(DEFAULT_REGISTRY, docker_image_name), headers=headers, timeout=TIMEOUT, verify=False ) res.raise_for_status() tags = res.json().get('tags', []) if tags: tag = DockerImageValidator.lexical_find_latest_tag(tags) return tag def get_docker_image_latest_tag(self, docker_image_name, yml_docker_image, is_iron_bank=False): if yml_docker_image: if not yml_docker_image.startswith('demisto/'): error_message, error_code = Errors.not_demisto_docker() if self.handle_error(error_message, error_code, file_path=self.file_path): return '' return "no-tag-required" try: if is_iron_bank: return self.get_docker_image_latest_tag_from_iron_bank_request(docker_image_name) return self.get_docker_image_latest_tag_request(docker_image_name) except (requests.exceptions.RequestException, Exception) as e: if not docker_image_name: docker_image_name = yml_docker_image error_message, error_code = Errors.docker_tag_not_fetched(docker_image_name, str(e)) if self.handle_error(error_message, error_code, file_path=self.file_path): return '' return "no-tag-required"
MIT License
cpqd/routeflow
pox/pox/messenger/__init__.py
DefaultChannelBot._exec_cmd_invite
python
def _exec_cmd_invite (self, event): botname = event.msg.get('bot') botclass = self._bots.get(botname) channel = event.msg.get('channel') new_channel = False if channel is None: new_channel = True channel = self._gen_channel_name(event.msg.get("prefix", "temp")) chan = self._nexus.get_channel(channel, create=True, temporary=True) if chan is None: log.warning("A bot was invited to a nonexistent channel (%s)" % (channel,)) return if botclass is None: log.warning("A nonexistent bot (%s) was invited to a channel" % (botname,)) return bot = botclass(channel, self._nexus) if new_channel: self.reply(event, new_channel = new_channel)
Invites a bot that has been registered with add_bot() to a channel. Note that you can invite a bot to an empty (new) temporary channel. It will stay until the first member leaves.
https://github.com/cpqd/routeflow/blob/3f406b9c1a0796f40a86eb1194990cdd2c955f4d/pox/pox/messenger/__init__.py#L523-L551
from pox.lib.revent.revent import * from pox.core import core as core import json import time import random import hashlib from base64 import b32encode log = core.getLogger() defaultDecoder = json.JSONDecoder() class ChannelJoin (Event): def __init__ (self, connection, channel, msg = {}): Event.__init__(self) self.con = connection self.channel = channel self.msg = msg class ConnectionClosed (Event): def __init__ (self, connection): Event.__init__(self) self.con = connection class ChannelLeave (Event): def __init__ (self, connection, channel): Event.__init__(self) self.con = connection self.channel = channel class ChannelCreate (Event): def __init__ (self, channel): Event.__init__(self) self.channel = channel class ChannelDestroy (Event): def __init__ (self, channel): Event.__init__(self) self.channel = channel self.keep = False class ChannelDestroyed (Event): def __init__ (self, channel): Event.__init__(self) self.channel = channel class MissingChannel (Event): def __init__ (self, connection, channel_name, msg): Event.__init__(self) self.con = connection self.channel_name = channel_name self.msg = msg class MessageReceived (Event): def __init__ (self, connection, channel, msg): Event.__init__(self) self.con = connection self.msg = msg self.channel = channel def is_to_channel (self, channel): if isinstance(channel, Channel): channel = channel.name if channel == self.channel: return True if channel in self.channel: return True return False def _invoke (self, handler, *args, **kw): return handler(self, self.msg, *args, **kw) def _get_nexus (nexus): if nexus is None: nexus = "MessengerNexus" if isinstance(nexus, str): if not core.hasComponent(nexus): s = "MessengerNexus %s is not available" % (nexus,) log.error(s) raise RuntimeError(s) return getattr(core, nexus) assert isinstance(nexus, MessengerNexus) return nexus class Transport (object): def __init__ (self, nexus): self._nexus = _get_nexus(nexus) def _forget (self, connection): raise RuntimeError("Not implemented") class Connection (EventMixin): _eventMixin_events = set([ MessageReceived, ConnectionClosed, ]) def __init__ (self, transport): EventMixin.__init__(self) self._is_connected = True self._transport = transport self._newlines = False self._buf = bytes() key,num = self._transport._nexus.generate_session() self._session_id,self._session_num = key,num def _send_welcome (self): self.send({"CHANNEL":"","cmd":"welcome","session_id":self._session_id}) def _close (self): if self._is_connected is False: return self._transport._forget(self) self._is_connected = False for name,chan in self._transport._nexus._channels.items(): chan._remove_member(self) self.raiseEventNoErrors(ConnectionClosed, self) def send (self, whatever): if self._is_connected is False: return False s = json.dumps(whatever, default=str) if self._newlines: s += "\n" self.send_raw(s) return True def send_raw (self, data): raise RuntimeError("Not implemented") @property def is_connected (self): return self._is_connected def _rx_message (self, msg): e = self.raiseEventNoErrors(MessageReceived,self,msg.get('CHANNEL'),msg) self._transport._nexus._rx_message(self, msg) def _rx_raw (self, data): if len(data) == 0: return if len(self._buf) == 0: if data[0].isspace(): self._buf = data.lstrip() else: self._buf = data else: self._buf += data while len(self._buf) > 0: try: msg, l = defaultDecoder.raw_decode(self._buf) except: return self._buf = self._buf[l:] if len(self._buf) != 0 and self._buf[0].isspace(): self._buf = self._buf.lstrip() self._rx_message(msg) def __str__ (self): return "<%s/%s/%i>" % (self.__class__.__name__, self._session_id, self._session_num) def close (self): self._close() class Channel (EventMixin): _eventMixin_events = set([ MessageReceived, ChannelJoin, ChannelLeave, ChannelDestroy, ChannelDestroyed, ]) def __init__ (self, name, nexus = None, temporary = False): EventMixin.__init__(self) assert isinstance(name, basestring) self._name = name self._nexus = _get_nexus(nexus) self._nexus._channels[name] = self self.temporary = temporary self._members = set() @property def name (self): return self._name def _destroy (self): e = self.raiseEvent(ChannelDestroy, self) if e: if e.keep: return False self._nexus.raiseEvent(e) if e.keep: return False del self._nexus._channels[self._name] for sub in set(self._members): self._remove_member(sub, allow_destroy = False) e = ChannelDestroyed(self) self.raiseEvent(e) self._nexus.raiseEvent(e) def _add_member (self, con, msg = {}): if con in self._members: return self._members.add(con) self.raiseEvent(ChannelJoin, con, self, msg) def _remove_member (self, con, allow_destroy = True): if con not in self._members: return self._members.remove(con) self.raiseEvent(ChannelLeave, con, self) if not allow_destroy: return if self.temporary is True: if len(self._members) == 0: self._destroy() def send (self, msg): d = dict(msg) d['CHANNEL'] = self._name for r in self._members: if not r.is_connected: continue r.send(d) def __str__ (self): return "<Channel " + self.name + ">" def reply (_msg, **kw): if not isinstance(_msg, dict): _msg = _msg.msg kw['CHANNEL'] = _msg.get('CHANNEL') if 'XID' in _msg: kw['XID'] = _msg.get('XID') return kw class ChannelBot (object): def __str__ (self): return "<%s@%s>" % (self.__class__.__name__, self.channel) def __init__ (self, channel, nexus = None, weak = False, extra = {}): self._startup(channel, nexus, weak, extra) def _startup (self, channel, nexus = None, weak = False, extra = {}): self._nexus = _get_nexus(nexus) if isinstance(channel, Channel): self.channel = channel else: self.channel = self._nexus.get_channel(channel, create=True) self.listeners = self.channel.addListeners(self, weak = weak) self.prefixes = None self._init(extra) if self.prefixes is None: self.prefixes = [] for n in dir(self): if n.startswith("_exec_"): n = n.split("_")[2] self.prefixes.append(n) def _handle_ChannelDestroyed (self, event): self.channel.removeListeners(self.listeners) self._destroyed() def _handle_ChannelJoin (self, event): self._join(event, event.con, event.msg) def _handle_ChannelLeave (self, event): self._leave(event.con, len(self.channel._members) == 0) def _handle_MessageReceived (self, event, msg): for prefix in self.prefixes: if prefix in event.msg: cmd = "_exec_%s_%s" % (prefix, str(event.msg[prefix])) if hasattr(self, cmd): getattr(self, cmd)(event) return for prefix in self.prefixes: if prefix in event.msg: cmd = "_exec_" + prefix if hasattr(self, cmd): getattr(self, cmd)(event, msg[prefix]) return self._unhandled(event) def _unhandled (self, event): pass def _join (self, event, connection, msg): pass def _leave (self, connection, empty): pass def _destroyed (self): pass def _init (self, extra): pass def reply (__self, __event, **kw): __event.con.send(reply(__event, **kw)) def send (__self, __msg={}, **kw): m = {} m.update(__msg) m.update(kw) __self.channel.send(m) class DefaultChannelBot (ChannelBot): def _init (self, extra): self._bots = {} def add_bot (self, bot, name = None): assert issubclass(bot, ChannelBot) if name is None: name = bot.__name__ self._bots[name] = bot def _exec_newlines_False (self, event): event.con._newlines = False def _exec_newlines_True (self, event): event.con._newlines = True
Apache License 2.0
plos/allofplos
allofplos/corpus/corpus.py
Corpus.iter_filepaths
python
def iter_filepaths(self): return (os.path.join(self.directory, fname) for fname in self.iter_files)
Generator of article XML files in corpus directory, including the full path.
https://github.com/plos/allofplos/blob/85a8cdcf783a5cf36f94e7eb2083aa74430beb57/allofplos/corpus/corpus.py#L101-L103
import os from random import Random from collections import OrderedDict from itertools import islice from .. import get_corpus_dir, Article from ..transformations import filename_to_doi, doi_to_path class Corpus: def __init__(self, directory=None, extension='.xml', seed=None): if directory is None: directory = get_corpus_dir() self.directory = directory self.extension = extension self.random = Random(seed) def __repr__(self): out = "Corpus location: {0}\nNumber of files: {1}".format(self.directory, len(self.files)) return out def __len__(self): return len(self.dois) def __iter__(self): return (article for article in self.random_article_generator) def __getitem__(self, key): if isinstance(key, int): return Article(self.dois[key], directory=self.directory) elif isinstance(key, slice): return (Article(doi, directory=self.directory) for doi in self.dois[key]) elif key not in self.dois: path= doi_to_path(key, directory=self.directory) raise IndexError(("You attempted get {doi} from " "the corpus at \n{directory}. \n" "This would point to: {path}. \n" "Is that the file that was intended?" ).format(doi=key, directory=self.directory, path=path ) ) else: return Article(key, directory=self.directory) def __contains__(self, value): is_in = False if isinstance(value, Article): is_in = value.doi in self.dois and value.directory == self.directory elif isinstance(value, str): doi_in = value in self.dois file_in = value in self.files filepath_in = value in self.filepaths is_in = doi_in or file_in or filepath_in return is_in @property def iter_file_doi(self): return ((file_, filename_to_doi(file_)) for file_ in sorted(os.listdir(self.directory)) if file_.endswith(self.extension) and 'DS_Store' not in file_) @property def file_doi(self): return OrderedDict(self.iter_file_doi) @property def iter_files(self): return (x[0] for x in self.iter_file_doi) @property def iter_dois(self): return (x[1] for x in self.iter_file_doi) @property
MIT License
rlenglet/openfaucet
src/openfaucet/ofproto.py
OpenflowProtocol.dataReceived
python
def dataReceived(self, data): self._buffer.append(data) while len(self._buffer) >= OFP_HEADER_LENGTH: self._buffer.set_message_boundaries(OFP_HEADER_LENGTH) version, msg_type, msg_length, xid = self._buffer.unpack_inplace(OFP_HEADER_FORMAT) if msg_length > len(self._buffer): self._buffer.compact() return self._buffer.set_message_boundaries(msg_length) try: self._handle_message() except oferror.OpenflowError, e: self.logger.error('openflow error while decoding message:', exc_info=True, extra=self.log_extra) self.send_error(e, xid=xid) except Exception, e: self.logger.error('error while decoding message:', exc_info=True, extra=self.log_extra) if self._buffer.message_bytes_left > 0: self.logger.error( 'message not completely decoded, %i bytes left', self._buffer.message_bytes_left, extra=self.log_extra) self._buffer.skip_bytes(self._buffer.message_bytes_left)
Handle bytes received over the connection. Args: data: A string of indeterminate length containing the received bytes.
https://github.com/rlenglet/openfaucet/blob/4ef1783fc74320e66ee7a71576dc91511f238a81/src/openfaucet/ofproto.py#L424-L466
import collections import logging import struct import weakref from twisted.internet import error from twisted.internet import interfaces from twisted.internet import protocol from zope import interface from openfaucet import buffer from openfaucet import ofaction from openfaucet import ofconfig from openfaucet import oferror from openfaucet import ofmatch from openfaucet import ofstats OFP_HEADER_FORMAT = '!BBHL' OFP_HEADER_LENGTH = 8 OFP_VERSION_1_0_0 = 0x01 OFPT_HELLO = 0 OFPT_ERROR = 1 OFPT_ECHO_REQUEST = 2 OFPT_ECHO_REPLY = 3 OFPT_VENDOR = 4 OFPT_FEATURES_REQUEST = 5 OFPT_FEATURES_REPLY = 6 OFPT_GET_CONFIG_REQUEST = 7 OFPT_GET_CONFIG_REPLY = 8 OFPT_SET_CONFIG = 9 OFPT_PACKET_IN = 10 OFPT_FLOW_REMOVED = 11 OFPT_PORT_STATUS = 12 OFPT_PACKET_OUT = 13 OFPT_FLOW_MOD = 14 OFPT_PORT_MOD = 15 OFPT_STATS_REQUEST = 16 OFPT_STATS_REPLY = 17 OFPT_BARRIER_REQUEST = 18 OFPT_BARRIER_REPLY = 19 OFPT_QUEUE_GET_CONFIG_REQUEST = 20 OFPT_QUEUE_GET_CONFIG_REPLY = 21 OFPR_NO_MATCH = 0 OFPR_ACTION = 1 OFPRR_IDLE_TIMEOUT = 0 OFPRR_HARD_TIMEOUT = 1 OFPRR_DELETE = 2 OFPPR_ADD = 0 OFPPR_DELETE = 1 OFPPR_MODIFY = 2 OFPP_MAX = 0xff00 OFPP_IN_PORT = 0xfff8 OFPP_TABLE = 0xfff9 OFPP_NORMAL = 0xfffa OFPP_FLOOD = 0xfffb OFPP_ALL = 0xfffc OFPP_CONTROLLER = 0xfffd OFPP_LOCAL = 0xfffe OFPP_NONE = 0xffff OFPQ_ALL = 0xffffffff OFPFC_ADD = 0 OFPFC_MODIFY = 1 OFPFC_MODIFY_STRICT = 2 OFPFC_DELETE = 3 OFPFC_DELETE_STRICT = 4 OFPFF_SEND_FLOW_REM = 1 << 0 OFPFF_CHECK_OVERLAP = 1 << 1 OFPFF_EMERG = 1 << 2 OFPST_DESC = 0 OFPST_FLOW = 1 OFPST_AGGREGATE = 2 OFPST_TABLE = 3 OFPST_PORT = 4 OFPST_QUEUE = 5 OFPST_VENDOR = 0xffff OFPSF_REPLY_MORE = 1 << 0 class IOpenflowVendorHandler(interface.Interface): vendor_id = interface.Attribute( """The OpenFlow vendor identifier that is handled by this object. This is the unsigned 32-bit integer value that uniquely identifies the vendor. """) def connection_made(): def connection_lost(reason): def handle_vendor_message(msg_length, xid, buf): def serialize_vendor_action(action): def deserialize_vendor_action(action_length, buf): def handle_vendor_stats_request(msg_length, xid, buf): def handle_vendor_stats_reply(msg_length, xid, buf, reply_more): class IOpenflowVendorHandlerStub(interface.Interface): def send_vendor(vendor_id, xid=0, data=()): def send_stats_request_vendor(xid, vendor_id, data=()): def send_stats_reply_vendor(xid, vendor_id, data=(), reply_more=False): class OpenflowProtocol(object): interface.implements(interfaces.IProtocol, IOpenflowVendorHandlerStub) def __init__(self): self.error_data_bytes = 64 self.logger = logging.getLogger('') self.log_extra = {} self._vendor_handlers = {} def get_vendor_handlers(self): return self._vendor_handlers.values() def set_vendor_handlers(self, vendor_handlers): self._vendor_handlers = dict((v.vendor_id, v) for v in vendor_handlers) vendor_handlers = property(get_vendor_handlers, set_vendor_handlers) def get_vendor_handler(self, vendor_id): return self._vendor_handlers.get(vendor_id) def makeConnection(self, transport): self.connected = True self.transport = transport self.connectionMade() def connectionMade(self): self.logger.info('connection made', extra=self.log_extra) self.logger.info( 'protocol configuration: error_data_bytes=%r, ' 'vendor IDs and vendor_handlers=%r', self.error_data_bytes, self._vendor_handlers.items(), extra=self.log_extra) self._buffer = buffer.ReceiveBuffer() for v in self._vendor_handlers.itervalues(): v.connection_made() def connectionLost(self, reason): self.logger.info('connection lost with reason %r', reason, extra=self.log_extra) for v in self._vendor_handlers.itervalues(): v.connection_lost(reason) self.connected = False self._buffer = None
Apache License 2.0
algorand/py-algorand-sdk
algosdk/v2client/models/account.py
Account.created_assets
python
def created_assets(self, created_assets): self._created_assets = created_assets
Sets the created_assets of this Account. \\[apar\\] parameters of assets created by this account. Note: the raw account uses `map[int] -> Asset` for this type. # noqa: E501 :param created_assets: The created_assets of this Account. # noqa: E501 :type created_assets: list[Asset]
https://github.com/algorand/py-algorand-sdk/blob/e444328616d99c88b830366dedcbf75e9795dcb3/algosdk/v2client/models/account.py#L289-L298
import pprint class Account(object): openapi_types = { "address": "str", "amount": "int", "amount_without_pending_rewards": "int", "apps_local_state": "list[ApplicationLocalState]", "apps_total_schema": "ApplicationStateSchema", "assets": "list[AssetHolding]", "created_apps": "list[Application]", "created_assets": "list[Asset]", "participation": "AccountParticipation", "pending_rewards": "int", "reward_base": "int", "rewards": "int", "round": "int", "status": "str", "sig_type": "str", "auth_addr": "str", } attribute_map = { "address": "address", "amount": "amount", "amount_without_pending_rewards": "amount-without-pending-rewards", "apps_local_state": "apps-local-state", "apps_total_schema": "apps-total-schema", "assets": "assets", "created_apps": "created-apps", "created_assets": "created-assets", "participation": "participation", "pending_rewards": "pending-rewards", "reward_base": "reward-base", "rewards": "rewards", "round": "round", "status": "status", "sig_type": "sig-type", "auth_addr": "auth-addr", } def __init__( self, address=None, amount=None, amount_without_pending_rewards=None, apps_local_state=None, apps_total_schema=None, assets=None, created_apps=None, created_assets=None, participation=None, pending_rewards=None, reward_base=None, rewards=None, round=None, status=None, sig_type=None, auth_addr=None, ): self._address = None self._amount = None self._amount_without_pending_rewards = None self._apps_local_state = None self._apps_total_schema = None self._assets = None self._created_apps = None self._created_assets = None self._participation = None self._pending_rewards = None self._reward_base = None self._rewards = None self._round = None self._status = None self._sig_type = None self._auth_addr = None self.address = address self.amount = amount self.amount_without_pending_rewards = amount_without_pending_rewards if apps_local_state is not None: self.apps_local_state = apps_local_state if apps_total_schema is not None: self.apps_total_schema = apps_total_schema if assets is not None: self.assets = assets if created_apps is not None: self.created_apps = created_apps if created_assets is not None: self.created_assets = created_assets if participation is not None: self.participation = participation self.pending_rewards = pending_rewards if reward_base is not None: self.reward_base = reward_base self.rewards = rewards self.round = round self.status = status if sig_type is not None: self.sig_type = sig_type if auth_addr is not None: self.auth_addr = auth_addr @property def address(self): return self._address @address.setter def address(self, address): self._address = address @property def amount(self): return self._amount @amount.setter def amount(self, amount): self._amount = amount @property def amount_without_pending_rewards(self): return self._amount_without_pending_rewards @amount_without_pending_rewards.setter def amount_without_pending_rewards(self, amount_without_pending_rewards): self._amount_without_pending_rewards = amount_without_pending_rewards @property def apps_local_state(self): return self._apps_local_state @apps_local_state.setter def apps_local_state(self, apps_local_state): self._apps_local_state = apps_local_state @property def apps_total_schema(self): return self._apps_total_schema @apps_total_schema.setter def apps_total_schema(self, apps_total_schema): self._apps_total_schema = apps_total_schema @property def assets(self): return self._assets @assets.setter def assets(self, assets): self._assets = assets @property def created_apps(self): return self._created_apps @created_apps.setter def created_apps(self, created_apps): self._created_apps = created_apps @property def created_assets(self): return self._created_assets @created_assets.setter
MIT License
quantopian/trading_calendars
trading_calendars/calendar_utils.py
TradingCalendarDispatcher.register_calendar_alias
python
def register_calendar_alias(self, alias, real_name, force=False): if force: self.deregister_calendar(alias) if self.has_calendar(alias): raise CalendarNameCollision(calendar_name=alias) self._aliases[alias] = real_name try: self.resolve_alias(alias) except CyclicCalendarAlias: del self._aliases[alias] raise
Register an alias for a calendar. This is useful when multiple exchanges should share a calendar, or when there are multiple ways to refer to the same exchange. After calling ``register_alias('alias', 'real_name')``, subsequent calls to ``get_calendar('alias')`` will return the same result as ``get_calendar('real_name')``. Parameters ---------- alias : str The name to be used to refer to a calendar. real_name : str The canonical name of the registered calendar. force : bool, optional If True, old calendars will be overwritten on a name collision. If False, name collisions will raise an exception. Default is False.
https://github.com/quantopian/trading_calendars/blob/19c4b677f13147928d34be5a3da50ba4161be45d/trading_calendars/calendar_utils.py#L290-L326
import itertools from .always_open import AlwaysOpenCalendar from .errors import ( CalendarNameCollision, CyclicCalendarAlias, InvalidCalendarName, ) from .exchange_calendar_asex import ASEXExchangeCalendar from .exchange_calendar_bvmf import BVMFExchangeCalendar from .exchange_calendar_cmes import CMESExchangeCalendar from .exchange_calendar_iepa import IEPAExchangeCalendar from .exchange_calendar_xams import XAMSExchangeCalendar from .exchange_calendar_xasx import XASXExchangeCalendar from .exchange_calendar_xbkk import XBKKExchangeCalendar from .exchange_calendar_xbog import XBOGExchangeCalendar from .exchange_calendar_xbom import XBOMExchangeCalendar from .exchange_calendar_xbru import XBRUExchangeCalendar from .exchange_calendar_xbud import XBUDExchangeCalendar from .exchange_calendar_xbue import XBUEExchangeCalendar from .exchange_calendar_xcbf import XCBFExchangeCalendar from .exchange_calendar_xcse import XCSEExchangeCalendar from .exchange_calendar_xdub import XDUBExchangeCalendar from .exchange_calendar_xfra import XFRAExchangeCalendar from .exchange_calendar_xetr import XETRExchangeCalendar from .exchange_calendar_xhel import XHELExchangeCalendar from .exchange_calendar_xhkg import XHKGExchangeCalendar from .exchange_calendar_xice import XICEExchangeCalendar from .exchange_calendar_xidx import XIDXExchangeCalendar from .exchange_calendar_xist import XISTExchangeCalendar from .exchange_calendar_xjse import XJSEExchangeCalendar from .exchange_calendar_xkar import XKARExchangeCalendar from .exchange_calendar_xkls import XKLSExchangeCalendar from .exchange_calendar_xkrx import XKRXExchangeCalendar from .exchange_calendar_xlim import XLIMExchangeCalendar from .exchange_calendar_xlis import XLISExchangeCalendar from .exchange_calendar_xlon import XLONExchangeCalendar from .exchange_calendar_xmad import XMADExchangeCalendar from .exchange_calendar_xmex import XMEXExchangeCalendar from .exchange_calendar_xmil import XMILExchangeCalendar from .exchange_calendar_xmos import XMOSExchangeCalendar from .exchange_calendar_xnys import XNYSExchangeCalendar from .exchange_calendar_xnze import XNZEExchangeCalendar from .exchange_calendar_xosl import XOSLExchangeCalendar from .exchange_calendar_xpar import XPARExchangeCalendar from .exchange_calendar_xphs import XPHSExchangeCalendar from .exchange_calendar_xpra import XPRAExchangeCalendar from .exchange_calendar_xses import XSESExchangeCalendar from .exchange_calendar_xsgo import XSGOExchangeCalendar from .exchange_calendar_xshg import XSHGExchangeCalendar from .exchange_calendar_xsto import XSTOExchangeCalendar from .exchange_calendar_xswx import XSWXExchangeCalendar from .exchange_calendar_xtae import XTAEExchangeCalendar from .exchange_calendar_xtai import XTAIExchangeCalendar from .exchange_calendar_xtks import XTKSExchangeCalendar from .exchange_calendar_xtse import XTSEExchangeCalendar from .exchange_calendar_xwar import XWARExchangeCalendar from .exchange_calendar_xwbo import XWBOExchangeCalendar from .us_futures_calendar import QuantopianUSFuturesCalendar from .weekday_calendar import WeekdayCalendar _default_calendar_factories = { 'ASEX': ASEXExchangeCalendar, 'BVMF': BVMFExchangeCalendar, 'CMES': CMESExchangeCalendar, 'IEPA': IEPAExchangeCalendar, 'XAMS': XAMSExchangeCalendar, 'XASX': XASXExchangeCalendar, 'XBKK': XBKKExchangeCalendar, 'XBOG': XBOGExchangeCalendar, 'XBOM': XBOMExchangeCalendar, 'XBRU': XBRUExchangeCalendar, 'XBUD': XBUDExchangeCalendar, 'XBUE': XBUEExchangeCalendar, 'XCBF': XCBFExchangeCalendar, 'XCSE': XCSEExchangeCalendar, 'XDUB': XDUBExchangeCalendar, 'XFRA': XFRAExchangeCalendar, 'XETR': XETRExchangeCalendar, 'XHEL': XHELExchangeCalendar, 'XHKG': XHKGExchangeCalendar, 'XICE': XICEExchangeCalendar, 'XIDX': XIDXExchangeCalendar, 'XIST': XISTExchangeCalendar, 'XJSE': XJSEExchangeCalendar, 'XKAR': XKARExchangeCalendar, 'XKLS': XKLSExchangeCalendar, 'XKRX': XKRXExchangeCalendar, 'XLIM': XLIMExchangeCalendar, 'XLIS': XLISExchangeCalendar, 'XLON': XLONExchangeCalendar, 'XMAD': XMADExchangeCalendar, 'XMEX': XMEXExchangeCalendar, 'XMIL': XMILExchangeCalendar, 'XMOS': XMOSExchangeCalendar, 'XNYS': XNYSExchangeCalendar, 'XNZE': XNZEExchangeCalendar, 'XOSL': XOSLExchangeCalendar, 'XPAR': XPARExchangeCalendar, 'XPHS': XPHSExchangeCalendar, 'XPRA': XPRAExchangeCalendar, 'XSES': XSESExchangeCalendar, 'XSGO': XSGOExchangeCalendar, 'XSHG': XSHGExchangeCalendar, 'XSTO': XSTOExchangeCalendar, 'XSWX': XSWXExchangeCalendar, 'XTAE': XTAEExchangeCalendar, 'XTAI': XTAIExchangeCalendar, 'XTKS': XTKSExchangeCalendar, 'XTSE': XTSEExchangeCalendar, 'XWAR': XWARExchangeCalendar, 'XWBO': XWBOExchangeCalendar, 'us_futures': QuantopianUSFuturesCalendar, '24/7': AlwaysOpenCalendar, '24/5': WeekdayCalendar, } _default_calendar_aliases = { 'NYSE': 'XNYS', 'NASDAQ': 'XNYS', 'BATS': 'XNYS', 'FWB': 'XFRA', 'LSE': 'XLON', 'TSX': 'XTSE', 'BMF': 'BVMF', 'CME': 'CMES', 'CBOT': 'CMES', 'COMEX': 'CMES', 'NYMEX': 'CMES', 'ICE': 'IEPA', 'ICEUS': 'IEPA', 'NYFE': 'IEPA', 'CFE': 'XCBF', 'JKT': 'XIDX', 'SIX': 'XSWX', 'JPX': 'XTKS', 'ASX': 'XASX', 'HKEX': 'XHKG', 'OSE': 'XOSL', 'BSE': 'XBOM', 'SSE': 'XSHG', 'TASE': 'XTAE', } default_calendar_names = sorted(_default_calendar_factories.keys()) class TradingCalendarDispatcher(object): def __init__(self, calendars, calendar_factories, aliases): self._calendars = calendars self._calendar_factories = dict(calendar_factories) self._aliases = dict(aliases) def get_calendar(self, name): canonical_name = self.resolve_alias(name) try: return self._calendars[canonical_name] except KeyError: pass try: factory = self._calendar_factories[canonical_name] except KeyError: raise InvalidCalendarName(calendar_name=name) calendar = self._calendars[canonical_name] = factory() return calendar def get_calendar_names(self): return list( set( itertools.chain( self._calendars.keys(), self._calendar_factories.keys(), self._aliases.keys() ) ) ) def has_calendar(self, name): return ( name in self._calendars or name in self._calendar_factories or name in self._aliases ) def register_calendar(self, name, calendar, force=False): if force: self.deregister_calendar(name) if self.has_calendar(name): raise CalendarNameCollision(calendar_name=name) self._calendars[name] = calendar def register_calendar_type(self, name, calendar_type, force=False): if force: self.deregister_calendar(name) if self.has_calendar(name): raise CalendarNameCollision(calendar_name=name) self._calendar_factories[name] = calendar_type
Apache License 2.0
yelp/detect-secrets
detect_secrets/filters/util.py
get_caller_path
python
def get_caller_path(offset: int = 0) -> str: stack = inspect.stack() frame_info = stack[1 + offset] module_path = frame_info.frame.f_globals['__name__'] function_name = frame_info.function return f'{module_path}.{function_name}'
This is a utility function to get the caller's fully qualified python import path, so that it can be used to index into the global settings object. It is highly recommended to cache the response to this (for performance reasons), as such: >>> @lru_cache(maxsize=1) ... def _get_specific_caller_path() -> str: ... return get_caller_path(offset=1) For a deeper dive into why this performance matters, check out https://stackoverflow.com/a/17366561/13340678, and estimate how many secrets you will need to filter out (and thereby, invoke this function for) :raises: IndexError
https://github.com/yelp/detect-secrets/blob/fefc6703fc08c68a7c1ad5b879a9e994b297f8e9/detect_secrets/filters/util.py#L5-L26
import hashlib import inspect
Apache License 2.0
sdispater/clikit
src/clikit/api/io/io.py
IO.set_formatter
python
def set_formatter(self, formatter): self._output.set_formatter(formatter) self._error_output.set_formatter(formatter)
Sets the output formatter.
https://github.com/sdispater/clikit/blob/1da34f16c50a2a7d4a22d21250cb6268c7b3eefd/src/clikit/api/io/io.py#L205-L210
from typing import TYPE_CHECKING from typing import Optional from clikit.api.formatter import Formatter from clikit.api.formatter.style import Style if TYPE_CHECKING: from clikit.ui.rectangle import Rectangle from .indent import Indent from .input import Input from .output import Output class IO(Formatter): def __init__( self, input, output, error_output ): self._input = input self._output = output self._error_output = error_output self._terminal_dimensions = None @property def input(self): return self._input @property def output(self): return self._output @property def error_output(self): return self._error_output def read(self, length, default=None): return self._input.read(length, default=default) def read_line( self, length=None, default=None ): return self._input.read_line(length=length, default=default) def write(self, string, flags=None): self._output.write(string, flags=flags) def write_line(self, string, flags=None): self._output.write_line(string, flags=flags) def write_raw(self, string, flags=None): self._output.write_raw(string, flags=flags) def write_line_raw(self, string, flags=None): self._output.write_raw(string, flags=flags) def error(self, string, flags=None): self._error_output.write(string, flags=flags) def error_line(self, string, flags=None): self._error_output.write_line(string, flags=flags) def error_raw(self, string, flags=None): self._error_output.write_raw(string, flags=flags) def error_line_raw(self, string, flags=None): self._error_output.write_raw(string, flags=flags) def flush(self): self._output.flush() self._error_output.flush() def close(self): self._input.close() self._output.close() self._error_output.close() def set_interactive(self, interactive): self._input.set_interactive(interactive) def is_interactive(self): return self._input.is_interactive() def set_verbosity(self, verbosity): self._output.set_verbosity(verbosity) self._error_output.set_verbosity(verbosity) def is_verbose(self): return self._output.is_verbose() def is_very_verbose(self): return self._output.is_very_verbose() def is_debug(self): return self._output.is_debug() @property def verbosity(self): return self._output.verbosity def set_quiet(self, quiet): self._output.set_quiet(quiet) self._error_output.set_quiet(quiet) def is_quiet(self): return self._output.is_quiet() def set_terminal_dimensions(self, dimensions): self._terminal_dimensions = dimensions @property def terminal_dimensions(self): if not self._terminal_dimensions: self._terminal_dimensions = self.get_default_terminal_dimensions() return self._terminal_dimensions def get_default_terminal_dimensions(self): from clikit.ui.rectangle import Rectangle return Rectangle(80, 20)
MIT License
ganeti/ganeti
lib/utils/x509.py
LoadSignedX509Certificate
python
def LoadSignedX509Certificate(cert_pem, key): (salt, signature) = _ExtractX509CertificateSignature(cert_pem) (cert, sane_pem) = ExtractX509Certificate(cert_pem) if not utils_hash.VerifySha1Hmac(key, sane_pem, signature, salt=salt): raise errors.GenericError("X509 certificate signature is invalid") return (cert, salt)
Verifies a signed X509 certificate. @type cert_pem: string @param cert_pem: Certificate in PEM format and with signature header @type key: string @param key: Key for HMAC @rtype: tuple; (OpenSSL.crypto.X509, string) @return: X509 certificate object and salt
https://github.com/ganeti/ganeti/blob/4d21019c72cba4d746f5d17ca22098f4c7682e9c/lib/utils/x509.py#L232-L251
import calendar import datetime import errno import logging import re import time import OpenSSL from ganeti import errors from ganeti import constants from ganeti import pathutils from ganeti.utils import text as utils_text from ganeti.utils import io as utils_io from ganeti.utils import hash as utils_hash HEX_CHAR_RE = r"[a-zA-Z0-9]" VALID_X509_SIGNATURE_SALT = re.compile("^%s+$" % HEX_CHAR_RE, re.S) X509_SIGNATURE = re.compile(r"^%s:\s*(?P<salt>%s+)/(?P<sign>%s+)$" % (re.escape(constants.X509_CERT_SIGNATURE_HEADER), HEX_CHAR_RE, HEX_CHAR_RE), re.S | re.I) (CERT_WARNING, CERT_ERROR) = range(1, 3) _ASN1_TIME_REGEX = re.compile(r"^(\d+)([-+]\d\d)(\d\d)$") def _ParseAsn1Generalizedtime(value): if value is None: return None if isinstance(value, bytes): value = value.decode("ascii") m = _ASN1_TIME_REGEX.match(value) if m: asn1time = m.group(1) hours = int(m.group(2)) minutes = int(m.group(3)) utcoffset = (60 * hours) + minutes else: if not value.endswith("Z"): raise ValueError("Missing timezone") asn1time = value[:-1] utcoffset = 0 parsed = time.strptime(asn1time, "%Y%m%d%H%M%S") tt = datetime.datetime(*(parsed[:7])) - datetime.timedelta(minutes=utcoffset) return calendar.timegm(tt.utctimetuple()) def GetX509CertValidity(cert): not_before = _ParseAsn1Generalizedtime(cert.get_notBefore()) not_after = _ParseAsn1Generalizedtime(cert.get_notAfter()) return (not_before, not_after) def _VerifyCertificateInner(expired, not_before, not_after, now, warn_days, error_days): if expired: msg = "Certificate is expired" if not_before is not None and not_after is not None: msg += (" (valid from %s to %s)" % (utils_text.FormatTime(not_before), utils_text.FormatTime(not_after))) elif not_before is not None: msg += " (valid from %s)" % utils_text.FormatTime(not_before) elif not_after is not None: msg += " (valid until %s)" % utils_text.FormatTime(not_after) return (CERT_ERROR, msg) elif not_before is not None and not_before > now: return (CERT_WARNING, "Certificate not yet valid (valid from %s)" % utils_text.FormatTime(not_before)) elif not_after is not None: remaining_days = int((not_after - now) / (24 * 3600)) msg = "Certificate expires in about %d days" % remaining_days if error_days is not None and remaining_days <= error_days: return (CERT_ERROR, msg) if warn_days is not None and remaining_days <= warn_days: return (CERT_WARNING, msg) return (None, None) def VerifyX509Certificate(cert, warn_days, error_days): (not_before, not_after) = GetX509CertValidity(cert) now = time.time() + constants.NODE_MAX_CLOCK_SKEW return _VerifyCertificateInner(cert.has_expired(), not_before, not_after, now, warn_days, error_days) def SignX509Certificate(cert, key, salt): if not VALID_X509_SIGNATURE_SALT.match(salt): raise errors.GenericError("Invalid salt: %r" % salt) cert_pem = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert).decode("ascii") return ("%s: %s/%s\n\n%s" % (constants.X509_CERT_SIGNATURE_HEADER, salt, utils_hash.Sha1Hmac(key, cert_pem, salt=salt), cert_pem)) def _ExtractX509CertificateSignature(cert_pem): if isinstance(cert_pem, bytes): cert_pem = cert_pem.decode("ascii") for line in cert_pem.splitlines(): if line.startswith("---"): break m = X509_SIGNATURE.match(line.strip()) if m: return (m.group("salt"), m.group("sign")) raise errors.GenericError("X509 certificate signature is missing")
BSD 2-Clause Simplified License
ansible/ansible-navigator
tests/unit/configuration_subsystem/conftest.py
fixture_generate_config
python
def fixture_generate_config(): return _generate_config
generate a config
https://github.com/ansible/ansible-navigator/blob/62fdbd05f25fb2d79133b3ab207f53ac2f2d6d36/tests/unit/configuration_subsystem/conftest.py#L73-L75
import os from copy import deepcopy from typing import Dict from typing import List from typing import NamedTuple import pytest from ansible_navigator.configuration_subsystem.configurator import Configurator from ansible_navigator.configuration_subsystem.definitions import ApplicationConfiguration from ansible_navigator.configuration_subsystem.navigator_configuration import NavigatorConfiguration from ansible_navigator.utils import ExitMessage from ansible_navigator.utils import LogMessage from ansible_navigator._yaml import yaml from ansible_navigator._yaml import Loader from ...defaults import FIXTURES_DIR TEST_FIXTURE_DIR = os.path.join(FIXTURES_DIR, "unit", "configuration_subsystem") class GenerateConfigResponse(NamedTuple): messages: List[LogMessage] exit_messages: List[ExitMessage] application_configuration: ApplicationConfiguration settings_contents: Dict def _generate_config(params=None, setting_file_name=None, initial=True) -> GenerateConfigResponse: if params is None: params = [] if setting_file_name: settings_file_path = os.path.join(TEST_FIXTURE_DIR, setting_file_name) with open(settings_file_path) as file: try: settings_contents = yaml.load(file, Loader=Loader) except yaml.parser.ParserError: settings_contents = {} else: settings_file_path = "" settings_contents = {} application_configuration = deepcopy(NavigatorConfiguration) configurator = Configurator( application_configuration=application_configuration, params=params, settings_file_path=settings_file_path or None, initial=initial, ) messages, exit_messages = configurator.configure() return GenerateConfigResponse( messages=messages, exit_messages=exit_messages, application_configuration=application_configuration, settings_contents=settings_contents, ) @pytest.fixture(name="generate_config")
Apache License 2.0
rwbfd/opencompetition
src/nlp/classification/pytorch/transformers/tests/test_modeling_common.py
floats_tensor
python
def floats_tensor(shape, scale=1.0, rng=None, name=None): if rng is None: rng = global_rng total_dims = 1 for dim in shape: total_dims *= dim values = [] for _ in range(total_dims): values.append(rng.random() * scale) return torch.tensor(data=values, dtype=torch.float, device=torch_device).view(shape).contiguous()
Creates a random float32 tensor of the shape within the vocab size.
https://github.com/rwbfd/opencompetition/blob/5262fc5fa7efd7b483c1dc09cb7747dd75e37175/src/nlp/classification/pytorch/transformers/tests/test_modeling_common.py#L610-L623
import copy import logging import os.path import random import tempfile import unittest from transformers import is_torch_available from .utils import require_torch, slow, torch_device if is_torch_available(): import torch import numpy as np from transformers import ( AdaptiveEmbedding, PretrainedConfig, PreTrainedModel, BertModel, BertConfig, BERT_PRETRAINED_MODEL_ARCHIVE_MAP, ) def _config_zero_init(config): configs_no_init = copy.deepcopy(config) for key in configs_no_init.__dict__.keys(): if "_range" in key or "_std" in key or "initializer_factor" in key: setattr(configs_no_init, key, 0.0) return configs_no_init @require_torch class ModelTesterMixin: model_tester = None all_model_classes = () test_torchscript = True test_pruning = True test_resize_embeddings = True test_head_masking = True is_encoder_decoder = False def test_save_load(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: model = model_class(config) model.to(torch_device) model.eval() with torch.no_grad(): outputs = model(**inputs_dict) out_2 = outputs[0].numpy() out_2[np.isnan(out_2)] = 0 with tempfile.TemporaryDirectory() as tmpdirname: model.save_pretrained(tmpdirname) model = model_class.from_pretrained(tmpdirname) model.to(torch_device) with torch.no_grad(): after_outputs = model(**inputs_dict) out_1 = after_outputs[0].cpu().numpy() out_1[np.isnan(out_1)] = 0 max_diff = np.amax(np.abs(out_1 - out_2)) self.assertLessEqual(max_diff, 1e-5) def test_initialization(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() configs_no_init = _config_zero_init(config) for model_class in self.all_model_classes: model = model_class(config=configs_no_init) for name, param in model.named_parameters(): if param.requires_grad: self.assertIn( param.data.mean().item(), [0.0, 1.0], msg="Parameter {} of model {} seems not properly initialized".format(name, model_class), ) def test_determinism(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: model = model_class(config) model.to(torch_device) model.eval() with torch.no_grad(): first = model(**inputs_dict)[0] second = model(**inputs_dict)[0] out_1 = first.cpu().numpy() out_2 = second.cpu().numpy() out_1 = out_1[~np.isnan(out_1)] out_2 = out_2[~np.isnan(out_2)] max_diff = np.amax(np.abs(out_1 - out_2)) self.assertLessEqual(max_diff, 1e-5) def test_attention_outputs(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() seq_len = getattr(self.model_tester, "seq_length", None) decoder_seq_length = getattr(self.model_tester, "decoder_seq_length", seq_len) encoder_seq_length = getattr(self.model_tester, "encoder_seq_length", seq_len) decoder_key_length = getattr(self.model_tester, "key_length", decoder_seq_length) encoder_key_length = getattr(self.model_tester, "key_length", encoder_seq_length) for model_class in self.all_model_classes: config.output_attentions = True config.output_hidden_states = False model = model_class(config) model.to(torch_device) model.eval() with torch.no_grad(): outputs = model(**inputs_dict) attentions = outputs[-1] self.assertEqual(model.config.output_attentions, True) self.assertEqual(model.config.output_hidden_states, False) self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, encoder_key_length], ) out_len = len(outputs) if self.is_encoder_decoder: self.assertEqual(out_len % 2, 0) decoder_attentions = outputs[(out_len // 2) - 1] self.assertEqual(model.config.output_attentions, True) self.assertEqual(model.config.output_hidden_states, False) self.assertEqual(len(decoder_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(decoder_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, decoder_seq_length, decoder_key_length], ) config.output_attentions = True config.output_hidden_states = True model = model_class(config) model.to(torch_device) model.eval() with torch.no_grad(): outputs = model(**inputs_dict) self.assertEqual(out_len + (2 if self.is_encoder_decoder else 1), len(outputs)) self.assertEqual(model.config.output_attentions, True) self.assertEqual(model.config.output_hidden_states, True) self_attentions = outputs[-1] self.assertEqual(len(self_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(self_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, encoder_key_length], ) def test_torchscript(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() self._create_and_check_torchscript(config, inputs_dict) def test_torchscript_output_attentions(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() config.output_attentions = True self._create_and_check_torchscript(config, inputs_dict) def test_torchscript_output_hidden_state(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() config.output_hidden_states = True self._create_and_check_torchscript(config, inputs_dict) def _create_and_check_torchscript(self, config, inputs_dict): if not self.test_torchscript: return configs_no_init = _config_zero_init(config) configs_no_init.torchscript = True for model_class in self.all_model_classes: model = model_class(config=configs_no_init) model.to(torch_device) model.eval() inputs = inputs_dict["input_ids"] try: traced_gpt2 = torch.jit.trace(model, inputs) except RuntimeError: self.fail("Couldn't trace module.") with tempfile.TemporaryDirectory() as tmp_dir_name: pt_file_name = os.path.join(tmp_dir_name, "traced_model.pt") try: torch.jit.save(traced_gpt2, pt_file_name) except Exception: self.fail("Couldn't save module.") try: loaded_model = torch.jit.load(pt_file_name) except Exception: self.fail("Couldn't load module.") model.to(torch_device) model.eval() loaded_model.to(torch_device) loaded_model.eval() model_state_dict = model.state_dict() loaded_model_state_dict = loaded_model.state_dict() self.assertEqual(set(model_state_dict.keys()), set(loaded_model_state_dict.keys())) models_equal = True for layer_name, p1 in model_state_dict.items(): p2 = loaded_model_state_dict[layer_name] if p1.data.ne(p2.data).sum() > 0: models_equal = False self.assertTrue(models_equal) def test_headmasking(self): if not self.test_head_masking: return global_rng.seed(42) config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() global_rng.seed() config.output_attentions = True config.output_hidden_states = True configs_no_init = _config_zero_init(config) for model_class in self.all_model_classes: model = model_class(config=configs_no_init) model.to(torch_device) model.eval() head_mask = torch.ones( self.model_tester.num_hidden_layers, self.model_tester.num_attention_heads, device=torch_device ) head_mask[0, 0] = 0 head_mask[-1, :-1] = 0 head_mask.requires_grad_(requires_grad=True) inputs = inputs_dict.copy() inputs["head_mask"] = head_mask outputs = model(**inputs) output = sum(t.sum() for t in outputs[0]) output = output.sum() output.backward() multihead_outputs = head_mask.grad attentions = outputs[-1] for t in attentions: self.assertLess( torch.sum(torch.isnan(t)), t.numel() / 4 ) attentions = [ t.masked_fill(torch.isnan(t), 0.0) for t in attentions ] self.assertIsNotNone(multihead_outputs) self.assertEqual(len(multihead_outputs), self.model_tester.num_hidden_layers) self.assertAlmostEqual(attentions[0][..., 0, :, :].flatten().sum().item(), 0.0) self.assertNotEqual(attentions[0][..., -1, :, :].flatten().sum().item(), 0.0) self.assertNotEqual(attentions[1][..., 0, :, :].flatten().sum().item(), 0.0) self.assertAlmostEqual(attentions[-1][..., -2, :, :].flatten().sum().item(), 0.0) self.assertNotEqual(attentions[-1][..., -1, :, :].flatten().sum().item(), 0.0) def test_head_pruning(self): if not self.test_pruning: return for model_class in self.all_model_classes: config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() if "head_mask" in inputs_dict: del inputs_dict["head_mask"] config.output_attentions = True config.output_hidden_states = False model = model_class(config=config) model.to(torch_device) model.eval() heads_to_prune = {0: list(range(1, self.model_tester.num_attention_heads)), -1: [0]} model.prune_heads(heads_to_prune) with torch.no_grad(): outputs = model(**inputs_dict) attentions = outputs[-1] self.assertEqual(attentions[0].shape[-3], 1) self.assertEqual(attentions[1].shape[-3], self.model_tester.num_attention_heads) self.assertEqual(attentions[-1].shape[-3], self.model_tester.num_attention_heads - 1) def test_head_pruning_save_load_from_pretrained(self): if not self.test_pruning: return for model_class in self.all_model_classes: config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() if "head_mask" in inputs_dict: del inputs_dict["head_mask"] config.output_attentions = True config.output_hidden_states = False model = model_class(config=config) model.to(torch_device) model.eval() heads_to_prune = {0: list(range(1, self.model_tester.num_attention_heads)), -1: [0]} model.prune_heads(heads_to_prune) with tempfile.TemporaryDirectory() as temp_dir_name: model.save_pretrained(temp_dir_name) model = model_class.from_pretrained(temp_dir_name) model.to(torch_device) with torch.no_grad(): outputs = model(**inputs_dict) attentions = outputs[-1] self.assertEqual(attentions[0].shape[-3], 1) self.assertEqual(attentions[1].shape[-3], self.model_tester.num_attention_heads) self.assertEqual(attentions[-1].shape[-3], self.model_tester.num_attention_heads - 1) def test_head_pruning_save_load_from_config_init(self): if not self.test_pruning: return for model_class in self.all_model_classes: config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() if "head_mask" in inputs_dict: del inputs_dict["head_mask"] config.output_attentions = True config.output_hidden_states = False heads_to_prune = {0: list(range(1, self.model_tester.num_attention_heads)), -1: [0]} config.pruned_heads = heads_to_prune model = model_class(config=config) model.to(torch_device) model.eval() with torch.no_grad(): outputs = model(**inputs_dict) attentions = outputs[-1] self.assertEqual(attentions[0].shape[-3], 1) self.assertEqual(attentions[1].shape[-3], self.model_tester.num_attention_heads) self.assertEqual(attentions[-1].shape[-3], self.model_tester.num_attention_heads - 1) def test_head_pruning_integration(self): if not self.test_pruning: return for model_class in self.all_model_classes: config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() if "head_mask" in inputs_dict: del inputs_dict["head_mask"] config.output_attentions = True config.output_hidden_states = False heads_to_prune = {0: [0], 1: [1, 2]} config.pruned_heads = heads_to_prune model = model_class(config=config) model.to(torch_device) model.eval() with torch.no_grad(): outputs = model(**inputs_dict) attentions = outputs[-1] self.assertEqual(attentions[0].shape[-3], self.model_tester.num_attention_heads - 1) self.assertEqual(attentions[1].shape[-3], self.model_tester.num_attention_heads - 2) self.assertEqual(attentions[2].shape[-3], self.model_tester.num_attention_heads) self.assertEqual(attentions[3].shape[-3], self.model_tester.num_attention_heads) with tempfile.TemporaryDirectory() as temp_dir_name: model.save_pretrained(temp_dir_name) model = model_class.from_pretrained(temp_dir_name) model.to(torch_device) with torch.no_grad(): outputs = model(**inputs_dict) attentions = outputs[-1] self.assertEqual(attentions[0].shape[-3], self.model_tester.num_attention_heads - 1) self.assertEqual(attentions[1].shape[-3], self.model_tester.num_attention_heads - 2) self.assertEqual(attentions[2].shape[-3], self.model_tester.num_attention_heads) self.assertEqual(attentions[3].shape[-3], self.model_tester.num_attention_heads) heads_to_prune = {0: [0], 2: [1, 2]} model.prune_heads(heads_to_prune) with torch.no_grad(): outputs = model(**inputs_dict) attentions = outputs[-1] self.assertEqual(attentions[0].shape[-3], self.model_tester.num_attention_heads - 1) self.assertEqual(attentions[1].shape[-3], self.model_tester.num_attention_heads - 2) self.assertEqual(attentions[2].shape[-3], self.model_tester.num_attention_heads - 2) self.assertEqual(attentions[3].shape[-3], self.model_tester.num_attention_heads) self.assertDictEqual(model.config.pruned_heads, {0: [0], 1: [1, 2], 2: [1, 2]}) def test_hidden_states_output(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: config.output_hidden_states = True config.output_attentions = False model = model_class(config) model.to(torch_device) model.eval() with torch.no_grad(): outputs = model(**inputs_dict) hidden_states = outputs[-1] self.assertEqual(model.config.output_attentions, False) self.assertEqual(model.config.output_hidden_states, True) self.assertEqual(len(hidden_states), self.model_tester.num_hidden_layers + 1) self.assertListEqual( list(hidden_states[0].shape[-2:]), [ self.model_tester.encoder_seq_length if hasattr(self.model_tester, "encoder_seq_length") else self.model_tester.seq_length, self.model_tester.hidden_size, ], ) def test_resize_tokens_embeddings(self): original_config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() if not self.test_resize_embeddings: return for model_class in self.all_model_classes: config = copy.deepcopy(original_config) model = model_class(config) model_vocab_size = config.vocab_size model_embed = model.resize_token_embeddings(model_vocab_size) cloned_embeddings = model_embed.weight.clone() model_embed = model.resize_token_embeddings(model_vocab_size + 10) self.assertEqual(model.config.vocab_size, model_vocab_size + 10) self.assertEqual(model_embed.weight.shape[0], cloned_embeddings.shape[0] + 10) model(**inputs_dict) model_embed = model.resize_token_embeddings(model_vocab_size - 15) self.assertEqual(model.config.vocab_size, model_vocab_size - 15) self.assertEqual(model_embed.weight.shape[0], cloned_embeddings.shape[0] - 15) inputs_dict["input_ids"].clamp_(max=model_vocab_size - 15 - 1) model(**inputs_dict) models_equal = True for p1, p2 in zip(cloned_embeddings, model_embed.weight): if p1.data.ne(p2.data).sum() > 0: models_equal = False self.assertTrue(models_equal) def test_model_common_attributes(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: model = model_class(config) self.assertIsInstance(model.get_input_embeddings(), (torch.nn.Embedding, AdaptiveEmbedding)) model.set_input_embeddings(torch.nn.Embedding(10, 10)) x = model.get_output_embeddings() self.assertTrue(x is None or isinstance(x, torch.nn.Linear)) def test_tie_model_weights(self): if not self.test_torchscript: return config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() def check_same_values(layer_1, layer_2): equal = True for p1, p2 in zip(layer_1.weight, layer_2.weight): if p1.data.ne(p2.data).sum() > 0: equal = False return equal for model_class in self.all_model_classes: config.torchscript = True model_not_tied = model_class(config) if model_not_tied.get_output_embeddings() is None: continue params_not_tied = list(model_not_tied.parameters()) config_tied = copy.deepcopy(config) config_tied.torchscript = False model_tied = model_class(config_tied) params_tied = list(model_tied.parameters()) self.assertGreater(len(params_not_tied), len(params_tied)) model_tied.resize_token_embeddings(config.vocab_size + 10) params_tied_2 = list(model_tied.parameters()) self.assertGreater(len(params_not_tied), len(params_tied)) self.assertEqual(len(params_tied_2), len(params_tied)) def test_inputs_embeds(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() if not self.is_encoder_decoder: input_ids = inputs_dict["input_ids"] del inputs_dict["input_ids"] else: encoder_input_ids = inputs_dict["encoder_input_ids"] decoder_input_ids = inputs_dict["decoder_input_ids"] del inputs_dict["encoder_input_ids"] del inputs_dict["decoder_input_ids"] for model_class in self.all_model_classes: model = model_class(config) model.to(torch_device) model.eval() wte = model.get_input_embeddings() if not self.is_encoder_decoder: inputs_dict["inputs_embeds"] = wte(input_ids) else: inputs_dict["encoder_inputs_embeds"] = wte(encoder_input_ids) inputs_dict["decoder_inputs_embeds"] = wte(decoder_input_ids) with torch.no_grad(): model(**inputs_dict) global_rng = random.Random() def ids_tensor(shape, vocab_size, rng=None, name=None): if rng is None: rng = global_rng total_dims = 1 for dim in shape: total_dims *= dim values = [] for _ in range(total_dims): values.append(rng.randint(0, vocab_size - 1)) return torch.tensor(data=values, dtype=torch.long, device=torch_device).view(shape).contiguous()
Apache License 2.0
nuagenetworks/vspk-python
vspk/v5_0/nuvirtualfirewallpolicy.py
NUVirtualFirewallPolicy.allow_address_spoof
python
def allow_address_spoof(self): return self._allow_address_spoof
Get allow_address_spoof value. Notes: If enabled, it will disable the default anti-spoof Rule for this domain that essentially prevents any VM to send packets that do not originate from that particular VM This attribute is named `allowAddressSpoof` in VSD API.
https://github.com/nuagenetworks/vspk-python/blob/375cce10ae144ad6017104e57fcd3630898cc2a6/vspk/v5_0/nuvirtualfirewallpolicy.py#L317-L327
from .fetchers import NUMetadatasFetcher from .fetchers import NUVirtualFirewallRulesFetcher from .fetchers import NUGlobalMetadatasFetcher from bambou import NURESTObject class NUVirtualFirewallPolicy(NURESTObject): __rest_name__ = "virtualfirewallpolicy" __resource_name__ = "virtualfirewallpolicies" CONST_POLICY_STATE_DRAFT = "DRAFT" CONST_POLICY_STATE_LIVE = "LIVE" CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL" CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE" CONST_PRIORITY_TYPE_TOP = "TOP" CONST_PRIORITY_TYPE_BOTTOM = "BOTTOM" def __init__(self, **kwargs): super(NUVirtualFirewallPolicy, self).__init__() self._name = None self._last_updated_by = None self._active = None self._default_allow_ip = None self._default_allow_non_ip = None self._default_install_acl_implicit_rules = None self._description = None self._allow_address_spoof = None self._entity_scope = None self._policy_state = None self._priority = None self._priority_type = None self._associated_egress_template_id = None self._associated_ingress_template_id = None self._associated_live_entity_id = None self._auto_generate_priority = None self._external_id = None self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=True, is_unique=False) self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="active", remote_name="active", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="default_allow_ip", remote_name="defaultAllowIP", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="default_allow_non_ip", remote_name="defaultAllowNonIP", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="default_install_acl_implicit_rules", remote_name="defaultInstallACLImplicitRules", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="allow_address_spoof", remote_name="allowAddressSpoof", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL']) self.expose_attribute(local_name="policy_state", remote_name="policyState", attribute_type=str, is_required=False, is_unique=False, choices=[u'DRAFT', u'LIVE']) self.expose_attribute(local_name="priority", remote_name="priority", attribute_type=int, is_required=False, is_unique=False) self.expose_attribute(local_name="priority_type", remote_name="priorityType", attribute_type=str, is_required=False, is_unique=False, choices=[u'BOTTOM', u'TOP']) self.expose_attribute(local_name="associated_egress_template_id", remote_name="associatedEgressTemplateID", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="associated_ingress_template_id", remote_name="associatedIngressTemplateID", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="associated_live_entity_id", remote_name="associatedLiveEntityID", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="auto_generate_priority", remote_name="autoGeneratePriority", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True) self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self.virtual_firewall_rules = NUVirtualFirewallRulesFetcher.fetcher_with_object(parent_object=self, relationship="child") self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self._compute_args(**kwargs) @property def name(self): return self._name @name.setter def name(self, value): self._name = value @property def last_updated_by(self): return self._last_updated_by @last_updated_by.setter def last_updated_by(self, value): self._last_updated_by = value @property def active(self): return self._active @active.setter def active(self, value): self._active = value @property def default_allow_ip(self): return self._default_allow_ip @default_allow_ip.setter def default_allow_ip(self, value): self._default_allow_ip = value @property def default_allow_non_ip(self): return self._default_allow_non_ip @default_allow_non_ip.setter def default_allow_non_ip(self, value): self._default_allow_non_ip = value @property def default_install_acl_implicit_rules(self): return self._default_install_acl_implicit_rules @default_install_acl_implicit_rules.setter def default_install_acl_implicit_rules(self, value): self._default_install_acl_implicit_rules = value @property def description(self): return self._description @description.setter def description(self, value): self._description = value @property
BSD 3-Clause New or Revised License
azure/autorest.python
test/vanilla/version-tolerant/Expected/AcceptanceTests/ModelFlatteningVersionTolerant/modelflatteningversiontolerant/aio/operations/_operations.py
AutoRestResourceFlatteningTestServiceOperationsMixin.put_simple_product
python
async def put_simple_product(self, simple_body_product: Any = None, **kwargs: Any) -> Any: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") if simple_body_product is not None: json = simple_body_product else: json = None request = build_put_simple_product_request( content_type=content_type, json=json, template_url=self.put_simple_product.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: deserialized = response.json() else: deserialized = None if cls: return cls(pipeline_response, deserialized, {}) return deserialized
Put Simple Product with client flattening true on the model. :param simple_body_product: Simple body product to put. :type simple_body_product: Any :return: JSON object :rtype: Any :raises: ~azure.core.exceptions.HttpResponseError Example: .. code-block:: python # JSON input template you can fill out and use as your body input. simple_body_product = { "base_product_description": "str", # Optional. Description of product. "base_product_id": "str", # Required. Unique identifier representing a specific product for a given latitude & longitude. For example, uberX in San Francisco will have a different product_id than uberX in Los Angeles. "details": { "max_product_capacity": "Large", # Default value is "Large". Capacity of product. For example, 4 people. Has constant value: "Large". "max_product_display_name": "str", # Required. Display name of product. "max_product_image": { "@odata.value": "str", # Optional. URL value. "generic_value": "str" # Optional. Generic URL value. } } } # response body for status code(s): 200 response.json() == { "base_product_description": "str", # Optional. Description of product. "base_product_id": "str", # Required. Unique identifier representing a specific product for a given latitude & longitude. For example, uberX in San Francisco will have a different product_id than uberX in Los Angeles. "details": { "max_product_capacity": "Large", # Default value is "Large". Capacity of product. For example, 4 people. Has constant value: "Large". "max_product_display_name": "str", # Required. Display name of product. "max_product_image": { "@odata.value": "str", # Optional. URL value. "generic_value": "str" # Optional. Generic URL value. } } }
https://github.com/azure/autorest.python/blob/90d60a965788e3b4c0809e6686bdc3525acac89c/test/vanilla/version-tolerant/Expected/AcceptanceTests/ModelFlatteningVersionTolerant/modelflatteningversiontolerant/aio/operations/_operations.py#L566-L639
import functools from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar import warnings from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error, ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ...operations._operations import ( build_get_array_request, build_get_dictionary_request, build_get_resource_collection_request, build_get_wrapped_array_request, build_post_flattened_simple_product_request, build_put_array_request, build_put_dictionary_request, build_put_resource_collection_request, build_put_simple_product_request, build_put_simple_product_with_grouping_request, build_put_wrapped_array_request, ) T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class AutoRestResourceFlatteningTestServiceOperationsMixin: @distributed_trace_async async def put_array(self, resource_array: Optional[List[Any]] = None, **kwargs: Any) -> None: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") if resource_array is not None: json = resource_array else: json = None request = build_put_array_request( content_type=content_type, json=json, template_url=self.put_array.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if cls: return cls(pipeline_response, None, {}) put_array.metadata = {"url": "/model-flatten/array"} @distributed_trace_async async def get_array(self, **kwargs: Any) -> List[Any]: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_get_array_request( template_url=self.get_array.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: deserialized = response.json() else: deserialized = None if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_array.metadata = {"url": "/model-flatten/array"} @distributed_trace_async async def put_wrapped_array(self, resource_array: Optional[List[Any]] = None, **kwargs: Any) -> None: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") if resource_array is not None: json = resource_array else: json = None request = build_put_wrapped_array_request( content_type=content_type, json=json, template_url=self.put_wrapped_array.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if cls: return cls(pipeline_response, None, {}) put_wrapped_array.metadata = {"url": "/model-flatten/wrappedarray"} @distributed_trace_async async def get_wrapped_array(self, **kwargs: Any) -> List[Any]: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_get_wrapped_array_request( template_url=self.get_wrapped_array.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: deserialized = response.json() else: deserialized = None if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_wrapped_array.metadata = {"url": "/model-flatten/wrappedarray"} @distributed_trace_async async def put_dictionary(self, resource_dictionary: Optional[Dict[str, Any]] = None, **kwargs: Any) -> None: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") if resource_dictionary is not None: json = resource_dictionary else: json = None request = build_put_dictionary_request( content_type=content_type, json=json, template_url=self.put_dictionary.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if cls: return cls(pipeline_response, None, {}) put_dictionary.metadata = {"url": "/model-flatten/dictionary"} @distributed_trace_async async def get_dictionary(self, **kwargs: Any) -> Dict[str, Any]: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_get_dictionary_request( template_url=self.get_dictionary.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: deserialized = response.json() else: deserialized = None if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_dictionary.metadata = {"url": "/model-flatten/dictionary"} @distributed_trace_async async def put_resource_collection(self, resource_complex_object: Any = None, **kwargs: Any) -> None: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") if resource_complex_object is not None: json = resource_complex_object else: json = None request = build_put_resource_collection_request( content_type=content_type, json=json, template_url=self.put_resource_collection.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if cls: return cls(pipeline_response, None, {}) put_resource_collection.metadata = {"url": "/model-flatten/resourcecollection"} @distributed_trace_async async def get_resource_collection(self, **kwargs: Any) -> Any: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_get_resource_collection_request( template_url=self.get_resource_collection.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: deserialized = response.json() else: deserialized = None if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_resource_collection.metadata = {"url": "/model-flatten/resourcecollection"} @distributed_trace_async
MIT License
rustychris/stompy
stompy/io/qnc.py
QuickVar.__getitem__
python
def __getitem__(self,k): if isinstance(k,dict): dims=self.dims slices=[ k.get(d,slice(None)) for d in self.dims ] k=tuple(slices) if not isinstance(k,tuple): k=(k,) k=tuple(to_str(ki) for ki in k) if isinstance(k[0],str): return self._transpose_by_names(k) else: myv=self._v k=list(k) for ki,kk in enumerate(k): if kk is Ellipsis: expand_slc=slice(ki,ki+1) expansion=[slice(None)]*(myv.ndim-(len(k)-1)) k[expand_slc] = expansion break else: while len(k)< myv.ndim: k.append( slice(None) ) untranspose=[ self._transpose.index(i) for i in range(myv.ndim) ] k_untransposed=[k[j] for j in untranspose] pulled=self._rich_take(myv,tuple(k_untransposed)) retranspose=[i for i in self._transpose if (isinstance(k_untransposed[i],slice) or isinstance(k_untransposed[i],collections.Iterable))] if len(retranspose): retranspose=np.argsort(np.argsort(retranspose)) return pulled.transpose(retranspose) else: return pulled
the options are similar to indexing a record array, but strings refer to dimensions
https://github.com/rustychris/stompy/blob/ef04d8b3ee9c9af827c87c72c7b50d365e5e567d/stompy/io/qnc.py#L56-L132
from __future__ import print_function from six import iteritems import netCDF4 import os import uuid from .. import utils import numpy as np import collections from scipy import interpolate from scipy.signal import decimate class QncException(Exception): pass def to_str(s): if not isinstance(s,str) and isinstance(s,bytes): s=s.decode() return s def sanitize_name(s): return to_str(s).replace(' ','_').replace('/','_') def as_tuple(x): if isinstance(x,tuple): return x elif isinstance(x,list): return tuple(x) else: return (x,) def anon_dim_name(size,**kws): return 'd%d'%size class QuickVar(object): _nc=None _v=None _transpose=None def __init__(self,nc,v,transpose=None): self.__dict__['_nc']=nc self.__dict__['_v']=v if transpose is None: transpose=range(len(v.dimensions)) self.__dict__['_transpose']=transpose
MIT License
hippogriff/csgnet
src/utils/reinforce.py
Reinforce.pg_loss_var
python
def pg_loss_var(self, R, samples, probs): batch_size = R.shape[0] R = Variable(torch.from_numpy(R)).cuda() T = len(samples) samples = [s.data.cpu().numpy() for s in samples] Parse_program = Parser() parser = ParseModelOutput(self.unique_draws, T // 2 + 1, T, [64, 64]) samples_ = np.concatenate(samples, 1) expressions = parser.labels2exps(samples_, T) for index, exp in enumerate(expressions): expressions[index] = exp.split("$")[0] len_programs = np.zeros((batch_size), dtype=np.int32) for index, exp in enumerate(expressions): p = Parse_program.parse(exp) if len(p) == T: len_programs[index] = len(p) else: try: len_programs[index] = len(p) + 1 except: print(len(expressions), batch_size, samples_.shape) self.rolling_baseline = self.alpha_baseline * self.rolling_baseline + (1 - self.alpha_baseline) * torch.mean(R) baseline = self.rolling_baseline.view(1, 1).repeat(batch_size, 1) baseline = baseline.detach() advantage = R - baseline temp = [] for i in range(batch_size): neg_log_prob = Variable(torch.zeros(1)).cuda() for j in range(len_programs[i]): neg_log_prob = neg_log_prob + probs[j][i, samples[j][i, 0]] temp.append(neg_log_prob) loss = -torch.cat(temp).view(batch_size, 1) loss = loss.mul(advantage) loss = torch.mean(loss) return loss
Reinforce loss for variable length program setting, where we stop at maximum length programs or when stop symbol is encountered. The baseline is calculated using rolling average baseline. :return: :param R: Rewards for the minibatch :param samples: Sampled actions for minibatch at every time step :param probs: Probability corresponding to every sampled action. :return loss: reinforce loss
https://github.com/hippogriff/csgnet/blob/1ff8a4f78b6024a65084262ccd9f902a95af4f4b/src/utils/reinforce.py#L104-L158
import numpy as np import torch from torch.autograd.variable import Variable from .generators.mixed_len_generator import Parser from ..Models.models import ParseModelOutput, validity from ..utils.train_utils import chamfer class Reinforce: def __init__(self, unique_draws, canvas_shape=[64, 64], rolling_average_const=0.7): self.canvas_shape = canvas_shape self.unique_draws = unique_draws self.max_reward = Variable(torch.zeros(1)).cuda() self.rolling_baseline = Variable(torch.zeros(1)).cuda() self.alpha_baseline = rolling_average_const def generate_rewards(self, samples, data, time_steps, stack_size, reward="chamfer", if_stack_calculated=False, pred_images=None, power=20): if not if_stack_calculated: parser = ParseModelOutput(self.unique_draws, stack_size, time_steps, [64, 64]) samples = torch.cat(samples, 1) expressions = parser.labels2exps(samples, time_steps) for index, exp in enumerate(expressions): expressions[index] = exp.split("$")[0] pred_images = [] for index, exp in enumerate(expressions): program = parser.Parser.parse(exp) if validity(program, len(program), len(program) - 1): stack = parser.expression2stack([exp]) pred_images.append(stack[-1, -1, 0, :, :]) else: pred_images.append(np.zeros(self.canvas_shape)) pred_images = np.stack(pred_images, 0).astype(dtype=np.bool) else: pred_images = pred_images[-1, :, 0, :, :].data.cpu().numpy() target_images = data[-1, :, 0, :, :].astype(dtype=np.bool) image_size = target_images.shape[-1] if reward == "iou": R = np.sum(np.logical_and(target_images, pred_images), (1, 2)) / (np.sum(np.logical_or(target_images, pred_images), (1, 2)) + 1.0) R = R**power elif reward == "chamfer": distance = chamfer(target_images, pred_images) R = (1.0 - distance / image_size / (2**0.5)) R = np.clip(R, a_min=0.0, a_max=1.0) R[R > 1.0] = 0 R = R**power R = np.expand_dims(R, 1).astype(dtype=np.float32) if (reward == "chamfer"): if if_stack_calculated: return R, samples, pred_images, 0, distance else: return R, samples, pred_images, expressions, distance elif reward == "iou": if if_stack_calculated: return R, samples, pred_images, 0 else: return R, samples, pred_images, expressions
MIT License
timkpaine/tributary
tributary/streaming/dd3.py
_DagreD3Mixin._finishdd3g
python
async def _finishdd3g(self): if self._dd3g: self._dd3g.setNode(self._name, tooltip=str(self._last), style="fill: #f00") await asyncio.sleep(_DD3_TRANSITION_DELAY)
represent a node that has finished its calculation
https://github.com/timkpaine/tributary/blob/0826f864c9d64761db486b95a1b96dd39d64665f/tributary/streaming/dd3.py#L24-L28
import asyncio _DD3_TRANSITION_DELAY = 0.1 class _DagreD3Mixin(object): async def _startdd3g(self): if self._dd3g: self._dd3g.setNode(self._name, tooltip=str(self._last), style="fill: #0f0") await asyncio.sleep(_DD3_TRANSITION_DELAY) async def _waitdd3g(self): if self._dd3g: self._dd3g.setNode(self._name, tooltip=str(self._last), style="fill: #ff0") await asyncio.sleep(_DD3_TRANSITION_DELAY)
Apache License 2.0
hughsalimbeni/orth_decoupled_var_gps
tests/test_kl.py
ref_hybrid_KL
python
def ref_hybrid_KL(a, m, S, K_alpha, K_alpha_tilde, K_tilde): K_inv = np.linalg.inv(K_tilde) KL = 0.5 * np.sum(m * (K_inv @ m)) KL += np.sum(m * (K_inv @ K_alpha_tilde.T @ a)) KL += 0.5 * np.sum(a * (K_alpha @ a)) KL -= 0.5 * np.linalg.slogdet(K_inv @ S)[1] KL += 0.5 * np.sum(S * K_inv) KL -= 0.5 * K_tilde.shape[0] return KL
eq 14 of Cheng and Boots 2017
https://github.com/hughsalimbeni/orth_decoupled_var_gps/blob/04c86489b733a87d9a1f599c156a397fd9321e68/tests/test_kl.py#L57-L71
import pytest import tensorflow as tf import numpy as np from numpy.testing import assert_allclose from gpflow import settings from gpflow.kernels import Matern52 from gpflow.likelihoods import Gaussian from gpflow.test_util import session_tf from odvgp.gaussian_bases import DecoupledBasis, OrthogonallyDecoupledBasis, HybridDecoupledBasis def ref_decoupled_KL(a, B, K_alpha, K_beta): B_inv = np.linalg.inv(B) I = np.eye(K_beta.shape[0]) KL = 0.5 * np.sum(a * (K_alpha @ a)) KL += 0.5 * np.linalg.slogdet(I + K_beta @ B)[1] KL -= 0.5 * np.sum(K_beta * np.linalg.inv(B_inv + K_beta)) return KL def ref_orthogonally_decoupled_KL(a_gamma, a_beta, S, K_gamma, K_gamma_beta, K_beta): KL = -0.5 * K_beta.shape[0] K_beta_inv = np.linalg.inv(K_beta) KL += 0.5 * np.sum(a_gamma * (K_gamma @ a_gamma)) KL += 0.5 * np.sum(a_beta * (K_beta @ a_beta)) KL -= 0.5 * np.sum(a_gamma * (K_gamma_beta @ K_beta_inv @ K_gamma_beta.T @ a_gamma)) KL += 0.5 * np.linalg.slogdet(K_beta)[1] KL -= 0.5 * np.linalg.slogdet(S)[1] KL += 0.5 * np.sum(S * K_beta_inv) return KL
Apache License 2.0
aboudykreidieh/h-baselines
hbaselines/envs/efficient_hrl/humanoid_maze_env.py
HumanoidMazeEnv.get_ori
python
def get_ori(self): return self.wrapped_env.get_ori()
Return the orientation of the humanoid.
https://github.com/aboudykreidieh/h-baselines/blob/47c858584c6de3867a9981dfe974e341188626ed/hbaselines/envs/efficient_hrl/humanoid_maze_env.py#L285-L287
import random import os import tempfile import xml.etree.ElementTree as ET import math import numpy as np import gym from hbaselines.envs.efficient_hrl import maze_env_utils from hbaselines.envs.efficient_hrl.humanoid import HumanoidEnv SCRIPT_PATH = os.path.abspath(os.path.dirname(__file__)) MODEL_DIR = os.path.join(SCRIPT_PATH, 'assets') class HumanoidMazeEnv(gym.Env): MODEL_CLASS = HumanoidEnv MAZE_HEIGHT = None MAZE_SIZE_SCALING = None def __init__(self, maze_id=None, maze_height=0.5, maze_size_scaling=4, n_bins=0, sensor_range=3., sensor_span=2 * math.pi, observe_blocks=False, put_spin_near_agent=False, top_down_view=False, image_size=64, manual_collision=False, *args, **kwargs): self._maze_id = maze_id self.t = 0 model_cls = self.__class__.MODEL_CLASS if model_cls is None: raise AssertionError("MODEL_CLASS unspecified!") xml_path = os.path.join(MODEL_DIR, model_cls.FILE) tree = ET.parse(xml_path) worldbody = tree.find(".//worldbody") self.MAZE_HEIGHT = height = maze_height self.MAZE_SIZE_SCALING = size_scaling = maze_size_scaling self._n_bins = n_bins self._sensor_range = sensor_range * size_scaling self._sensor_span = sensor_span self._observe_blocks = observe_blocks self._put_spin_near_agent = put_spin_near_agent self._top_down_view = top_down_view self._manual_collision = manual_collision self.MAZE_STRUCTURE = structure = maze_env_utils.construct_maze( maze_id=self._maze_id) self.elevated = any(-1 in row for row in structure) self.blocks = any( any(maze_env_utils.can_move(r) for r in row) for row in structure) torso_x, torso_y = self._find_robot() self._init_torso_x = torso_x self._init_torso_y = torso_y self._init_positions = [ (x - torso_x, y - torso_y) for x, y in self._find_all_robots()] self._xy_to_rowcol = lambda x, y: ( 2 + (y + size_scaling / 2) / size_scaling, 2 + (x + size_scaling / 2) / size_scaling) self.image_size = image_size height_offset = 0. if self.elevated: height_offset = height * size_scaling torso = tree.find(".//body[@name='torso']") torso.set('pos', '0 0 %.2f' % (1.4 + height_offset)) if self.blocks: default = tree.find(".//default") default.find('.//geom').set('solimp', '.995 .995 .01') self.movable_blocks = [] for i in range(len(structure)): for j in range(len(structure[0])): struct = structure[i][j] if struct == 'r' and self._put_spin_near_agent: struct = maze_env_utils.Move.SpinXY if self.elevated and struct not in [-1]: ET.SubElement( worldbody, "geom", name="elevated_%d_%d" % (i, j), pos="%f %f %f" % (j * size_scaling - torso_x, i * size_scaling - torso_y, height / 2 * size_scaling), size="%f %f %f" % (0.5 * size_scaling, 0.5 * size_scaling, height / 2 * size_scaling), type="box", material="", contype="1", conaffinity="1", rgba="0.9 0.9 0.9 1", ) if struct == 1: ET.SubElement( worldbody, "geom", name="block_%d_%d" % (i, j), pos="%f %f %f" % (j * size_scaling - torso_x, i * size_scaling - torso_y, height_offset + height / 2 * size_scaling), size="%f %f %f" % (0.5 * size_scaling, 0.5 * size_scaling, height / 2 * size_scaling), type="box", material="", contype="1", conaffinity="1", rgba="{} {} 0.4 1".format(i / len(structure), j / len(structure[0])), ) elif maze_env_utils.can_move(struct): name = "movable_%d_%d" % (i, j) self.movable_blocks.append((name, struct)) falling = maze_env_utils.can_move_z(struct) spinning = maze_env_utils.can_spin(struct) x_offset = 0.25 * size_scaling if spinning else 0.0 y_offset = 0.0 shrink = 0.1 if spinning else 0.99 if falling else 1.0 height_shrink = 0.1 if spinning else 1.0 movable_body = ET.SubElement( worldbody, "body", name=name, pos="%f %f %f" % ( j * size_scaling - torso_x + x_offset, i * size_scaling - torso_y + y_offset, height_offset + height / 2 * size_scaling * height_shrink), ) ET.SubElement( movable_body, "geom", name="block_%d_%d" % (i, j), pos="0 0 0", size="%f %f %f" % ( 0.5 * size_scaling * shrink, 0.5 * size_scaling * shrink, height / 2 * size_scaling * height_shrink), type="box", material="", mass="0.001" if falling else "0.0002", contype="1", conaffinity="1", rgba="0.9 0.1 0.1 1" ) if maze_env_utils.can_move_x(struct): ET.SubElement( movable_body, "joint", armature="0", axis="1 0 0", damping="0.0", limited="true" if falling else "false", range="%f %f" % (-size_scaling, size_scaling), margin="0.01", name="movable_x_%d_%d" % (i, j), pos="0 0 0", type="slide" ) if maze_env_utils.can_move_y(struct): ET.SubElement( movable_body, "joint", armature="0", axis="0 1 0", damping="0.0", limited="true" if falling else "false", range="%f %f" % (-size_scaling, size_scaling), margin="0.01", name="movable_y_%d_%d" % (i, j), pos="0 0 0", type="slide" ) if maze_env_utils.can_move_z(struct): ET.SubElement( movable_body, "joint", armature="0", axis="0 0 1", damping="0.0", limited="true", range="%f 0" % (-height_offset), margin="0.01", name="movable_z_%d_%d" % (i, j), pos="0 0 0", type="slide" ) if maze_env_utils.can_spin(struct): ET.SubElement( movable_body, "joint", armature="0", axis="0 0 1", damping="0.0", limited="false", name="spinable_%d_%d" % (i, j), pos="0 0 0", type="ball" ) torso = tree.find(".//body[@name='torso']") geoms = torso.findall(".//geom") for geom in geoms: if 'name' not in geom.attrib: raise Exception("Every geom of the torso must have a name " "defined") _, file_path = tempfile.mkstemp(text=True, suffix='.xml') tree.write(file_path) try: self.wrapped_env = model_cls(*args, file_path=file_path, **kwargs) except (AssertionError, TypeError): pass
MIT License
chainer/chainer-chemistry
chainer_chemistry/datasets/tox21.py
get_tox21_label_names
python
def get_tox21_label_names(): return _label_names
Returns label names of Tox21 datasets.
https://github.com/chainer/chainer-chemistry/blob/efe323aa21f63a815130d673781e7cca1ccb72d2/chainer_chemistry/datasets/tox21.py#L38-L40
from logging import getLogger import os import shutil import zipfile from chainer.dataset import download import numpy from chainer_chemistry.dataset.parsers.sdf_file_parser import SDFFileParser from chainer_chemistry.dataset.preprocessors.atomic_number_preprocessor import AtomicNumberPreprocessor _config = { 'train': { 'url': 'https://tripod.nih.gov/tox21/challenge/download?' 'id=tox21_10k_data_allsdf', 'filename': 'tox21_10k_data_all.sdf' }, 'val': { 'url': 'https://tripod.nih.gov/tox21/challenge/download?' 'id=tox21_10k_challenge_testsdf', 'filename': 'tox21_10k_challenge_test.sdf' }, 'test': { 'url': 'https://tripod.nih.gov/tox21/challenge/download?' 'id=tox21_10k_challenge_scoresdf', 'filename': 'tox21_10k_challenge_score.sdf' } } _root = 'pfnet/chainer/tox21' _label_names = ['NR-AR', 'NR-AR-LBD', 'NR-AhR', 'NR-Aromatase', 'NR-ER', 'NR-ER-LBD', 'NR-PPAR-gamma', 'SR-ARE', 'SR-ATAD5', 'SR-HSE', 'SR-MMP', 'SR-p53']
MIT License
pybrain2/pybrain2
pybrain/optimization/populationbased/coevolution/competitivecoevolution.py
CompetitiveCoevolution._initPopulation
python
def _initPopulation(self, seeds): if self.parentChildAverage < 1: for s in seeds: s.parent = None if len(seeds) > 1: s1 = seeds[:len(seeds) // 2] s2 = seeds[len(seeds) // 2:] else: s1 = seeds tmp = seeds[0].copy() tmp.randomize() s2 = [tmp] self.pop = self._extendPopulation(s1, self.populationSize) self.parasitePop = self._extendPopulation(s2, self.populationSize)
one half for each population
https://github.com/pybrain2/pybrain2/blob/33ead60704d126e58c10d458ddd1e5e5fd17b65d/pybrain/optimization/populationbased/coevolution/competitivecoevolution.py#L14-L29
from __future__ import print_function __author__ = 'Tom Schaul, tom@idsia.ch' from pybrain.optimization.coevolution.coevolution import Coevolution class CompetitiveCoevolution(Coevolution): def __str__(self): return 'Competitive' + Coevolution.__str__(self)
BSD 3-Clause New or Revised License
facebookresearch/fvcore
tests/test_layers_squeeze_excitation.py
TestSqueezeExcitation._get_inputs3d
python
def _get_inputs3d(num_channels: int = 8) -> Iterable[torch.Tensor]: shapes = ( (1, num_channels, 5, 7, 7), (2, num_channels, 5, 7, 7), (4, num_channels, 5, 7, 7), (4, num_channels, 5, 7, 7), (4, num_channels, 7, 7, 7), (4, num_channels, 7, 7, 14), (4, num_channels, 7, 14, 7), (4, num_channels, 7, 14, 14), (8, num_channels * 2, 3, 7, 7), (8, num_channels * 4, 5, 7, 7), ) for shape in shapes: yield torch.rand(shape)
Provide different tensors as test cases. Yield: (torch.tensor): tensor as test case input.
https://github.com/facebookresearch/fvcore/blob/4525b814c8bb0f70510e37e68247c958010eb285/tests/test_layers_squeeze_excitation.py#L60-L83
import itertools import unittest from typing import Iterable import torch from fvcore.nn.squeeze_excitation import ( ChannelSpatialSqueezeExcitation, SpatialSqueezeExcitation, SqueezeExcitation, ) class TestSqueezeExcitation(unittest.TestCase): def setUp(self) -> None: super().setUp() torch.manual_seed(42) def test_build_se(self) -> None: for layer, num_channels, is_3d in itertools.product( ( SqueezeExcitation, SpatialSqueezeExcitation, ChannelSpatialSqueezeExcitation, ), (16, 32), (True, False), ): model = layer( num_channels=num_channels, is_3d=is_3d, ) for input_tensor in TestSqueezeExcitation._get_inputs( num_channels=num_channels, is_3d=is_3d ): if input_tensor.shape[1] != num_channels: with self.assertRaises(RuntimeError): output_tensor = model(input_tensor) continue else: output_tensor = model(input_tensor) input_shape = input_tensor.shape output_shape = output_tensor.shape self.assertEqual( input_shape, output_shape, "Input shape {} is different from output shape {}".format( input_shape, output_shape ), ) @staticmethod
Apache License 2.0
twitchio/twitchio
twitchio/user.py
PartialUser.get_predictions
python
async def get_predictions(self, token: str, prediction_id: str = None) -> List["Prediction"]: from .models import Prediction data = await self._http.get_predictions(token, broadcaster_id=str(self.id), prediction_id=prediction_id) return [Prediction(self._http, d) for d in data]
|coro| Gets information on a prediction or the list of predictions if none is provided. Parameters ----------- token: :class:`str` An oauth token with the channel:manage:predictions scope prediction_id: :class:`str` ID of the prediction to receive information about. Returns -------- :class:`twitchio.Prediction`
https://github.com/twitchio/twitchio/blob/c3c1c936d8e9bc4ac1fa1e7e60454ba5e06c16c4/twitchio/user.py#L669-L689
import datetime import time from typing import TYPE_CHECKING, List, Optional, Union from .enums import BroadcasterTypeEnum, UserTypeEnum from .errors import HTTPException, Unauthorized from .rewards import CustomReward if TYPE_CHECKING: from .http import TwitchHTTP from .channel import Channel from .models import BitsLeaderboard, Clip, ExtensionBuilder, Tag, FollowEvent, Prediction __all__ = ( "PartialUser", "BitLeaderboardUser", "UserBan", "SearchUser", "User", ) class PartialUser: __slots__ = "id", "name", "_http", "_cached_rewards" def __init__(self, http: "TwitchHTTP", id: Union[int, str], name: Optional[str]): self.id = int(id) self.name = name self._http = http self._cached_rewards = None def __repr__(self): return f"<PartialUser id={self.id}, name={self.name}>" @property def channel(self) -> Optional["Channel"]: from .channel import Channel if self.name in self._http.client._connection._cache: return Channel(self.name, self._http.client._connection) async def fetch(self, token: str = None, force=False) -> "User": data = await self._http.client.fetch_users(ids=[self.id], force=force, token=token) return data[0] async def edit(self, token: str, description: str) -> None: await self._http.put_update_user(token, description) async def fetch_tags(self): from .models import Tag data = await self._http.get_channel_tags(str(self.id)) return [Tag(x) for x in data] async def replace_tags(self, token: str, tags: List[Union[str, "Tag"]]): tags = [x if isinstance(x, str) else x.id for x in tags] await self._http.put_replace_channel_tags(token, str(self.id), tags) async def get_custom_rewards( self, token: str, *, only_manageable=False, ids: List[int] = None, force=False ) -> List["CustomReward"]: if not force and self._cached_rewards and self._cached_rewards[0] + 300 > time.monotonic(): return self._cached_rewards[1] try: data = await self._http.get_rewards(token, self.id, only_manageable, ids) except Unauthorized as error: raise Unauthorized("The given token is invalid", "", 401) from error except HTTPException as error: status = error.args[2] if status == 403: raise HTTPException( "The custom reward was created by a different application, or channel points are " "not available for the broadcaster (403)", error.args[1], 403, ) from error raise else: values = [CustomReward(self._http, x, self) for x in data] self._cached_rewards = time.monotonic(), values return values async def fetch_bits_leaderboard( self, token: str, period: str = "all", user_id: int = None, started_at: datetime.datetime = None ) -> "BitsLeaderboard": from .models import BitsLeaderboard data = await self._http.get_bits_board(token, period, user_id, started_at) return BitsLeaderboard(self._http, data) async def start_commercial(self, token: str, length: int) -> dict: data = await self._http.post_commercial(token, str(self.id), length) return data[0] async def create_clip(self, token: str, has_delay=False) -> dict: data = await self._http.post_create_clip(token, self.id, has_delay) return data[0] async def fetch_clips(self) -> List["Clip"]: from .models import Clip data = await self._http.get_clips(self.id) return [Clip(self._http, x) for x in data] async def fetch_hypetrain_events(self, id: str = None, token: str = None): from .models import HypeTrainEvent data = await self._http.get_hype_train(self.id, id=id, token=token) return [HypeTrainEvent(self._http, d) for d in data] async def fetch_bans(self, token: str, userids: List[Union[str, int]] = None) -> List["UserBan"]: data = await self._http.get_channel_bans(token, str(self.id), user_ids=userids) return [UserBan(self._http, d) for d in data] async def fetch_ban_events(self, token: str, userids: List[int] = None): from .models import BanEvent data = await self._http.get_channel_ban_unban_events(token, str(self.id), userids) return [BanEvent(self._http, x, self) for x in data] async def fetch_moderators(self, token: str, userids: List[int] = None): data = await self._http.get_channel_moderators(token, str(self.id), user_ids=userids) return [PartialUser(self._http, d["user_id"], d["user_name"]) for d in data] async def fetch_mod_events(self, token: str): from .models import ModEvent data = await self._http.get_channel_mod_events(token, str(self.id)) return [ModEvent(self._http, d, self) for d in data] async def automod_check(self, token: str, query: list): from .models import AutomodCheckResponse data = await self._http.post_automod_check(token, str(self.id), *[x._to_dict() for x in query]) return [AutomodCheckResponse(d) for d in data] async def fetch_stream_key(self, token: str): data = await self._http.get_stream_key(token, str(self.id)) return data async def fetch_following(self, token: str = None) -> List["FollowEvent"]: from .models import FollowEvent data = await self._http.get_user_follows(token=token, from_id=str(self.id)) return [FollowEvent(self._http, d, from_=self) for d in data] async def fetch_followers(self, token: str = None): from .models import FollowEvent data = await self._http.get_user_follows(to_id=str(self.id)) return [FollowEvent(self._http, d, to=self) for d in data] async def fetch_follow(self, to_user: "PartialUser", token: str = None): if not isinstance(to_user, PartialUser): raise TypeError(f"to_user must be a PartialUser not {type(to_user)}") from .models import FollowEvent data = await self._http.get_user_follows(from_id=str(self.id), to_id=str(to_user.id)) if not data: raise IndexError(f"{self.name} does not follow {to_user.name}") return FollowEvent(self._http, data[0]) async def follow(self, userid: int, token: str, *, notifications=False): await self._http.post_follow_channel( token, from_id=str(userid), to_id=str(self.id), notifications=notifications ) async def unfollow(self, userid: int, token: str): await self._http.delete_unfollow_channel(token, from_id=str(userid), to_id=str(self.id)) async def fetch_subscriptions(self, token: str, userids: List[int] = None): from .models import SubscriptionEvent data = await self._http.get_channel_subscriptions(token, str(self.id), user_ids=userids) return [SubscriptionEvent(self._http, d, broadcaster=self) for d in data] async def create_marker(self, token: str, description: str = None): from .models import Marker data = await self._http.post_stream_marker(token, user_id=str(self.id), description=description) return Marker(data[0]) async def fetch_markers(self, token: str, video_id: str = None): from .models import VideoMarkers data = await self._http.get_stream_markers(token, user_id=str(self.id), video_id=video_id) if data: return VideoMarkers(data[0]["videos"]) async def fetch_extensions(self, token: str): from .models import Extension data = await self._http.get_channel_extensions(token) return [Extension(d) for d in data] async def fetch_active_extensions(self, token: str = None): from .models import ActiveExtension data = await self._http.get_user_active_extensions(token, str(self.id)) return {typ: {int(n): ActiveExtension(d) for n, d in vals.items()} for typ, vals in data.items()} async def update_extensions(self, token: str, extensions: "ExtensionBuilder"): from .models import ActiveExtension data = await self._http.put_user_extensions(token, extensions._to_dict()) return {typ: {int(n): ActiveExtension(d) for n, d in vals.items()} for typ, vals in data.items()} async def fetch_videos(self, period="all", sort="time", type="all", language=None): from .models import Video data = await self._http.get_videos(user_id=str(self.id), period=period, sort=sort, type=type, language=language) return [Video(self._http, x, self) for x in data] async def end_prediction( self, token: str, prediction_id: str, status: str, winning_outcome_id: str = None ) -> "Prediction": from .models import Prediction data = await self._http.patch_prediction( token, broadcaster_id=str(self.id), prediction_id=prediction_id, status=status, winning_outcome_id=winning_outcome_id, ) return Prediction(self._http, data[0])
MIT License
ceph/teuthology
teuthology/misc.py
all_roles
python
def all_roles(cluster): for _, roles_for_host in cluster.remotes.items(): for name in roles_for_host: yield name
Generator of role values. Each call returns another role. :param cluster: Cluster extracted from the ctx.
https://github.com/ceph/teuthology/blob/b35344e81ac507b6cad7c1cae575ce08b9c766f2/teuthology/misc.py#L406-L414
import argparse import os import logging import configobj import getpass import shutil import socket import subprocess import tarfile import time import yaml import json import re import pprint import datetime from tarfile import ReadError from teuthology.util.compat import urljoin, urlopen, HTTPError from netaddr.strategy.ipv4 import valid_str as _is_ipv4 from netaddr.strategy.ipv6 import valid_str as _is_ipv6 from teuthology import safepath from teuthology.exceptions import (CommandCrashedError, CommandFailedError, ConnectionLostError) from teuthology.orchestra import run from teuthology.config import config from teuthology.contextutil import safe_while from teuthology.orchestra.opsys import DEFAULT_OS_VERSION log = logging.getLogger(__name__) stamp = datetime.datetime.now().strftime("%y%m%d%H%M") is_arm = lambda x: x.startswith('tala') or x.startswith( 'ubuntu@tala') or x.startswith('saya') or x.startswith('ubuntu@saya') hostname_expr_templ = '(?P<user>.*@)?(?P<shortname>.*){lab_domain}' def host_shortname(hostname): if _is_ipv4(hostname) or _is_ipv6(hostname): return hostname else: return hostname.split('.', 1)[0] def canonicalize_hostname(hostname, user='ubuntu'): hostname_expr = hostname_expr_templ.format( lab_domain=config.lab_domain.replace('.', '\.')) match = re.match(hostname_expr, hostname) if _is_ipv4(hostname) or _is_ipv6(hostname): return "%s@%s" % (user, hostname) if match: match_d = match.groupdict() shortname = match_d['shortname'] if user is None: user_ = user else: user_ = match_d.get('user') or user else: shortname = host_shortname(hostname) user_ = user user_at = user_.strip('@') + '@' if user_ else '' domain = config.lab_domain if domain and not shortname.endswith('.'): domain = '.' + domain ret = '{user_at}{short}{domain}'.format( user_at=user_at, short=shortname, domain=domain, ) return ret def decanonicalize_hostname(hostname): lab_domain = '' if config.lab_domain: lab_domain='\.' + config.lab_domain.replace('.', '\.') hostname_expr = hostname_expr_templ.format(lab_domain=lab_domain) match = re.match(hostname_expr, hostname) if match: hostname = match.groupdict()['shortname'] return hostname def config_file(string): config_dict = {} try: with open(string) as f: g = yaml.safe_load_all(f) for new in g: config_dict.update(new) except IOError as e: raise argparse.ArgumentTypeError(str(e)) return config_dict class MergeConfig(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): config_dict = getattr(namespace, self.dest) for new in values: deep_merge(config_dict, new) def merge_configs(config_paths): conf_dict = dict() for conf_path in config_paths: if not os.path.exists(conf_path): log.debug("The config path {0} does not exist, skipping.".format(conf_path)) continue with open(conf_path) as partial_file: partial_dict = yaml.safe_load(partial_file) try: conf_dict = deep_merge(conf_dict, partial_dict) except Exception: pprint.pprint("failed to merge {0} into {1}".format(conf_dict, partial_dict)) raise return conf_dict def get_testdir(ctx=None): if 'test_path' in config: return config['test_path'] return config.get( 'test_path', '/home/%s/cephtest' % get_test_user() ) def get_test_user(ctx=None): return config.get('test_user', 'ubuntu') def get_archive_dir(ctx): test_dir = get_testdir(ctx) return os.path.normpath(os.path.join(test_dir, 'archive')) def get_http_log_path(archive_dir, job_id=None): http_base = config.archive_server if not http_base: return None sep = os.path.sep archive_dir = archive_dir.rstrip(sep) archive_subdir = archive_dir.split(sep)[-1] if archive_subdir.endswith(str(job_id)): archive_subdir = archive_dir.split(sep)[-2] if job_id is None: return os.path.join(http_base, archive_subdir, '') return os.path.join(http_base, archive_subdir, str(job_id), '') def get_results_url(run_name, job_id=None): if not config.results_ui_server: return None base_url = config.results_ui_server if job_id is None: return os.path.join(base_url, run_name, '') return os.path.join(base_url, run_name, str(job_id), '') def get_ceph_binary_url(package=None, branch=None, tag=None, sha1=None, dist=None, flavor=None, format=None, arch=None): BASE = 'http://{host}/{package}-{format}-{dist}-{arch}-{flavor}/'.format( host=config.gitbuilder_host, package=package, flavor=flavor, arch=arch, format=format, dist=dist ) if sha1 is not None: assert branch is None, "cannot set both sha1 and branch" assert tag is None, "cannot set both sha1 and tag" else: if tag is not None: ref = tag assert branch is None, "cannot set both branch and tag" else: if branch is None: branch = 'master' ref = branch sha1_url = urljoin(BASE, 'ref/{ref}/sha1'.format(ref=ref)) log.debug('Translating ref to sha1 using url %s', sha1_url) try: sha1_fp = urlopen(sha1_url) sha1 = sha1_fp.read().rstrip('\n') sha1_fp.close() except HTTPError as e: log.error('Failed to get url %s', sha1_url) raise e log.debug('Using %s %s sha1 %s', package, format, sha1) bindir_url = urljoin(BASE, 'sha1/{sha1}/'.format(sha1=sha1)) return (sha1, bindir_url) def feed_many_stdins(fp, processes): while True: data = fp.read(8192) if not data: break for proc in processes: proc.stdin.write(data) def feed_many_stdins_and_close(fp, processes): feed_many_stdins(fp, processes) for proc in processes: proc.stdin.close() def get_mons(roles, ips, mon_bind_msgr2=False, mon_bind_addrvec=False): mons = {} mon_ports = {} mon_id = 0 is_mon = is_type('mon') for idx, roles in enumerate(roles): for role in roles: if not is_mon(role): continue if ips[idx] not in mon_ports: mon_ports[ips[idx]] = 6789 else: mon_ports[ips[idx]] += 1 if mon_bind_msgr2: assert mon_bind_addrvec addr = 'v2:{ip}:{port},v1:{ip}:{port2}'.format( ip=ips[idx], port=mon_ports[ips[idx]], port2=mon_ports[ips[idx]] + 1, ) mon_ports[ips[idx]] += 1 elif mon_bind_addrvec: addr = 'v1:{ip}:{port}'.format( ip=ips[idx], port=mon_ports[ips[idx]], ) else: addr = '{ip}:{port}'.format( ip=ips[idx], port=mon_ports[ips[idx]], ) mon_id += 1 mons[role] = addr assert mons return mons def skeleton_config(ctx, roles, ips, cluster='ceph', mon_bind_msgr2=False, mon_bind_addrvec=False): path = os.path.join(os.path.dirname(__file__), 'ceph.conf.template') conf = configobj.ConfigObj(path, file_error=True) mons = get_mons(roles=roles, ips=ips, mon_bind_msgr2=mon_bind_msgr2, mon_bind_addrvec=mon_bind_addrvec) for role, addr in mons.items(): mon_cluster, _, _ = split_role(role) if mon_cluster != cluster: continue name = ceph_role(role) conf.setdefault(name, {}) conf[name]['mon addr'] = addr is_mds = is_type('mds', cluster) for roles_subset in roles: for role in roles_subset: if is_mds(role): name = ceph_role(role) conf.setdefault(name, {}) if '-s-' in name: standby_mds = name[name.find('-s-') + 3:] conf[name]['mds standby for name'] = standby_mds return conf def ceph_role(role): _, type_, id_ = split_role(role) return type_ + '.' + id_ def split_role(role): cluster = 'ceph' if role.count('.') > 1: cluster, role = role.split('.', 1) type_, id_ = role.split('.', 1) return cluster, type_, id_ def roles_of_type(roles_for_host, type_): for role in cluster_roles_of_type(roles_for_host, type_, None): _, _, id_ = split_role(role) yield id_ def cluster_roles_of_type(roles_for_host, type_, cluster): is_type_in_cluster = is_type(type_, cluster) for role in roles_for_host: if not is_type_in_cluster(role): continue yield role
MIT License
trusted-ai/adversarial-robustness-toolbox
art/estimators/pytorch.py
PyTorchEstimator.device_type
python
def device_type(self) -> str: return self._device_type
Return the type of device on which the estimator is run. :return: Type of device on which the estimator is run, either `gpu` or `cpu`.
https://github.com/trusted-ai/adversarial-robustness-toolbox/blob/564f46f99b3cb0406fe3570919b8e71a4c5bba9d/art/estimators/pytorch.py#L87-L93
import logging from typing import TYPE_CHECKING, Any, List, Tuple import numpy as np from art.estimators.estimator import BaseEstimator, LossGradientsMixin, NeuralNetworkMixin if TYPE_CHECKING: import torch logger = logging.getLogger(__name__) class PyTorchEstimator(NeuralNetworkMixin, LossGradientsMixin, BaseEstimator): estimator_params = ( BaseEstimator.estimator_params + NeuralNetworkMixin.estimator_params + [ "device_type", ] ) def __init__(self, device_type: str = "gpu", **kwargs) -> None: import torch preprocessing = kwargs.get("preprocessing") if isinstance(preprocessing, tuple): from art.preprocessing.standardisation_mean_std.pytorch import StandardisationMeanStdPyTorch kwargs["preprocessing"] = StandardisationMeanStdPyTorch( mean=preprocessing[0], std=preprocessing[1], device_type=device_type ) super().__init__(**kwargs) self._device_type = device_type if device_type == "cpu" or not torch.cuda.is_available(): self._device = torch.device("cpu") else: cuda_idx = torch.cuda.current_device() self._device = torch.device("cuda:{}".format(cuda_idx)) PyTorchEstimator._check_params(self) @property
MIT License
tomplus/kubernetes_asyncio
kubernetes_asyncio/client/models/v1_self_subject_access_review.py
V1SelfSubjectAccessReview.metadata
python
def metadata(self, metadata): self._metadata = metadata
Sets the metadata of this V1SelfSubjectAccessReview. :param metadata: The metadata of this V1SelfSubjectAccessReview. # noqa: E501 :type: V1ObjectMeta
https://github.com/tomplus/kubernetes_asyncio/blob/22bf0f4ec775b920abc9cee86bb38abcfc57506d/kubernetes_asyncio/client/models/v1_self_subject_access_review.py#L131-L139
import pprint import re import six from kubernetes_asyncio.client.configuration import Configuration class V1SelfSubjectAccessReview(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'api_version': 'str', 'kind': 'str', 'metadata': 'V1ObjectMeta', 'spec': 'V1SelfSubjectAccessReviewSpec', 'status': 'V1SubjectAccessReviewStatus' } attribute_map = { 'api_version': 'apiVersion', 'kind': 'kind', 'metadata': 'metadata', 'spec': 'spec', 'status': 'status' } def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._api_version = None self._kind = None self._metadata = None self._spec = None self._status = None self.discriminator = None if api_version is not None: self.api_version = api_version if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata self.spec = spec if status is not None: self.status = status @property def api_version(self): return self._api_version @api_version.setter def api_version(self, api_version): self._api_version = api_version @property def kind(self): return self._kind @kind.setter def kind(self, kind): self._kind = kind @property def metadata(self): return self._metadata @metadata.setter
Apache License 2.0
deepgram/kur
kur/backend/keras_backend.py
KerasBackend._save_keras
python
def _save_keras(self, keras_model, filename): path = os.path.expanduser(os.path.expandvars(filename)) if os.path.exists(path): if not os.path.isdir(path): raise ValueError('Target weight exists, but it is not a ' 'directory. Kur expected a directory that it can work ' 'with. Please move or delete the existing path: {}' .format(path)) for dirpath, _, filenames in os.walk(path): for this_file in filenames: if this_file.endswith('.kur'): os.unlink(os.path.join(dirpath, this_file)) else: os.makedirs(path, exist_ok=True) layers = keras_model.flattened_layers if hasattr(keras_model, 'flattened_layers') else keras_model.layers for layer in layers: layer_name = layer.name symbolic_weights = layer.weights weight_names, weight_values = self._get_weight_names_and_values_from_symbolic( symbolic_weights ) for name, val in zip(weight_names, weight_values): name = name.replace('/', '_') target = os.path.join( path, '{}+{}.kur'.format(layer_name, name) ) idx.save(target, val)
Saves a native Keras model.
https://github.com/deepgram/kur/blob/fd0c120e50815c1e5be64e5dde964dcd47234556/kur/backend/keras_backend.py#L321-L356
import contextlib import io import re import os import sys import tempfile import shutil import logging import functools import warnings from collections import OrderedDict import numpy from . import Backend from .. import __homepage__ from ..utils import can_import, EnvironmentalVariable, redirect_stderr, idx, DisableLogging from ..providers import BatchProvider logger = logging.getLogger(__name__) class KerasBackend(Backend): @classmethod def is_supported(cls): return can_import('keras') and ( can_import('theano') or can_import('tensorflow') ) def __init__(self, backend=None, optimizer=None, theano_flags=None, *args, **kwargs): super().__init__(*args, **kwargs) if backend is not None: logger.info('The %s backend for Keras has been requested.', backend) if 'keras' in sys.modules: import keras.backend as K if K.backend() != backend: logger.warning('Keras was already imported by the time ' 'the Kur backend was instantiated. Kur was asked to ' 'use Keras %s backend, but Keras is already using %s. ' 'We cannot change the Keras backend at this point, so ' 'we will try to work with the currently loaded ' 'backend. In the future, try to let Kur manage ' 'importing Keras.', backend, K.backend()) deps = { 'theano' : ['theano'], 'tensorflow' : ['tensorflow'] }[backend] for dep in deps: if can_import(dep): continue if backend == 'tensorflow': logger.error('Your Kurfile is trying to use TensorFlow.') logger.error('However, we cannot find TensorFlow ' 'installed.') logger.error('At least it is easy to install!') logger.error('To install TensorFlow for CPU: pip install ' 'tensorflow') logger.error('To install TensorFlow for GPU: pip install ' 'tensorflow-gpu') logger.error('See our troubleshooting page for more ' 'information: %s', os.path.join(__homepage__, 'troubleshooting.html')) raise ValueError('Need to install TensorFlow for this ' 'Kurfile to work.') else: logger.warning('The Keras backend was asked to use the %s ' 'backend, but %s does not appear to be installed. You ' 'will likely get an error about this soon.', backend, dep) else: logger.debug('No particular backend for Keras has been requested.') if can_import('theano') and can_import('tensorflow'): logger.trace('Using the system-default Keras backend.') elif can_import('theano'): backend = 'theano' logger.trace('Only the Theano backend for Keras is installed, ' 'so we will try to use it.') elif can_import('tensorflow'): backend = 'tensorflow' logger.trace('Only the TensorFlow backend for Keras is ' 'installed, so we will try to use it.') else: logger.warning('No supported Keras backend seems to be ' 'installed. You will probably get an error about this ' 'shortly.') x = io.StringIO() with redirect_stderr(x): env = { 'KERAS_BACKEND' : backend, 'THEANO_FLAGS' : os.environ.get('THEANO_FLAGS') } def replace_theano_flag(key, value): if env['THEANO_FLAGS']: parts = [i for i in env['THEANO_FLAGS'].split(',') if not i.startswith('{}='.format(key))] else: parts = [] parts.append('{}={}'.format(key, value)) env['THEANO_FLAGS'] = ','.join(parts) if optimizer is False: logger.trace('Disabling the Theano optimizer.') replace_theano_flag('optimizer', 'None') if theano_flags is not None: for k, v in theano_flags.items(): logger.trace('Setting Theano flag %s = %s', k, v) replace_theano_flag(k, v) replace_theano_flag('force_device', 'true') if not self.devices: replace_theano_flag('device', 'cpu') env['CUDA_VISIBLE_DEVICES'] = '100' logger.info('Requesting CPU') else: replace_theano_flag('device', 'gpu') env['CUDA_VISIBLE_DEVICES'] = ','.join( str(x) for x in self.devices) logger.info('Requesting GPUs: %s', self.devices) env['TF_CPP_MIN_LOG_LEVEL'] = '1' logger.trace('Overriding environmental variables: %s', env) EnvironmentalVariable(**env).push() import keras import keras.backend as K logger.info('Keras is loaded. The backend is: %s', K.backend()) self.toolchain = K.backend() keras.backend.set_image_dim_ordering('tf') logging.getLogger('py.warnings').addFilter( type('theano_filter', (), { 'filter' : lambda record: not ( record.module == 'topology' and record.levelname == 'WARNING' and record.funcName == 'regularizers' ) }) ) if self.parallel > 1 and self.get_toolchain() == 'theano': logger.warning('Multiple GPUs were requested, but are not ' 'supported with Keras\' Theano backend. Try the PyTorch ' 'backend or Keras\' TensorFlow backend instead. Falling back ' 'to a single device.') self.devices = self.devices[:1] def get_toolchain(self): return self.toolchain @classmethod def get_name(cls): return 'keras' def connect(self, inputs, target, data): if self.keras_version() == 1: if not isinstance(inputs, list): inputs = [inputs] else: if isinstance(inputs, (list, tuple)): if len(inputs) == 1: inputs = inputs[0] pool_2d = None if self.get_toolchain() == 'theano': import theano if theano.__version__ < '0.9': from theano.tensor.signal import pool original_pool = pool.pool_2d def pool_2d(input, ws=None, ignore_border=None, stride=None, pad=(0, 0), mode='max', ds=None, st=None, padding=None): return original_pool( input=input, ds=ds if ds is not None else ws, ignore_border=ignore_border, st=st if st is not None else stride, padding=padding if padding is not None else pad, mode=mode ) logger.trace('Connecting: %s(%s)', target, ', '.join( str(x) for x in ( inputs if isinstance(inputs, (list, tuple)) else [inputs] ) ) ) with warnings.catch_warnings(): warnings.filterwarnings( 'ignore', message='.*tensor.nnet.abstract_conv.conv2d.*', module='.*theano_backend.*' ) if pool_2d is None: return target(inputs) else: from unittest.mock import patch with patch('theano.tensor.signal.pool.pool_2d', pool_2d): return target(inputs) @staticmethod def keras_version(): from keras import __version__ return int(__version__.split('.')[0]) @staticmethod def make_model(inputs, outputs): import keras.models as M if KerasBackend.keras_version() == 1: return M.Model(input=inputs, output=outputs) return M.Model(inputs=inputs, outputs=outputs) def save(self, model, filename): keras_model = self.make_model( inputs=[node.value for node in model.inputs.values()], outputs=[node.value for node in model.outputs.values()] ) self._save_keras(keras_model, filename)
Apache License 2.0
datadog/datadog-unix-agent
tasks/test.py
lint_milestone
python
def lint_milestone(ctx): pr_url = os.environ.get("CIRCLE_PULL_REQUEST") if pr_url: import requests pr_id = pr_url.rsplit('/')[-1] res = requests.get("https://api.github.com/repos/DataDog/datadog-unix-agent/issues/{}".format(pr_id)) pr = res.json() if pr.get("milestone"): print("Milestone: %s" % pr["milestone"].get("title", "NO_TITLE")) return print("PR %s requires a milestone" % pr_url) raise Exit(code=1) else: print("PR not yet created, skipping check for milestone")
Make sure PRs are assigned a milestone
https://github.com/datadog/datadog-unix-agent/blob/e6be8e77ae1e80d8c279fcb0f0b8dab5a23224e1/tasks/test.py#L131-L151
import os import json import mmap import re from termcolor import colored from json.decoder import JSONDecodeError from pylint import epylint from invoke import task from invoke.exceptions import Exit from .utils import ( get_shell, get_repo_path, get_matching, ) try: basestring except NameError: basestring = str PROFILE_COV = "profile.cov" PYLINT_RC = ".pylintrc" FLAKE8_RC = ".flake8" LINT_SKIP_PATTERNS = [ r".*\/venv.*\/", r".*\/.tox\/", ] UNLICENSED_EXT_PATTERNS = [ r"LICENSE$", r"\..*$", r".*Gemfile(\.lock)?$", r".*Berksfile(\.lock)?$", r".*\.rb$", r".*\.txt$", r".*\.json$", r".*\.patch$", r".*\.yaml.*$", r".*\.md$", r".*\.ini$" ] @task() def test(ctx, targets=None, timeout=120): with ctx.cd(get_repo_path()): if not targets: print("\n--- Running unit tests on agent code:") ctx.run("python -m pytest -v .", pty=True, shell=get_shell()) print("\n--- Running unit tests on bundled checks:") test_wheels(ctx) else: print("\n--- Running unit tests on defined targets:") for target in targets.split(','): ctx.run("python -m pytest -v {}".format(target), pty=True, shell=get_shell()) @task def test_wheels(ctx): wheels = set() matches = get_matching(get_repo_path(), patterns=[r"^checks\/bundled\/.*\/tests\/.*\.py$"]) success = True wheels = set([os.path.dirname(os.path.dirname(match)) for match in matches]) for wheel in wheels: result = ctx.run('python -m pytest -v {}'.format(wheel), warn=True, pty=True, shell=get_shell()) success = (success and True) if result.ok else False @task def lint_py(ctx, targets=None): args = "--rcfile={} --reports=y".format(get_repo_path(PYLINT_RC)) files = get_matching(get_repo_path(), patterns=[r".*\.py$"], exclude_patterns=LINT_SKIP_PATTERNS) stdout, _ = epylint.py_run("{target} {args}".format(target=" ".join(files), args=args), return_std=True) try: msgs = json.load(stdout) for msg in msgs: if msg['type'].lower() == 'error': print(colored(json.dumps(msg, sort_keys=True, indent=4), "red")) else: print(colored(json.dumps(msg, sort_keys=True, indent=4), "green")) else: print(colored("Nice! No lint errors!", "green")) except JSONDecodeError: print(colored("Whoopsie Daisy! There was an issue linting your code!", "red")) @task def flake8(ctx, targets=None, branch=None): success = True if not targets: files = get_matching(get_repo_path(), patterns=[r".*\.py$"], reference=branch) result = ctx.run("flake8 --config={rc_file} {targets}".format( rc_file=get_repo_path(FLAKE8_RC), targets=' '.join(files)), pty=True, shell=get_shell()) success = True if result.ok else False else: for target in targets.split(','): print("Checking {}...".format(target)) result = ctx.run("flake8 --config={rc_file} {target}".format( rc_file=get_repo_path(FLAKE8_RC), target=target), pty=True, shell=get_shell()) success = (success and True) if result.ok else False if success: print(colored("Nice! No flakes errors!", "green")) @task
Apache License 2.0
deepmind/detcon
datasets/dataset_adapter.py
ImageDatasetAdapter._transpose_for_h2d
python
def _transpose_for_h2d(self, batch: TFBatch) -> TFBatch: return self._tf_transpose_helper(batch, transpose_order=(1, 2, 3, 0))
Transposes images for a batch of data.
https://github.com/deepmind/detcon/blob/7ead4e1c0e120b497113642b3f9e3d9c57753e00/datasets/dataset_adapter.py#L228-L231
import abc import enum from typing import Any, Generator, Mapping, Sequence from absl import logging import jax import jax.numpy as jnp import numpy as np import tensorflow.compat.v2 as tf import tensorflow_datasets as tfds from detcon.utils import tf_image_ops Batch = Mapping[str, np.ndarray] JaxBatch = Mapping[str, jnp.ndarray] TFBatch = Mapping[str, tf.Tensor] class DatasetMode(enum.Enum): PRETRAIN = 1 LINEAR_TRAIN = 2 EVAL = 3 SEGMENT = 4 class Split(enum.Enum): TRAIN = 1 VALID = 2 TEST = 3 @classmethod def from_string(cls, name: str) -> 'Split': return { 'TRAIN': Split.TRAIN, 'VALID': Split.VALID, 'VALIDATION': Split.VALID, 'TEST': Split.TEST }[name.upper()] class DatasetAdapter(metaclass=abc.ABCMeta): def __init__(self, dataset_directory: str, dataset_name: str, enable_double_transpose: bool, allow_caching: bool, preprocessing_config: Mapping[str, Mapping[str, Any]], use_tfds: bool): self._dataset_directory = dataset_directory self._dataset_name = dataset_name self._preprocessing_config = preprocessing_config self._use_double_transpose = ( enable_double_transpose and jax.local_devices()[0].platform == 'tpu') self._allow_caching = allow_caching self._use_tfds = use_tfds if not self._use_tfds: assert not self._allow_caching self._use_fh = False @abc.abstractmethod def num_examples(self, split: Split) -> int: @abc.abstractproperty def num_classes(self) -> int: @property def num_train_examples(self) -> int: return self.num_examples(Split.TRAIN) def normalize_images(self, images: jnp.ndarray) -> jnp.ndarray: return images @abc.abstractmethod def _transpose_for_h2d(self, batch: TFBatch) -> TFBatch: def load( self, split: Split, *, dataset_mode: DatasetMode, batch_dims: Sequence[int]) -> Generator[Batch, None, None]: if (dataset_mode is DatasetMode.EVAL and self.num_examples(split) % np.prod(batch_dims) != 0): raise ValueError(f'Test/valid must be divisible by {np.prod(batch_dims)}') ds = self._wrap(self._load(split), dataset_mode, batch_dims) logging.info('Constructed dataset:') logging.info(ds) yield from tfds.as_numpy(ds) @abc.abstractmethod def _load(self, split: Split) -> tf.data.Dataset: @abc.abstractmethod def _preprocess_pretrain(self, example: TFBatch) -> TFBatch: pass @abc.abstractmethod def _preprocess_linear_train(self, example: TFBatch) -> TFBatch: pass @abc.abstractmethod def _preprocess_segment(self, example: TFBatch) -> TFBatch: pass @abc.abstractmethod def _preprocess_eval(self, example: TFBatch) -> TFBatch: pass def _wrap( self, ds: tf.data.Dataset, dataset_mode: DatasetMode, batch_dims: Sequence[int]) -> tf.data.Dataset: total_batch_size = np.prod(batch_dims) options = tf.data.Options() options.experimental_threading.private_threadpool_size = 48 options.experimental_threading.max_intra_op_parallelism = 1 ds = ds.with_options(options) if dataset_mode is not DatasetMode.EVAL: options.experimental_deterministic = False if jax.process_count() > 1 and self._allow_caching: ds = ds.cache() ds = ds.repeat() ds = ds.shuffle(buffer_size=10 * total_batch_size, seed=0) if dataset_mode is DatasetMode.PRETRAIN: ds = ds.map( self._preprocess_pretrain, num_parallel_calls=tf.data.experimental.AUTOTUNE) elif dataset_mode is DatasetMode.LINEAR_TRAIN: ds = ds.map( self._preprocess_linear_train, num_parallel_calls=tf.data.experimental.AUTOTUNE) elif dataset_mode is DatasetMode.SEGMENT: ds = ds.map( self._preprocess_segment, num_parallel_calls=tf.data.experimental.AUTOTUNE) else: ds = ds.map( self._preprocess_eval, num_parallel_calls=tf.data.experimental.AUTOTUNE) for i, batch_size in enumerate(reversed(batch_dims)): ds = ds.batch(batch_size) if i == 0 and self._use_double_transpose: ds = ds.map(self._transpose_for_h2d) ds = ds.prefetch(tf.data.experimental.AUTOTUNE) return ds @abc.abstractmethod def maybe_transpose_on_device(self, batch: JaxBatch) -> JaxBatch: pass def _tf_transpose_helper( self, batch: TFBatch, transpose_order: Sequence[int]) -> TFBatch: new_batch = dict(batch) if 'images' in batch: new_batch['images'] = tf.transpose(batch['images'], transpose_order) else: new_batch['view1'] = tf.transpose(batch['view1'], transpose_order) new_batch['view2'] = tf.transpose(batch['view2'], transpose_order) return new_batch def _jax_transpose_helper( self, batch: JaxBatch, transpose_order: Sequence[int]) -> JaxBatch: new_batch = dict(batch) if 'images' in batch: new_batch['images'] = jnp.transpose(batch['images'], transpose_order) else: new_batch['view1'] = jnp.transpose(batch['view1'], transpose_order) new_batch['view2'] = jnp.transpose(batch['view2'], transpose_order) return new_batch class ImageDatasetAdapter(DatasetAdapter):
Apache License 2.0
neuraxio/neuraxle
neuraxle/metaopt/trial.py
TrialSplit.fit_trial_split
python
def fit_trial_split(self, train_data_container: DataContainer, context: ExecutionContext) -> 'TrialSplit': self.pipeline.set_train(True) self.pipeline = self.pipeline.handle_fit(train_data_container, context) return self
Fit the trial split pipeline with the training data container. :param train_data_container: training data container :param context: execution context :return: trial split with its fitted pipeline.
https://github.com/neuraxio/neuraxle/blob/18479c0adf5ebfd3504a83ef6711219961c2bfdb/neuraxle/metaopt/trial.py#L433-L443
import datetime import hashlib import logging import os import traceback import json from enum import Enum from logging import FileHandler, Logger from typing import Dict, List, Callable, Iterable, Tuple import numpy as np from neuraxle.base import BaseStep, ExecutionContext, LOGGER_FORMAT, DATE_FORMAT from neuraxle.data_container import DataContainer from neuraxle.hyperparams.space import HyperparameterSamples TRIAL_DATETIME_STR_FORMAT = '%m/%d/%Y, %H:%M:%S' class Trial: def __init__( self, trial_number: int, hyperparams: HyperparameterSamples, main_metric_name: str, save_trial_function: Callable, status: 'TRIAL_STATUS' = None, pipeline: BaseStep = None, validation_splits: List['TrialSplit'] = None, cache_folder: str = None, error: str = None, error_traceback: str = None, start_time: datetime.datetime = None, end_time: datetime.datetime = None, logger: Logger = None ): self.trial_number = trial_number self.save_trial_function: Callable = save_trial_function if status is None: status = TRIAL_STATUS.PLANNED if validation_splits is None: validation_splits = [] self.main_metric_name: str = main_metric_name self.status: TRIAL_STATUS = status self.hyperparams: HyperparameterSamples = hyperparams self.pipeline: BaseStep = pipeline self.validation_splits: List['TrialSplit'] = validation_splits self.cache_folder: str = cache_folder self.error_traceback: str = error_traceback self.error: str = error self.start_time: datetime.datetime = start_time self.end_time: datetime.datetime = end_time if logger is None: if self.cache_folder is not None: logger = self._initialize_logger_with_file() else: logger = logging.getLogger() self.logger: Logger = logger def save_trial(self) -> 'Trial': self.save_trial_function(self) return self def new_validation_split(self, pipeline: BaseStep, delete_pipeline_on_completion: bool = True) -> 'TrialSplit': trial_split: TrialSplit = TrialSplit( trial=self, split_number=len(self.validation_splits), main_metric_name=self.main_metric_name, pipeline=pipeline, delete_pipeline_on_completion=delete_pipeline_on_completion ) self.validation_splits.append(trial_split) self.save_trial() return trial_split def save_model(self, label: str): assert self.cache_folder is not None self._save_model(self.pipeline, label) def _save_model(self, pipeline: BaseStep, label: str): hyperparams = self.hyperparams.to_flat_dict() trial_hash = self._get_trial_hash(hyperparams) path = os.path.join(self.cache_folder, label) pipeline.set_name(trial_hash).save(ExecutionContext(path), full_dump=True) def get_model(self, label: str) -> BaseStep: assert self.cache_folder is not None hyperparams = self.hyperparams.to_flat_dict() trial_hash = self._get_trial_hash(hyperparams) path = os.path.join(self.cache_folder, label) return ExecutionContext(path).load(trial_hash) def set_main_metric_name(self, name: str) -> 'Trial': self.main_metric_name = name return self def set_hyperparams(self, hyperparams: HyperparameterSamples) -> 'Trial': self.hyperparams = hyperparams return self def is_higher_score_better(self) -> bool: return self.validation_splits[0].is_higher_score_better() def get_validation_score(self) -> float: scores = [ validation_split.get_best_validation_score() for validation_split in self.validation_splits if validation_split.is_success() ] score = sum(scores) / len(scores) return score def get_n_epoch_to_best_validation_score(self) -> float: n_epochs = [ validation_split.get_n_epochs_to_best_validation_score() for validation_split in self.validation_splits if validation_split.is_success() ] n_epochs = sum(n_epochs) / len(n_epochs) return n_epochs def set_success(self) -> 'Trial': self.status = TRIAL_STATUS.SUCCESS self.save_trial() return self def update_final_trial_status(self): success = True for validation_split in self.validation_splits: if not validation_split.is_success(): success = False if success: self.status = TRIAL_STATUS.SUCCESS else: self.status = TRIAL_STATUS.FAILED self.save_trial() def set_failed(self, error: Exception) -> 'Trial': self.status = TRIAL_STATUS.FAILED self.error = str(error) self.error_traceback = traceback.format_exc() self.save_trial() return self def get_trained_pipeline(self, split_number: int = 0): return self.validation_splits[split_number].get_pipeline() def _get_trial_hash(self, hp_dict: Dict): current_hyperparameters_hash = hashlib.md5(str.encode(str(hp_dict))).hexdigest() return current_hyperparameters_hash def to_json(self): return { 'trial_number': self.trial_number, 'status': self.status.value, 'hyperparams': self.hyperparams.to_flat_dict(), 'validation_splits': [v.to_json() for v in self.validation_splits], 'error': self.error, 'error_traceback': self.error_traceback, 'start_time': self.start_time.strftime(TRIAL_DATETIME_STR_FORMAT) if self.start_time is not None else '', 'end_time': self.end_time.strftime(TRIAL_DATETIME_STR_FORMAT) if self.end_time is not None else '', 'main_metric_name': self.main_metric_name } @staticmethod def from_json(update_trial_function: Callable, trial_json: Dict, cache_folder: str = None) -> 'Trial': trial: Trial = Trial( trial_number=trial_json["trial_number"], main_metric_name=trial_json['main_metric_name'], status=TRIAL_STATUS(trial_json['status']), hyperparams=HyperparameterSamples(trial_json['hyperparams']), save_trial_function=update_trial_function, error=trial_json['error'], error_traceback=trial_json['error_traceback'], start_time=datetime.datetime.strptime(trial_json['start_time'], TRIAL_DATETIME_STR_FORMAT), end_time=datetime.datetime.strptime(trial_json['start_time'], TRIAL_DATETIME_STR_FORMAT), cache_folder=cache_folder, logger=None ) trial.validation_splits = [ TrialSplit.from_json( trial=trial, trial_split_json=validation_split_json ) for validation_split_json in trial_json['validation_splits'] ] return trial def __getitem__(self, item) -> 'TrialSplit': return self.validation_splits[item] def _initialize_logger_with_file(self) -> logging.Logger: os.makedirs(self.cache_folder, exist_ok=True) logfile_path = os.path.join(self.cache_folder, f"trial_{self.trial_number}.log") logger_name = f"trial_{self.trial_number}" logger = logging.getLogger(logger_name) formatter = logging.Formatter(fmt=LOGGER_FORMAT, datefmt=DATE_FORMAT) file_handler = logging.FileHandler(filename=logfile_path) file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger def _free_logger_file(self): for h in self.logger.handlers: if isinstance(h, FileHandler): self.logger.removeHandler(h) def __enter__(self): self.start_time = datetime.datetime.now() self.status = TRIAL_STATUS.STARTED self.logger.info( '\nnew trial: {}'.format( json.dumps(self.hyperparams.to_nested_dict(), sort_keys=True, indent=4))) self.save_trial() return self def __exit__(self, exc_type, exc_val, exc_tb): self.end_time = datetime.datetime.now() del self.pipeline if exc_type is not None: self.set_failed(exc_val) self.save_trial() raise exc_val self.save_trial() self._free_logger_file() return self class TrialSplit: def __init__( self, trial: Trial, split_number: int, main_metric_name: str, status: 'TRIAL_STATUS' = None, error: Exception = None, error_traceback: str = None, metrics_results: Dict = None, start_time: datetime.datetime = None, end_time: datetime.datetime = None, pipeline: BaseStep = None, delete_pipeline_on_completion: bool = True ): if status is None: status = TRIAL_STATUS.PLANNED self.trial: Trial = trial self.split_number: int = split_number self.status: TRIAL_STATUS = status self.error: Exception = error self.error_traceback: str = error_traceback if metrics_results is None: metrics_results = {} self.metrics_results: Dict = metrics_results self.end_time: datetime.datetime = end_time self.start_time: datetime.datetime = start_time self.pipeline: BaseStep = pipeline self.main_metric_name: str = main_metric_name self.delete_pipeline_on_completion = delete_pipeline_on_completion def get_metric_names(self) -> List[str]: return list(self.metrics_results.keys()) def save_parent_trial(self) -> 'TrialSplit': self.trial.save_trial() return self def save_model(self, label: str): self.trial._save_model(self.pipeline, label)
Apache License 2.0
bpython/curtsies
curtsies/formatstring.py
FmtStr.new_with_atts_removed
python
def new_with_atts_removed(self, *attributes: str) -> "FmtStr": result = FmtStr(*(Chunk(bfs.s, bfs.atts.remove(*attributes)) for bfs in self.chunks)) return result
Returns a new FmtStr with the same content but some attributes removed
https://github.com/bpython/curtsies/blob/a6019a45eb9a9897f68b1a404245076cbd74492c/curtsies/formatstring.py#L586-L590
import itertools import re import sys from cwcwidth import wcswidth, wcwidth from typing import ( Iterator, Tuple, List, Union, Optional, Any, Mapping, cast, MutableMapping, no_type_check, Type, Callable, Iterable, ) try: from functools import cached_property except ImportError: from backports.cached_property import cached_property from .escseqparse import parse, remove_ansi from .termformatconstants import ( FG_COLORS, BG_COLORS, STYLES, FG_NUMBER_TO_COLOR, BG_NUMBER_TO_COLOR, RESET_ALL, RESET_BG, RESET_FG, seq, ) one_arg_xforms: Mapping[str, Callable[[str], str]] = { "bold": lambda s: seq(STYLES["bold"]) + s + seq(RESET_ALL), "dark": lambda s: seq(STYLES["dark"]) + s + seq(RESET_ALL), "underline": lambda s: seq(STYLES["underline"]) + s + seq(RESET_ALL), "blink": lambda s: seq(STYLES["blink"]) + s + seq(RESET_ALL), "invert": lambda s: seq(STYLES["invert"]) + s + seq(RESET_ALL), } two_arg_xforms: Mapping[str, Callable[[str, int], str]] = { "fg": lambda s, v: "{}{}{}".format(seq(v), s, seq(RESET_FG)), "bg": lambda s, v: seq(v) + s + seq(RESET_BG), } xforms: MutableMapping[str, Union[Callable[[str], str], Callable[[str, int], str]]] = {} xforms.update(one_arg_xforms) xforms.update(two_arg_xforms) class FrozenDict(dict): @no_type_check def __setitem__(self, key, value): raise Exception("Cannot change value.") @no_type_check def update(self, *args, **kwds): raise Exception("Cannot change value.") def extend(self, dictlike: Mapping[str, Union[int, bool]]) -> "FrozenDict": return FrozenDict(itertools.chain(self.items(), dictlike.items())) def remove(self, *keys: str) -> "FrozenDict": return FrozenDict((k, v) for k, v in self.items() if k not in keys) def stable_format_dict(d: Mapping) -> str: inner = ", ".join( "{}: {}".format( repr(k)[1:] if repr(k).startswith("u'") or repr(k).startswith('u"') else repr(k), v, ) for k, v in sorted(d.items()) ) return "{%s}" % inner class Chunk: def __init__( self, string: str, atts: Optional[Mapping[str, Union[int, bool]]] = None ): if not isinstance(string, str): raise ValueError("unicode string required, got %r" % string) self._s = string self._atts = FrozenDict(atts if atts else {}) @property def s(self) -> str: return self._s @property def atts(self) -> Mapping[str, Union[int, bool]]: return self._atts def __len__(self) -> int: return len(self._s) @property def width(self) -> int: width = wcswidth(self._s, None) if len(self._s) > 0 and width < 1: raise ValueError("Can't calculate width of string %r" % self._s) return width @cached_property def color_str(self) -> str: s = self.s for k, v in sorted(self.atts.items()): if k not in xforms: continue elif v is False: continue elif v is True: s = one_arg_xforms[k](s) else: s = two_arg_xforms[k](s, v) return s def __str__(self) -> str: value = self.color_str if isinstance(value, bytes): return value.decode("utf8", "replace") return value def __eq__(self, other: Any) -> bool: if not isinstance(other, Chunk): return NotImplemented return self.s == other.s and self.atts == other.atts def __hash__(self) -> int: return hash((self.s, self.atts)) def __repr__(self) -> str: return "Chunk({s}{sep}{atts})".format( s=repr(self.s), sep=", " if self.atts else "", atts=stable_format_dict(self.atts) if self.atts else "", ) def repr_part(self) -> str: def pp_att(att: str) -> str: if att == "fg": return FG_NUMBER_TO_COLOR[self.atts[att]] elif att == "bg": return "on_" + BG_NUMBER_TO_COLOR[self.atts[att]] else: return att atts_out = {k: v for (k, v) in self.atts.items() if v} return ( "".join(pp_att(att) + "(" for att in sorted(atts_out)) + repr(self.s) + ")" * len(atts_out) ) def splitter(self) -> "ChunkSplitter": return ChunkSplitter(self) class ChunkSplitter: def __init__(self, chunk: Chunk) -> None: self.chunk = chunk self.internal_offset = 0 self.internal_width = 0 divides = [0] for c in self.chunk.s: divides.append(divides[-1] + wcwidth(c)) self.divides = divides def reinit(self, chunk: Chunk) -> None: self.__init__(chunk) def request(self, max_width: int) -> Optional[Tuple[int, Chunk]]: if max_width < 1: raise ValueError("requires positive integer max_width") s = self.chunk.s length = len(s) if self.internal_offset == len(s): return None width = 0 start_offset = i = self.internal_offset replacement_char = " " while True: w = wcswidth(s[i], None) if width + w > max_width: self.internal_offset = i self.internal_width += width if width < max_width: assert ( width + 1 == max_width ), "unicode character width of more than 2!?!" assert w == 2, "unicode character of width other than 2?" return ( width + 1, Chunk( s[start_offset : self.internal_offset] + replacement_char, atts=self.chunk.atts, ), ) return ( width, Chunk(s[start_offset : self.internal_offset], atts=self.chunk.atts), ) width += w if i + 1 == length: self.internal_offset = ( i + 1 ) self.internal_width += width return ( width, Chunk(s[start_offset : self.internal_offset], atts=self.chunk.atts), ) i += 1 class FmtStr: def __init__(self, *components: Chunk) -> None: self.chunks = list(components) self._unicode: Optional[str] = None self._len: Optional[int] = None self._s: Optional[str] = None self._width: Optional[int] = None @classmethod def from_str(cls, s: str) -> "FmtStr": if "\x1b[" in s: try: tokens_and_strings = parse(s) except ValueError: return FmtStr(Chunk(remove_ansi(s))) else: chunks = [] cur_fmt = {} for x in tokens_and_strings: if isinstance(x, dict): cur_fmt.update(x) elif isinstance(x, str): atts = parse_args( (), {k: v for k, v in cur_fmt.items() if v is not None} ) chunks.append(Chunk(x, atts=atts)) else: raise TypeError(f"Expected dict or str, not {type(x)}") return FmtStr(*chunks) else: return FmtStr(Chunk(s)) def copy_with_new_str(self, new_str: str) -> "FmtStr": old_atts = { att: value for bfs in self.chunks for (att, value) in bfs.atts.items() } return FmtStr(Chunk(new_str, old_atts)) def setitem(self, startindex: int, fs: Union[str, "FmtStr"]) -> "FmtStr": return self.setslice_with_length(startindex, startindex + 1, fs, len(self)) def setslice_with_length( self, startindex: int, endindex: int, fs: Union[str, "FmtStr"], length: int ) -> "FmtStr": if len(self) < startindex: fs = " " * (startindex - len(self)) + fs if len(self) > endindex: fs = fs + " " * (endindex - startindex - len(fs)) assert len(fs) == endindex - startindex, (len(fs), startindex, endindex) result = self.splice(fs, startindex, endindex) if len(result) > length: raise ValueError( "Your change is resulting in a longer fmtstr than the original length and this is not supported." ) return result def splice( self, new_str: Union[str, "FmtStr"], start: int, end: Optional[int] = None ) -> "FmtStr": if len(new_str) == 0: return self new_fs = new_str if isinstance(new_str, FmtStr) else fmtstr(new_str) assert len(new_fs.chunks) > 0, (new_fs.chunks, new_fs) new_components = [] inserted = False if end is None: end = start tail = None for bfs, bfs_start, bfs_end in zip( self.chunks, self.divides[:-1], self.divides[1:] ): if end == bfs_start == 0: new_components.extend(new_fs.chunks) new_components.append(bfs) inserted = True elif bfs_start <= start < bfs_end: divide = start - bfs_start head = Chunk(bfs.s[:divide], atts=bfs.atts) tail = Chunk(bfs.s[end - bfs_start :], atts=bfs.atts) new_components.extend([head] + new_fs.chunks) inserted = True if bfs_start < end < bfs_end: tail = Chunk(bfs.s[end - bfs_start :], atts=bfs.atts) new_components.append(tail) elif bfs_start < end < bfs_end: divide = start - bfs_start tail = Chunk(bfs.s[end - bfs_start :], atts=bfs.atts) new_components.append(tail) elif bfs_start >= end or bfs_end <= start: new_components.append(bfs) if not inserted: new_components.extend(new_fs.chunks) inserted = True return FmtStr(*(s for s in new_components if s.s)) def append(self, string: Union[str, "FmtStr"]) -> "FmtStr": return self.splice(string, len(self.s)) def copy_with_new_atts(self, **attributes: Union[bool, int]) -> "FmtStr": result = FmtStr(*(Chunk(bfs.s, bfs.atts.extend(attributes)) for bfs in self.chunks)) return result def join(self, iterable: Iterable[Union[str, "FmtStr"]]) -> "FmtStr": before: List[Chunk] = [] chunks: List[Chunk] = [] for s in iterable: chunks.extend(before) before = self.chunks if isinstance(s, FmtStr): chunks.extend(s.chunks) elif isinstance(s, (bytes, str)): chunks.extend(fmtstr(s).chunks) else: raise TypeError("expected str or FmtStr, %r found" % type(s)) return FmtStr(*chunks) def split( self, sep: Optional[str] = None, maxsplit: Optional[int] = None, regex: bool = False, ) -> List["FmtStr"]: if maxsplit is not None: raise NotImplementedError("no maxsplit yet") s = self.s if sep is None: sep = r"\s+" elif not regex: sep = re.escape(sep) matches = list(re.finditer(sep, s)) return [ self[start:end] for start, end in zip( [0] + [m.end() for m in matches], [m.start() for m in matches] + [len(s)], ) ] def splitlines(self, keepends: bool = False) -> List["FmtStr"]: lines = self.split("\n") return ( [line + "\n" for line in lines] if keepends else (lines if lines[-1] else lines[:-1]) ) def ljust(self, width: int, fillchar: Optional[str] = None) -> "FmtStr": if fillchar is not None: return fmtstr(self.s.ljust(width, fillchar), **self.shared_atts) to_add = " " * (width - len(self.s)) shared = self.shared_atts if "bg" in shared: return self + fmtstr(to_add, bg=shared["bg"]) if to_add else self else: uniform = self.new_with_atts_removed("bg") return uniform + fmtstr(to_add, **self.shared_atts) if to_add else uniform def rjust(self, width: int, fillchar: Optional[str] = None) -> "FmtStr": if fillchar is not None: return fmtstr(self.s.rjust(width, fillchar), **self.shared_atts) to_add = " " * (width - len(self.s)) shared = self.shared_atts if "bg" in shared: return fmtstr(to_add, bg=shared["bg"]) + self if to_add else self else: uniform = self.new_with_atts_removed("bg") return fmtstr(to_add, **self.shared_atts) + uniform if to_add else uniform def __str__(self) -> str: if self._unicode is not None: return self._unicode self._unicode = "".join(str(fs) for fs in self.chunks) return self._unicode def __len__(self) -> int: if self._len is not None: return self._len value = sum(len(fs) for fs in self.chunks) self._len = value return value @property def width(self) -> int: if self._width is not None: return self._width value = sum(fs.width for fs in self.chunks) self._width = value return value def width_at_offset(self, n: int) -> int: width = wcswidth(self.s, n) assert width != -1 return width def __repr__(self) -> str: return "+".join(fs.repr_part() for fs in self.chunks) def __eq__(self, other: Any) -> bool: if isinstance(other, (str, bytes, FmtStr)): return str(self) == str(other) return NotImplemented def __hash__(self) -> int: return hash(str(self)) def __add__(self, other: Union["FmtStr", str]) -> "FmtStr": if isinstance(other, FmtStr): return FmtStr(*(self.chunks + other.chunks)) elif isinstance(other, (bytes, str)): return FmtStr(*(self.chunks + [Chunk(other)])) return NotImplemented def __radd__(self, other: Union["FmtStr", str]) -> "FmtStr": if isinstance(other, FmtStr): return FmtStr(*(x for x in (other.chunks + self.chunks))) elif isinstance(other, (bytes, str)): return FmtStr(*(x for x in ([Chunk(other)] + self.chunks))) return NotImplemented def __mul__(self, other: int) -> "FmtStr": if isinstance(other, int): return sum((self for _ in range(other)), FmtStr()) return NotImplemented @property def shared_atts(self) -> Mapping[str, Union[int, bool]]: atts = {} first = self.chunks[0] for att in sorted(first.atts): if all( fs.atts.get(att, "???") == first.atts[att] for fs in self.chunks if len(fs) > 0 ): atts[att] = first.atts[att] return atts
MIT License
qiuqiangkong/torchlibrosa
torchlibrosa/stft.py
ISTFT._get_ifft_window_sum_onnx
python
def _get_ifft_window_sum_onnx(self, frames_num, device): ifft_window_sum = librosa.filters.window_sumsquare(window=self.window, n_frames=frames_num, win_length=self.win_length, n_fft=self.n_fft, hop_length=self.hop_length) ifft_window_sum = torch.Tensor(ifft_window_sum) if device: ifft_window_sum = ifft_window_sum.to(device) return ifft_window_sum
r"""Pre-calculate overlap-add window sum for reconstructing signals when using ONNX. Args: frames_num: int device: str | None Returns: ifft_window_sum: (audio_samples,)
https://github.com/qiuqiangkong/torchlibrosa/blob/63c1b4c63fedf91b9158fb86af2a31120f5423e7/torchlibrosa/stft.py#L585-L607
import math import argparse import librosa import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from torch.nn.parameter import Parameter class DFTBase(nn.Module): def __init__(self): super(DFTBase, self).__init__() def dft_matrix(self, n): (x, y) = np.meshgrid(np.arange(n), np.arange(n)) omega = np.exp(-2 * np.pi * 1j / n) W = np.power(omega, x * y) return W def idft_matrix(self, n): (x, y) = np.meshgrid(np.arange(n), np.arange(n)) omega = np.exp(2 * np.pi * 1j / n) W = np.power(omega, x * y) return W class DFT(DFTBase): def __init__(self, n, norm): super(DFT, self).__init__() self.W = self.dft_matrix(n) self.inv_W = self.idft_matrix(n) self.W_real = torch.Tensor(np.real(self.W)) self.W_imag = torch.Tensor(np.imag(self.W)) self.inv_W_real = torch.Tensor(np.real(self.inv_W)) self.inv_W_imag = torch.Tensor(np.imag(self.inv_W)) self.n = n self.norm = norm def dft(self, x_real, x_imag): z_real = torch.matmul(x_real, self.W_real) - torch.matmul(x_imag, self.W_imag) z_imag = torch.matmul(x_imag, self.W_real) + torch.matmul(x_real, self.W_imag) if self.norm is None: pass elif self.norm == 'ortho': z_real /= math.sqrt(self.n) z_imag /= math.sqrt(self.n) return z_real, z_imag def idft(self, x_real, x_imag): z_real = torch.matmul(x_real, self.inv_W_real) - torch.matmul(x_imag, self.inv_W_imag) z_imag = torch.matmul(x_imag, self.inv_W_real) + torch.matmul(x_real, self.inv_W_imag) if self.norm is None: z_real /= self.n elif self.norm == 'ortho': z_real /= math.sqrt(n) z_imag /= math.sqrt(n) return z_real, z_imag def rdft(self, x_real): n_rfft = self.n // 2 + 1 z_real = torch.matmul(x_real, self.W_real[..., 0 : n_rfft]) z_imag = torch.matmul(x_real, self.W_imag[..., 0 : n_rfft]) if self.norm is None: pass elif self.norm == 'ortho': z_real /= math.sqrt(self.n) z_imag /= math.sqrt(self.n) return z_real, z_imag def irdft(self, x_real, x_imag): n_rfft = self.n // 2 + 1 flip_x_real = torch.flip(x_real, dims=(-1,)) flip_x_imag = torch.flip(x_imag, dims=(-1,)) x_real = torch.cat((x_real, flip_x_real[..., 1 : n_rfft - 1]), dim=-1) x_imag = torch.cat((x_imag, -1. * flip_x_imag[..., 1 : n_rfft - 1]), dim=-1) z_real = torch.matmul(x_real, self.inv_W_real) - torch.matmul(x_imag, self.inv_W_imag) if self.norm is None: z_real /= self.n elif self.norm == 'ortho': z_real /= math.sqrt(n) return z_real class STFT(DFTBase): def __init__(self, n_fft=2048, hop_length=None, win_length=None, window='hann', center=True, pad_mode='reflect', freeze_parameters=True): super(STFT, self).__init__() assert pad_mode in ['constant', 'reflect'] self.n_fft = n_fft self.hop_length = hop_length self.win_length = win_length self.window = window self.center = center self.pad_mode = pad_mode if self.win_length is None: self.win_length = n_fft if self.hop_length is None: self.hop_length = int(self.win_length // 4) fft_window = librosa.filters.get_window(window, self.win_length, fftbins=True) fft_window = librosa.util.pad_center(fft_window, n_fft) self.W = self.dft_matrix(n_fft) out_channels = n_fft // 2 + 1 self.conv_real = nn.Conv1d(in_channels=1, out_channels=out_channels, kernel_size=n_fft, stride=self.hop_length, padding=0, dilation=1, groups=1, bias=False) self.conv_imag = nn.Conv1d(in_channels=1, out_channels=out_channels, kernel_size=n_fft, stride=self.hop_length, padding=0, dilation=1, groups=1, bias=False) self.conv_real.weight.data = torch.Tensor( np.real(self.W[:, 0 : out_channels] * fft_window[:, None]).T)[:, None, :] self.conv_imag.weight.data = torch.Tensor( np.imag(self.W[:, 0 : out_channels] * fft_window[:, None]).T)[:, None, :] if freeze_parameters: for param in self.parameters(): param.requires_grad = False def forward(self, input): x = input[:, None, :] if self.center: x = F.pad(x, pad=(self.n_fft // 2, self.n_fft // 2), mode=self.pad_mode) real = self.conv_real(x) imag = self.conv_imag(x) real = real[:, None, :, :].transpose(2, 3) imag = imag[:, None, :, :].transpose(2, 3) return real, imag def magphase(real, imag): mag = (real ** 2 + imag ** 2) ** 0.5 cos = real / torch.clamp(mag, 1e-10, np.inf) sin = imag / torch.clamp(mag, 1e-10, np.inf) return mag, cos, sin class ISTFT(DFTBase): def __init__(self, n_fft=2048, hop_length=None, win_length=None, window='hann', center=True, pad_mode='reflect', freeze_parameters=True, onnx=False, frames_num=None, device=None): super(ISTFT, self).__init__() assert pad_mode in ['constant', 'reflect'] if not onnx: assert frames_num is None, "When onnx=False, frames_num must be None!" assert device is None, "When onnx=False, device must be None!" self.n_fft = n_fft self.hop_length = hop_length self.win_length = win_length self.window = window self.center = center self.pad_mode = pad_mode self.onnx = onnx if self.win_length is None: self.win_length = self.n_fft if self.hop_length is None: self.hop_length = int(self.win_length // 4) self.init_real_imag_conv() self.init_overlap_add_window() if self.onnx: self.init_onnx_modules(frames_num, device) if freeze_parameters: for param in self.parameters(): param.requires_grad = False def init_real_imag_conv(self): self.W = self.idft_matrix(self.n_fft) / self.n_fft self.conv_real = nn.Conv1d(in_channels=self.n_fft, out_channels=self.n_fft, kernel_size=1, stride=1, padding=0, dilation=1, groups=1, bias=False) self.conv_imag = nn.Conv1d(in_channels=self.n_fft, out_channels=self.n_fft, kernel_size=1, stride=1, padding=0, dilation=1, groups=1, bias=False) ifft_window = librosa.filters.get_window(self.window, self.win_length, fftbins=True) ifft_window = librosa.util.pad_center(ifft_window, self.n_fft) self.conv_real.weight.data = torch.Tensor( np.real(self.W * ifft_window[None, :]).T)[:, :, None] self.conv_imag.weight.data = torch.Tensor( np.imag(self.W * ifft_window[None, :]).T)[:, :, None] def init_overlap_add_window(self): ola_window = librosa.filters.get_window(self.window, self.win_length, fftbins=True) ola_window = librosa.util.normalize(ola_window, norm=None) ** 2 ola_window = librosa.util.pad_center(ola_window, self.n_fft) ola_window = torch.Tensor(ola_window) self.register_buffer('ola_window', ola_window) def init_onnx_modules(self, frames_num, device): self.reverse = nn.Conv1d(in_channels=self.n_fft // 2 + 1, out_channels=self.n_fft // 2 - 1, kernel_size=1, bias=False) tmp = np.zeros((self.n_fft // 2 - 1, self.n_fft // 2 + 1, 1)) tmp[:, 1 : -1, 0] = np.array(np.eye(self.n_fft // 2 - 1)[::-1]) self.reverse.weight.data = torch.Tensor(tmp) self.overlap_add = nn.ConvTranspose2d(in_channels=self.n_fft, out_channels=1, kernel_size=(self.n_fft, 1), stride=(self.hop_length, 1), bias=False) self.overlap_add.weight.data = torch.Tensor(np.eye(self.n_fft)[:, None, :, None]) if frames_num: self.ifft_window_sum = self._get_ifft_window_sum_onnx(frames_num, device) else: self.ifft_window_sum = [] def forward(self, real_stft, imag_stft, length): assert real_stft.ndimension() == 4 and imag_stft.ndimension() == 4 batch_size, _, frames_num, _ = real_stft.shape real_stft = real_stft[:, 0, :, :].transpose(1, 2) imag_stft = imag_stft[:, 0, :, :].transpose(1, 2) if self.onnx: full_real_stft, full_imag_stft = self._get_full_stft_onnx(real_stft, imag_stft) else: full_real_stft, full_imag_stft = self._get_full_stft(real_stft, imag_stft) s_real = self.conv_real(full_real_stft) - self.conv_imag(full_imag_stft) if self.onnx: y = self._overlap_add_divide_window_sum_onnx(s_real, frames_num) else: y = self._overlap_add_divide_window_sum(s_real, frames_num) y = self._trim_edges(y, length) return y def _get_full_stft(self, real_stft, imag_stft): full_real_stft = torch.cat((real_stft, torch.flip(real_stft[:, 1 : -1, :], dims=[1])), dim=1) full_imag_stft = torch.cat((imag_stft, - torch.flip(imag_stft[:, 1 : -1, :], dims=[1])), dim=1) return full_real_stft, full_imag_stft def _get_full_stft_onnx(self, real_stft, imag_stft): full_real_stft = torch.cat((real_stft, self.reverse(real_stft)), dim=1) full_imag_stft = torch.cat((imag_stft, - self.reverse(imag_stft)), dim=1) return full_real_stft, full_imag_stft def _overlap_add_divide_window_sum(self, s_real, frames_num): output_samples = (s_real.shape[-1] - 1) * self.hop_length + self.win_length y = torch.nn.functional.fold(input=s_real, output_size=(1, output_samples), kernel_size=(1, self.win_length), stride=(1, self.hop_length)) y = y[:, 0, 0, :] ifft_window_sum = self._get_ifft_window(frames_num) ifft_window_sum = torch.clamp(ifft_window_sum, 1e-11, np.inf) y = y / ifft_window_sum[None, :] return y def _get_ifft_window(self, frames_num): output_samples = (frames_num - 1) * self.hop_length + self.win_length window_matrix = self.ola_window[None, :, None].repeat(1, 1, frames_num) ifft_window_sum = F.fold(input=window_matrix, output_size=(1, output_samples), kernel_size=(1, self.win_length), stride=(1, self.hop_length)) ifft_window_sum = ifft_window_sum.squeeze() return ifft_window_sum def _overlap_add_divide_window_sum_onnx(self, s_real, frames_num): s_real = s_real[..., None] y = self.overlap_add(s_real)[:, 0, :, 0] if len(self.ifft_window_sum) != y.shape[1]: device = s_real.device self.ifft_window_sum = self._get_ifft_window_sum_onnx(frames_num, device) ifft_window_sum = torch.clamp(self.ifft_window_sum, 1e-11, np.inf) y = y / ifft_window_sum[None, :] return y
MIT License
sepandhaghighi/qpage
source/setup.py
sample_handler
python
def sample_handler(): response = input( "Press [S] to enter sample site material running or other keys to continue with your data") print_line(70) if response.upper() == "S": sample_site_download(is_sample_downloaded())
Ask for run sample website :return:None call: -sample_site_download -is_sample_downloaded
https://github.com/sepandhaghighi/qpage/blob/79501b458cd8ab24e421f01b884133c946d8b36a/source/setup.py#L98-L111
from qpage import * import sys import gc import doctest def error_handler(): close_files() vector_2 = error_finder() error_vector = vector_2[0] pass_vector = vector_2[1] print(str(len(error_vector)) + " Error") print("Please Check Following :\n") show_items(error_vector) for i, item in enumerate(pass_vector): print(str(i + len(error_vector) + 1) + "-" + item) enter_to_exit() main_handler() def file_handler(): for i in ACTUAL_NAME: html_init(i) menu_writer() for i in ACTUAL_NAME: contain(i) html_end(i) css_creator() icon_creator() robot_maker() close_files() def main_handler_2(time_1=0): file_handler() total_perf_time = generation_time(time_1) print("HOMEPAGE is ready,generated in " + str(total_perf_time) + " sec") print("Upload output folder contains directly to your host") print("Please Don't Change HTML Files Name") address_print() print_warning() file_size() logger(True, perf_time=total_perf_time) if internet(): server() browse = int(input("Preview HOMEPAGE?[1] or Not[2]")) if browse == 1: preview() close_files() gc.collect() def response_handler(response): if response: print( "At least one of the folders create for the first time ,\n" " please put your data in proper order and run program again\n Program Reboot Automatically in 3 Sec") wait_func(3) main_handler(False) sys.exit()
MIT License
microsoft/qdk-python
azure-quantum/azure/quantum/aio/storage.py
set_blob_metadata
python
async def set_blob_metadata(blob_url: str, metadata: Dict[str, str]): blob_client = BlobClient.from_blob_url(blob_url) logger.info( f"Setting blob properties '{blob_client.blob_name}'" + f"from container '{blob_client.container_name}' on account:" + f"'{blob_client.account_name}'" ) return await blob_client.set_blob_metadata(metadata=metadata)
Sets the provided dictionary as the metadata on the Azure blob
https://github.com/microsoft/qdk-python/blob/d0a87fda57dc360c96d9ce9772b71406d9b29ebe/azure-quantum/azure/quantum/aio/storage.py#L223-L231
import logging from typing import Any, Dict from azure.core import exceptions from azure.storage.blob.aio import ( BlobServiceClient, ContainerClient, BlobClient, BlobType, ) from azure.storage.blob import ( BlobSasPermissions, ContentSettings, generate_blob_sas, generate_container_sas, ) from datetime import datetime, timedelta from enum import Enum logger = logging.getLogger(__name__) async def create_container( connection_string: str, container_name: str ) -> ContainerClient: blob_service_client = BlobServiceClient.from_connection_string( connection_string ) logger.info( f'{"Initializing storage client for account:"}' + f"{blob_service_client.account_name}" ) container_client = blob_service_client.get_container_client(container_name) await create_container_using_client(container_client) return container_client async def create_container_using_client(container_client: ContainerClient): container_exists = await container_client.exists() if not container_exists: logger.debug( f'{" - uploading to **new** container:"}' f"{container_client.container_name}" ) await container_client.create_container() async def get_container_uri(connection_string: str, container_name: str) -> str: container = await create_container(connection_string, container_name) logger.info( f'{"Creating SAS token for container"}' + f"'{container_name}' on account: '{container.account_name}'" ) sas_token = generate_container_sas( container.account_name, container.container_name, account_key=container.credential.account_key, permission=BlobSasPermissions( read=True, add=True, write=True, create=True ), expiry=datetime.utcnow() + timedelta(days=14), ) uri = container.url + "?" + sas_token logger.debug(f" - container url: '{uri}'.") return uri async def upload_blob( container: ContainerClient, blob_name: str, content_type: str, content_encoding: str, data: Any, return_sas_token: bool = True, ) -> str: await create_container_using_client(container) logger.info( f"Uploading blob '{blob_name}'" + f"to container '{container.container_name}'" + f"on account: '{container.account_name}'" ) content_settings = ContentSettings( content_type=content_type, content_encoding=content_encoding ) blob = container.get_blob_client(blob_name) await blob.upload_blob(data, content_settings=content_settings) logger.debug(f" - blob '{blob_name}' uploaded. generating sas token.") if return_sas_token: uri = get_blob_uri_with_sas_token(blob) else: uri = remove_sas_token(blob.url) logger.debug(f" - blob access url: '{uri}'.") return uri async def append_blob( container: ContainerClient, blob_name: str, content_type: str, content_encoding: str, data: Any, return_sas_token: bool = True, metadata: Dict[str, str] = None, ) -> str: await create_container_using_client(container) logger.info( f"Appending data to blob '{blob_name}'" + f"in container '{container.container_name}'" + f"on account: '{container.account_name}'" ) content_settings = ContentSettings( content_type=content_type, content_encoding=content_encoding ) blob = container.get_blob_client(blob_name) try: props = await blob.get_blob_properties() if props.blob_type != BlobType.AppendBlob: raise Exception("blob must be an append blob") except exceptions.ResourceNotFoundError: props = await blob.create_append_blob( content_settings=content_settings, metadata=metadata ) await blob.append_block(data, len(data)) logger.debug(f" - blob '{blob_name}' appended. generating sas token.") if return_sas_token: uri = get_blob_uri_with_sas_token(blob) else: uri = remove_sas_token(blob.url) logger.debug(f" - blob access url: '{uri}'.") return uri def get_blob_uri_with_sas_token(blob: BlobClient): sas_token = generate_blob_sas( blob.account_name, blob.container_name, blob.blob_name, account_key=blob.credential.account_key, permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(days=14), ) return blob.url + "?" + sas_token async def download_blob(blob_url: str) -> Any: blob_client = BlobClient.from_blob_url(blob_url) logger.info( f"Downloading blob '{blob_client.blob_name}'" + f"from container '{blob_client.container_name}'" + f"on account: '{blob_client.account_name}'" ) response = await (await blob_client.download_blob()).readall() logger.debug(response) await blob_client.close() return response async def download_blob_properties(blob_url: str) -> Dict[str, str]: blob_client = BlobClient.from_blob_url(blob_url) logger.info( f"Downloading blob properties '{blob_client.blob_name}'" + f"from container '{blob_client.container_name}'" + f"on account: '{blob_client.account_name}'" ) response = await blob_client.get_blob_properties() logger.debug(response) return response async def download_blob_metadata(blob_url: str) -> Dict[str, str]: return (await download_blob_properties(blob_url)).metadata
MIT License
huangsam/ultimate-python
ultimatepython/advanced/thread.py
multiply_by_two
python
def multiply_by_two(item): time.sleep(_MULTIPLY_DELAY) return item * 2
This multiplication has a small delay.
https://github.com/huangsam/ultimate-python/blob/136ad161adb0f4f831162d361bb322fa17feb289/ultimatepython/advanced/thread.py#L15-L18
import time from concurrent.futures import ThreadPoolExecutor, as_completed from datetime import datetime _MULTIPLY_DELAY = 0.01
MIT License
facebookresearch/reagent
reagent/test/workflow/test_oss_workflows.py
mock_cartpole_normalization
python
def mock_cartpole_normalization() -> Dict[int, NormalizationParameters]: with open(CARTPOLE_NORMALIZATION_JSON, "r") as f: norm = json.load(f) norm_params_dict = {} for k, v in norm.items(): norm_params_dict[k] = NormalizationParameters(**json.loads(v)) return norm_params_dict
Get mock normalization from our local file.
https://github.com/facebookresearch/reagent/blob/57b58a8b3a6b74bb87a197b73a6cd108ddad895e/reagent/test/workflow/test_oss_workflows.py#L60-L68
import json import os import unittest import zipfile from typing import Dict from unittest.mock import patch import reagent import reagent.workflow.cli as cli import torch from click.testing import CliRunner from reagent.core.parameters import NormalizationParameters from reagent.test.base.horizon_test_base import HorizonTestBase from reagent.workflow.types import Dataset from ruamel.yaml import YAML base_dir = os.path.abspath(os.path.dirname(reagent.__file__)) curr_dir = os.path.abspath(os.path.dirname(__file__)) CARTPOLE_NORMALIZATION_JSON = os.path.join( curr_dir, "test_data/discrete_action/cartpole_norm.json" ) DQN_WORKFLOW_PARQUET_ZIP = os.path.join( curr_dir, "test_data/discrete_action/dqn_workflow.zip" ) DQN_WORKFLOW_PARQUET_REL_PATH = "dqn_workflow" DQN_WORKFLOW_YAML = os.path.join( base_dir, "workflow/sample_configs/discrete_dqn_cartpole_offline.yaml" ) NEW_CONFIG_NAME = "config.yaml" DISCRETE_DQN_BASE = "reagent.model_managers.discrete_dqn_base" def get_test_workflow_config(path_to_config: str, use_gpu: bool): yaml = YAML(typ="safe") with open(path_to_config, "r") as f: config = yaml.load(f) config["use_gpu"] = use_gpu config["num_train_epochs"] = 1 config["num_eval_episodes"] = 1 config["passing_score_bar"] = -0.0001 config["input_table_spec"]["table_sample"] = 50.0 config["input_table_spec"]["eval_table_sample"] = 50.0 return config
BSD 3-Clause New or Revised License
bomquote/transistor
setup.py
UploadCommand.status
python
def status(s): print('\033[1m{0}\033[0m'.format(s))
Prints things in bold.
https://github.com/bomquote/transistor/blob/4bc5eaa1beac334cd05f2149a1dd584e0d803921/setup.py#L91-L93
import io import os import sys from shutil import rmtree from setuptools import find_packages, setup, Command here = os.path.abspath(os.path.dirname(__file__)) with io.open(os.path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = '\n' + f.read() about = {} with open(os.path.join(here, 'transistor', '__version__.py')) as f: exec(f.read(), about) NAME = about['__title__'] DESCRIPTION = about['__description__'] URL = about['__url__'] EMAIL = about['__author_email__'] AUTHOR = about['__author__'] REQUIRES_PYTHON = '>=3.6.0' REQUIRED = [ 'mechanicalsoup>=0.11.0,<0.13.0', 'requests>=2.20.1,<3.0', 'urllib3>=1.24.1,<2.0', 'keyring>=17.0.0,<22.0', 'kombu>=4.2.1', 'lxml>=4.2.5,<5.0', 'lz4>=2.1.2,<4.0', 'openpyxl>=2.5.0,<4.0', 'pyexcel>=0.5.15,<0.7.0', 'pyexcel-io>=0.5.19,<0.6.0', 'pyexcel-ods3>=0.5.3,<0.6.0', 'pyexcel-webio>=0.1.4,<0.2.0', 'pyexcel-xls>=0.5.8,<0.6.0', 'pyexcel-xlsx>=0.5.7,<0.6.0', 'cookiecutter>=1.6.0,<2.0', 'cssselect>=1.0.3,<2.0', 'w3lib>=1.19.0,<2.0', 'pycryptodome>=3.7.2,<4.0', 'gevent>=1.3.7,<21.0', ] test_requirements = [ 'pytest>=4.0.1,<7.0', 'pytest-cov==2.6.0,<3.0', 'coverage==4.5.2,<6.0', 'mock==2.0.0,<5.0' ] EXTRAS = { 'newt.db': [ 'RelStorage[postgresql]==2.1.1', 'newt.db>=0.9.0', 'zodbpickle>=1.0.2', 'persistent>=4.4.3', 'zodb>=5.5.1' ], 'redis':[ 'redis>=3.0.1' ] } class UploadCommand(Command): description = 'Build and publish the package.' user_options = [] @staticmethod
MIT License
rmarkello/abagen
abagen/io.py
read_microarray
python
def read_microarray(fname, copy=False, parquet=True): try: if use_parq and parquet: data = _make_parquet(fname, convert_only=False) data = data.set_index('0') else: data = pd.read_csv(fname, header=None, index_col=0) data.index.name = 'probe_id' data.columns = pd.Series(range(1, len(data.columns) + 1), name='sample_id') except (AttributeError, ValueError, TypeError): if not isinstance(fname, pd.DataFrame): raise TypeError('Provided fname must be filepath to Microarray' 'Expression.csv file from Allen Human Brain ' 'Atlas.') data = fname.copy() if copy else fname return data
Loads MicroarrayExpression.csv file found at `fname` Microarray files contain raw expression data for all the tissue samples taken from a single donor across all genetic probes. Parameters ---------- fname : str Path to MicroarrayExpression.csv file copy : bool, optional Whether to return a copy if `fname` is a pre-loaded pandas.Dataframe. Default: False parquet : bool, optional Whether to load data from parquet file instead of CSV. If a parquet file does not already exist then one will be created for faster loading in the future. Only available if ``fastparquet`` and ``python-snappy`` module are installed. Default: True Returns ------- microarray : (P, S) pandas.DataFrame Dataframe containing microarray expression data, where `P` is probes and `S` is samples. The row index is the unique probe ID assigned during processing, which can be used to match data to the information obtained with :func:`read_probes`. The column index is the unique sample ID (integer, beginning at 0) which can be used to match data to the information obtained with :func:`read_annotation`.
https://github.com/rmarkello/abagen/blob/2aeab5bd0f147fa76b488645e148a1c18095378d/abagen/io.py#L59-L106
import os.path as op import pandas as pd try: eng = pd.io.parquet.get_engine('fastparquet') assert 'SNAPPY' in eng.api.compression.compressions use_parq = True except (AttributeError, ImportError, AssertionError): use_parq = False def _make_parquet(fname, convert_only=False): parqname = fname.rpartition('.csv')[0] + '.parq' if op.exists(parqname): if convert_only: return data = pd.read_parquet(parqname, engine='fastparquet') else: data = pd.read_csv(fname, header=None) data.columns = data.columns.astype(str) data.to_parquet(parqname, engine='fastparquet') if convert_only: return return data
BSD 3-Clause New or Revised License
pelioniot/mbed-cloud-sdk-python
src/mbed_cloud/_backends/iam/models/account_creation_resp.py
AccountCreationResp.state
python
def state(self, state): self._state = state
Sets the state of this AccountCreationResp. The state part of the postal address, not longer than 100 characters. :param state: The state of this AccountCreationResp. :type: str
https://github.com/pelioniot/mbed-cloud-sdk-python/blob/71dc67fc2a8d1aff31e35ec781fb328e6a60639c/src/mbed_cloud/_backends/iam/models/account_creation_resp.py#L613-L622
from pprint import pformat from six import iteritems import re class AccountCreationResp(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'address_line1': 'str', 'address_line2': 'str', 'admin_email': 'str', 'admin_full_name': 'str', 'admin_id': 'str', 'admin_key': 'str', 'admin_name': 'str', 'admin_password': 'str', 'aliases': 'list[str]', 'city': 'str', 'company': 'str', 'contact': 'str', 'contract_number': 'str', 'country': 'str', 'customer_number': 'str', 'display_name': 'str', 'email': 'str', 'end_market': 'str', 'id': 'str', 'phone_number': 'str', 'postal_code': 'str', 'state': 'str' } attribute_map = { 'address_line1': 'address_line1', 'address_line2': 'address_line2', 'admin_email': 'admin_email', 'admin_full_name': 'admin_full_name', 'admin_id': 'admin_id', 'admin_key': 'admin_key', 'admin_name': 'admin_name', 'admin_password': 'admin_password', 'aliases': 'aliases', 'city': 'city', 'company': 'company', 'contact': 'contact', 'contract_number': 'contract_number', 'country': 'country', 'customer_number': 'customer_number', 'display_name': 'display_name', 'email': 'email', 'end_market': 'end_market', 'id': 'id', 'phone_number': 'phone_number', 'postal_code': 'postal_code', 'state': 'state' } def __init__(self, address_line1=None, address_line2=None, admin_email=None, admin_full_name=None, admin_id=None, admin_key=None, admin_name=None, admin_password=None, aliases=None, city=None, company=None, contact=None, contract_number=None, country=None, customer_number=None, display_name=None, email=None, end_market=None, id=None, phone_number=None, postal_code=None, state=None): self._address_line1 = address_line1 self._address_line2 = address_line2 self._admin_email = admin_email self._admin_full_name = admin_full_name self._admin_id = admin_id self._admin_key = admin_key self._admin_name = admin_name self._admin_password = admin_password self._aliases = aliases self._city = city self._company = company self._contact = contact self._contract_number = contract_number self._country = country self._customer_number = customer_number self._display_name = display_name self._email = email self._end_market = end_market self._id = id self._phone_number = phone_number self._postal_code = postal_code self._state = state self.discriminator = None @property def address_line1(self): return self._address_line1 @address_line1.setter def address_line1(self, address_line1): self._address_line1 = address_line1 @property def address_line2(self): return self._address_line2 @address_line2.setter def address_line2(self, address_line2): self._address_line2 = address_line2 @property def admin_email(self): return self._admin_email @admin_email.setter def admin_email(self, admin_email): self._admin_email = admin_email @property def admin_full_name(self): return self._admin_full_name @admin_full_name.setter def admin_full_name(self, admin_full_name): self._admin_full_name = admin_full_name @property def admin_id(self): return self._admin_id @admin_id.setter def admin_id(self, admin_id): if admin_id is None: raise ValueError("Invalid value for `admin_id`, must not be `None`") self._admin_id = admin_id @property def admin_key(self): return self._admin_key @admin_key.setter def admin_key(self, admin_key): self._admin_key = admin_key @property def admin_name(self): return self._admin_name @admin_name.setter def admin_name(self, admin_name): self._admin_name = admin_name @property def admin_password(self): return self._admin_password @admin_password.setter def admin_password(self, admin_password): self._admin_password = admin_password @property def aliases(self): return self._aliases @aliases.setter def aliases(self, aliases): self._aliases = aliases @property def city(self): return self._city @city.setter def city(self, city): self._city = city @property def company(self): return self._company @company.setter def company(self, company): self._company = company @property def contact(self): return self._contact @contact.setter def contact(self, contact): self._contact = contact @property def contract_number(self): return self._contract_number @contract_number.setter def contract_number(self, contract_number): self._contract_number = contract_number @property def country(self): return self._country @country.setter def country(self, country): self._country = country @property def customer_number(self): return self._customer_number @customer_number.setter def customer_number(self, customer_number): self._customer_number = customer_number @property def display_name(self): return self._display_name @display_name.setter def display_name(self, display_name): self._display_name = display_name @property def email(self): return self._email @email.setter def email(self, email): self._email = email @property def end_market(self): return self._end_market @end_market.setter def end_market(self, end_market): if end_market is None: raise ValueError("Invalid value for `end_market`, must not be `None`") self._end_market = end_market @property def id(self): return self._id @id.setter def id(self, id): if id is None: raise ValueError("Invalid value for `id`, must not be `None`") self._id = id @property def phone_number(self): return self._phone_number @phone_number.setter def phone_number(self, phone_number): self._phone_number = phone_number @property def postal_code(self): return self._postal_code @postal_code.setter def postal_code(self, postal_code): self._postal_code = postal_code @property def state(self): return self._state @state.setter
Apache License 2.0
pokemongof/pokemongo-bot-desktop
build/pywin/Lib/re.py
split
python
def split(pattern, string, maxsplit=0, flags=0): return _compile(pattern, flags).split(string, maxsplit)
Split the source string by the occurrences of the pattern, returning a list containing the resulting substrings.
https://github.com/pokemongof/pokemongo-bot-desktop/blob/4bfa94f0183406c6a86f93645eff7abd3ad4ced8/build/pywin/Lib/re.py#L168-L171
import sys import sre_compile import sre_parse try: import _locale except ImportError: _locale = None __all__ = [ "match", "search", "sub", "subn", "split", "findall", "compile", "purge", "template", "escape", "I", "L", "M", "S", "X", "U", "IGNORECASE", "LOCALE", "MULTILINE", "DOTALL", "VERBOSE", "UNICODE", "error" ] __version__ = "2.2.1" I = IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE L = LOCALE = sre_compile.SRE_FLAG_LOCALE U = UNICODE = sre_compile.SRE_FLAG_UNICODE M = MULTILINE = sre_compile.SRE_FLAG_MULTILINE S = DOTALL = sre_compile.SRE_FLAG_DOTALL X = VERBOSE = sre_compile.SRE_FLAG_VERBOSE T = TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE DEBUG = sre_compile.SRE_FLAG_DEBUG error = sre_compile.error def match(pattern, string, flags=0): return _compile(pattern, flags).match(string) def search(pattern, string, flags=0): return _compile(pattern, flags).search(string) def sub(pattern, repl, string, count=0, flags=0): return _compile(pattern, flags).sub(repl, string, count) def subn(pattern, repl, string, count=0, flags=0): return _compile(pattern, flags).subn(repl, string, count)
MIT License
ebellocchia/bip_utils
bip_utils/addr/eth_addr.py
EthAddrUtils.ChecksumEncode
python
def ChecksumEncode(addr: str) -> str: addr_hex_digest = ConvUtils.BytesToHexString(CryptoUtils.Kekkak256(addr)) enc_addr = [c.upper() if (int(addr_hex_digest[i], 16) >= 8) else c.lower() for i, c in enumerate(addr)] return "".join(enc_addr)
Checksum encode the specified address. Args: addr (str): Address string Returns: str: Checksum encoded address
https://github.com/ebellocchia/bip_utils/blob/b04f9ef493a5b57983412c0ce460a9ca05ee1f50/bip_utils/addr/eth_addr.py#L43-L59
from typing import Any, Union from bip_utils.addr.iaddr_encoder import IAddrEncoder from bip_utils.addr.utils import AddrUtils from bip_utils.coin_conf import CoinsConf from bip_utils.ecc import IPublicKey from bip_utils.utils.misc import ConvUtils, CryptoUtils class EthAddrConst: START_BYTE: int = 24 class EthAddrUtils: @staticmethod
MIT License
hexrd/hexrd
hexrd/fitting/fitpeak.py
estimate_pk_parms_1d
python
def estimate_pk_parms_1d(x, f, pktype='pvoigt'): npts = len(x) assert len(f) == npts, "ordinate and data must be same length!" bkg = snip1d(np.atleast_2d(f), w=int(2*npts/3.)).flatten() bp, _ = optimize.curve_fit(lin_fit_obj, x, bkg, jac=lin_fit_jac) bg0 = bp[-1] bg1 = bp[0] pint = f - lin_fit_obj(x, *bp) cen_index = np.argmax(pint) A = pint[cen_index] x0 = x[cen_index] if cen_index > 0 and cen_index < npts - 1: left_hm = np.argmin(abs(pint[:cen_index] - 0.5*A)) right_hm = np.argmin(abs(pint[cen_index:] - 0.5*A)) elif cen_index == 0: right_hm = np.argmin(abs(pint[cen_index:] - 0.5*A)) left_hm = right_hm elif cen_index == npts - 1: left_hm = np.argmin(abs(pint[:cen_index] - 0.5*A)) right_hm = left_hm try: FWHM = x[cen_index + right_hm] - x[left_hm] except(IndexError): FWHM = 0 if FWHM <= 0 or FWHM > 0.75*npts: FWHM = 0.25*(x[-1] - x[0]) if pktype in ['gaussian', 'lorentzian']: p = [A, x0, FWHM, bg0, bg1] elif pktype == 'pvoigt': p = [A, x0, FWHM, 0.5, bg0, bg1] elif pktype == 'split_pvoigt': p = [A, x0, FWHM, FWHM, 0.5, 0.5, bg0, bg1] else: raise RuntimeError("pktype '%s' not understood" % pktype) return np.r_[p]
Gives initial guess of parameters for analytic fit of one dimensional peak data. Required Arguments: x -- (n) ndarray of coordinate positions f -- (n) ndarray of intensity measurements at coordinate positions x pktype -- string, type of analytic function that will be used to fit the data, current options are "gaussian", "lorentzian", "pvoigt" (psuedo voigt), and "split_pvoigt" (split psuedo voigt) Outputs: p -- (m) ndarray containing initial guesses for parameters for the input peaktype (see peak function help for what each parameters corresponds to)
https://github.com/hexrd/hexrd/blob/90e9b26e5e5091dd5ecf460b3227072e6d90bcd5/hexrd/fitting/fitpeak.py#L65-L130
import numpy as np from scipy import integrate from scipy import ndimage as imgproc from scipy import optimize from hexrd import constants from hexrd.imageutil import snip1d from hexrd.fitting import peakfunctions as pkfuncs import matplotlib.pyplot as plt ftol = constants.sqrt_epsf xtol = constants.sqrt_epsf inf = np.inf minf = -inf def lin_fit_obj(x, m, b): return m*np.asarray(x) + b def lin_fit_jac(x, m, b): return np.vstack([x, np.ones_like(x)]).T
BSD 3-Clause New or Revised License
brython-dev/brython
www/src/Lib/threading.py
Semaphore.release
python
def release(self, n=1): if n < 1: raise ValueError('n must be one or more') with self._cond: self._value += n for i in range(n): self._cond.notify()
Release a semaphore, incrementing the internal counter by one or more. When the counter is zero on entry and another thread is waiting for it to become larger than zero again, wake up that thread.
https://github.com/brython-dev/brython/blob/33aeaab551f1b73209326c5a0aecf98642d4c126/www/src/Lib/threading.py#L468-L480
import os as _os import sys as _sys import _thread import functools from time import monotonic as _time from _weakrefset import WeakSet from itertools import islice as _islice, count as _count try: from _collections import deque as _deque except ImportError: from collections import deque as _deque __all__ = ['get_ident', 'active_count', 'Condition', 'current_thread', 'enumerate', 'main_thread', 'TIMEOUT_MAX', 'Event', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread', 'Barrier', 'BrokenBarrierError', 'Timer', 'ThreadError', 'setprofile', 'settrace', 'local', 'stack_size', 'excepthook', 'ExceptHookArgs', 'gettrace', 'getprofile'] _start_new_thread = _thread.start_new_thread _allocate_lock = _thread.allocate_lock _set_sentinel = _thread._set_sentinel get_ident = _thread.get_ident try: get_native_id = _thread.get_native_id _HAVE_THREAD_NATIVE_ID = True __all__.append('get_native_id') except AttributeError: _HAVE_THREAD_NATIVE_ID = False ThreadError = _thread.error try: _CRLock = _thread.RLock except AttributeError: _CRLock = None TIMEOUT_MAX = _thread.TIMEOUT_MAX del _thread _profile_hook = None _trace_hook = None def setprofile(func): global _profile_hook _profile_hook = func def getprofile(): return _profile_hook def settrace(func): global _trace_hook _trace_hook = func def gettrace(): return _trace_hook Lock = _allocate_lock def RLock(*args, **kwargs): if _CRLock is None: return _PyRLock(*args, **kwargs) return _CRLock(*args, **kwargs) class _RLock: def __init__(self): self._block = _allocate_lock() self._owner = None self._count = 0 def __repr__(self): owner = self._owner try: owner = _active[owner].name except KeyError: pass return "<%s %s.%s object owner=%r count=%d at %s>" % ( "locked" if self._block.locked() else "unlocked", self.__class__.__module__, self.__class__.__qualname__, owner, self._count, hex(id(self)) ) def _at_fork_reinit(self): self._block._at_fork_reinit() self._owner = None self._count = 0 def acquire(self, blocking=True, timeout=-1): me = get_ident() if self._owner == me: self._count += 1 return 1 rc = self._block.acquire(blocking, timeout) if rc: self._owner = me self._count = 1 return rc __enter__ = acquire def release(self): if self._owner != get_ident(): raise RuntimeError("cannot release un-acquired lock") self._count = count = self._count - 1 if not count: self._owner = None self._block.release() def __exit__(self, t, v, tb): self.release() def _acquire_restore(self, state): self._block.acquire() self._count, self._owner = state def _release_save(self): if self._count == 0: raise RuntimeError("cannot release un-acquired lock") count = self._count self._count = 0 owner = self._owner self._owner = None self._block.release() return (count, owner) def _is_owned(self): return self._owner == get_ident() _PyRLock = _RLock class Condition: def __init__(self, lock=None): if lock is None: lock = RLock() self._lock = lock self.acquire = lock.acquire self.release = lock.release try: self._release_save = lock._release_save except AttributeError: pass try: self._acquire_restore = lock._acquire_restore except AttributeError: pass try: self._is_owned = lock._is_owned except AttributeError: pass self._waiters = _deque() def _at_fork_reinit(self): self._lock._at_fork_reinit() self._waiters.clear() def __enter__(self): return self._lock.__enter__() def __exit__(self, *args): return self._lock.__exit__(*args) def __repr__(self): return "<Condition(%s, %d)>" % (self._lock, len(self._waiters)) def _release_save(self): self._lock.release() def _acquire_restore(self, x): self._lock.acquire() def _is_owned(self): if self._lock.acquire(False): self._lock.release() return False else: return True def wait(self, timeout=None): if not self._is_owned(): raise RuntimeError("cannot wait on un-acquired lock") waiter = _allocate_lock() waiter.acquire() self._waiters.append(waiter) saved_state = self._release_save() gotit = False try: if timeout is None: waiter.acquire() gotit = True else: if timeout > 0: gotit = waiter.acquire(True, timeout) else: gotit = waiter.acquire(False) return gotit finally: self._acquire_restore(saved_state) if not gotit: try: self._waiters.remove(waiter) except ValueError: pass def wait_for(self, predicate, timeout=None): endtime = None waittime = timeout result = predicate() while not result: if waittime is not None: if endtime is None: endtime = _time() + waittime else: waittime = endtime - _time() if waittime <= 0: break self.wait(waittime) result = predicate() return result def notify(self, n=1): if not self._is_owned(): raise RuntimeError("cannot notify on un-acquired lock") all_waiters = self._waiters waiters_to_notify = _deque(_islice(all_waiters, n)) if not waiters_to_notify: return for waiter in waiters_to_notify: waiter.release() try: all_waiters.remove(waiter) except ValueError: pass def notify_all(self): self.notify(len(self._waiters)) def notifyAll(self): import warnings warnings.warn('notifyAll() is deprecated, use notify_all() instead', DeprecationWarning, stacklevel=2) self.notify_all() class Semaphore: def __init__(self, value=1): if value < 0: raise ValueError("semaphore initial value must be >= 0") self._cond = Condition(Lock()) self._value = value def acquire(self, blocking=True, timeout=None): if not blocking and timeout is not None: raise ValueError("can't specify timeout for non-blocking acquire") rc = False endtime = None with self._cond: while self._value == 0: if not blocking: break if timeout is not None: if endtime is None: endtime = _time() + timeout else: timeout = endtime - _time() if timeout <= 0: break self._cond.wait(timeout) else: self._value -= 1 rc = True return rc __enter__ = acquire
BSD 3-Clause New or Revised License
enkidulan/slidelint
src/slidelint/checkers/language_tool_checker.py
LanguagetoolServer.grammar_checker
python
def grammar_checker(self, text, language="en-US"): data = dict(language=language, text=text) try: content = requests.post(self.url, data=data, timeout=15) except requests.exceptions.Timeout: os.kill(self.pid, 9) self.port, self.pid = start_languagetool_server(self.lt_path, self.config_file) self.url = 'http://127.0.0.1:%s' % self.port content = requests.post(self.url, data=data, timeout=15) try: root = etree.fromstring(content.text.encode('utf-8')) except etree.XMLSyntaxError, e: tb = StringIO.StringIO() traceback.print_exc(file=tb) e.message += """ -- %s -- """ % (content.text.encode('utf-8')) raise return root.findall('error')
sends text to Languagetool Server and returns its checks results
https://github.com/enkidulan/slidelint/blob/ddc99831ce2129254deba580665842b0cdb05a17/src/slidelint/checkers/language_tool_checker.py#L629-L655
from slidelint.utils import help_wrapper from slidelint.utils import SubprocessTimeoutHelper from slidelint.pdf_utils import convert_pdf_to_text import cStringIO as StringIO import os import requests import socket import subprocess import traceback from appdirs import user_data_dir from lxml import etree PACKAGE_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) LT_PATH = os.path.join(PACKAGE_ROOT, 'LanguageTool') MESSAGES = ( {'id': 'C2000', 'msg_name': 'language-tool', 'msg': 'Language tool', 'help': "Language tool found error"}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2001', 'msg': 'Language tool', 'msg_name': 'COMMA_PARENTHESIS_WHITESPACE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2002', 'msg': 'Language tool', 'msg_name': 'UPPERCASE_SENTENCE_START'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2003', 'msg': 'Language tool', 'msg_name': 'WHITESPACE_PUNCTUATION'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2004', 'msg': 'Language tool', 'msg_name': 'WHITESPACE_RULE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2005', 'msg': 'Language tool', 'msg_name': 'MORFOLOGIK_RULE_EN_US'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2006', 'msg': 'Language tool', 'msg_name': 'BRITISH_SIMPLE_REPLACE_RULE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2007', 'msg': 'Language tool', 'msg_name': 'MORFOLOGIK_RULE_EN_AU'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2008', 'msg': 'Language tool', 'msg_name': 'MORFOLOGIK_RULE_EN_CA'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2009', 'msg': 'Language tool', 'msg_name': 'MORFOLOGIK_RULE_EN_NZ'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2010', 'msg': 'Language tool', 'msg_name': 'A_WAS'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2011', 'msg': 'Language tool', 'msg_name': 'CONFUSION_OF_OUR_OUT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2012', 'msg': 'Language tool', 'msg_name': 'YOUR_SHOULD'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2013', 'msg': 'Language tool', 'msg_name': 'THE_SOME_DAY'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2014', 'msg': 'Language tool', 'msg_name': 'MAKE_US_OF'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2015', 'msg': 'Language tool', 'msg_name': 'ON_OF_THE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2016', 'msg': 'Language tool', 'msg_name': 'ASK_WETHER'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2017', 'msg': 'Language tool', 'msg_name': 'UP_TO_DATA'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2018', 'msg': 'Language tool', 'msg_name': 'FEEL_TREE_TO'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2019', 'msg': 'Language tool', 'msg_name': 'EASIEST_WAS_TO'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2020', 'msg': 'Language tool', 'msg_name': 'ARE_STILL_THE_SOME'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2021', 'msg': 'Language tool', 'msg_name': 'IS_EVEN_WORST'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2022', 'msg': 'Language tool', 'msg_name': 'DE_JURO'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2023', 'msg': 'Language tool', 'msg_name': 'MASSAGE_MESSAGE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2024', 'msg': 'Language tool', 'msg_name': 'I_THIN'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2025', 'msg': 'Language tool', 'msg_name': 'SUPPOSE_TO'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2026', 'msg': 'Language tool', 'msg_name': 'ALL_BE_IT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2027', 'msg': 'Language tool', 'msg_name': 'ALL_FOR_NOT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2028', 'msg': 'Language tool', 'msg_name': 'ALL_OVER_THE_WORD'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2029', 'msg': 'Language tool', 'msg_name': 'ANOTHER_WORDS'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2030', 'msg': 'Language tool', 'msg_name': 'BACK_AND_FOURTH'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2031', 'msg': 'Language tool', 'msg_name': 'BACK_IN_FORTH'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2032', 'msg': 'Language tool', 'msg_name': 'BOB_WIRE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2033', 'msg': 'Language tool', 'msg_name': 'BYE_THE_WAY'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2034', 'msg': 'Language tool', 'msg_name': 'CHALK_FULL'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2035', 'msg': 'Language tool', 'msg_name': 'EGG_YOKE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2036', 'msg': 'Language tool', 'msg_name': 'ET_ALL'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2037', 'msg': 'Language tool', 'msg_name': 'EYE_BROW'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2038', 'msg': 'Language tool', 'msg_name': 'FOR_SELL'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2039', 'msg': 'Language tool', 'msg_name': 'THERE_EXITS'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2040', 'msg': 'Language tool', 'msg_name': 'HE_THE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2041', 'msg': 'Language tool', 'msg_name': 'INSURE_THAT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2042', 'msg': 'Language tool', 'msg_name': 'IN_MASSE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2043', 'msg': 'Language tool', 'msg_name': 'IN_PARENTHESIS'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2044', 'msg': 'Language tool', 'msg_name': 'IN_STEAD_OF'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2045', 'msg': 'Language tool', 'msg_name': 'IN_TACT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2046', 'msg': 'Language tool', 'msg_name': 'IN_VEIN'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2047', 'msg': 'Language tool', 'msg_name': 'IT_SELF'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2048', 'msg': 'Language tool', 'msg_name': 'VE_GO_TO'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2049', 'msg': 'Language tool', 'msg_name': 'FOR_ALONG_TIME'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2050', 'msg': 'Language tool', 'msg_name': 'FOR_ALL_INTENSIVE_PURPOSES'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2051', 'msg': 'Language tool', 'msg_name': 'AWAY_FRO'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2052', 'msg': 'Language tool', 'msg_name': 'ONE_IN_THE_SAME'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2053', 'msg': 'Language tool', 'msg_name': 'PER_SE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2054', 'msg': 'Language tool', 'msg_name': 'SNEAK_PEAK'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2055', 'msg': 'Language tool', 'msg_name': 'SOME_WHAT_JJ'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2056', 'msg': 'Language tool', 'msg_name': 'STAND_ALONE_NN'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2057', 'msg': 'Language tool', 'msg_name': 'TEEM_TEAM'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2058', 'msg': 'Language tool', 'msg_name': 'UNDER_WEAR'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2059', 'msg': 'Language tool', 'msg_name': 'WHERE_AS'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2060', 'msg': 'Language tool', 'msg_name': 'WITCH_HAUNT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2061', 'msg': 'Language tool', 'msg_name': 'YOUR_S'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2062', 'msg': 'Language tool', 'msg_name': 'YOURS_APOSTROPHE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2063', 'msg': 'Language tool', 'msg_name': 'HEAR_HERE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2064', 'msg': 'Language tool', 'msg_name': 'TOT_HE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2065', 'msg': 'Language tool', 'msg_name': 'WITH_OUT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2066', 'msg': 'Language tool', 'msg_name': 'ALLOT_OF'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2067', 'msg': 'Language tool', 'msg_name': 'I_HERD'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2068', 'msg': 'Language tool', 'msg_name': 'ADVICE_ADVISE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2069', 'msg': 'Language tool', 'msg_name': 'ALL_MOST'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2070', 'msg': 'Language tool', 'msg_name': 'ANALYSIS_IF'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2071', 'msg': 'Language tool', 'msg_name': 'BED_ENGLISH'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2072', 'msg': 'Language tool', 'msg_name': 'PIGEON_ENGLISH'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2073', 'msg': 'Language tool', 'msg_name': 'TELEPHONE_POLL'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2074', 'msg': 'Language tool', 'msg_name': 'OPINION_POLE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2075', 'msg': 'Language tool', 'msg_name': 'BOTTLE_NECK'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2076', 'msg': 'Language tool', 'msg_name': 'FIRE_ARM'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2077', 'msg': 'Language tool', 'msg_name': 'NEWS_PAPER'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2078', 'msg': 'Language tool', 'msg_name': 'AN_OTHER'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2079', 'msg': 'Language tool', 'msg_name': 'IN_THE_PASSED'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2080', 'msg': 'Language tool', 'msg_name': 'SENT_START_THEM'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2081', 'msg': 'Language tool', 'msg_name': 'TOO_TO'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2082', 'msg': 'Language tool', 'msg_name': 'THINK_YOU_A'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2083', 'msg': 'Language tool', 'msg_name': 'IS_WERE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2084', 'msg': 'Language tool', 'msg_name': 'ONE_ORE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2085', 'msg': 'Language tool', 'msg_name': 'THE_ONLY_ON'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2086', 'msg': 'Language tool', 'msg_name': 'THEIR_IS'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2087', 'msg': 'Language tool', 'msg_name': 'I_A'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2088', 'msg': 'Language tool', 'msg_name': 'I_NEW'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2089', 'msg': 'Language tool', 'msg_name': 'PLEASE_NOT_THAT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2090', 'msg': 'Language tool', 'msg_name': 'NUT_NOT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2091', 'msg': 'Language tool', 'msg_name': 'AND_SO_ONE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2092', 'msg': 'Language tool', 'msg_name': 'THROUGH_AWAY'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2093', 'msg': 'Language tool', 'msg_name': 'OR_WAY_IT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2094', 'msg': 'Language tool', 'msg_name': 'DT_RESPONDS'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2095', 'msg': 'Language tool', 'msg_name': 'THINK_OFF'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2096', 'msg': 'Language tool', 'msg_name': 'YOU_THING'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2097', 'msg': 'Language tool', 'msg_name': 'VBZ_VBD'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2098', 'msg': 'Language tool', 'msg_name': 'FORE_DPS'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2099', 'msg': 'Language tool', 'msg_name': 'LESS_MORE_THEN'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2100', 'msg': 'Language tool', 'msg_name': 'COMMA_THAN'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2101', 'msg': 'Language tool', 'msg_name': 'FROM_THAN_ON'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2102', 'msg': 'Language tool', 'msg_name': 'AND_THAN'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2103', 'msg': 'Language tool', 'msg_name': 'THAN_INTERJ'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2104', 'msg': 'Language tool', 'msg_name': 'WHO_THAN'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2105', 'msg': 'Language tool', 'msg_name': 'OF_CAUSE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2106', 'msg': 'Language tool', 'msg_name': 'LOOK_ATE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2107', 'msg': 'Language tool', 'msg_name': 'A_KNOW_BUG'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2108', 'msg': 'Language tool', 'msg_name': 'MY_BE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2109', 'msg': 'Language tool', 'msg_name': 'IS_SHOULD'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2110', 'msg': 'Language tool', 'msg_name': 'THE_FLEW'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2111', 'msg': 'Language tool', 'msg_name': 'CAN_NOT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2112', 'msg': 'Language tool', 'msg_name': 'CAN_BEEN'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2113', 'msg': 'Language tool', 'msg_name': 'CANT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2114', 'msg': 'Language tool', 'msg_name': 'TURNED_OFF'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2115', 'msg': 'Language tool', 'msg_name': 'FEMALE_ACTOR'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2116', 'msg': 'Language tool', 'msg_name': 'FEMALE_WAITER'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2117', 'msg': 'Language tool', 'msg_name': 'FIRST_WOMAN_NOUN'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2118', 'msg': 'Language tool', 'msg_name': 'FIRST_MAN_NOUN'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2119', 'msg': 'Language tool', 'msg_name': 'LITTLE_BIT'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2120', 'msg': 'Language tool', 'msg_name': 'MANGER_MANAGER'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2121', 'msg': 'Language tool', 'msg_name': 'HAD_OF'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2122', 'msg': 'Language tool', 'msg_name': 'ONES'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2123', 'msg': 'Language tool', 'msg_name': 'SPARKING_WINE'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2124', 'msg': 'Language tool', 'msg_name': 'VERY_MATCH'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2125', 'msg': 'Language tool', 'msg_name': 'VARY_MUCH'}, {'help': 'http://wiki.languagetool.org/', 'id': 'C2126', 'msg': 'Language tool', 'msg_name': 'ZERO-SUM_GAIN'} ) MESSAGES_BY_RULES = {m['msg_name']: m for m in MESSAGES} def get_java(): cmd = ["update-alternatives", "--query", "java"] try: output = SubprocessTimeoutHelper(cmd)() except (OSError, IOError): return 'java' for line in output: if not line.startswith("Alternative: "): continue if "java-7" in line: return line[len("Alternative: "):].strip(" \n") return 'java' def get_free_port(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) sock.bind(('', 0)) sock.listen(socket.SOMAXCONN) _, port = sock.getsockname() sock.close() return str(port) class LanguagetoolSubprocessHandler(SubprocessTimeoutHelper): def subprocess_handler(self): self.process = subprocess.Popen( self.cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True,) output = [] while True: output.append(self.process.stdout.readline()) if 'Server started' in output[-1]: break retcode = self.process.poll() if retcode is not None: output.extend(self.process.stdout.readlines()) output.insert( 0, "languagetool-server died with exit code %s!\n" % retcode ) output.insert(1, " ".join(self.cmd) + "\n") output.append("\nLanguageTool requires Java 7 or later." " Please check and update java version." " For more details look at " "http://help.ubuntu.com/community/Java\n") raise IOError("".join(output)) def start_languagetool_server(lt_path, config_file): port = get_free_port() java = get_java() cmd = [java, '-cp', os.path.join(lt_path, 'languagetool-server.jar'), 'org.languagetool.server.HTTPServer', '--port', port] lt_server = LanguagetoolSubprocessHandler(cmd, 30) lt_server() pid = lt_server.process.pid with open(config_file, 'w') as out_file: out_file.write("%s,%s" % (port, pid)) return port, pid def get_languagetool_port_and_pid(lt_path, config_file): if os.path.isfile(config_file): port, pid = open(config_file, 'r').read().strip(' \n').split(',') if os.path.exists("/proc/%s" % pid): return port, int(pid) return start_languagetool_server(lt_path, config_file) class LanguagetoolServer(object): def __init__(self, lt_path, keep_alive=False): self.keep_alive = keep_alive config_dir = user_data_dir('slidelint') if not os.path.exists(config_dir): os.makedirs(config_dir) self.config_file = os.path.join(config_dir, 'run') self.lt_path = lt_path self.port, self.pid = get_languagetool_port_and_pid(self.lt_path, self.config_file) self.url = 'http://127.0.0.1:%s' % self.port
Apache License 2.0
lmas/opensimplex
opensimplex/opensimplex.py
OpenSimplex.__init__
python
def __init__(self, seed=DEFAULT_SEED): perm = self._perm = [0] * 256 perm_grad_index_3D = self._perm_grad_index_3D = [0] * 256 source = [i for i in range(0, 256)] seed = overflow(seed * 6364136223846793005 + 1442695040888963407) seed = overflow(seed * 6364136223846793005 + 1442695040888963407) seed = overflow(seed * 6364136223846793005 + 1442695040888963407) for i in range(255, -1, -1): seed = overflow(seed * 6364136223846793005 + 1442695040888963407) r = int((seed + 31) % (i + 1)) if r < 0: r += i + 1 perm[i] = source[r] perm_grad_index_3D[i] = int((perm[i] % (len(GRADIENTS_3D) / 3)) * 3) source[r] = source[i]
Initiate the class using a permutation array generated from a 64-bit seed number.
https://github.com/lmas/opensimplex/blob/fd57fd9fd5609724c06e06c55c235cf9c692b1b8/opensimplex/opensimplex.py#L89-L108
import sys from ctypes import c_int64 from math import floor as _floor if sys.version_info[0] < 3: def floor(num): return int(_floor(num)) else: floor = _floor STRETCH_CONSTANT_2D = -0.211324865405187 SQUISH_CONSTANT_2D = 0.366025403784439 STRETCH_CONSTANT_3D = -1.0 / 6 SQUISH_CONSTANT_3D = 1.0 / 3 STRETCH_CONSTANT_4D = -0.138196601125011 SQUISH_CONSTANT_4D = 0.309016994374947 NORM_CONSTANT_2D = 47 NORM_CONSTANT_3D = 103 NORM_CONSTANT_4D = 30 DEFAULT_SEED = 0 GRADIENTS_2D = ( 5, 2, 2, 5, -5, 2, -2, 5, 5, -2, 2, -5, -5, -2, -2, -5, ) GRADIENTS_3D = ( -11, 4, 4, -4, 11, 4, -4, 4, 11, 11, 4, 4, 4, 11, 4, 4, 4, 11, -11, -4, 4, -4, -11, 4, -4, -4, 11, 11, -4, 4, 4, -11, 4, 4, -4, 11, -11, 4, -4, -4, 11, -4, -4, 4, -11, 11, 4, -4, 4, 11, -4, 4, 4, -11, -11, -4, -4, -4, -11, -4, -4, -4, -11, 11, -4, -4, 4, -11, -4, 4, -4, -11, ) GRADIENTS_4D = ( 3, 1, 1, 1, 1, 3, 1, 1, 1, 1, 3, 1, 1, 1, 1, 3, -3, 1, 1, 1, -1, 3, 1, 1, -1, 1, 3, 1, -1, 1, 1, 3, 3, -1, 1, 1, 1, -3, 1, 1, 1, -1, 3, 1, 1, -1, 1, 3, -3, -1, 1, 1, -1, -3, 1, 1, -1, -1, 3, 1, -1, -1, 1, 3, 3, 1, -1, 1, 1, 3, -1, 1, 1, 1, -3, 1, 1, 1, -1, 3, -3, 1, -1, 1, -1, 3, -1, 1, -1, 1, -3, 1, -1, 1, -1, 3, 3, -1, -1, 1, 1, -3, -1, 1, 1, -1, -3, 1, 1, -1, -1, 3, -3, -1, -1, 1, -1, -3, -1, 1, -1, -1, -3, 1, -1, -1, -1, 3, 3, 1, 1, -1, 1, 3, 1, -1, 1, 1, 3, -1, 1, 1, 1, -3, -3, 1, 1, -1, -1, 3, 1, -1, -1, 1, 3, -1, -1, 1, 1, -3, 3, -1, 1, -1, 1, -3, 1, -1, 1, -1, 3, -1, 1, -1, 1, -3, -3, -1, 1, -1, -1, -3, 1, -1, -1, -1, 3, -1, -1, -1, 1, -3, 3, 1, -1, -1, 1, 3, -1, -1, 1, 1, -3, -1, 1, 1, -1, -3, -3, 1, -1, -1, -1, 3, -1, -1, -1, 1, -3, -1, -1, 1, -1, -3, 3, -1, -1, -1, 1, -3, -1, -1, 1, -1, -3, -1, 1, -1, -1, -3, -3, -1, -1, -1, -1, -3, -1, -1, -1, -1, -3, -1, -1, -1, -1, -3, ) def overflow(x): return c_int64(x).value class OpenSimplex(object):
MIT License
mattvonrocketstein/smash
smashlib/ipy3x/qt/console/rich_ipython_widget.py
RichIPythonWidget._pre_image_append
python
def _pre_image_append(self, msg, prompt_number): self.log.debug("execute_result: %s", msg.get('content', '')) self._append_plain_text(self.output_sep, True) self._append_html(self._make_out_prompt(prompt_number), True) self._append_plain_text('\n', True)
Append the Out[] prompt and make the output nicer Shared code for some the following if statement
https://github.com/mattvonrocketstein/smash/blob/98acdc27ab72ca80d9a7f63a54c0d52f126a8009/smashlib/ipy3x/qt/console/rich_ipython_widget.py#L97-L105
from base64 import decodestring import os import re from IPython.external.qt import QtCore, QtGui from IPython.lib.latextools import latex_to_png from IPython.utils.path import ensure_dir_exists from IPython.utils.traitlets import Bool from IPython.qt.svg import save_svg, svg_to_clipboard, svg_to_image from .ipython_widget import IPythonWidget class RichIPythonWidget(IPythonWidget): _payload_source_plot = 'IPython.kernel.zmq.pylab.backend_payload.add_plot_payload' _jpg_supported = Bool(False) _svg_warning_displayed = False def __init__(self, *args, **kw): kw['kind'] = 'rich' super(RichIPythonWidget, self).__init__(*args, **kw) self._html_exporter.image_tag = self._get_image_tag self._name_to_svg_map = {} _supported_format = map( str, QtGui.QImageReader.supportedImageFormats()) self._jpg_supported = 'jpeg' in _supported_format def export_html(self): self._svg_warning_displayed = False super(RichIPythonWidget, self).export_html() def _context_menu_make(self, pos): format = self._control.cursorForPosition(pos).charFormat() name = format.stringProperty(QtGui.QTextFormat.ImageName) if name: menu = QtGui.QMenu() menu.addAction('Copy Image', lambda: self._copy_image(name)) menu.addAction('Save Image As...', lambda: self._save_image(name)) menu.addSeparator() svg = self._name_to_svg_map.get(name, None) if svg is not None: menu.addSeparator() menu.addAction('Copy SVG', lambda: svg_to_clipboard(svg)) menu.addAction('Save SVG As...', lambda: save_svg(svg, self._control)) else: menu = super(RichIPythonWidget, self)._context_menu_make(pos) return menu
MIT License
erdewit/ib_insync
ib_insync/ib.py
IB.qualifyContracts
python
def qualifyContracts(self, *contracts: Contract) -> List[Contract]: return self._run(self.qualifyContractsAsync(*contracts))
Fully qualify the given contracts in-place. This will fill in the missing fields in the contract, especially the conId. Returns a list of contracts that have been successfully qualified. This method is blocking. Args: contracts: Contracts to qualify.
https://github.com/erdewit/ib_insync/blob/e09183927d8a024a44c7849c2cda43f5a9c63184/ib_insync/ib.py#L541-L553
import asyncio import copy import datetime import logging import time from typing import Awaitable, Dict, Iterator, List, Optional, Union from eventkit import Event import ib_insync.util as util from ib_insync.client import Client from ib_insync.contract import Contract, ContractDescription, ContractDetails from ib_insync.objects import ( AccountValue, BarDataList, DepthMktDataDescription, Execution, ExecutionFilter, Fill, HistogramData, HistoricalNews, NewsArticle, NewsBulletin, NewsProvider, NewsTick, OptionChain, OptionComputation, PnL, PnLSingle, PortfolioItem, Position, PriceIncrement, RealTimeBarList, ScanDataList, ScannerSubscription, TagValue, TradeLogEntry) from ib_insync.order import ( BracketOrder, LimitOrder, Order, OrderState, OrderStatus, StopOrder, Trade) from ib_insync.ticker import Ticker from ib_insync.wrapper import Wrapper __all__ = ['IB'] class IB: events = ( 'connectedEvent', 'disconnectedEvent', 'updateEvent', 'pendingTickersEvent', 'barUpdateEvent', 'newOrderEvent', 'orderModifyEvent', 'cancelOrderEvent', 'openOrderEvent', 'orderStatusEvent', 'execDetailsEvent', 'commissionReportEvent', 'updatePortfolioEvent', 'positionEvent', 'accountValueEvent', 'accountSummaryEvent', 'pnlEvent', 'pnlSingleEvent', 'scannerDataEvent', 'tickNewsEvent', 'newsBulletinEvent', 'errorEvent', 'timeoutEvent') RequestTimeout: float = 0 RaiseRequestErrors: bool = False MaxSyncedSubAccounts: int = 50 TimezoneTWS = None def __init__(self): self._createEvents() self.wrapper = Wrapper(self) self.client = Client(self.wrapper) self.errorEvent += self._onError self.client.apiEnd += self.disconnectedEvent self._logger = logging.getLogger('ib_insync.ib') def _createEvents(self): self.connectedEvent = Event('connectedEvent') self.disconnectedEvent = Event('disconnectedEvent') self.updateEvent = Event('updateEvent') self.pendingTickersEvent = Event('pendingTickersEvent') self.barUpdateEvent = Event('barUpdateEvent') self.newOrderEvent = Event('newOrderEvent') self.orderModifyEvent = Event('orderModifyEvent') self.cancelOrderEvent = Event('cancelOrderEvent') self.openOrderEvent = Event('openOrderEvent') self.orderStatusEvent = Event('orderStatusEvent') self.execDetailsEvent = Event('execDetailsEvent') self.commissionReportEvent = Event('commissionReportEvent') self.updatePortfolioEvent = Event('updatePortfolioEvent') self.positionEvent = Event('positionEvent') self.accountValueEvent = Event('accountValueEvent') self.accountSummaryEvent = Event('accountSummaryEvent') self.pnlEvent = Event('pnlEvent') self.pnlSingleEvent = Event('pnlSingleEvent') self.scannerDataEvent = Event('scannerDataEvent') self.tickNewsEvent = Event('tickNewsEvent') self.newsBulletinEvent = Event('newsBulletinEvent') self.errorEvent = Event('errorEvent') self.timeoutEvent = Event('timeoutEvent') def __del__(self): self.disconnect() def __enter__(self): return self def __exit__(self, *_exc): self.disconnect() def __repr__(self): conn = (f'connected to {self.client.host}:' f'{self.client.port} clientId={self.client.clientId}' if self.client.isConnected() else 'not connected') return f'<{self.__class__.__qualname__} {conn}>' def connect( self, host: str = '127.0.0.1', port: int = 7497, clientId: int = 1, timeout: float = 4, readonly: bool = False, account: str = ''): return self._run(self.connectAsync( host, port, clientId, timeout, readonly, account)) def disconnect(self): if not self.client.isConnected(): return stats = self.client.connectionStats() self._logger.info( f'Disconnecting from {self.client.host}:{self.client.port}, ' f'{util.formatSI(stats.numBytesSent)}B sent ' f'in {stats.numMsgSent} messages, ' f'{util.formatSI(stats.numBytesRecv)}B received ' f'in {stats.numMsgRecv} messages, ' f'session time {util.formatSI(stats.duration)}s.') self.client.disconnect() self.disconnectedEvent.emit() def isConnected(self) -> bool: return self.client.isReady() def _onError(self, reqId, errorCode, errorString, contract): if errorCode == 1102: asyncio.ensure_future(self.reqAccountSummaryAsync()) run = staticmethod(util.run) schedule = staticmethod(util.schedule) sleep = staticmethod(util.sleep) timeRange = staticmethod(util.timeRange) timeRangeAsync = staticmethod(util.timeRangeAsync) waitUntil = staticmethod(util.waitUntil) def _run(self, *awaitables: Awaitable): return util.run(*awaitables, timeout=self.RequestTimeout) def waitOnUpdate(self, timeout: float = 0) -> bool: if timeout: try: util.run(asyncio.wait_for(self.updateEvent, timeout)) except asyncio.TimeoutError: return False else: util.run(self.updateEvent) return True def loopUntil( self, condition=None, timeout: float = 0) -> Iterator[object]: endTime = time.time() + timeout while True: test = condition and condition() if test: yield test return elif timeout and time.time() > endTime: yield False return else: yield test self.waitOnUpdate(endTime - time.time() if timeout else 0) def setTimeout(self, timeout: float = 60): self.wrapper.setTimeout(timeout) def managedAccounts(self) -> List[str]: return list(self.wrapper.accounts) def accountValues(self, account: str = '') -> List[AccountValue]: if account: return [v for v in self.wrapper.accountValues.values() if v.account == account] else: return list(self.wrapper.accountValues.values()) def accountSummary(self, account: str = '') -> List[AccountValue]: return self._run(self.accountSummaryAsync(account)) def portfolio(self) -> List[PortfolioItem]: account = self.wrapper.accounts[0] return [v for v in self.wrapper.portfolio[account].values()] def positions(self, account: str = '') -> List[Position]: if account: return list(self.wrapper.positions[account].values()) else: return [v for d in self.wrapper.positions.values() for v in d.values()] def pnl(self, account='', modelCode='') -> List[PnL]: return [v for v in self.wrapper.reqId2PnL.values() if (not account or v.account == account) and (not modelCode or v.modelCode == modelCode)] def pnlSingle( self, account: str = '', modelCode: str = '', conId: int = 0) -> List[PnLSingle]: return [v for v in self.wrapper.reqId2PnlSingle.values() if (not account or v.account == account) and (not modelCode or v.modelCode == modelCode) and (not conId or v.conId == conId)] def trades(self) -> List[Trade]: return list(self.wrapper.trades.values()) def openTrades(self) -> List[Trade]: return [v for v in self.wrapper.trades.values() if v.orderStatus.status not in OrderStatus.DoneStates] def orders(self) -> List[Order]: return list( trade.order for trade in self.wrapper.trades.values()) def openOrders(self) -> List[Order]: return [trade.order for trade in self.wrapper.trades.values() if trade.orderStatus.status not in OrderStatus.DoneStates] def fills(self) -> List[Fill]: return list(self.wrapper.fills.values()) def executions(self) -> List[Execution]: return list(fill.execution for fill in self.wrapper.fills.values()) def ticker(self, contract: Contract) -> Ticker: return self.wrapper.tickers.get(id(contract)) def tickers(self) -> List[Ticker]: return list(self.wrapper.tickers.values()) def pendingTickers(self) -> List[Ticker]: return list(self.wrapper.pendingTickers) def realtimeBars(self) -> List[Union[BarDataList, RealTimeBarList]]: return list(self.wrapper.reqId2Subscriber.values()) def newsTicks(self) -> List[NewsTick]: return self.wrapper.newsTicks def newsBulletins(self) -> List[NewsBulletin]: return list(self.wrapper.msgId2NewsBulletin.values()) def reqTickers( self, *contracts: Contract, regulatorySnapshot: bool = False) -> List[Ticker]: return self._run( self.reqTickersAsync( *contracts, regulatorySnapshot=regulatorySnapshot))
BSD 2-Clause Simplified License
exopy/exopy
exopy/measurement/monitors/text_monitor/plugin.py
TextMonitorPlugin.build_rule
python
def build_rule(self, name_or_config): if not isinstance(name_or_config, dict): if name_or_config in self._user_rules: config = self._user_rules[name_or_config].copy() config['id'] = name_or_config elif name_or_config in self._rule_configs.contributions: rule_config = self._rule_configs.contributions[name_or_config] config = rule_config.config.copy() config['class_id'] = rule_config.rule_type config['description'] = rule_config.description config['id'] = name_or_config else: msg = 'Requested rule not found : {}'.format(name_or_config) logger.warning(msg) return else: config = name_or_config.copy() class_id = config.pop('class_id') rule_infos = self._rule_types.contributions.get(class_id) if rule_infos is not None: rule = rule_infos.cls() rule.update_members_from_preferences(config) return rule else: msg = 'Requested rule class not found : {}'.format(class_id) logger.warning(msg)
Build rule from a dict. Parameters ---------- name_or_config : unicode|dict Name of the rule to build or dict containing the infos to build the rule from scratch. Returns ------- rule : BaseRule | None New rule properly initialized.
https://github.com/exopy/exopy/blob/aeda9bcfad2d2f76903c7ad2800ea2110ff689b2/exopy/measurement/monitors/text_monitor/plugin.py#L91-L133
import logging from atom.api import List, Dict, Typed from ....utils.plugin_tools import (DeclaratorsCollector, ExtensionsCollector, make_extension_validator, HasPreferencesPlugin) from ..base_monitor import Monitor from .monitor import TextMonitor from .rules.base import RuleType, Rules, RuleConfig RULE_TYPE_POINT = 'exopy.measurement.monitors.text_monitor.rules.type' RULE_CONFIG_POINT = 'exopy.measurement.monitors.text_monitor.rules.config' logger = logging.getLogger(__name__) class TextMonitorPlugin(HasPreferencesPlugin): default_rules = List(default=['Measurement entries', 'Loop progress', 'Instrument ids']).tag(pref=True) rule_types = List() rules = List() def start(self): super(TextMonitorPlugin, self).start() self._rule_types = DeclaratorsCollector(workbench=self.workbench, point=RULE_TYPE_POINT, ext_class=(Rules, RuleType)) self._rule_types.start() validator = make_extension_validator(RuleConfig, attributes=('id', 'description', 'rule_type', 'config') ) self._rule_configs = ExtensionsCollector(workbench=self.workbench, point=RULE_CONFIG_POINT, ext_class=RuleConfig, validate_ext=validator) self._rule_configs.start() self._update_rule_types(None) self._update_rules(None) defaults = [r for r in self.default_rules if r in self.rules] if defaults != self.default_rules: msg = ('The following rules for the TextMonitor are not defined, ' 'and have been removed from the defaults : %s') removed = set(self.default_rules) - set(defaults) logger.warning(msg, removed) self.default_rules = defaults self._bind_observers() def stop(self): self._unbind_observers() self.rule_types = [] self.rules = [] self._rule_types.stop() self._rule_configs.stop()
BSD 3-Clause New or Revised License
yashaka/selene
selene/core/entity.py
WaitingEntity.perform
python
def perform(self, command: Command[E]) -> E: self.wait.for_(command) return self
Useful to call external commands. Commands might be predefined in Selene: element.perform(command.js.scroll_into_view) or some custom defined by selene user: element.perform(my_action.triple_click) You might think that it will be useful to use these methods also in Selene internally in order to define built in commands e.g. in Element class, like: def click(self): return self.perform(Command('click', lambda element: element().click())) instead of: def click(self): self.wait.for_(Command('click', lambda element: element().click())) return self But so far, we use the latter version - though, less concise, but more explicit, making it more obvious that waiting is built in;)
https://github.com/yashaka/selene/blob/e316f833651daefe7634fff29891b4813d24b198/selene/core/entity.py#L87-L112
from __future__ import annotations import re import warnings from abc import abstractmethod, ABC from typing import TypeVar, Union, List, Dict, Any, Callable, Tuple from selenium.common.exceptions import ( ElementNotVisibleException, ElementNotInteractableException, ) from selenium.webdriver import ActionChains from selenium.webdriver.android.webdriver import WebDriver from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys from selenium.webdriver.remote.switch_to import SwitchTo from selenium.webdriver.remote.webelement import WebElement from selene.common.fp import pipe from selene.core.configuration import Config from selene.core.wait import Wait, Command, Query from selene.core.condition import Condition, not_ from selene.core.locator import Locator from selene.common.helpers import to_by, flatten, is_absolute_url from selene.core.exceptions import TimeoutException, _SeleneError E = TypeVar('E', bound='Assertable') R = TypeVar('R') class Assertable(ABC): @abstractmethod def should(self, condition: Condition[E]) -> E: pass class Matchable(Assertable): @abstractmethod def wait_until(self, condition: Condition[E]) -> bool: pass @abstractmethod def matching(self, condition: Condition[E]) -> bool: pass class Configured(ABC): @property @abstractmethod def config(self) -> Config: pass class WaitingEntity(Matchable, Configured): def __init__(self, config: Config): self._config = config @property def wait(self) -> Wait[E]: return self.config.wait(self)
MIT License
biomedia-mira/istn
pymira/img/datasets.py
ImageSegRegDataset.__init__
python
def __init__(self, csv_file_img, csv_file_seg, csv_file_msk=None, normalizer_img=None, resampler_img=None, normalizer_seg=None, resampler_seg=None): self.img_data = pd.read_csv(csv_file_img) self.seg_data = pd.read_csv(csv_file_seg) if csv_file_msk: self.msk_data = pd.read_csv(csv_file_msk) self.samples = [] for idx in range(len(self.img_data)): src_path = self.img_data.iloc[idx, 0] trg_path = self.img_data.iloc[idx, 1] src_seg_path = self.seg_data.iloc[idx, 0] trg_seg_path = self.seg_data.iloc[idx, 1] print('Reading source image ' + src_path) source = sitk.ReadImage(src_path, sitk.sitkFloat32) print('Reading target image ' + trg_path) target = sitk.ReadImage(trg_path, sitk.sitkFloat32) print('Reading source segmentation ' + src_seg_path) source_seg = sitk.ReadImage(src_seg_path, sitk.sitkFloat32) print('Reading target segmentation ' + trg_seg_path) target_seg = sitk.ReadImage(trg_seg_path, sitk.sitkFloat32) source_msk = sitk.GetImageFromArray(np.ones(source.GetSize()[::-1])) target_msk = sitk.GetImageFromArray(np.ones(target.GetSize()[::-1])) if csv_file_msk: src_msk_path = self.msk_data.iloc[idx, 0] trg_msk_path = self.msk_data.iloc[idx, 1] print('Reading source mask ' + src_msk_path) source_msk = sitk.ReadImage(src_msk_path, sitk.sitkFloat32) source_msk.CopyInformation(source) print('Reading target mask ' + trg_msk_path) target_msk = sitk.ReadImage(trg_msk_path, sitk.sitkFloat32) target_msk.CopyInformation(target) if normalizer_img: source = normalizer_img(source, source_msk) target = normalizer_img(target, target_msk) if resampler_img: source = resampler_img(source) target = resampler_img(target) source_msk = resampler_img(source_msk) target_msk = resampler_img(target_msk) if normalizer_seg: source_seg = normalizer_seg(source_seg) target_seg = normalizer_seg(target_seg) if resampler_seg: source_seg = resampler_seg(source_seg) target_seg = resampler_seg(target_seg) if len(source.GetSize()) == 3: source.SetDirection((1, 0, 0, 0, 1, 0, 0, 0, 1)) target.SetDirection((1, 0, 0, 0, 1, 0, 0, 0, 1)) else: source.SetDirection((1, 0, 0, 1)) target.SetDirection((1, 0, 0, 1)) source.SetOrigin(np.zeros(len(source.GetOrigin()))) target.SetOrigin(np.zeros(len(target.GetOrigin()))) source_seg.CopyInformation(source) target_seg.CopyInformation(target) source_msk.CopyInformation(source) target_msk.CopyInformation(target) sample = {'source': source, 'target': target, 'source_seg': source_seg, 'target_seg': target_seg, 'source_msk': source_msk, 'target_msk': target_msk} self.samples.append(sample)
Args: :param csv_file_img (string): Path to csv file with image filenames. :param csv_file_seg (string): Path to csv file with segmentation filenames. :param csv_file_msk (string): Path to csv file with mask filenames. :param normalizer_img (callable, optional): Optional transform to be applied on each image. :param resampler_img (callable, optional): Optional transform to be applied on each image. :param normalizer_seg (callable, optional): Optional transform to be applied on each segmentation. :param resampler_seg (callable, optional): Optional transform to be applied on each segmentation.
https://github.com/biomedia-mira/istn/blob/6bbf6ebff35643917a33a15e8f964371b24077ad/pymira/img/datasets.py#L96-L182
import torch import numpy as np import pandas as pd import SimpleITK as sitk from torch.utils.data import Dataset, DataLoader class ImageRegistrationDataset(Dataset): def __init__(self, csv_file_img, csv_file_msk=None, normalizer=None, resampler=None): self.data = pd.read_csv(csv_file_img) if csv_file_msk: self.msk_data = pd.read_csv(csv_file_msk) self.samples = [] for idx in range(len(self.data)): src_path = self.data.iloc[idx, 0] trg_path = self.data.iloc[idx, 1] print('Reading source image ' + src_path) source = sitk.ReadImage(src_path, sitk.sitkFloat32) print('Reading target image ' + trg_path) target = sitk.ReadImage(trg_path, sitk.sitkFloat32) source_msk = sitk.GetImageFromArray(np.ones(source.GetSize()[::-1])) target_msk = sitk.GetImageFromArray(np.ones(target.GetSize()[::-1])) if csv_file_msk: src_msk_path = self.msk_data.iloc[idx, 0] trg_msk_path = self.msk_data.iloc[idx, 1] print('Reading source mask ' + src_msk_path) source_msk = sitk.ReadImage(src_msk_path, sitk.sitkFloat32) source_msk.CopyInformation(source) print('Reading target mask ' + trg_msk_path) target_msk = sitk.ReadImage(trg_msk_path, sitk.sitkFloat32) target_msk.CopyInformation(target) if normalizer: source = normalizer(source, source_msk) target = normalizer(target, target_msk) if resampler: source = resampler(source) target = resampler(target) source_msk = resampler(source_msk) target_msk = resampler(target_msk) if len(source.GetSize()) == 3: source.SetDirection((1, 0, 0, 0, 1, 0, 0, 0, 1)) target.SetDirection((1, 0, 0, 0, 1, 0, 0, 0, 1)) else: source.SetDirection((1, 0, 0, 1)) target.SetDirection((1, 0, 0, 1)) source.SetOrigin(np.zeros(len(source.GetOrigin()))) target.SetOrigin(np.zeros(len(target.GetOrigin()))) source_msk.CopyInformation(source) target_msk.CopyInformation(target) sample = {'source': source, 'target': target, 'source_msk': source_msk, 'target_msk': target_msk} self.samples.append(sample) def __len__(self): return len(self.data) def __getitem__(self, item): sample = self.samples[item] source = torch.from_numpy(sitk.GetArrayFromImage(sample['source'])).unsqueeze(0) target = torch.from_numpy(sitk.GetArrayFromImage(sample['target'])).unsqueeze(0) source_msk = torch.from_numpy(sitk.GetArrayFromImage(sample['source_msk'])).unsqueeze(0) target_msk = torch.from_numpy(sitk.GetArrayFromImage(sample['target_msk'])).unsqueeze(0) return {'source': source, 'target': target, 'source_msk': source_msk, 'target_msk': target_msk} def get_sample(self, item): return self.samples[item] class ImageSegRegDataset(Dataset):
Apache License 2.0
llnl/maestrowf
maestrowf/datastructures/environment/script.py
Script._verify
python
def _verify(self): valid_param_pattern = re.compile(r"\w+") return bool(re.search(valid_param_pattern, self.source))
Verify the Script object's contents. :returns: True if the Script object is valid, False otherwise.
https://github.com/llnl/maestrowf/blob/b172457b6d20c01cbfa29dfc691ff20ddaadfe09/maestrowf/datastructures/environment/script.py#L63-L70
import logging import re from maestrowf.abstracts import Source logger = logging.getLogger(__name__) class Script(Source): def __init__(self, source): self.source = source self._verification("Script initialized without complete settings. Set" " source before calling methods.") def apply(self, cmds): return [self.source] + list(cmds)
MIT License
sibirrer/lenstronomy
lenstronomy/ImSim/image_model.py
ImageModel.image
python
def image(self, kwargs_lens=None, kwargs_source=None, kwargs_lens_light=None, kwargs_ps=None, kwargs_extinction=None, kwargs_special=None, unconvolved=False, source_add=True, lens_light_add=True, point_source_add=True): model = np.zeros((self.Data.num_pixel_axes)) if source_add is True: model += self.source_surface_brightness(kwargs_source, kwargs_lens, kwargs_extinction=kwargs_extinction, kwargs_special=kwargs_special, unconvolved=unconvolved) if lens_light_add is True: model += self.lens_surface_brightness(kwargs_lens_light, unconvolved=unconvolved) if point_source_add is True: model += self.point_source(kwargs_ps, kwargs_lens, kwargs_special=kwargs_special, unconvolved=unconvolved) return model
make an image with a realisation of linear parameter values "param" :param kwargs_lens: list of keyword arguments corresponding to the superposition of different lens profiles :param kwargs_source: list of keyword arguments corresponding to the superposition of different source light profiles :param kwargs_lens_light: list of keyword arguments corresponding to different lens light surface brightness profiles :param kwargs_ps: keyword arguments corresponding to "other" parameters, such as external shear and point source image positions :param unconvolved: if True: returns the unconvolved light distribution (prefect seeing) :param source_add: if True, compute source, otherwise without :param lens_light_add: if True, compute lens light, otherwise without :param point_source_add: if True, add point sources, otherwise without :return: 2d array of surface brightness pixels of the simulation
https://github.com/sibirrer/lenstronomy/blob/e6d0e179a98ecb0c4db25cdf7cfb73e83c6aeded/lenstronomy/ImSim/image_model.py#L241-L265
__author__ = 'sibirrer' from lenstronomy.ImSim.Numerics.numerics_subframe import NumericsSubFrame from lenstronomy.ImSim.image2source_mapping import Image2SourceMapping from lenstronomy.LensModel.lens_model import LensModel from lenstronomy.LightModel.light_model import LightModel from lenstronomy.PointSource.point_source import PointSource from lenstronomy.ImSim.differential_extinction import DifferentialExtinction from lenstronomy.Util import util import numpy as np __all__ = ['ImageModel'] class ImageModel(object): def __init__(self, data_class, psf_class, lens_model_class=None, source_model_class=None, lens_light_model_class=None, point_source_class=None, extinction_class=None, kwargs_numerics=None, kwargs_pixelbased=None): self.type = 'single-band' self.num_bands = 1 self.PSF = psf_class self.Data = data_class self.PSF.set_pixel_size(self.Data.pixel_width) if kwargs_numerics is None: kwargs_numerics = {} self.ImageNumerics = NumericsSubFrame(pixel_grid=self.Data, psf=self.PSF, **kwargs_numerics) if lens_model_class is None: lens_model_class = LensModel(lens_model_list=[]) self.LensModel = lens_model_class if point_source_class is None: point_source_class = PointSource(point_source_type_list=[]) self.PointSource = point_source_class self.PointSource.update_lens_model(lens_model_class=lens_model_class) x_center, y_center = self.Data.center self.PointSource.update_search_window(search_window=np.max(self.Data.width), x_center=x_center, y_center=y_center, min_distance=self.Data.pixel_width, only_from_unspecified=True) self._psf_error_map = self.PSF.psf_error_map_bool if source_model_class is None: source_model_class = LightModel(light_model_list=[]) self.SourceModel = source_model_class if lens_light_model_class is None: lens_light_model_class = LightModel(light_model_list=[]) self.LensLightModel = lens_light_model_class self._kwargs_numerics = kwargs_numerics if extinction_class is None: extinction_class = DifferentialExtinction(optical_depth_model=[]) self._extinction = extinction_class if kwargs_pixelbased is None: kwargs_pixelbased = {} else: kwargs_pixelbased = kwargs_pixelbased.copy() self._pixelbased_bool = self._detect_pixelbased_models() if self._pixelbased_bool is True: from slitronomy.Util.class_util import create_solver_class self.SourceNumerics = self._setup_pixelbased_source_numerics(kwargs_numerics, kwargs_pixelbased) self.PixelSolver = create_solver_class(self.Data, self.PSF, self.ImageNumerics, self.SourceNumerics, self.LensModel, self.SourceModel, self.LensLightModel, self.PointSource, self._extinction, kwargs_pixelbased) self.source_mapping = None else: self.source_mapping = Image2SourceMapping(lensModel=lens_model_class, sourceModel=source_model_class) def reset_point_source_cache(self, bool=True): self.PointSource.delete_lens_model_cache() self.PointSource.set_save_cache(bool) def update_psf(self, psf_class): self.PSF = psf_class self.PSF.set_pixel_size(self.Data.pixel_width) self.ImageNumerics = NumericsSubFrame(pixel_grid=self.Data, psf=self.PSF, **self._kwargs_numerics) def source_surface_brightness(self, kwargs_source, kwargs_lens=None, kwargs_extinction=None, kwargs_special=None, unconvolved=False, de_lensed=False, k=None, update_pixelbased_mapping=True): if len(self.SourceModel.profile_type_list) == 0: return np.zeros((self.Data.num_pixel_axes)) if self._pixelbased_bool is True: return self._source_surface_brightness_pixelbased(kwargs_source, kwargs_lens=kwargs_lens, kwargs_extinction=kwargs_extinction, kwargs_special=kwargs_special, unconvolved=unconvolved, de_lensed=de_lensed, k=k, update_mapping=update_pixelbased_mapping) else: return self._source_surface_brightness_analytical(kwargs_source, kwargs_lens=kwargs_lens, kwargs_extinction=kwargs_extinction, kwargs_special=kwargs_special, unconvolved=unconvolved, de_lensed=de_lensed, k=k) def _source_surface_brightness_analytical(self, kwargs_source, kwargs_lens=None, kwargs_extinction=None, kwargs_special=None, unconvolved=False, de_lensed=False, k=None): ra_grid, dec_grid = self.ImageNumerics.coordinates_evaluate if de_lensed is True: source_light = self.SourceModel.surface_brightness(ra_grid, dec_grid, kwargs_source, k=k) else: source_light = self.source_mapping.image_flux_joint(ra_grid, dec_grid, kwargs_lens, kwargs_source, k=k) source_light *= self._extinction.extinction(ra_grid, dec_grid, kwargs_extinction=kwargs_extinction, kwargs_special=kwargs_special) source_light_final = self.ImageNumerics.re_size_convolve(source_light, unconvolved=unconvolved) return source_light_final def _source_surface_brightness_pixelbased(self, kwargs_source, kwargs_lens=None, kwargs_extinction=None, kwargs_special=None, unconvolved=False, de_lensed=False, k=None, update_mapping=True): ra_grid, dec_grid = self.SourceNumerics.coordinates_evaluate source_light = self.SourceModel.surface_brightness(ra_grid, dec_grid, kwargs_source, k=k) if de_lensed is True: source_light = self.SourceNumerics.re_size_convolve(source_light, unconvolved=unconvolved) else: source_mapping = self.PixelSolver.lensingOperator source_light = source_mapping.source2image(source_light, kwargs_lens=kwargs_lens, kwargs_special=kwargs_special, update_mapping=update_mapping, original_source_grid=True) source_light = self.ImageNumerics.re_size_convolve(source_light, unconvolved=unconvolved) source_light_final = source_light / self.Data.pixel_width**2 return source_light_final def lens_surface_brightness(self, kwargs_lens_light, unconvolved=False, k=None): if self._pixelbased_bool is True: if unconvolved is True: raise ValueError("Lens light pixel-based modelling does not perform deconvolution") return self._lens_surface_brightness_pixelbased(kwargs_lens_light, k=k) else: return self._lens_surface_brightness_analytical(kwargs_lens_light, unconvolved=unconvolved, k=k) def _lens_surface_brightness_analytical(self, kwargs_lens_light, unconvolved=False, k=None): ra_grid, dec_grid = self.ImageNumerics.coordinates_evaluate lens_light = self.LensLightModel.surface_brightness(ra_grid, dec_grid, kwargs_lens_light, k=k) lens_light_final = self.ImageNumerics.re_size_convolve(lens_light, unconvolved=unconvolved) return lens_light_final def _lens_surface_brightness_pixelbased(self, kwargs_lens_light, k=None): ra_grid, dec_grid = self.ImageNumerics.coordinates_evaluate lens_light = self.LensLightModel.surface_brightness(ra_grid, dec_grid, kwargs_lens_light, k=k) lens_light_final = util.array2image(lens_light) return lens_light_final def point_source(self, kwargs_ps, kwargs_lens=None, kwargs_special=None, unconvolved=False, k=None): point_source_image = np.zeros((self.Data.num_pixel_axes)) if unconvolved or self.PointSource is None: return point_source_image ra_pos, dec_pos, amp = self.PointSource.point_source_list(kwargs_ps, kwargs_lens=kwargs_lens, k=k) ra_pos, dec_pos = self._displace_astrometry(ra_pos, dec_pos, kwargs_special=kwargs_special) point_source_image += self.ImageNumerics.point_source_rendering(ra_pos, dec_pos, amp) return point_source_image
MIT License
deepmind/acme
acme/agents/tf/mpo/agent_test.py
make_networks
python
def make_networks( action_spec, policy_layer_sizes=(10, 10), critic_layer_sizes=(10, 10), ): num_dimensions = np.prod(action_spec.shape, dtype=int) critic_layer_sizes = list(critic_layer_sizes) + [1] policy_network = snt.Sequential([ networks.LayerNormMLP(policy_layer_sizes), networks.MultivariateNormalDiagHead(num_dimensions) ]) critic_network = networks.CriticMultiplexer( critic_network=networks.LayerNormMLP(critic_layer_sizes)) return { 'policy': policy_network, 'critic': critic_network, }
Creates networks used by the agent.
https://github.com/deepmind/acme/blob/39232315e1761219bcc98e7a4ecdd308a42b00e4/acme/agents/tf/mpo/agent_test.py#L28-L48
from absl.testing import absltest import acme from acme import specs from acme.agents.tf import mpo from acme.testing import fakes from acme.tf import networks import numpy as np import sonnet as snt
Apache License 2.0
theiviaxx/python-perforce
perforce/models.py
split_ls
python
def split_ls(func): @wraps(func) def wrapper(self, files, silent=True, exclude_deleted=False): if not isinstance(files, (tuple, list)): files = [files] counter = 0 index = 0 results = [] while files: if index >= len(files): results += func(self, files, silent, exclude_deleted) break length = len(str(files[index])) if length + counter > CHAR_LIMIT: runfiles = files[:index] files = files[index:] counter = 0 index = 0 results += func(self, runfiles, silent, exclude_deleted) runfiles = None del runfiles else: index += 1 counter += length return results return wrapper
Decorator to split files into manageable chunks as not to exceed the windows cmd limit :param func: Function to call for each chunk :type func: :py:class:Function
https://github.com/theiviaxx/python-perforce/blob/249b73b54ed8f49f6e0be60f53c738212ff55aaa/perforce/models.py#L69-L105
import subprocess import datetime import traceback import os import marshal import logging import re from collections import namedtuple from functools import wraps import path import six from perforce import errors LOGGER = logging.getLogger(__name__) CHAR_LIMIT = 8000 DATE_FORMAT = "%Y/%m/%d %H:%M:%S" FORMAT = """Change: {change} Client: {client} User: {user} Status: {status} Description: \t{description} Files: {files} """ NEW_FORMAT = """Change: new Client: {client} Status: new Description: \t{description} """ ErrorLevel = namedtuple('ErrorLevel', 'EMPTY, INFO, WARN, FAILED, FATAL')(*range(5)) ConnectionStatus = namedtuple('ConnectionStatus', 'OK, OFFLINE, NO_AUTH, INVALID_CLIENT')(*range(4)) FileSpec = namedtuple('FileSpec', 'depot,client') RE_FILESPEC = re.compile('^"?(//[\w\d\_\/\.\s]+)"?\s')
MIT License
twidi/mixt
src/mixt/exceptions.py
ParserStateError.__init__
python
def __init__(self, state: int, message: str = "") -> None: self.state = state super().__init__(f"[State={State.state_name(state)}] {message}")
Init the exception. Parameters ---------- state : int One of the states defined in ``State`` For the other parameters, see ``GeneralParserError``.
https://github.com/twidi/mixt/blob/adeff652784f0d814835fd16a8cacab09f426922/src/mixt/exceptions.py#L269-L282
from typing import Any, List, Tuple from mixt.codec.state import State class MixtException(Exception): def __init__(self, message: str = "") -> None: self.message = message super().__init__(message) class ElementError(MixtException): def __init__(self, tag_name: str, message: str = "") -> None: if message and not message.startswith(".") and not message.startswith(" "): message = " " + message self.tag_name = tag_name super().__init__(f"<{tag_name}>{message}") class PropError(ElementError): def __init__(self, tag_name: str, prop_name: str, message: str = "") -> None: self.prop_name = prop_name super().__init__(tag_name, f".{prop_name}: {message}") class PropTypeError(PropError): class PropTypeChoicesError(PropTypeError): class PropTypeRequiredError(PropTypeError): class InvalidPropNameError(PropError, AttributeError): def __init__(self, tag_name: str, prop_name: str) -> None: super().__init__(tag_name, prop_name, "is not an allowed prop") class InvalidPropValueError(PropError, TypeError): def __init__( self, tag_name: str, prop_name: str, value: Any, expected_type: Any ) -> None: self.value = value self.expected_type = expected_type super().__init__( tag_name, prop_name, f"`{value}` is not a valid value for this prop " f"(type: {type(value)}, expected: {expected_type})", ) class InvalidPropChoiceError(InvalidPropValueError): def __init__( self, tag_name: str, prop_name: str, value: Any, choices: List[Any] ) -> None: self.value = value self.choices = choices super( InvalidPropValueError, self ).__init__( tag_name, prop_name, f"`{value}` is not a valid choice for this prop (must be in {choices})", ) class InvalidPropBoolError(InvalidPropValueError): def __init__(self, tag_name: str, prop_name: str, value: Any) -> None: self.value = value super( InvalidPropValueError, self ).__init__( tag_name, prop_name, f"`{value}` is not a valid choice for this boolean prop " f"(must be in [True, False, 'true', 'false', '', '{prop_name}'])", ) class RequiredPropError(PropError, TypeError): def __init__(self, tag_name: str, prop_name: str) -> None: super().__init__(tag_name, prop_name, "is a required prop but is not set") class UnsetPropError(PropError, AttributeError): def __init__(self, tag_name: str, prop_name: str) -> None: super().__init__(tag_name, prop_name, "prop is not set") class InvalidChildrenError(ElementError): class GeneralParserError(Exception): def __init__(self, message: str = "") -> None: self.message = message super().__init__(f"<mixt parser> {message}") class ParserStateError(GeneralParserError):
MIT License
google/flax
examples/ogbg_molpcba/input_pipeline.py
get_graphs_tuple_size
python
def get_graphs_tuple_size(graph: jraph.GraphsTuple): return GraphsTupleSize( n_node=np.sum(graph.n_node), n_edge=np.sum(graph.n_edge), n_graph=np.shape(graph.n_node)[0])
Returns the number of nodes, edges and graphs in a GraphsTuple.
https://github.com/google/flax/blob/48b34ab87c7d20afc567f6e0fe5d67e423cf08bc/examples/ogbg_molpcba/input_pipeline.py#L227-L232
import functools from typing import Dict, NamedTuple import jraph import numpy as np import tensorflow as tf import tensorflow_datasets as tfds class GraphsTupleSize(NamedTuple): n_node: int n_edge: int n_graph: int def get_raw_datasets() -> Dict[str, tf.data.Dataset]: ds_builder = tfds.builder('ogbg_molpcba') ds_builder.download_and_prepare() ds_splits = ['train', 'validation', 'test'] datasets = { split: ds_builder.as_dataset(split=split) for split in ds_splits } return datasets def get_datasets(batch_size: int, add_virtual_node: bool = True, add_undirected_edges: bool = True, add_self_loops: bool = True) -> Dict[str, tf.data.Dataset]: if batch_size <= 1: raise ValueError('Batch size must be > 1 to account for padding graphs.') datasets = get_raw_datasets() convert_to_graphs_tuple_fn = functools.partial( convert_to_graphs_tuple, add_virtual_node=add_self_loops, add_undirected_edges=add_undirected_edges, add_self_loops=add_virtual_node, ) for split_name in datasets: datasets[split_name] = datasets[split_name].map( convert_to_graphs_tuple_fn, num_parallel_calls=tf.data.AUTOTUNE, deterministic=True) budget = estimate_padding_budget_for_batch_size(datasets['train'], batch_size, num_estimation_graphs=100) example_graph = next(datasets['train'].as_numpy_iterator()) example_padded_graph = jraph.pad_with_graphs(example_graph, *budget) padded_graphs_spec = specs_from_graphs_tuple(example_padded_graph) for split_name, dataset_split in datasets.items(): if split_name == 'train': dataset_split = dataset_split.shuffle(100, reshuffle_each_iteration=True) dataset_split = dataset_split.repeat() batching_fn = functools.partial( jraph.dynamically_batch, graphs_tuple_iterator=iter(dataset_split), n_node=budget.n_node, n_edge=budget.n_edge, n_graph=budget.n_graph) dataset_split = tf.data.Dataset.from_generator( batching_fn, output_signature=padded_graphs_spec) if split_name in ['validation', 'test']: dataset_split = dataset_split.cache() datasets[split_name] = dataset_split return datasets def convert_to_graphs_tuple(graph: Dict[str, tf.Tensor], add_virtual_node: bool, add_undirected_edges: bool, add_self_loops: bool) -> jraph.GraphsTuple: num_nodes = tf.squeeze(graph['num_nodes']) num_edges = tf.squeeze(graph['num_edges']) nodes = graph['node_feat'] edges = graph['edge_feat'] edge_feature_dim = edges.shape[-1] labels = graph['labels'] senders = graph['edge_index'][:, 0] receivers = graph['edge_index'][:, 1] if add_virtual_node: nodes = tf.concat( [nodes, tf.zeros_like(nodes[0, None])], axis=0) senders = tf.concat( [senders, tf.range(num_nodes)], axis=0) receivers = tf.concat( [receivers, tf.fill((num_nodes,), num_nodes + 1)], axis=0) edges = tf.concat( [edges, tf.zeros((num_nodes, edge_feature_dim))], axis=0) num_edges += num_nodes num_nodes += 1 if add_undirected_edges: new_senders = tf.concat([senders, receivers], axis=0) new_receivers = tf.concat([receivers, senders], axis=0) edges = tf.concat([edges, edges], axis=0) senders, receivers = new_senders, new_receivers num_edges *= 2 if add_self_loops: senders = tf.concat([senders, tf.range(num_nodes)], axis=0) receivers = tf.concat([receivers, tf.range(num_nodes)], axis=0) edges = tf.concat([edges, tf.zeros((num_nodes, edge_feature_dim))], axis=0) num_edges += num_nodes return jraph.GraphsTuple( n_node=tf.expand_dims(num_nodes, 0), n_edge=tf.expand_dims(num_edges, 0), nodes=nodes, edges=edges, senders=senders, receivers=receivers, globals=tf.expand_dims(labels, axis=0), ) def estimate_padding_budget_for_batch_size( dataset: tf.data.Dataset, batch_size: int, num_estimation_graphs: int) -> GraphsTupleSize: def next_multiple_of_64(val: float): return 64 * (1 + int(val // 64)) if batch_size <= 1: raise ValueError('Batch size must be > 1 to account for padding graphs.') total_num_nodes = 0 total_num_edges = 0 for graph in dataset.take(num_estimation_graphs).as_numpy_iterator(): graph_size = get_graphs_tuple_size(graph) if graph_size.n_graph != 1: raise ValueError('Dataset contains batched GraphTuples.') total_num_nodes += graph_size.n_node total_num_edges += graph_size.n_edge num_nodes_per_graph_estimate = total_num_nodes / num_estimation_graphs num_edges_per_graph_estimate = total_num_edges / num_estimation_graphs padding_budget = GraphsTupleSize( n_node=next_multiple_of_64(num_nodes_per_graph_estimate * batch_size), n_edge=next_multiple_of_64(num_edges_per_graph_estimate * batch_size), n_graph=batch_size) return padding_budget def specs_from_graphs_tuple(graph: jraph.GraphsTuple): def get_tensor_spec(array: np.ndarray): shape = list(array.shape) dtype = array.dtype return tf.TensorSpec(shape=shape, dtype=dtype) specs = {} for field in [ 'nodes', 'edges', 'senders', 'receivers', 'globals', 'n_node', 'n_edge' ]: field_sample = getattr(graph, field) specs[field] = get_tensor_spec(field_sample) return jraph.GraphsTuple(**specs)
Apache License 2.0
ses4j/ts
ts.py
format_time
python
def format_time(t): if t is None: return "" ampm = "a" if t.hour > 12: ampm = "p" hour = t.hour - 12 elif t.hour == 12: ampm = "p" hour = 12 elif t.hour == 0: hour = 12 else: hour = t.hour if t.minute==0: s = "%d%s" % (hour, ampm) else: s = "%d:%02d%s" % (hour, t.minute, ampm) return s
Print out succinct time. >>> format_time(datetime(2015, 1, 1, 5, 15, 0)) '5:15a' >>> format_time(datetime(2015, 1, 1, 12, 0, 0)) '12p' >>> format_time(datetime(2015, 1, 1, 0, 1, 0)) '12:01a'
https://github.com/ses4j/ts/blob/b41d7477638abc72777dca9465dba92045d8a561/ts.py#L312-L340
import logging, re, os, shutil, sys from datetime import date, datetime from dataclasses import dataclass from collections import defaultdict from os.path import expanduser from typing import List, Optional from dateutil.parser import parse as dateutil_parse from modgrammar import * import yaml from invoice import Invoice def get_default_settings(): settings = { 'billcode': True, 'billcodes': {}, 'billrate': 1000., 'footer': [], 'prefix': '', 'invoice_on': 'marker', 'invoice_marker': '====', 'summary_on': 'marker', 'summary_marker': '----', 'verbose': 0, 'weekly_summary_template': '---------- {hours_this_week} ({hours_since_invoice} uninvoiced)', 'invoice_template': '========== {hours_this_week} ({hours_since_invoice} since invoice)', 'invoice_filename_template': 'invoice-{invoice_code}.pdf', 'address': [] } return settings def samefile(f1, f2): try: return os.path.samefile(f1, f2) except AttributeError: f1 = os.path.abspath(f1).lower() f2 = os.path.abspath(f2).lower() return f1 == f2 @dataclass class TimesheetLineItem: date: date prefix: Optional[str] = None suffix: Optional[str] = None billcode: Optional[str] = None hours: Optional[int] = None ranges: Optional[List[int]] = None logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) grammar_whitespace_mode = 'explicit' class MyDate(Grammar): grammar = (WORD('2', "-0-9", fullmatch=True, grammar_name='date') | WORD('0-9', "-0-9/", grammar_name='date')) grammar_tags = ['date'] class BillCode(Grammar): grammar = (WORD("A-Z", grammar_name='bill_code')) class Hours(Grammar): grammar = (WORD(".0-9", grammar_name='hours'), OPTIONAL("h")) class Hour(Grammar): grammar = WORD("0-9", min=1, max=2, grammar_name='hour') class Minute(Grammar): grammar = WORD("0-9", min=1, max=2, grammar_name='minute') class AMPM(Grammar): grammar = L("A") | L("P") | L("a") | L("p") class MyTime(Grammar): grammar = (G(Hour, OPTIONAL(":", Minute))), OPTIONAL(AMPM) class Range(Grammar): grammar = G(MyTime, OPTIONAL(WHITESPACE), '-', OPTIONAL(WHITESPACE), OPTIONAL(MyTime), OPTIONAL('(', Hours, ')'), grammar_name='range') class RangeList(Grammar): grammar = LIST_OF(G(Range | Hours), sep=G(",", OPTIONAL(WHITESPACE)), grammar_name="ranges") class Prefix(Grammar): grammar = (ZERO_OR_MORE(L('*') | WHITESPACE), ) class Suffix(Grammar): grammar = (OPTIONAL(WHITESPACE), OPTIONAL(L('#'), REST_OF_LINE), EOF) class MyGrammar (Grammar): grammar = ( G(Prefix, MyDate, WHITESPACE, Hours, WHITESPACE, RangeList, Suffix, grammar_name="3args") | G(Prefix, MyDate, WHITESPACE, RangeList, Suffix, grammar_name="2argrange") | G(Prefix, MyDate, WHITESPACE, Hours, Suffix, grammar_name="2arghours") | G(Prefix, MyDate, WHITESPACE, BillCode, WHITESPACE, Hours, WHITESPACE, RangeList, Suffix, grammar_name="3args") | G(Prefix, MyDate, WHITESPACE, BillCode, WHITESPACE, RangeList, Suffix, grammar_name="2argrange") | G(Prefix, MyDate, WHITESPACE, BillCode, WHITESPACE, Hours, Suffix, grammar_name="2arghours") | G(Prefix, MyDate, Suffix, grammar_name="justdate") ) myparser = MyGrammar.parser() time_regex = re.compile(r'(\d{1,2})(:\d+)?([aApP])?') def parse_time(cur_date, time_str, after=None): m = time_regex.match(time_str) if not m: return None g = m.groups() hour = int(g[0]) minute = 0 if g[1] is not None: minute = int(g[1][1:]) if g[2] is not None: if hour != 12 and g[2] in ('p','P'): hour += 12 elif hour == 12 and g[2] in ('a','A'): hour -= 12 else: time_as_am_guess = datetime(cur_date.year, cur_date.month, cur_date.day, hour=hour, minute=minute) if after is not None: if after > time_as_am_guess: hour += 12 else: if hour < 7: logger.warning("Assuming time {} is PM".format(time_str)) hour += 12 return datetime(cur_date.year, cur_date.month, cur_date.day, hour=hour, minute=minute) class TimesheetParseError(Exception): pass def parse(line, settings=None, prefix=None) -> Optional[TimesheetLineItem]: if settings is None: settings = get_default_settings() if prefix is None: prefix = settings.get('prefix','* ') if not line.strip(): return None line = line.rstrip() origresult = myparser.parse_text(line, reset=True, eof=True) result = origresult.elements[0] date_g = result.get(MyDate) if date_g is None: return None cur_date = dateutil_parse(str(date_g)).date() ret = TimesheetLineItem(date=cur_date) ret.prefix = result.get(Prefix) ret.suffix = result.get(Suffix) ret.billcode = result.get(BillCode) hours_g = result.get(Hours) if hours_g is not None: ret.hours = float(str(hours_g)) ranges = result.get(RangeList) if ranges is not None: ret.ranges = [] for r in ranges.elements[0].elements: if r.grammar_name == 'Hours': duration = float(str(r)) ret.ranges.append( {'duration': duration} ) elif r.grammar_name == 'Range': times = r.find_all(MyTime) if len(times)==1: start = str(times[0]) end = None elif len(times)==2: start = str(times[0]) end = str(times[1]) else: raise Exception() try: parsed_start = parse_time(cur_date, start) except (ValueError, ): parsed_start = None parsed_end = None if end is not None: try: parsed_end = parse_time(cur_date, end, after=parsed_start) except (ValueError, AttributeError): pass if parsed_end is not None: if parsed_end < parsed_start: raise TimesheetParseError("{} < {} in {}".format(parsed_end, parsed_start, line)) duration = (parsed_end-parsed_start).seconds/60./60. else: duration = None ret.ranges.append( {'s': parsed_start, 'e': parsed_end, 'duration': duration} ) else: pass if ret.ranges is not None: total_duration = sum([r['duration'] for r in ret.ranges if r['duration'] is not None]) if ret.hours is not None and format_hours(total_duration) != format_hours(ret.hours): logger.warning('Changing total hours from %s to %s\n Original: %s' % (ret.hours, total_duration, line)) ret.hours = total_duration if len(ret.ranges) == 1 and 's' not in ret.ranges[0]: del ret.ranges if ret.hours is not None and ret.hours > 9: logger.warning('Calculated duration={}, which is above normal\n Original: {}'.format(ret.hours, line)) if settings['verbose'] >= 2: print('= parsed={}'.format(ret)) return ret def format_hours(h): if h is None: return '-' if int(h) == h: return str(int(h)) return ("%.2f" % h).lstrip('0')
MIT License
missionpinball/mpf
mpf/modes/credits/code/credits.py
Credits.__init__
python
def __init__(self, *args, **kwargs): self.data_manager = None self.earnings = None self.credit_units_per_game = None self.credit_unit = None self.pricing_table = {} self.pricing_tiers_wrap_around = 1 self.credit_units_for_pricing_tiers = None self.reset_pricing_tier_count_this_game = None self.credits_config = None self._switch_handlers = [] super().__init__(*args, **kwargs)
Initialise credits mode.
https://github.com/missionpinball/mpf/blob/1eda6ba6892b8f7cc6dedf6cb6472ff92293b8ef/mpf/modes/credits/code/credits.py#L19-L32
from math import floor from mpf.core.placeholder_manager import NativeTypeTemplate from mpf.core.settings_controller import SettingEntry from mpf.core.mode import Mode class Credits(Mode): __slots__ = ["data_manager", "earnings", "credit_units_per_game", "credit_unit", "pricing_table", "credit_units_for_pricing_tiers", "reset_pricing_tier_count_this_game", "credits_config", "pricing_tiers_wrap_around", "_switch_handlers"]
MIT License
wikimedia/pywikibot
pywikibot/pagegenerators.py
GeneratorFactory._handle_unwatched
python
def _handle_unwatched(self, value: str) -> HANDLER_RETURN_TYPE: return self.site.unwatchedpage(total=_int_none(value))
Handle `-unwatched` argument.
https://github.com/wikimedia/pywikibot/blob/5097f5b9a7ef9d39f35f17edd11faf3086a01d1d/pywikibot/pagegenerators.py#L888-L890
import calendar import codecs import datetime import io import itertools import json import re import sys from collections import abc, namedtuple from datetime import timedelta from functools import partial from http import HTTPStatus from itertools import zip_longest from typing import Any, Optional, Union from urllib.parse import urlparse from requests.exceptions import ReadTimeout import pywikibot from pywikibot import config, date, i18n, xmlreader from pywikibot.backports import ( Callable, Dict, FrozenSet, Iterable, Iterator, List, Pattern, Set, Sequence, Tuple, Type, ) from pywikibot.bot import ShowingListOption from pywikibot.comms import http from pywikibot.data import api from pywikibot.exceptions import ( NoPageError, ServerError, UnknownExtensionError, ) from pywikibot.proofreadpage import ProofreadPage from pywikibot.tools import ( DequeGenerator, deprecated, filter_unique, intersect_generators, itergroup, ) _logger = 'pagegenerators' parameterHelp = """\ GENERATOR OPTIONS ================= -cat Work on all pages which are in a specific category. Argument can also be given as "-cat:categoryname" or as "-cat:categoryname|fromtitle" (using # instead of | is also allowed in this one and the following) -catr Like -cat, but also recursively includes pages in subcategories, sub-subcategories etc. of the given category. Argument can also be given as "-catr:categoryname" or as "-catr:categoryname|fromtitle". -subcats Work on all subcategories of a specific category. Argument can also be given as "-subcats:categoryname" or as "-subcats:categoryname|fromtitle". -subcatsr Like -subcats, but also includes sub-subcategories etc. of the given category. Argument can also be given as "-subcatsr:categoryname" or as "-subcatsr:categoryname|fromtitle". -uncat Work on all pages which are not categorised. -uncatcat Work on all categories which are not categorised. -uncatfiles Work on all files which are not categorised. -file Read a list of pages to treat from the named text file. Page titles in the file may be either enclosed with [[brackets]], or be separated by new lines. Argument can also be given as "-file:filename". -filelinks Work on all pages that use a certain image/media file. Argument can also be given as "-filelinks:filename". -search Work on all pages that are found in a MediaWiki search across all namespaces. -logevents Work on articles that were on a specified Special:Log. The value may be a comma separated list of these values: logevent,username,start,end or for backward compatibility: logevent,username,total Note: 'start' is the most recent date and log events are iterated from present to past. If 'start'' is not provided, it means 'now'; if 'end' is not provided, it means 'since the beginning'. To use the default value, use an empty string. You have options for every type of logs given by the log event parameter which could be one of the following: spamblacklist, titleblacklist, gblblock, renameuser, globalauth, gblrights, gblrename, abusefilter, massmessage, thanks, usermerge, block, protect, rights, delete, upload, move, import, patrol, merge, suppress, tag, managetags, contentmodel, review, stable, timedmediahandler, newusers It uses the default number of pages 10. Examples: -logevents:move gives pages from move log (usually redirects) -logevents:delete,,20 gives 20 pages from deletion log -logevents:protect,Usr gives pages from protect log by user Usr -logevents:patrol,Usr,20 gives 20 patrolled pages by Usr -logevents:upload,,20121231,20100101 gives upload pages in the 2010s, 2011s, and 2012s -logevents:review,,20121231 gives review pages since the beginning till the 31 Dec 2012 -logevents:review,Usr,20121231 gives review pages by user Usr since the beginning till the 31 Dec 2012 In some cases it must be given as -logevents:"move,Usr,20" -interwiki Work on the given page and all equivalent pages in other languages. This can, for example, be used to fight multi-site spamming. Attention: this will cause the bot to modify pages on several wiki sites, this is not well tested, so check your edits! -links Work on all pages that are linked from a certain page. Argument can also be given as "-links:linkingpagetitle". -liverecentchanges Work on pages from the live recent changes feed. If used as -liverecentchanges:x, work on x recent changes. -imagesused Work on all images that contained on a certain page. Can also be given as "-imagesused:linkingpagetitle". -newimages Work on the most recent new images. If given as -newimages:x, will work on x newest images. -newpages Work on the most recent new pages. If given as -newpages:x, will work on x newest pages. -recentchanges Work on the pages with the most recent changes. If given as -recentchanges:x, will work on the x most recently changed pages. If given as -recentchanges:offset,duration it will work on pages changed from 'offset' minutes with 'duration' minutes of timespan. rctags are supported too. The rctag must be the very first parameter part. Examples: -recentchanges:20 gives the 20 most recently changed pages -recentchanges:120,70 will give pages with 120 offset minutes and 70 minutes of timespan -recentchanges:visualeditor,10 gives the 10 most recently changed pages marked with 'visualeditor' -recentchanges:"mobile edit,60,35" will retrieve pages marked with 'mobile edit' for the given offset and timespan -unconnectedpages Work on the most recent unconnected pages to the Wikibase repository. Given as -unconnectedpages:x, will work on the x most recent unconnected pages. -ref Work on all pages that link to a certain page. Argument can also be given as "-ref:referredpagetitle". -start Specifies that the robot should go alphabetically through all pages on the home wiki, starting at the named page. Argument can also be given as "-start:pagetitle". You can also include a namespace. For example, "-start:Template:!" will make the bot work on all pages in the template namespace. default value is start:! -prefixindex Work on pages commencing with a common prefix. -transcludes Work on all pages that use a certain template. Argument can also be given as "-transcludes:Title". -unusedfiles Work on all description pages of images/media files that are not used anywhere. Argument can be given as "-unusedfiles:n" where n is the maximum number of articles to work on. -lonelypages Work on all articles that are not linked from any other article. Argument can be given as "-lonelypages:n" where n is the maximum number of articles to work on. -unwatched Work on all articles that are not watched by anyone. Argument can be given as "-unwatched:n" where n is the maximum number of articles to work on. -property:name Work on all pages with a given property name from Special:PagesWithProp. -usercontribs Work on all articles that were edited by a certain user. (Example : -usercontribs:DumZiBoT) -weblink Work on all articles that contain an external link to a given URL; may be given as "-weblink:url" -withoutinterwiki Work on all pages that don't have interlanguage links. Argument can be given as "-withoutinterwiki:n" where n is the total to fetch. -mysqlquery Takes a MySQL query string like "SELECT page_namespace, page_title FROM page WHERE page_namespace = 0" and treats the resulting pages. See https://www.mediawiki.org/wiki/Manual:Pywikibot/MySQL for more details. -sparql Takes a SPARQL SELECT query string including ?item and works on the resulting pages. -sparqlendpoint Specify SPARQL endpoint URL (optional). (Example : -sparqlendpoint:http://myserver.com/sparql) -searchitem Takes a search string and works on Wikibase pages that contain it. Argument can be given as "-searchitem:text", where text is the string to look for, or "-searchitem:lang:text", where lang is the language to search items in. -wantedpages Work on pages that are linked, but do not exist; may be given as "-wantedpages:n" where n is the maximum number of articles to work on. -wantedcategories Work on categories that are used, but do not exist; may be given as "-wantedcategories:n" where n is the maximum number of categories to work on. -wantedfiles Work on files that are used, but do not exist; may be given as "-wantedfiles:n" where n is the maximum number of files to work on. -wantedtemplates Work on templates that are used, but do not exist; may be given as "-wantedtemplates:n" where n is the maximum number of templates to work on. -random Work on random pages returned by [[Special:Random]]. Can also be given as "-random:n" where n is the number of pages to be returned. -randomredirect Work on random redirect pages returned by [[Special:RandomRedirect]]. Can also be given as "-randomredirect:n" where n is the number of pages to be returned. -google Work on all pages that are found in a Google search. You need a Google Web API license key. Note that Google doesn't give out license keys anymore. See google_key in config.py for instructions. Argument can also be given as "-google:searchstring". -page Work on a single page. Argument can also be given as "-page:pagetitle", and supplied multiple times for multiple pages. -pageid Work on a single pageid. Argument can also be given as "-pageid:pageid1,pageid2,." or "-pageid:'pageid1|pageid2|..'" and supplied multiple times for multiple pages. -linter Work on pages that contain lint errors. Extension Linter must be available on the site. -linter select all categories. -linter:high, -linter:medium or -linter:low select all categories for that prio. Single categories can be selected with commas as in -linter:cat1,cat2,cat3 Adding '/int' identifies Lint ID to start querying from: e.g. -linter:high/10000 -linter:show just shows available categories. -querypage:name Work on pages provided by a QueryPage-based special page, see https://www.mediawiki.org/wiki/API:Querypage. (tip: use -limit:n to fetch only n pages). -querypage shows special pages available. -url Read a list of pages to treat from the provided URL. The URL must return text in the same format as expected for the -file argument, e.g. page titles separated by newlines or enclosed in brackets. FILTER OPTIONS ============== -catfilter Filter the page generator to only yield pages in the specified category. See -cat generator for argument format. -grep A regular expression that needs to match the article otherwise the page won't be returned. Multiple -grep:regexpr can be provided and the page will be returned if content is matched by any of the regexpr provided. Case insensitive regular expressions will be used and dot matches any character, including a newline. -grepnot Like -grep, but return the page only if the regular expression does not match. -intersect Work on the intersection of all the provided generators. -limit When used with any other argument -limit:n specifies a set of pages, work on no more than n pages in total. -namespaces Filter the page generator to only yield pages in the -namespace specified namespaces. Separate multiple namespace -ns numbers or names with commas. Examples: -ns:0,2,4 -ns:Help,MediaWiki You may use a preleading "not" to exclude the namespace. Examples: -ns:not:2,3 -ns:not:Help,File If used with -newpages/-random/-randomredirect/linter generators, -namespace/ns must be provided before -newpages/-random/-randomredirect/linter. If used with -recentchanges generator, efficiency is improved if -namespace is provided before -recentchanges. If used with -start generator, -namespace/ns shall contain only one value. -onlyif A claim the page needs to contain, otherwise the item won't be returned. The format is property=value,qualifier=value. Multiple (or none) qualifiers can be passed, separated by commas. Examples: P1=Q2 (property P1 must contain value Q2), P3=Q4,P5=Q6,P6=Q7 (property P3 with value Q4 and qualifiers: P5 with value Q6 and P6 with value Q7). Value can be page ID, coordinate in format: latitude,longitude[,precision] (all values are in decimal degrees), year, or plain string. The argument can be provided multiple times and the item page will be returned only if all claims are present. Argument can be also given as "-onlyif:expression". -onlyifnot A claim the page must not contain, otherwise the item won't be returned. For usage and examples, see -onlyif above. -ql Filter pages based on page quality. This is only applicable if contentmodel equals 'proofread-page', otherwise has no effects. Valid values are in range 0-4. Multiple values can be comma-separated. -subpage -subpage:n filters pages to only those that have depth n i.e. a depth of 0 filters out all pages that are subpages, and a depth of 1 filters out all pages that are subpages of subpages. -titleregex A regular expression that needs to match the article title otherwise the page won't be returned. Multiple -titleregex:regexpr can be provided and the page will be returned if title is matched by any of the regexpr provided. Case insensitive regular expressions will be used and dot matches any character. -titleregexnot Like -titleregex, but return the page only if the regular expression does not match. """ docuReplacements = {'&params;': parameterHelp} HANDLER_RETURN_TYPE = Union[None, bool, Iterable['pywikibot.page.BasePage']] PRELOAD_SITE_TYPE = Dict[pywikibot.site.BaseSite, List[pywikibot.page.Page]] GEN_FACTORY_NAMESPACE_TYPE = Union[List[str], FrozenSet['pywikibot.site.Namespace']] GEN_FACTORY_CLAIM_TYPE = List[Tuple[str, str, Dict[str, str], bool]] OPT_SITE_TYPE = Optional['pywikibot.site.BaseSite'] OPT_TIMESTAMP_TYPE = Optional['pywikibot.Timestamp'] OPT_GENERATOR_TYPE = Optional[Iterable['pywikibot.page.Page']] NAMESPACE_OR_INT_TYPE = Union[int, 'pywikibot.site.Namespace'] NAMESPACE_OR_STR_TYPE = Union[str, 'pywikibot.site.Namespace'] ITEM_CLAIM_FILTER_CLASS = Type['ItemClaimFilter'] REGEX_FILTER_CLASS = Type['RegexFilter'] PATTERN_STR_OR_SEQ_TYPE = Union[str, Pattern[str], Sequence[str], Sequence[Pattern[str]]] __doc__ = __doc__.replace('&params;', parameterHelp) _filter_unique_pages = partial( filter_unique, key=lambda page: '{}:{}:{}'.format(*page._cmpkey())) def _output_if(predicate: bool, msg: str) -> None: if predicate: pywikibot.output(msg) class GeneratorFactory: def __init__(self, site: OPT_SITE_TYPE = None, positional_arg_name: Optional[str] = None, enabled_options: Optional[Iterable[str]] = None, disabled_options: Optional[Iterable[str]] = None) -> None: self.gens = [] self._namespaces = [] self.limit = None self.qualityfilter_list = [] self.articlefilter_list = [] self.articlenotfilter_list = [] self.titlefilter_list = [] self.titlenotfilter_list = [] self.claimfilter_list = [] self.catfilter_list = [] self.intersect = False self.subpage_max_depth = None self._site = site self._positional_arg_name = positional_arg_name self._sparql = None self.nopreload = False self._validate_options(enabled_options, disabled_options) def _validate_options(self, enable: Optional[Iterable[str]], disable: Optional[Iterable[str]]) -> None: msg = '{!r} is not a valid pagegenerators option to be ' enable = enable or [] disable = disable or [] self.enabled_options = set(enable) self.disabled_options = set(disable) for opt in enable: if not hasattr(self, '_handle_' + opt): pywikibot.warning((msg + 'enabled').format(opt)) self.enabled_options.remove(opt) for opt in disable: if not hasattr(self, '_handle_' + opt): pywikibot.warning((msg + 'disabled').format(opt)) self.disabled_options.remove(opt) if self.enabled_options and self.disabled_options: pywikibot.warning('Ignoring disabled option because enabled ' 'options are set.') self.disabled_options = set() @property def site(self) -> 'pywikibot.site.BaseSite': if self._site is None: self._site = pywikibot.Site() return self._site @property def namespaces(self) -> FrozenSet['pywikibot.site.Namespace']: if isinstance(self._namespaces, list): self._namespaces = frozenset( self.site.namespaces.resolve(self._namespaces)) return self._namespaces def getCombinedGenerator(self, gen: OPT_GENERATOR_TYPE = None, preload: bool = False) -> OPT_GENERATOR_TYPE: if gen: self.gens.insert(0, gen) for i, gen_item in enumerate(self.gens): if self.namespaces: if (isinstance(gen_item, api.QueryGenerator) and gen_item.support_namespace()): gen_item.set_namespace(self.namespaces) else: self.gens[i] = NamespaceFilterPageGenerator( gen_item, self.namespaces, self.site) if self.limit: try: gen_item.set_maximum_items(self.limit) except AttributeError: self.gens[i] = itertools.islice(gen_item, self.limit) if not self.gens: if any((self.titlefilter_list, self.titlenotfilter_list, self.articlefilter_list, self.articlenotfilter_list, self.claimfilter_list, self.catfilter_list, self.qualityfilter_list, self.subpage_max_depth is not None)): pywikibot.warning('filter(s) specified but no generators.') return None if len(self.gens) == 1: dupfiltergen = self.gens[0] if hasattr(self, '_single_gen_filter_unique'): dupfiltergen = _filter_unique_pages(dupfiltergen) if self.intersect: pywikibot.warning( '"-intersect" ignored as only one generator is specified.') elif self.intersect: dupfiltergen = intersect_generators(*self.gens) else: dupfiltergen = _filter_unique_pages(itertools.chain(*self.gens)) if self.subpage_max_depth is not None: dupfiltergen = SubpageFilterGenerator( dupfiltergen, self.subpage_max_depth) if self.claimfilter_list: for claim in self.claimfilter_list: dupfiltergen = ItemClaimFilterPageGenerator(dupfiltergen, claim[0], claim[1], claim[2], claim[3]) if self.qualityfilter_list: dupfiltergen = QualityFilterPageGenerator( dupfiltergen, self.qualityfilter_list) if self.titlefilter_list: dupfiltergen = RegexFilterPageGenerator( dupfiltergen, self.titlefilter_list) if self.titlenotfilter_list: dupfiltergen = RegexFilterPageGenerator( dupfiltergen, self.titlenotfilter_list, 'none') if self.catfilter_list: dupfiltergen = CategoryFilterPageGenerator( dupfiltergen, self.catfilter_list) if (preload or self.articlefilter_list) and not self.nopreload: if isinstance(dupfiltergen, DequeGenerator): dupfiltergen = DequePreloadingGenerator(dupfiltergen) else: dupfiltergen = PreloadingGenerator(dupfiltergen) if self.articlefilter_list: dupfiltergen = RegexBodyFilterPageGenerator( dupfiltergen, self.articlefilter_list) if self.articlenotfilter_list: dupfiltergen = RegexBodyFilterPageGenerator( dupfiltergen, self.articlenotfilter_list, 'none') return dupfiltergen def getCategory(self, category: str ) -> Tuple['pywikibot.Category', Optional[str]]: if not category: category = i18n.input( 'pywikibot-enter-category-name', fallback_prompt='Please enter the category name:') category = category.replace('#', '|') startfrom = None category, _, startfrom = category.partition('|') if not startfrom: startfrom = None prefix = category.split(':', 1)[0] if prefix not in self.site.namespaces[14]: category = '{}:{}'.format( self.site.namespace(14), category) cat = pywikibot.Category(pywikibot.Link(category, source=self.site, default_namespace=14)) return cat, startfrom def getCategoryGen(self, category: str, recurse: Union[int, bool] = False, content: bool = False, gen_func: Optional[Callable] = None) -> Any: if gen_func is None: raise ValueError('getCategoryGen requires a gen_func argument') cat, startfrom = self.getCategory(category) return gen_func(cat, start=startfrom, recurse=recurse, content=content) @staticmethod def _parse_log_events(logtype: str, user: Optional[str] = None, start: Optional[str] = None, end: Optional[str] = None ) -> Optional[Iterator['pywikibot.page.Page']]: def parse_start(start: Optional[str] ) -> Tuple[Optional[str], Optional[int]]: if start is None: return None, None if len(start) >= 8: return pywikibot.Timestamp.fromtimestampformat(start), None return None, int(start) start = start or None try: start, total = parse_start(start) assert total is None or total > 0 except ValueError as err: pywikibot.error( '{}. Start parameter has wrong format!'.format(err)) return None except AssertionError: pywikibot.error('Total number of log ({}) events must be a ' 'positive int.'.format(start)) return None try: end = pywikibot.Timestamp.fromtimestampformat(end) except ValueError as err: pywikibot.error( '{}. End parameter has wrong format!'.format(err)) return None except TypeError: pass if start or end: pywikibot.output('Fetching log events in range: {} - {}.' .format(end or 'beginning of time', start or 'now')) return LogeventsPageGenerator(logtype, user or None, total=total, start=start, end=end) def _handle_filelinks(self, value: str) -> HANDLER_RETURN_TYPE: if not value: value = i18n.input( 'pywikibot-enter-file-links-processing', fallback_prompt='Links to which file page should be ' 'processed?') if not value.startswith(self.site.namespace(6) + ':'): value = 'Image:' + value file_page = pywikibot.FilePage(self.site, value) return file_page.usingPages() def _handle_linter(self, value: str) -> HANDLER_RETURN_TYPE: if not self.site.has_extension('Linter'): raise UnknownExtensionError( '-linter needs a site with Linter extension.') cats = self.site.siteinfo.get('linter') valid_cats = [c for _list in cats.values() for c in _list] value = value or '' lint_from = None cat, _, lint_from = value.partition('/') lint_from = lint_from or None def show_available_categories(cats: Dict[ str, Sequence['pywikibot.Category']] ) -> None: _i = ' ' * 4 _2i = 2 * _i txt = 'Available categories of lint errors:\n' for prio, _list in cats.items(): txt += '{indent}{prio}\n'.format(indent=_i, prio=prio) txt += ''.join( '{indent}{cat}\n'.format(indent=_2i, cat=c) for c in _list) pywikibot.output(txt) if cat == 'show': show_available_categories(cats) sys.exit(0) if not cat: lint_cats = valid_cats elif cat in ['low', 'medium', 'high']: lint_cats = cats[cat] else: lint_cats = cat.split(',') assert set(lint_cats) <= set(valid_cats), 'Invalid category of lint errors: {}'.format(cat) return self.site.linter_pages( lint_categories='|'.join(lint_cats), namespaces=self.namespaces, lint_from=lint_from) def _handle_querypage(self, value: str) -> HANDLER_RETURN_TYPE: if value is None: pages = self.site._paraminfo.parameter('query+querypage', 'page') pages = sorted(pages['type']) limit = self.site._paraminfo.parameter('query+querypage', 'limit') max_w = max(len(p) for p in pages[::2]) + 4 txt = 'Available special pages:\n' for a, b in zip_longest(pages[::2], pages[1::2], fillvalue=''): txt += ' {a:<{max_w}}{b}\n'.format(a=a, b=b, max_w=max_w) txt += ('\nMaximum number of pages to return is {max} ' '({highmax} for bots).\n'.format_map(limit)) pywikibot.output(txt) sys.exit(0) return self.site.querypage(value) def _handle_url(self, value: str) -> HANDLER_RETURN_TYPE: if not value: value = pywikibot.input('Please enter the URL:') return TextIOPageGenerator(value, site=self.site) def _handle_unusedfiles(self, value: str) -> HANDLER_RETURN_TYPE: return self.site.unusedfiles(total=_int_none(value)) def _handle_lonelypages(self, value: str) -> HANDLER_RETURN_TYPE: return self.site.lonelypages(total=_int_none(value))
MIT License
tektoncd/experimental
sdk/python/tekton_pipeline/models/v1beta1_pipeline_resource_binding.py
V1beta1PipelineResourceBinding.resource_spec
python
def resource_spec(self): return self._resource_spec
Gets the resource_spec of this V1beta1PipelineResourceBinding. # noqa: E501 :return: The resource_spec of this V1beta1PipelineResourceBinding. # noqa: E501 :rtype: V1alpha1PipelineResourceSpec
https://github.com/tektoncd/experimental/blob/0ba4e7a2b9d45ed4accaecbb34dac006d665796a/sdk/python/tekton_pipeline/models/v1beta1_pipeline_resource_binding.py#L124-L131
import pprint import re import six from tekton_pipeline.configuration import Configuration class V1beta1PipelineResourceBinding(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'name': 'str', 'resource_ref': 'V1beta1PipelineResourceRef', 'resource_spec': 'V1alpha1PipelineResourceSpec' } attribute_map = { 'name': 'name', 'resource_ref': 'resourceRef', 'resource_spec': 'resourceSpec' } def __init__(self, name=None, resource_ref=None, resource_spec=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._name = None self._resource_ref = None self._resource_spec = None self.discriminator = None if name is not None: self.name = name if resource_ref is not None: self.resource_ref = resource_ref if resource_spec is not None: self.resource_spec = resource_spec @property def name(self): return self._name @name.setter def name(self, name): self._name = name @property def resource_ref(self): return self._resource_ref @resource_ref.setter def resource_ref(self, resource_ref): self._resource_ref = resource_ref @property
Apache License 2.0
voxel-fox-ltd/novus
discord/guild.py
Guild.channels
python
def channels(self) -> List[GuildChannel]: return list(self._channels.values())
List[:class:`abc.GuildChannel`]: A list of channels that belongs to this guild.
https://github.com/voxel-fox-ltd/novus/blob/4b3a3f918b6212ef2167002c4dbfe910727c04b0/discord/guild.py#L488-L490
from __future__ import annotations import copy import unicodedata from typing import ( Any, ClassVar, Dict, List, NamedTuple, Sequence, Set, Literal, Optional, TYPE_CHECKING, Tuple, Union, overload, ) from . import utils, abc from .role import Role from .member import Member, VoiceState from .emoji import Emoji from .errors import InvalidData from .permissions import PermissionOverwrite from .colour import Colour from .errors import InvalidArgument, ClientException from .channel import * from .channel import _guild_channel_factory from .channel import _threaded_guild_channel_factory from .enums import ( AuditLogAction, VideoQualityMode, VoiceRegion, ChannelType, try_enum, VerificationLevel, ContentFilter, NotificationLevel, NSFWLevel, ) from .mixins import Hashable from .user import User from .invite import Invite from .iterators import AuditLogIterator, MemberIterator from .widget import Widget from .asset import Asset from .flags import SystemChannelFlags from .integrations import Integration, _integration_factory from .stage_instance import StageInstance from .threads import Thread, ThreadMember from .sticker import GuildSticker from .file import File from .welcome_screen import WelcomeScreen, WelcomeChannel __all__ = ( 'Guild', ) MISSING = utils.MISSING if TYPE_CHECKING: from .abc import Snowflake, SnowflakeTime from .types.guild import Ban as BanPayload, Guild as GuildPayload, MFALevel, GuildFeature from .types.threads import ( Thread as ThreadPayload, ) from .types.voice import GuildVoiceState from .permissions import Permissions from .channel import VoiceChannel, StageChannel, TextChannel, CategoryChannel, StoreChannel from .template import Template from .webhook import Webhook from .state import ConnectionState from .voice_client import VoiceProtocol import datetime VocalGuildChannel = Union[VoiceChannel, StageChannel] GuildChannel = Union[VoiceChannel, StageChannel, TextChannel, CategoryChannel, StoreChannel] ByCategoryItem = Tuple[Optional[CategoryChannel], List[GuildChannel]] class BanEntry(NamedTuple): reason: Optional[str] user: User class _GuildLimit(NamedTuple): emoji: int stickers: int bitrate: float filesize: int class Guild(Hashable): __slots__ = ( 'afk_timeout', 'afk_channel', 'name', 'id', 'unavailable', 'region', 'owner_id', 'mfa_level', 'emojis', 'stickers', 'features', 'verification_level', 'explicit_content_filter', 'default_notifications', 'description', 'max_presences', 'max_members', 'max_video_channel_users', 'premium_tier', 'premium_subscription_count', 'preferred_locale', 'nsfw_level', '_members', '_channels', '_icon', '_banner', '_state', '_roles', '_member_count', '_large', '_splash', '_voice_states', '_system_channel_id', '_system_channel_flags', '_discovery_splash', '_rules_channel_id', '_public_updates_channel_id', '_stage_instances', '_threads', ) _PREMIUM_GUILD_LIMITS: ClassVar[Dict[Optional[int], _GuildLimit]] = { None: _GuildLimit(emoji=50, stickers=0, bitrate=96e3, filesize=8388608), 0: _GuildLimit(emoji=50, stickers=0, bitrate=96e3, filesize=8388608), 1: _GuildLimit(emoji=100, stickers=15, bitrate=128e3, filesize=8388608), 2: _GuildLimit(emoji=150, stickers=30, bitrate=256e3, filesize=52428800), 3: _GuildLimit(emoji=250, stickers=60, bitrate=384e3, filesize=104857600), } def __init__(self, *, data: GuildPayload, state: ConnectionState): self._channels: Dict[int, GuildChannel] = {} self._members: Dict[int, Member] = {} self._voice_states: Dict[int, VoiceState] = {} self._threads: Dict[int, Thread] = {} self._state: ConnectionState = state self._from_data(data) def _add_channel(self, channel: GuildChannel, /) -> None: self._channels[channel.id] = channel def _remove_channel(self, channel: Snowflake, /) -> None: self._channels.pop(channel.id, None) def _voice_state_for(self, user_id: int, /) -> Optional[VoiceState]: return self._voice_states.get(user_id) def _add_member(self, member: Member, /) -> None: self._members[member.id] = member def _store_thread(self, payload: ThreadPayload, /) -> Thread: thread = Thread(guild=self, state=self._state, data=payload) self._threads[thread.id] = thread return thread def _remove_member(self, member: Snowflake, /) -> None: self._members.pop(member.id, None) def _add_thread(self, thread: Thread, /) -> None: self._threads[thread.id] = thread def _remove_thread(self, thread: Snowflake, /) -> None: self._threads.pop(thread.id, None) def _clear_threads(self) -> None: self._threads.clear() def _remove_threads_by_channel(self, channel_id: int) -> None: to_remove = [k for k, t in self._threads.items() if t.parent_id == channel_id] for k in to_remove: del self._threads[k] def _filter_threads(self, channel_ids: Set[int]) -> Dict[int, Thread]: to_remove: Dict[int, Thread] = {k: t for k, t in self._threads.items() if t.parent_id in channel_ids} for k in to_remove: del self._threads[k] return to_remove def __str__(self) -> str: return self.name or '' def __repr__(self) -> str: attrs = ( ('id', self.id), ('name', self.name), ('shard_id', self.shard_id), ('chunked', self.chunked), ('member_count', getattr(self, '_member_count', None)), ) inner = ' '.join('%s=%r' % t for t in attrs) return f'<Guild {inner}>' def _update_voice_state(self, data: GuildVoiceState, channel_id: int) -> Tuple[Optional[Member], VoiceState, VoiceState]: user_id = int(data['user_id']) channel = self.get_channel(channel_id) try: if channel is None: after = self._voice_states.pop(user_id) else: after = self._voice_states[user_id] before = copy.copy(after) after._update(data, channel) except KeyError: after = VoiceState(data=data, channel=channel) before = VoiceState(data=data, channel=None) self._voice_states[user_id] = after member = self.get_member(user_id) if member is None: try: member = Member(data=data['member'], state=self._state, guild=self) except KeyError: member = None return member, before, after def _add_role(self, role: Role, /) -> None: for r in self._roles.values(): r.position += not r.is_default() self._roles[role.id] = role def _remove_role(self, role_id: int, /) -> Role: role = self._roles.pop(role_id) for r in self._roles.values(): r.position -= r.position > role.position return role def _from_data(self, guild: GuildPayload) -> None: member_count = guild.get('member_count', None) if member_count is not None: self._member_count: int = member_count self.name: str = guild.get('name') self.region: VoiceRegion = try_enum(VoiceRegion, guild.get('region')) self.verification_level: VerificationLevel = try_enum(VerificationLevel, guild.get('verification_level')) self.default_notifications: NotificationLevel = try_enum( NotificationLevel, guild.get('default_message_notifications') ) self.explicit_content_filter: ContentFilter = try_enum(ContentFilter, guild.get('explicit_content_filter', 0)) self.afk_timeout: int = guild.get('afk_timeout') self._icon: Optional[str] = guild.get('icon') self._banner: Optional[str] = guild.get('banner') self.unavailable: bool = guild.get('unavailable', False) self.id: int = int(guild['id']) self._roles: Dict[int, Role] = {} state = self._state for r in guild.get('roles', []): role = Role(guild=self, data=r, state=state) self._roles[role.id] = role self.mfa_level: MFALevel = guild.get('mfa_level') self.emojis: Tuple[Emoji, ...] = tuple(map(lambda d: state.store_emoji(self, d), guild.get('emojis', []))) self.stickers: Tuple[GuildSticker, ...] = tuple( map(lambda d: state.store_sticker(self, d), guild.get('stickers', [])) ) self.features: List[GuildFeature] = guild.get('features', []) self._splash: Optional[str] = guild.get('splash') self._system_channel_id: Optional[int] = utils._get_as_snowflake(guild, 'system_channel_id') self.description: Optional[str] = guild.get('description') self.max_presences: Optional[int] = guild.get('max_presences') self.max_members: Optional[int] = guild.get('max_members') self.max_video_channel_users: Optional[int] = guild.get('max_video_channel_users') self.premium_tier: int = guild.get('premium_tier', 0) self.premium_subscription_count: int = guild.get('premium_subscription_count') or 0 self._system_channel_flags: int = guild.get('system_channel_flags', 0) self.preferred_locale: Optional[str] = guild.get('preferred_locale') self._discovery_splash: Optional[str] = guild.get('discovery_splash') self._rules_channel_id: Optional[int] = utils._get_as_snowflake(guild, 'rules_channel_id') self._public_updates_channel_id: Optional[int] = utils._get_as_snowflake(guild, 'public_updates_channel_id') self.nsfw_level: NSFWLevel = try_enum(NSFWLevel, guild.get('nsfw_level', 0)) self._stage_instances: Dict[int, StageInstance] = {} for s in guild.get('stage_instances', []): stage_instance = StageInstance(guild=self, data=s, state=state) self._stage_instances[stage_instance.id] = stage_instance cache_joined = self._state.member_cache_flags.joined self_id = self._state.self_id for mdata in guild.get('members', []): member = Member(data=mdata, guild=self, state=state) if cache_joined or member.id == self_id: self._add_member(member) self._sync(guild) self._large: Optional[bool] = None if member_count is None else self._member_count >= 250 self.owner_id: Optional[int] = utils._get_as_snowflake(guild, 'owner_id') self.afk_channel: Optional[VocalGuildChannel] = self.get_channel(utils._get_as_snowflake(guild, 'afk_channel_id')) for obj in guild.get('voice_states', []): self._update_voice_state(obj, int(obj['channel_id'])) def _sync(self, data: GuildPayload) -> None: try: self._large = data['large'] except KeyError: pass empty_tuple = tuple() for presence in data.get('presences', []): user_id = int(presence['user']['id']) member = self.get_member(user_id) if member is not None: member._presence_update(presence, empty_tuple) if 'channels' in data: channels = data['channels'] for c in channels: factory, ch_type = _guild_channel_factory(c['type']) if factory: self._add_channel(factory(guild=self, data=c, state=self._state)) if 'threads' in data: threads = data['threads'] for thread in threads: self._add_thread(Thread(guild=self, state=self._state, data=thread)) @property
MIT License
opendilab/di-star
ctools/utils/log_helper.py
AverageMeter.__init__
python
def __init__(self, length=0): assert (length > 0) self.length = length self.reset()
r""" Overview: init AverageMeter class Arguments: - length (:obj:`int`) : set the default length of iters to average
https://github.com/opendilab/di-star/blob/f12d79403488e7df0498d7b116fc23a67506112b/ctools/utils/log_helper.py#L459-L468
import json import logging import numbers import os import sys import cv2 import numpy as np import yaml from tabulate import tabulate from tensorboardX import SummaryWriter import torch def build_logger(cfg, name=None, rank=0): path = cfg.common.save_path if rank == 0: logger = TextLogger(path, name=name) tb_logger = TensorBoardLogger(path, name=name) var_record_type = cfg.learner.get("var_record_type", None) if var_record_type is None: variable_record = VariableRecord(cfg.learner.log_freq) else: raise NotImplementedError("not support var_record_type: {}".format(var_record_type)) return logger, tb_logger, variable_record else: logger = TextLogger(path, name=name) return logger, None, None def build_logger_naive(path, name, level=logging.INFO, print_freq=1): logger = TextLogger(path, name, level) variable_record = VariableRecord(print_freq) return logger, variable_record def get_default_logger(name=None): if name is None: name = 'default_logger' return logging.getLogger(name) class TextLogger(object): def __init__(self, path, name=None, level=logging.INFO): if name is None: name = 'default_logger' try: os.makedirs(path) except FileExistsError: pass self.logger = self._create_logger(name, os.path.join(path, name + '.txt'), level=level) def _create_logger(self, name, path, level=logging.INFO): logger = logging.getLogger(name) logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='[%(asctime)s][%(filename)15s][line:%(lineno)4d][%(levelname)8s] %(message)s') if not logger.handlers: formatter = logging.Formatter('[%(asctime)s][%(filename)15s][line:%(lineno)4d][%(levelname)8s] %(message)s') fh = logging.FileHandler(path, 'a') fh.setFormatter(formatter) logger.setLevel(level) logger.addHandler(fh) return logger def info(self, s): self.logger.info(s) def bug(self, s): self.logger.debug(s) def error(self, s): self.logger.error(s) class TensorBoardLogger(object): def __init__(self, path, name=None): if name is None: name = 'default_tb_logger' self.logger = SummaryWriter(os.path.join(path, name)) self._var_names = { 'scalar': [], 'text': [], 'scalars': [], 'histogram': [], 'figure': [], 'image': [], } def add_scalar(self, name, *args, **kwargs): assert (name in self._var_names['scalar']) self.logger.add_scalar(name, *args, **kwargs) def add_text(self, name, *args, **kwargs): assert (name in self._var_names['text']) self.logger.add_text(name, *args, **kwargs) def add_scalars(self, name, *args, **kwargs): assert (name in self._var_names['scalars']) self.logger.add_scalars(name, *args, **kwargs) def add_histogram(self, name, *args, **kwargs): assert (name in self._var_names['histogram']) self.logger.add_histogram(name, *args, **kwargs) def add_figure(self, name, *args, **kwargs): assert (name in self._var_names['figure']) self.logger.add_figure(name, *args, **kwargs) def add_image(self, name, *args, **kwargs): assert (name in self._var_names['image']) self.logger.add_image(name, *args, **kwargs) def add_val_list(self, val_list, viz_type): assert (viz_type in ['scalar', 'scalars', 'histogram']) func_dict = { 'scalar': self.add_scalar, 'scalars': self.add_scalars, 'histogram': self.add_histogram, } for n, v, s in val_list: func_dict[viz_type](n, v, s) def _no_contain_name(self, name): for k, v in self._var_names.items(): if name in v: return False return True def register_var(self, name, var_type='scalar'): assert (var_type in self._var_names.keys()) self._var_names[var_type].append(name) @property def scalar_var_names(self): return self._var_names['scalar'] class VariableRecord(object): def __init__(self, length): self.var_dict = {'scalar': {}} self.length = max(length, 10) def register_var(self, name, length=None, var_type='scalar'): assert (var_type in ['scalar']) lens = self.length if length is None else length self.var_dict[var_type][name] = AverageMeter(lens) def update_var(self, info): assert isinstance(info, dict) for k, v in info.items(): var_type = self._get_var_type(k) self.var_dict[var_type][k].update(v) def _get_var_type(self, k): for var_type, var_type_dict in self.var_dict.items(): if k in var_type_dict.keys(): return var_type raise KeyError("invalid key({}) in variable record".format(k)) def get_var_names(self, var_type='scalar'): return self.var_dict[var_type].keys() def get_var_text(self, name, var_type='scalar'): assert (var_type in ['scalar']) if var_type == 'scalar': handle_var = self.var_dict[var_type][name] return '{}: val({:.6f})|avg({:.6f})'.format(name, handle_var.val, handle_var.avg) else: raise NotImplementedError def get_vars_tb_format(self, keys, cur_step, var_type='scalar', **kwargs): assert (var_type in ['scalar']) if var_type == 'scalar': ret = [] var_keys = self.get_var_names(var_type) for k in keys: if k in var_keys: v = self.var_dict[var_type][k] if k == 'grad': ret.append([k, v.val, cur_step]) else: ret.append([k, v.avg, cur_step]) return ret else: raise NotImplementedError def get_vars_text(self): headers = ["Name", "Value", "Avg"] data = [] for k in self.get_var_names('scalar'): handle_var = self.var_dict['scalar'][k] data.append([k, "{:.6f}".format(handle_var.val), "{:.6f}".format(handle_var.avg)]) s = "\n" + tabulate(data, headers=headers, tablefmt='grid') return s def get_star_text(self): headers = ["name", "val", "name", "reward", "value", "td_loss", "pg_loss", "at", "delay", "queued", "su", "tu", "tl"] data = [] k0 = ['cur_lr', 'data_time', 'train_time', 'forward_time', 'backward_time', 'total_loss', 'grad'] k1 = ['winloss', 'bo', 'bu', 'effect', 'upgrade', 'battle', 'upgo', 'kl', 'entropy'] k2 = ['reward', 'value', 'td', 'total', 'at', 'delay', 'queued', 'su', 'tu', 'tl', ] all_vars = self.get_var_names('scalar') for i in range(max(len(k1), len(k0))): d = [] if i < len(k0): if k0[i] == 'grad': d += [k0[i], "{:.6f}".format(self.var_dict['scalar'][k0[i]].val)] else: d += [k0[i], "{:.6f}".format(self.var_dict['scalar'][k0[i]].avg)] else: d += ['', ''] if i < len(k1): d += [k1[i]] vals = [] for k in k2: var_key = k1[i] + '_' + k if var_key in all_vars: vals.append("{:.6f}".format(self.var_dict['scalar'][var_key].avg)) else: vals.append('') d += vals data.append(d) s = "\n" + tabulate(data, headers=headers, tablefmt='grid') return s class AverageMeter(object):
Apache License 2.0
amwa-tv/nmos-testing
nmostesting/suites/IS1001Test.py
IS1001Test._make_auth_request
python
def _make_auth_request(self, method, url_path, data=None, auth=None, params=None): if auth == "user": username = CONFIG.AUTH_USERNAME password = CONFIG.AUTH_PASSWORD elif auth == "client" and self.client_data: username = self.client_data["client_id"] password = self.client_data["client_secret"] else: username = password = None return self.do_request(method=method, url=self.url + url_path, data=data, auth=(username, password), params=params)
Utility function for making requests with Basic Authorization
https://github.com/amwa-tv/nmos-testing/blob/e6cb83f5c1625ac21ddf7aaf7ee8531e66a0592b/nmostesting/suites/IS1001Test.py#L80-L91
import json import socket import requests from time import sleep from urllib.parse import parse_qs from OpenSSL import crypto from ..GenericTest import GenericTest, NMOSTestException, NMOSInitException from .. import Config as CONFIG from zeroconf_monkey import ServiceBrowser, Zeroconf from ..MdnsListener import MdnsListener AUTH_API_KEY = 'auth' GRANT_SCOPES = ['is-04', 'is-05'] class IS1001Test(GenericTest): def __init__(self, apis): super(IS1001Test, self).__init__(apis) if not CONFIG.ENABLE_HTTPS: raise NMOSInitException("IS-10 can only be tested when ENABLE_HTTPS is set to True in UserConfig.py") self.authorization = False self.url = self.apis[AUTH_API_KEY]["url"] self.bearer_tokens = [] self.client_data = {} self.auth_codes = [] self.clients = [] self.zc = Zeroconf() self.zc_listener = MdnsListener(self.zc) def set_up_tests(self): print(""" Ensure a User is already set-up on the Authorization Server that corresponds to the 'AUTH_USERNAME' and 'AUTH_PASSWORD' config options. They are currently: AUTH_USERNAME: '{}' AUTH_PASSWORD: '{}' """.format(CONFIG.AUTH_USERNAME, CONFIG.AUTH_PASSWORD)) def tear_down_tests(self): print("Remember to delete the registered client with username: {}".format(CONFIG.AUTH_USERNAME))
Apache License 2.0
googlecloudplatform/perfkitbenchmarker
perfkitbenchmarker/providers/aws/aws_elasticached_memcached.py
ElastiCacheMemcached._IsReady
python
def _IsReady(self): cluster_info = self._DescribeInstance() if cluster_info.get('CacheClusterStatus', '') == 'available': self.version = cluster_info.get('EngineVersion') return True return False
Returns True if cluster is ready and false otherwise.
https://github.com/googlecloudplatform/perfkitbenchmarker/blob/c14a122016d414351d41167029c79c9a19709384/perfkitbenchmarker/providers/aws/aws_elasticached_memcached.py#L112-L118
import json import logging from absl import flags from perfkitbenchmarker import errors from perfkitbenchmarker import managed_memory_store from perfkitbenchmarker import vm_util from perfkitbenchmarker.providers import aws from perfkitbenchmarker.providers.aws import util MEMCACHED_VERSIONS = ['1.5.10', '1.5.16', '1.6.6'] FLAGS = flags.FLAGS class ElastiCacheMemcached(managed_memory_store.BaseManagedMemoryStore): CLOUD = aws.CLOUD MEMORY_STORE = managed_memory_store.MEMCACHED def __init__(self, spec): super(ElastiCacheMemcached, self).__init__(spec) self.subnet_group_name = 'subnet-%s' % self.name self.zone = self.spec.vms[0].zone self.region = util.GetRegionFromZone(self.zone) self.node_type = FLAGS.cache_node_type self.version = FLAGS.managed_memory_store_version @staticmethod def CheckPrerequisites(benchmark_config): if (FLAGS.managed_memory_store_version and FLAGS.managed_memory_store_version not in MEMCACHED_VERSIONS): raise errors.Config.InvalidValue('Invalid Memcached version.') def GetResourceMetadata(self): result = { 'cloud_memcached_version': self.version, 'cloud_memcached_node_type': self.node_type, } return result def _CreateDependencies(self): subnet_id = self.spec.vms[0].network.subnet.id cmd = ['aws', 'elasticache', 'create-cache-subnet-group', '--region', self.region, '--cache-subnet-group-name', self.subnet_group_name, '--cache-subnet-group-description', '"memcached benchmark subnet"', '--subnet-ids', subnet_id] vm_util.IssueCommand(cmd) def _DeleteDependencies(self): cmd = ['aws', 'elasticache', 'delete-cache-subnet-group', '--region', self.region, '--cache-subnet-group-name', self.subnet_group_name] vm_util.IssueCommand(cmd, raise_on_failure=False) def _Create(self): cmd = ['aws', 'elasticache', 'create-cache-cluster', '--engine', 'memcached', '--region', self.region, '--cache-cluster-id', self.name, '--preferred-availability-zone', self.zone, '--num-cache-nodes', str(managed_memory_store.MEMCACHED_NODE_COUNT), '--cache-node-type', self.node_type, '--cache-subnet-group-name', self.subnet_group_name] if self.version: cmd += ['--engine-version', self.version] cmd += ['--tags'] cmd += util.MakeFormattedDefaultTags() vm_util.IssueCommand(cmd) def _Delete(self): cmd = ['aws', 'elasticache', 'delete-cache-cluster', '--region', self.region, '--cache-cluster-id', self.name] vm_util.IssueCommand(cmd, raise_on_failure=False) def _IsDeleting(self): cluster_info = self._DescribeInstance() return cluster_info.get('CacheClusterStatus', '') == 'deleting'
Apache License 2.0
googlecloudplatform/appstart
appstart/validator/contract.py
ContractClause.__init__
python
def __init__(self, sandbox): super(ContractClause, self).__init__('run_test') self.__sandbox = sandbox
Initializer for ContractClause. Args: sandbox: (sandbox.container_sandbox.ContainerSandbox) A sandbox that manages the container to be tested.
https://github.com/googlecloudplatform/appstart/blob/f08d4867cd115c458b151b1414d9833fadc63bf1/appstart/validator/contract.py#L582-L593
import copy import inspect import logging import os import subprocess import tempfile import time import unittest import yaml from ..sandbox import container_sandbox from .. import utils import errors import color_logging FATAL = 30 WARNING = 20 UNUSED = 10 LEVEL_NUMBERS_TO_NAMES = {FATAL: 'FATAL', WARNING: 'WARNING', UNUSED: 'UNUSED'} LEVEL_NAMES_TO_NUMBERS = {name: val for val, name in LEVEL_NUMBERS_TO_NAMES.iteritems()} POST_STOP = 50 STOP = 40 POST_START = 30 START = 20 PRE_START = 10 _TIMELINE = [PRE_START, START, POST_START, STOP, POST_STOP] _SINGULAR_POINTS = [START, STOP] _TIMELINE_NUMBERS_TO_NAMES = {POST_STOP: 'Post Stop', STOP: 'Stop', POST_START: 'Post Start', START: 'Start', PRE_START: 'Pre Start'} _TIMELINE_NAMES_TO_NUMBERS = {name.upper().replace(' ', '_'): val for val, name in _TIMELINE_NUMBERS_TO_NAMES.iteritems()} _HOOK_CONF_EXTENSION = '.conf.yaml' HOOK_DIRECTORY = 'validator_tests' _REQUIRED_ATTRS = ['lifecycle_point', 'title', 'description'] _DEFAULT_ATTRS = {'dependencies': set(), 'dependents': set(), 'before': set(), 'after': set(), 'tags': set(), 'error_level': UNUSED, '_unresolved_before': set(), '_unresolved_after': set(), '_unresolved_dependents': set(), '_unresolved_dependencies': set(), '_conf_file': None} _REQUIRED_YAML_ATTRS = ['name'] + _REQUIRED_ATTRS _DEFAULT_YAML_ATTRS = {'dependencies': [], 'dependents': [], 'before': [], 'after': [], 'tags': [], 'error_level': 'UNUSED'} class ContractTestResult(unittest.TextTestResult): ERROR = 3 FAIL = 2 SKIP = 1 PASS = 0 def __init__(self, success_set, threshold, *result_args, **result_kwargs): super(ContractTestResult, self).__init__(*result_args, **result_kwargs) self.success = True self.__threshold = threshold self.__success_set = success_set self.success_list = [] self.error_stats = {} def addSuccess(self, test): unittest.TestResult.addSuccess(self, test) self.__success_set.add(test.__class__) self.success_list.append(test) message = self.__make_message(test, self.PASS) self.stream.writeln(message) def __update_error_stats(self, test): self.error_stats.setdefault(test.error_level, 0) self.error_stats[test.error_level] += 1 if test.error_level >= self.__threshold: self.success = False def addSkip(self, test, reason): unittest.TestResult.addSkip(self, test, reason) message = self.__make_message(test, self.SKIP) self.stream.writeln(message, lvl=logging.DEBUG) def addError(self, test, err): unittest.TestResult.addError(self, test, err) self.__update_error_stats(test) message = self.__make_message(test, self.ERROR) self.stream.writeln(message) def addFailure(self, test, err): unittest.TestResult.addFailure(self, test, err) self.__update_error_stats(test) test.failure_message = str(err[1]) message = self.__make_message(test, self.FAIL) self.stream.writeln(message) def getDescription(self, test): return test.shortDescription() def __make_message(self, test, outcome, short=True): if outcome == self.PASS: color = 'green' outcome_type = 'PASSED' elif outcome == self.SKIP: color = None outcome_type = 'SKIP' elif outcome == self.FAIL: outcome_type = 'FAILED' if test.error_level == UNUSED: color = None outcome_type = 'UNUSED' elif test.error_level >= self.__threshold: color = 'red' else: color = 'warn' elif outcome == self.ERROR: color = 'red' outcome_type = 'ERROR' if short: prefix = '[{0: >6}]'.format(outcome_type) else: prefix = '[{0} ({1})]'.format( outcome_type, LEVEL_NUMBERS_TO_NAMES.get(test.error_level)) if color: prefix = '%({0})s{1}%(end)s'.format(color, prefix) return '{0} {1}'.format(prefix, test.shortDescription()) def print_errors(self): if not (self.failures or self.errors): return self.stream.writeln( ' %(bold)s Failure Details %(end)s '.center(100, '-'), lvl=logging.DEBUG if self.success else logging.INFO) for test, _ in self.failures: lvl = logging.DEBUG if test.error_level >= self.__threshold: lvl = logging.INFO message = self.__make_message(test, self.FAIL, short=False) self.stream.writeln(message, lvl=lvl) self.stream.writeln(test.failure_message.replace('%', '%%'), lvl=lvl) self.stream.writeln(lvl=lvl) for test, err in self.errors: message = self.__make_message(test, self.ERROR, short=False) self.stream.writeln(message) self.stream.writeln(err.replace('%', '%%')) def print_skips(self): if self.skipped: self.stream.writeln( ' %(bold)s Skip Details %(end)s '.center(100, '-'), lvl=logging.DEBUG) for test, reason in self.skipped: message = self.__make_message(test, self.SKIP, short=False) self.stream.writeln(message, lvl=logging.DEBUG) self.stream.writeln('Reason: {0}'.format(reason), lvl=logging.DEBUG) self.stream.writeln(lvl=logging.DEBUG) class ContractTestRunner(unittest.TextTestRunner): def __init__(self, success_set, threshold, logfile, verbose_printing): super(ContractTestRunner, self).__init__() self.__threshold = threshold self.stream = color_logging.LoggingStream(logfile, verbose_printing) self.__success_set = success_set def _makeResult(self): return ContractTestResult(self.__success_set, self.__threshold, self.stream, self.descriptions, self.verbosity) def run(self, tests, point): self.stream.writeln() self.stream.writeln('%(bold)sRunning tests: {0} %(end)s'.format(point)) result = self._makeResult() unittest.signals.registerResult(result) result.failfast = self.failfast result.buffer = self.buffer start_time = time.time() start_test_run = getattr(result, 'startTestRun', None) if start_test_run is not None: start_test_run() try: tests(result) finally: stop_test_run = getattr(result, 'stopTestRun', None) if stop_test_run is not None: stop_test_run() stop_time = time.time() time_taken = stop_time - start_time self.stream.writeln() result.print_errors() result.print_skips() run = result.testsRun infos = [] if result.success_list: infos.append('PASSED=%d' % len(result.success_list)) for level in result.error_stats.keys(): infos.append('%s=%i' % (LEVEL_NUMBERS_TO_NAMES[level], result.error_stats[level])) if result.skipped: infos.append('SKIPPED=%d' % len(result.skipped)) time_str = ('Ran %d test%s in %.3fs' % (run, '' if run == 1 else 's', time_taken)) self.stream.writeln('%s (%s)' % (time_str, ', '.join(infos),)) self.stream.writeln('=' * 100) return result class ContractClause(unittest.TestCase): class __metaclass__(type): def __init__(cls, name, bases, dct): type.__init__(cls, name, bases, dct) identifier = getattr(cls, '_conf_file', name) def set_default_attr(obj, attr, value): if not hasattr(obj, attr): setattr(obj, attr, copy.copy(value)) def ensure_proper_type(obj, attr, attr_type): if type(getattr(obj, attr)) != attr_type: raise errors.ContractAttributeError( '{0}: {1} should be of type ' '"{2}"'.format(identifier, attr, attr_type)) def assert_attrs_exist(cls, attrs): for attr in attrs: if not hasattr(cls, attr): raise errors.ContractAttributeError( '{0} must have attribute: {1}'.format(identifier, attr)) if name != 'ContractClause': for attr, val in _DEFAULT_ATTRS.iteritems(): set_default_attr(cls, attr, val) if val is not None: ensure_proper_type(cls, attr, type(val)) assert_attrs_exist(cls, _REQUIRED_ATTRS) if cls.lifecycle_point not in _TIMELINE: raise errors.ContractAttributeError( '{0} does not have a valid lifecycle ' 'point'.format(identifier)) if cls.error_level not in LEVEL_NUMBERS_TO_NAMES.keys(): raise errors.ContractAttributeError( '{0} does not have a valid error ' 'level'.format(identifier)) cls.tags.add(name)
Apache License 2.0
kriaga/health-checker
HealthChecker/venv/Lib/site-packages/selenium/webdriver/remote/webelement.py
WebElement._execute
python
def _execute(self, command, params=None): if not params: params = {} params['id'] = self._id return self._parent.execute(command, params)
Executes a command against the underlying HTML element. Args: command: The name of the command to _execute as a string. params: A dictionary of named parameters to send with the command. Returns: The command's JSON response loaded into a dictionary object.
https://github.com/kriaga/health-checker/blob/3d9ce933f131bcbb897103b0f509cc45393cae4a/HealthChecker/venv/Lib/site-packages/selenium/webdriver/remote/webelement.py#L620-L633
import base64 import hashlib import os import pkgutil import warnings import zipfile from selenium.common.exceptions import WebDriverException from selenium.webdriver.common.by import By from selenium.webdriver.common.utils import keys_to_typing from .command import Command try: str = basestring except NameError: pass try: from StringIO import StringIO as IOStream except ImportError: from io import BytesIO as IOStream _pkg = '.'.join(__name__.split('.')[:-1]) getAttribute_js = pkgutil.get_data(_pkg, 'getAttribute.js').decode('utf8') isDisplayed_js = pkgutil.get_data(_pkg, 'isDisplayed.js').decode('utf8') class WebElement(object): def __init__(self, parent, id_, w3c=False): self._parent = parent self._id = id_ self._w3c = w3c def __repr__(self): return '<{0.__module__}.{0.__name__} (session="{1}", element="{2}")>'.format( type(self), self._parent.session_id, self._id) @property def tag_name(self): return self._execute(Command.GET_ELEMENT_TAG_NAME)['value'] @property def text(self): return self._execute(Command.GET_ELEMENT_TEXT)['value'] def click(self): self._execute(Command.CLICK_ELEMENT) def submit(self): if self._w3c: form = self.find_element(By.XPATH, "./ancestor-or-self::form") self._parent.execute_script( "var e = arguments[0].ownerDocument.createEvent('Event');" "e.initEvent('submit', true, true);" "if (arguments[0].dispatchEvent(e)) { arguments[0].submit() }", form) else: self._execute(Command.SUBMIT_ELEMENT) def clear(self): self._execute(Command.CLEAR_ELEMENT) def get_property(self, name): try: return self._execute(Command.GET_ELEMENT_PROPERTY, {"name": name})["value"] except WebDriverException: return self.parent.execute_script('return arguments[0][arguments[1]]', self, name) def get_attribute(self, name): attributeValue = '' if self._w3c: attributeValue = self.parent.execute_script( "return (%s).apply(null, arguments);" % getAttribute_js, self, name) else: resp = self._execute(Command.GET_ELEMENT_ATTRIBUTE, {'name': name}) attributeValue = resp.get('value') if attributeValue is not None: if name != 'value' and attributeValue.lower() in ('true', 'false'): attributeValue = attributeValue.lower() return attributeValue def is_selected(self): return self._execute(Command.IS_ELEMENT_SELECTED)['value'] def is_enabled(self): return self._execute(Command.IS_ELEMENT_ENABLED)['value'] def find_element_by_id(self, id_): return self.find_element(by=By.ID, value=id_) def find_elements_by_id(self, id_): return self.find_elements(by=By.ID, value=id_) def find_element_by_name(self, name): return self.find_element(by=By.NAME, value=name) def find_elements_by_name(self, name): return self.find_elements(by=By.NAME, value=name) def find_element_by_link_text(self, link_text): return self.find_element(by=By.LINK_TEXT, value=link_text) def find_elements_by_link_text(self, link_text): return self.find_elements(by=By.LINK_TEXT, value=link_text) def find_element_by_partial_link_text(self, link_text): return self.find_element(by=By.PARTIAL_LINK_TEXT, value=link_text) def find_elements_by_partial_link_text(self, link_text): return self.find_elements(by=By.PARTIAL_LINK_TEXT, value=link_text) def find_element_by_tag_name(self, name): return self.find_element(by=By.TAG_NAME, value=name) def find_elements_by_tag_name(self, name): return self.find_elements(by=By.TAG_NAME, value=name) def find_element_by_xpath(self, xpath): return self.find_element(by=By.XPATH, value=xpath) def find_elements_by_xpath(self, xpath): return self.find_elements(by=By.XPATH, value=xpath) def find_element_by_class_name(self, name): return self.find_element(by=By.CLASS_NAME, value=name) def find_elements_by_class_name(self, name): return self.find_elements(by=By.CLASS_NAME, value=name) def find_element_by_css_selector(self, css_selector): return self.find_element(by=By.CSS_SELECTOR, value=css_selector) def find_elements_by_css_selector(self, css_selector): return self.find_elements(by=By.CSS_SELECTOR, value=css_selector) def send_keys(self, *value): if self.parent._is_remote: local_file = self.parent.file_detector.is_local_file(*value) if local_file is not None: value = self._upload(local_file) self._execute(Command.SEND_KEYS_TO_ELEMENT, {'text': "".join(keys_to_typing(value)), 'value': keys_to_typing(value)}) def is_displayed(self): if self._w3c: return self.parent.execute_script( "return (%s).apply(null, arguments);" % isDisplayed_js, self) else: return self._execute(Command.IS_ELEMENT_DISPLAYED)['value'] @property def location_once_scrolled_into_view(self): if self._w3c: old_loc = self._execute(Command.W3C_EXECUTE_SCRIPT, { 'script': "arguments[0].scrollIntoView(true); return arguments[0].getBoundingClientRect()", 'args': [self]})['value'] return {"x": round(old_loc['x']), "y": round(old_loc['y'])} else: return self._execute(Command.GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW)['value'] @property def size(self): size = {} if self._w3c: size = self._execute(Command.GET_ELEMENT_RECT)['value'] else: size = self._execute(Command.GET_ELEMENT_SIZE)['value'] new_size = {"height": size["height"], "width": size["width"]} return new_size def value_of_css_property(self, property_name): return self._execute(Command.GET_ELEMENT_VALUE_OF_CSS_PROPERTY, { 'propertyName': property_name})['value'] @property def location(self): if self._w3c: old_loc = self._execute(Command.GET_ELEMENT_RECT)['value'] else: old_loc = self._execute(Command.GET_ELEMENT_LOCATION)['value'] new_loc = {"x": round(old_loc['x']), "y": round(old_loc['y'])} return new_loc @property def rect(self): if self._w3c: return self._execute(Command.GET_ELEMENT_RECT)['value'] else: rect = self.size.copy() rect.update(self.location) return rect @property def screenshot_as_base64(self): return self._execute(Command.ELEMENT_SCREENSHOT)['value'] @property def screenshot_as_png(self): return base64.b64decode(self.screenshot_as_base64.encode('ascii')) def screenshot(self, filename): if not filename.lower().endswith('.png'): warnings.warn("name used for saved screenshot does not match file " "type. It should end with a `.png` extension", UserWarning) png = self.screenshot_as_png try: with open(filename, 'wb') as f: f.write(png) except IOError: return False finally: del png return True @property def parent(self): return self._parent @property def id(self): return self._id def __eq__(self, element): return hasattr(element, 'id') and self._id == element.id def __ne__(self, element): return not self.__eq__(element)
MIT License
botify-labs/python-simple-workflow
swf/models/base.py
BaseModel.exists
python
def exists(self): raise NotImplementedError
Checks if the connected swf object exists amazon-side
https://github.com/botify-labs/python-simple-workflow/blob/f8da5d3c2a249dba852d77f438b0cc01f3a1754e/swf/models/base.py#L77-L79
from collections import namedtuple, OrderedDict from swf.core import ConnectedSWFObject from swf.exceptions import DoesNotExistError Difference = namedtuple('Difference', ('attr', 'local', 'upstream')) class ModelDiff(object): def __init__(self, *input): self.container = self._process_input(input) def __contains__(self, attr): return attr in self.container def __len__(self): return len(self.container) def __getitem__(self, index): attr, (local, upstream) = self.container.items()[index] return Difference(attr, local, upstream) def _process_input(self, input): return OrderedDict((attr, (local, upstream)) for attr, local, upstream in input if local != upstream) def add_input(self, *input): self.container.update(self._process_input(input)) def merge(self, model_diff): self.container.update(model_diff.container) def differing_fields(self): return self.container.keys() def as_list(self): return [ Difference(k, v[0], v[1]) for k, v in self.container.iteritems() ] class BaseModel(ConnectedSWFObject): __slots__ = [ 'exists', 'is_synced', 'changes', ] def _diff(self): raise NotImplementedError @property
MIT License
geoffxy/habitat
analyzer/habitat/utils.py
name_all_arguments
python
def name_all_arguments(all_parameters, args, kwargs): merged = {**kwargs} for arg_name, arg in zip(all_parameters, args): merged[arg_name] = arg return merged
This function merges positional and keyword arguments into one dictionary based on the declared names of the function's parameters.
https://github.com/geoffxy/habitat/blob/decc70d18c4a1db7bb109fd59b2b60567bf74375/analyzer/habitat/utils.py#L54-L63
import logging import os import yaml def set_up_logging(): logging.basicConfig( level=logging.INFO, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M', ) def add_common_cmd_args(parser): parser.add_argument('model_path', help='The serialized model to analyze') parser.add_argument( 'model_config_path', help='The configuration file for the model', ) parser.add_argument( '--device-config', type=str, default='devices.yml', help='The config file containing GPU device properties.', ) parser.add_argument( '--origin-device', type=str, required=True, help='The GPU on which the analysis is being performed.', ) parser.add_argument( '--kernel-lut', type=str, default=os.path.join('lutfiles', 'kernels.sqlite'), help='The path to the kernel metadata look up table.', ) parser.add_argument( '--operation-lut', type=str, default=os.path.join('lutfiles', 'operations.sqlite'), help='The path to the operation run time look up table.', ) def ns_to_ms(ns): return ns / 1e6 def ms_to_ns(ms): return ms * 1e6
Apache License 2.0
swissdatasciencecenter/renku-python
renku/core/commands/version.py
_check_version
python
def _check_version(): from ..management.config import APP_NAME if VersionCache.load(APP_NAME).is_fresh: return from pkg_resources import parse_version from renku.version import __version__ version = parse_version(__version__) allow_prereleases = version.is_prerelease latest_version = find_latest_version("renku", allow_prereleases=allow_prereleases) if version < latest_version: click.secho( "You are using renku version {version}, however version " "{latest_version} is available.\n" "You should consider upgrading ...".format(version=__version__, latest_version=latest_version), fg="yellow", bold=True, ) VersionCache(pypi_version=str(latest_version)).dump(APP_NAME)
Check renku version.
https://github.com/swissdatasciencecenter/renku-python/blob/5e43e2eff67cdf20fc2805799fe2822e23bc503d/renku/core/commands/version.py#L127-L152
import datetime import json import sys import attr import click import lockfile import requests def print_version(ctx, param, value): if not value or ctx.resilient_parsing: return from renku.version import __version__ click.echo(__version__) ctx.exit() def find_latest_version(name, allow_prereleases=False): response = requests.get("https://pypi.org/pypi/{name}/json".format(name=name)) if response.status_code != 200: return description = response.json() from pkg_resources import parse_version return max( version for version in (parse_version(version) for version in description["releases"].keys()) if allow_prereleases or not version.is_prerelease ) @attr.s class VersionCache: STATE_NAME = "selfcheck.json" DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" MAX_AGE = 24 * 60 * 60 last_check = attr.ib() pypi_version = attr.ib(default=None) @last_check.default def default_last_check(self): return datetime.datetime.utcnow().strftime(self.DATE_FMT) @property def is_fresh(self): if self.last_check and self.pypi_version: current_time = datetime.datetime.utcnow() last_check = datetime.datetime.strptime(self.last_check, self.DATE_FMT) return (current_time - last_check).total_seconds() < self.MAX_AGE @classmethod def _cache(cls, app_name): from pathlib import Path from appdirs import user_cache_dir cache_dir = Path(user_cache_dir(app_name, None)) cache_dir.mkdir(parents=True, exist_ok=True) return cache_dir / cls.STATE_NAME @classmethod def load(cls, app_name): cache = cls._cache(app_name) try: with cache.open() as fp: return cls(**json.load(fp)[sys.prefix]) except (IOError, ValueError, KeyError): return cls() def dump(self, app_name): cache = self._cache(app_name) with lockfile.LockFile(str(cache)): if cache.exists(): with cache.open() as fp: state = json.load(fp) else: state = {} state[sys.prefix] = attr.asdict(self) with cache.open("w") as fp: json.dump(state, fp, sort_keys=True)
Apache License 2.0
fangjinhuawang/patchmatchnet
eval.py
check_geometric_consistency
python
def check_geometric_consistency( depth_ref: np.ndarray, intrinsics_ref: np.ndarray, extrinsics_ref: np.ndarray, depth_src: np.ndarray, intrinsics_src: np.ndarray, extrinsics_src: np.ndarray, geo_pixel_thres: float, geo_depth_thres: float, ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: width, height = depth_ref.shape[1], depth_ref.shape[0] x_ref, y_ref = np.meshgrid(np.arange(0, width), np.arange(0, height)) depth_reprojected, x2d_reprojected, y2d_reprojected, x2d_src, y2d_src = reproject_with_depth( depth_ref, intrinsics_ref, extrinsics_ref, depth_src, intrinsics_src, extrinsics_src) dist = np.sqrt((x2d_reprojected - x_ref) ** 2 + (y2d_reprojected - y_ref) ** 2) depth_diff = np.abs(depth_reprojected - depth_ref) relative_depth_diff = depth_diff / depth_ref mask = np.logical_and(dist < geo_pixel_thres, relative_depth_diff < geo_depth_thres) depth_reprojected[~mask] = 0 return mask, depth_reprojected, x2d_src, y2d_src
Check geometric consistency and return valid points Args: depth_ref: depths of points in the reference view, of shape (H, W) intrinsics_ref: camera intrinsic of the reference view, of shape (3, 3) extrinsics_ref: camera extrinsic of the reference view, of shape (4, 4) depth_src: depths of points in the source view, of shape (H, W) intrinsics_src: camera intrinsic of the source view, of shape (3, 3) extrinsics_src: camera extrinsic of the source view, of shape (4, 4) geo_pixel_thres: geometric pixel threshold geo_depth_thres: geometric depth threshold Returns: Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: mask: mask for points with geometric consistency, of shape (H, W) depth_reprojected: reprojected depths of points in the reference view, of shape (H, W) x2d_src: x coordinates of points in the source view, of shape (H, W) y2d_src: y coordinates of points in the source view, of shape (H, W)
https://github.com/fangjinhuawang/patchmatchnet/blob/bce47b971e6df3854f1b35b06da70580e08216f3/eval.py#L178-L224
import argparse import os import torch.nn as nn import torch.nn.parallel import torch.backends.cudnn as cudnn from torch.utils.data import DataLoader import numpy as np import time from datasets import find_dataset_def from models import * from utils import * import sys from datasets.data_io import read_cam_file, read_pair_file, read_image, read_map, save_image, save_map import cv2 from plyfile import PlyData, PlyElement from typing import Tuple cudnn.benchmark = True parser = argparse.ArgumentParser(description='Predict depth, filter, and fuse') parser.add_argument('--model', default='PatchmatchNet', help='select model') parser.add_argument('--dataset', default='dtu_yao_eval', help='select dataset') parser.add_argument('--testpath', help='testing data path') parser.add_argument('--testlist', help='testing scan list') parser.add_argument('--batch_size', type=int, default=1, help='testing batch size') parser.add_argument('--n_views', type=int, default=5, help='num of view') parser.add_argument('--loadckpt', default=None, help='load a specific checkpoint') parser.add_argument('--outdir', default='./outputs', help='output dir') parser.add_argument('--display', action='store_true', help='display depth images and masks') parser.add_argument('--patchmatch_iteration', nargs='+', type=int, default=[1, 2, 2], help='num of iteration of patchmatch on stages 1,2,3') parser.add_argument('--patchmatch_num_sample', nargs='+', type=int, default=[8, 8, 16], help='num of generated samples in local perturbation on stages 1,2,3') parser.add_argument('--patchmatch_interval_scale', nargs='+', type=float, default=[0.005, 0.0125, 0.025], help='normalized interval in inverse depth range to generate samples in local perturbation') parser.add_argument('--patchmatch_range', nargs='+', type=int, default=[6, 4, 2], help='fixed offset of sampling points for propogation of patchmatch on stages 1,2,3') parser.add_argument('--propagate_neighbors', nargs='+', type=int, default=[0, 8, 16], help='num of neighbors for adaptive propagation on stages 1,2,3') parser.add_argument('--evaluate_neighbors', nargs='+', type=int, default=[9, 9, 9], help='num of neighbors for adaptive matching cost aggregation of adaptive evaluation on stages 1,2,3') parser.add_argument('--geo_pixel_thres', type=float, default=1, help='pixel threshold for geometric consistency filtering') parser.add_argument('--geo_depth_thres', type=float, default=0.01, help='depth threshold for geometric consistency filtering') parser.add_argument('--photo_thres', type=float, default=0.8, help='threshold for photometric consistency filtering') args = parser.parse_args() print("argv:", sys.argv[1:]) print_args(args) def save_depth(): mvs_dataset = find_dataset_def(args.dataset) test_dataset = mvs_dataset(args.testpath, args.testlist, "test", args.n_views) image_loader = DataLoader(test_dataset, args.batch_size, shuffle=False, num_workers=4, drop_last=False) model = PatchmatchNet( patchmatch_interval_scale=args.patchmatch_interval_scale, propagation_range=args.patchmatch_range, patchmatch_iteration=args.patchmatch_iteration, patchmatch_num_sample=args.patchmatch_num_sample, propagate_neighbors=args.propagate_neighbors, evaluate_neighbors=args.evaluate_neighbors ) model = nn.DataParallel(model) model.cuda() print("loading model {}".format(args.loadckpt)) state_dict = torch.load(args.loadckpt) model.load_state_dict(state_dict['model'], strict=False) model.eval() with torch.no_grad(): for batch_idx, sample in enumerate(image_loader): start_time = time.time() sample_cuda = tocuda(sample) refined_depth, confidence, _ = model(sample_cuda["imgs"], sample_cuda["proj_matrices"], sample_cuda["depth_min"], sample_cuda["depth_max"]) refined_depth = tensor2numpy(refined_depth) confidence = tensor2numpy(confidence) del sample_cuda print('Iter {}/{}, time = {:.3f}'.format(batch_idx, len(image_loader), time.time() - start_time)) filenames = sample["filename"] for filename, depth_est, photometric_confidence in zip(filenames, refined_depth, confidence): depth_filename = os.path.join(args.outdir, filename.format('depth_est', '.pfm')) confidence_filename = os.path.join(args.outdir, filename.format('confidence', '.pfm')) os.makedirs(depth_filename.rsplit('/', 1)[0], exist_ok=True) os.makedirs(confidence_filename.rsplit('/', 1)[0], exist_ok=True) depth_est = np.squeeze(depth_est, 0) save_map(depth_filename, depth_est) save_map(confidence_filename, photometric_confidence) def reproject_with_depth( depth_ref: np.ndarray, intrinsics_ref: np.ndarray, extrinsics_ref: np.ndarray, depth_src: np.ndarray, intrinsics_src: np.ndarray, extrinsics_src: np.ndarray, ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]: width, height = depth_ref.shape[1], depth_ref.shape[0] x_ref, y_ref = np.meshgrid(np.arange(0, width), np.arange(0, height)) x_ref, y_ref = x_ref.reshape([-1]), y_ref.reshape([-1]) xyz_ref = np.matmul(np.linalg.inv(intrinsics_ref), np.vstack((x_ref, y_ref, np.ones_like(x_ref))) * depth_ref.reshape([-1])) xyz_src = np.matmul(np.matmul(extrinsics_src, np.linalg.inv(extrinsics_ref)), np.vstack((xyz_ref, np.ones_like(x_ref))))[:3] k_xyz_src = np.matmul(intrinsics_src, xyz_src) xy_src = k_xyz_src[:2] / k_xyz_src[2:3] x_src = xy_src[0].reshape([height, width]).astype(np.float32) y_src = xy_src[1].reshape([height, width]).astype(np.float32) sampled_depth_src = cv2.remap(depth_src, x_src, y_src, interpolation=cv2.INTER_LINEAR) xyz_src = np.matmul(np.linalg.inv(intrinsics_src), np.vstack((xy_src, np.ones_like(x_ref))) * sampled_depth_src.reshape([-1])) xyz_reprojected = np.matmul(np.matmul(extrinsics_ref, np.linalg.inv(extrinsics_src)), np.vstack((xyz_src, np.ones_like(x_ref))))[:3] depth_reprojected = xyz_reprojected[2].reshape([height, width]).astype(np.float32) k_xyz_reprojected = np.matmul(intrinsics_ref, xyz_reprojected) xy_reprojected = k_xyz_reprojected[:2] / k_xyz_reprojected[2:3] x_reprojected = xy_reprojected[0].reshape([height, width]).astype(np.float32) y_reprojected = xy_reprojected[1].reshape([height, width]).astype(np.float32) return depth_reprojected, x_reprojected, y_reprojected, x_src, y_src
MIT License
geopandas/geopandas
geopandas/plotting.py
_plot_point_collection
python
def _plot_point_collection( ax, geoms, values=None, color=None, cmap=None, vmin=None, vmax=None, marker="o", markersize=None, **kwargs, ): if values is not None and color is not None: raise ValueError("Can only specify one of 'values' and 'color' kwargs") geoms, multiindex = _flatten_multi_geoms(geoms) x = [p.x if not p.is_empty else None for p in geoms] y = [p.y if not p.is_empty else None for p in geoms] if values is not None: kwargs["c"] = values if markersize is not None: kwargs["s"] = markersize if color is not None: kwargs["color"] = color if marker is not None: kwargs["marker"] = marker _expand_kwargs(kwargs, multiindex) if "norm" not in kwargs: collection = ax.scatter(x, y, vmin=vmin, vmax=vmax, cmap=cmap, **kwargs) else: collection = ax.scatter(x, y, cmap=cmap, **kwargs) return collection
Plots a collection of Point and MultiPoint geometries to `ax` Parameters ---------- ax : matplotlib.axes.Axes where shapes will be plotted geoms : sequence of `N` Points or MultiPoints values : a sequence of `N` values, optional Values mapped to colors using vmin, vmax, and cmap. Cannot be specified together with `color`. markersize : scalar or array-like, optional Size of the markers. Note that under the hood ``scatter`` is used, so the specified value will be proportional to the area of the marker (size in points^2). Returns ------- collection : matplotlib.collections.Collection that was plotted
https://github.com/geopandas/geopandas/blob/04d377f321972801888381356cb6259766eb63b6/geopandas/plotting.py#L250-L310
import warnings import numpy as np import pandas as pd from pandas.plotting import PlotAccessor import geopandas from distutils.version import LooseVersion from ._decorator import doc def deprecated(new): def old(*args, **kwargs): warnings.warn( "{} is intended for internal ".format(new.__name__[1:]) + "use only, and will be deprecated.", DeprecationWarning, stacklevel=2, ) new(*args, **kwargs) return old def _flatten_multi_geoms(geoms, prefix="Multi"): components, component_index = [], [] if not geoms.geom_type.str.startswith(prefix).any(): return geoms, np.arange(len(geoms)) for ix, geom in enumerate(geoms): if geom is not None and geom.type.startswith(prefix) and not geom.is_empty: for poly in geom.geoms: components.append(poly) component_index.append(ix) else: components.append(geom) component_index.append(ix) return components, np.array(component_index) def _expand_kwargs(kwargs, multiindex): import matplotlib from matplotlib.colors import is_color_like from typing import Iterable mpl = matplotlib.__version__ if mpl >= LooseVersion("3.4") or (mpl > LooseVersion("3.3.2") and "+" in mpl): scalar_kwargs = ["marker", "path_effects"] else: scalar_kwargs = ["marker", "alpha", "path_effects"] for att, value in kwargs.items(): if "color" in att: if is_color_like(value): continue elif "linestyle" in att: if ( isinstance(value, tuple) and len(value) == 2 and isinstance(value[1], Iterable) ): continue elif att in scalar_kwargs: continue if pd.api.types.is_list_like(value): kwargs[att] = np.take(value, multiindex, axis=0) def _PolygonPatch(polygon, **kwargs): from matplotlib.patches import PathPatch from matplotlib.path import Path path = Path.make_compound_path( Path(np.asarray(polygon.exterior.coords)[:, :2]), *[Path(np.asarray(ring.coords)[:, :2]) for ring in polygon.interiors], ) return PathPatch(path, **kwargs) def _plot_polygon_collection( ax, geoms, values=None, color=None, cmap=None, vmin=None, vmax=None, **kwargs ): from matplotlib.collections import PatchCollection geoms, multiindex = _flatten_multi_geoms(geoms) if values is not None: values = np.take(values, multiindex, axis=0) kwargs = { att: value for att, value in kwargs.items() if att not in ["markersize", "marker"] } if color is not None: kwargs["color"] = color _expand_kwargs(kwargs, multiindex) collection = PatchCollection( [_PolygonPatch(poly) for poly in geoms if not poly.is_empty], **kwargs ) if values is not None: collection.set_array(np.asarray(values)) collection.set_cmap(cmap) if "norm" not in kwargs: collection.set_clim(vmin, vmax) ax.add_collection(collection, autolim=True) ax.autoscale_view() return collection plot_polygon_collection = deprecated(_plot_polygon_collection) def _plot_linestring_collection( ax, geoms, values=None, color=None, cmap=None, vmin=None, vmax=None, **kwargs ): from matplotlib.collections import LineCollection geoms, multiindex = _flatten_multi_geoms(geoms) if values is not None: values = np.take(values, multiindex, axis=0) kwargs = { att: value for att, value in kwargs.items() if att not in ["markersize", "marker"] } if color is not None: kwargs["color"] = color _expand_kwargs(kwargs, multiindex) segments = [np.array(linestring.coords)[:, :2] for linestring in geoms] collection = LineCollection(segments, **kwargs) if values is not None: collection.set_array(np.asarray(values)) collection.set_cmap(cmap) if "norm" not in kwargs: collection.set_clim(vmin, vmax) ax.add_collection(collection, autolim=True) ax.autoscale_view() return collection plot_linestring_collection = deprecated(_plot_linestring_collection)
BSD 3-Clause New or Revised License
mitmproxy/mitmproxy
mitmproxy/addons/core.py
Core.kill
python
def kill(self, flows: typing.Sequence[flow.Flow]) -> None: updated = [] for f in flows: if f.killable: f.kill() updated.append(f) ctx.log.alert("Killed %s flows." % len(updated)) ctx.master.addons.trigger(hooks.UpdateHook(updated))
Kill running flows.
https://github.com/mitmproxy/mitmproxy/blob/667d4e04749a4bc2212f58fa2b8c31cd1d91fc7b/mitmproxy/addons/core.py#L107-L117
import typing import os from mitmproxy.utils import emoji from mitmproxy import ctx, hooks from mitmproxy import exceptions from mitmproxy import command from mitmproxy import flow from mitmproxy import optmanager from mitmproxy import platform from mitmproxy.net import server_spec from mitmproxy.net.http import status_codes import mitmproxy.types CONF_DIR = "~/.mitmproxy" LISTEN_PORT = 8080 class Core: def configure(self, updated): opts = ctx.options if opts.add_upstream_certs_to_client_chain and not opts.upstream_cert: raise exceptions.OptionsError( "add_upstream_certs_to_client_chain requires the upstream_cert option to be enabled." ) if "mode" in updated: mode = opts.mode if mode.startswith("reverse:") or mode.startswith("upstream:"): try: server_spec.parse_with_mode(mode) except ValueError as e: raise exceptions.OptionsError(str(e)) from e elif mode == "transparent": if not platform.original_addr: raise exceptions.OptionsError( "Transparent mode not supported on this platform." ) elif mode not in ["regular", "socks5"]: raise exceptions.OptionsError( "Invalid mode specification: %s" % mode ) if "client_certs" in updated: if opts.client_certs: client_certs = os.path.expanduser(opts.client_certs) if not os.path.exists(client_certs): raise exceptions.OptionsError( f"Client certificate path does not exist: {opts.client_certs}" ) @command.command("set") def set(self, option: str, value: str = "") -> None: strspec = f"{option}={value}" try: ctx.options.set(strspec) except exceptions.OptionsError as e: raise exceptions.CommandError(e) from e @command.command("flow.resume") def resume(self, flows: typing.Sequence[flow.Flow]) -> None: intercepted = [i for i in flows if i.intercepted] for f in intercepted: f.resume() ctx.master.addons.trigger(hooks.UpdateHook(intercepted)) @command.command("flow.mark") def mark(self, flows: typing.Sequence[flow.Flow], marker: mitmproxy.types.Marker) -> None: updated = [] if marker not in emoji.emoji: raise exceptions.CommandError(f"invalid marker value") for i in flows: i.marked = marker updated.append(i) ctx.master.addons.trigger(hooks.UpdateHook(updated)) @command.command("flow.mark.toggle") def mark_toggle(self, flows: typing.Sequence[flow.Flow]) -> None: for i in flows: if i.marked: i.marked = "" else: i.marked = ":default:" ctx.master.addons.trigger(hooks.UpdateHook(flows)) @command.command("flow.kill")
MIT License
fishpi/fishpi-pocv---command---control
fishpi/vehicle/drive_controller.py
PyJuiceDriveController.set_servo_pulse
python
def set_servo_pulse(self, channel, pulse): actual_pulse_value = int(pulse * 1000) self.i2c_bus.write16(channel, actual_pulse_value)
1000 is ~1ms pulse so standard servo would be in range 1000 <- 1500 -> 2000
https://github.com/fishpi/fishpi-pocv---command---control/blob/6df8e9db29c1b4769ddedb3a89a21fadae260709/fishpi/vehicle/drive_controller.py#L109-L113
import logging from Adafruit_I2C import Adafruit_I2C class DriveController(object): FULL_LEFT_SERVO = -1.570796 FULL_RIGHT_SERVO = 1.570796 FULL_LEFT_ALLOWED = -0.785398 FULL_RIGHT_ALLOWED = 0.785398 throttle_level = 0.0 steering_angle = 0.0 def set_throttle(self, throttle_level): logging.debug("DRIVE:\tThrottle set to: %s" % throttle_level) if throttle_level > 1.0 or throttle_level < -1.0: raise ValueError("throttle_level %f must be between -1.0 and 1.0." % throttle_level) pulse_time = (throttle_level/2.0)+1.5 if (self.debug): logging.debug("DRIVE:\tSetting pulse length to: %f for throttle level %f", pulse_time, throttle_level) self.set_servo_pulse(self.prop_channel, pulse_time) self.throttle_level = throttle_level def set_steering(self, angle): logging.debug("DRIVE:\tSteering set to: %s" % angle) if angle > self.FULL_RIGHT_ALLOWED: angle = self.FULL_RIGHT_ALLOWED elif angle < self.FULL_LEFT_ALLOWED: angle = self.FULL_LEFT_ALLOWED full_range = self.FULL_RIGHT_SERVO - self.FULL_LEFT_SERVO pulse_time = (angle/full_range)+1.5 if (self.debug): logging.debug("DRIVE:\tSetting pulse length to :%f for steering angle %f", pulse_time, angle) self.set_servo_pulse(self.servo_channel, pulse_time) self.steering_angle = angle def halt(self): self.set_throttle(0.0) self.set_steering(0.0) def set_servo_pulse(self, channel, pulse): pass class PyJuiceDriveController(DriveController): def __init__(self, i2c_addr=0x32, interface="", hw_interface="-1", prop_channel=2, servo_channel=1, debug=False): self.debug = debug self.prop_channel = prop_channel self.servo_channel = servo_channel if hw_interface == "-1": self.i2c_bus = Adafruit_I2C(i2c_addr, debug=debug) else: self.i2c_bus = Adafruit_I2C(i2c_addr, busnum=int(hw_interface), debug=debug) self.set_servo_pulse(self.prop_channel, 1.5) self.set_servo_pulse(self.servo_channel, 1.5)
BSD 2-Clause Simplified License
openstack/keystone
keystone/credential/core.py
Manager.delete_credential
python
def delete_credential(self, credential_id, initiator=None): cred = self.get_credential(credential_id) self.driver.delete_credential(credential_id) self._get_credential.invalidate(self, credential_id) self._list_credentials_for_user.invalidate(self, cred['user_id'], cred['type']) self._list_credentials_for_user.invalidate(self, cred['user_id'], None) notifications.Audit.deleted( self._CRED, credential_id, initiator)
Delete a credential.
https://github.com/openstack/keystone/blob/1e7ecca881a51144d61ae8026e1a77d6669997e2/keystone/credential/core.py#L193-L206
import json from keystone.common import cache from keystone.common import driver_hints from keystone.common import manager from keystone.common import provider_api import keystone.conf from keystone import exception from keystone import notifications CONF = keystone.conf.CONF MEMOIZE = cache.get_memoization_decorator(group='credential') PROVIDERS = provider_api.ProviderAPIs class Manager(manager.Manager): driver_namespace = 'keystone.credential' _provides_api = 'credential_api' _CRED = 'credential' def __init__(self): super(Manager, self).__init__(CONF.credential.driver) def _decrypt_credential(self, credential): if credential['type'] == 'ec2': decrypted_blob = json.loads( PROVIDERS.credential_provider_api.decrypt( credential['encrypted_blob'], ) ) else: decrypted_blob = PROVIDERS.credential_provider_api.decrypt( credential['encrypted_blob'] ) credential['blob'] = decrypted_blob credential.pop('key_hash', None) credential.pop('encrypted_blob', None) return credential def _encrypt_credential(self, credential): credential_copy = credential.copy() if credential.get('type', None) == 'ec2': encrypted_blob, key_hash = ( PROVIDERS.credential_provider_api.encrypt( json.dumps(credential['blob']) ) ) else: encrypted_blob, key_hash = ( PROVIDERS.credential_provider_api.encrypt( credential['blob'] ) ) credential_copy['encrypted_blob'] = encrypted_blob credential_copy['key_hash'] = key_hash credential_copy.pop('blob', None) return credential_copy def _assert_limit_not_exceeded(self, user_id): user_limit = CONF.credential.user_limit if user_limit >= 0: cred_count = len(self.list_credentials_for_user(user_id)) if cred_count >= user_limit: raise exception.CredentialLimitExceeded( limit=user_limit) @manager.response_truncated def list_credentials(self, hints=None): credentials = self.driver.list_credentials( hints or driver_hints.Hints() ) for credential in credentials: credential = self._decrypt_credential(credential) return credentials def list_credentials_for_user(self, user_id, type=None): credentials = self._list_credentials_for_user(user_id, type) for credential in credentials: credential = self._decrypt_credential(credential) return credentials @MEMOIZE def _list_credentials_for_user(self, user_id, type): return self.driver.list_credentials_for_user(user_id, type) def get_credential(self, credential_id): credential = self._get_credential(credential_id) return self._decrypt_credential(credential) @MEMOIZE def _get_credential(self, credential_id): return self.driver.get_credential(credential_id) def create_credential(self, credential_id, credential, initiator=None): credential_copy = self._encrypt_credential(credential) user_id = credential_copy['user_id'] self._assert_limit_not_exceeded(user_id) ref = self.driver.create_credential(credential_id, credential_copy) if MEMOIZE.should_cache(ref): self._get_credential.set(ref, credential_copy, credential_id) self._list_credentials_for_user.invalidate(self, ref['user_id'], ref['type']) self._list_credentials_for_user.invalidate(self, ref['user_id'], None) ref.pop('key_hash', None) ref.pop('encrypted_blob', None) ref['blob'] = credential['blob'] notifications.Audit.created( self._CRED, credential_id, initiator) return ref def _validate_credential_update(self, credential_id, credential): if (credential.get('type', '').lower() == 'ec2' and not credential.get('project_id')): existing_cred = self.get_credential(credential_id) if not existing_cred['project_id']: raise exception.ValidationError(attribute='project_id', target='credential') def update_credential(self, credential_id, credential): self._validate_credential_update(credential_id, credential) if 'blob' in credential: credential_copy = self._encrypt_credential(credential) else: credential_copy = credential.copy() existing_credential = self.get_credential(credential_id) existing_blob = existing_credential['blob'] ref = self.driver.update_credential(credential_id, credential_copy) if MEMOIZE.should_cache(ref): self._get_credential.set(ref, self, credential_id) self._list_credentials_for_user.invalidate(self, ref['user_id'], ref['type']) self._list_credentials_for_user.invalidate(self, ref['user_id'], None) ref.pop('key_hash', None) ref.pop('encrypted_blob', None) if credential.get('blob'): ref['blob'] = credential['blob'] else: ref['blob'] = existing_blob return ref
Apache License 2.0
hvandenb/splunk-elasticsearch
search-elasticsearch/bin/splunklib/searchcommands/decorators.py
Option.is_set
python
def is_set(self): return self._is_set
Indicates whether an option value was provided as argument.
https://github.com/hvandenb/splunk-elasticsearch/blob/6c3e6d0d48a9e20e2d421d8d490ac28e979de045/search-elasticsearch/bin/splunklib/searchcommands/decorators.py#L237-L241
from inspect import getmembers, isclass, isfunction from types import FunctionType, MethodType from json import JSONEncoder try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict from .search_command_internals import ConfigurationSettingsType from .validators import OptionName class Configuration(object): def __init__(self, **kwargs): self.settings = kwargs def __call__(self, o): if isfunction(o): o._settings = self.settings elif isclass(o): name = o.__name__ if name.endswith('Command'): name = name[:-len('Command')] o.name = name.lower() if self.settings is not None: o.ConfigurationSettings = ConfigurationSettingsType( module='.'.join((o.__module__, o.__name__)), name='ConfigurationSettings', bases=(o.ConfigurationSettings,), settings=self.settings) o.ConfigurationSettings.fix_up(o) Option.fix_up(o) else: raise TypeError( 'Incorrect usage: Configuration decorator applied to %s' % (type(o), o.__name__)) return o class Option(property): def __init__(self, fget=None, fset=None, fdel=None, doc=None, name=None, default=None, require=None, validate=None): super(Option, self).__init__(fget, fset, fdel, doc) self.name = None if name is None else OptionName()(name) self.default = default self.require = bool(require) self.validate = validate def __call__(self, function): return self.getter(function) @classmethod def fix_up(cls, command): is_option = lambda attribute: isinstance(attribute, Option) command.option_definitions = getmembers(command, is_option) member_number = 0 for member_name, option in command.option_definitions: if option.name is None: option.name = member_name if option.fget is None and option.fset is None: field_name = '_' + member_name def new_getter(name): def getter(self): return getattr(self, name, None) return getter fget = new_getter(field_name) fget = FunctionType( fget.func_code, fget.func_globals, member_name, None, fget.func_closure) fget = MethodType(fget, None, command) option = option.getter(fget) def new_setter(name): def setter(self, value): setattr(self, name, value) return setter fset = new_setter(field_name) fset = FunctionType( fset.func_code, fset.func_globals, member_name, None, fset.func_closure) fset = MethodType(fset, None, command) option = option.setter(fset) setattr(command, member_name, option) command.option_definitions[member_number] = member_name, option member_number += 1 return def deleter(self, function): deleter = super(Option, self).deleter(function) return self._reset(deleter, function) def getter(self, function): getter = super(Option, self).getter(function) return self._reset(getter) def setter(self, function): f = lambda s, v: function(s, self.validate(v) if self.validate else v) setter = super(Option, self).setter(f) return self._reset(setter) def _reset(self, other): other.name = self.name other.default = self.default other.require = self.require other.validate = self.validate return other class Encoder(JSONEncoder): def __init__(self, item): super(Option.Encoder, self).__init__() self.item = item def default(self, o): validator = self.item.validator if validator is None: return str(o) return validator.format(o) class Item(object): def __init__(self, command, option): self._command = command self._option = option self._is_set = False def __repr__(self): return str(self) def __str__(self): value = self.validator.format(self.value) if self.validator is not None else str(self.value) encoder = Option.Encoder(self) text = '='.join([self.name, encoder.encode(value)]) return text @property def is_required(self): return bool(self._option.require) @property
Apache License 2.0
argoproj-labs/argo-client-python
argo/workflows/client/models/v1alpha1_semaphore_ref.py
V1alpha1SemaphoreRef.config_map_key_ref
python
def config_map_key_ref(self, config_map_key_ref): self._config_map_key_ref = config_map_key_ref
Sets the config_map_key_ref of this V1alpha1SemaphoreRef. :param config_map_key_ref: The config_map_key_ref of this V1alpha1SemaphoreRef. # noqa: E501 :type: V1ConfigMapKeySelector
https://github.com/argoproj-labs/argo-client-python/blob/993d684cab39a834770b296e028519cec035c7b5/argo/workflows/client/models/v1alpha1_semaphore_ref.py#L66-L74
import pprint import re import six from argo.workflows.client.configuration import Configuration class V1alpha1SemaphoreRef(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'config_map_key_ref': 'V1ConfigMapKeySelector' } attribute_map = { 'config_map_key_ref': 'configMapKeyRef' } def __init__(self, config_map_key_ref=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._config_map_key_ref = None self.discriminator = None if config_map_key_ref is not None: self.config_map_key_ref = config_map_key_ref @property def config_map_key_ref(self): return self._config_map_key_ref @config_map_key_ref.setter
Apache License 2.0
rbarrois/mpdlcd
mpdlcd/lcdrunner.py
MpdRunner.add_pseudo_fields
python
def add_pseudo_fields(self): fields = [] if self.backlight_on != enums.BACKLIGHT_ON_NEVER: fields.append( display_fields.BacklightPseudoField(ref='0', backlight_rule=self.backlight_on) ) fields.append( display_fields.PriorityPseudoField( ref='0', priority_playing=self.priority_playing, priority_not_playing=self.priority_not_playing, ) ) self.pattern.add_pseudo_fields(fields, self.screen)
Add 'pseudo' fields (e.g non-displayed fields) to the display.
https://github.com/rbarrois/mpdlcd/blob/c39cce8eafd4308bf255a9fcb2f92db4cfcab586/mpdlcd/lcdrunner.py#L63-L79
import logging import time from .vendor.lcdproc import server from . import display_fields from . import enums from . import utils logger = logging.getLogger(__name__) class LcdProcServer(server.Server): def __init__(self, hostname, port, **kwargs): super(LcdProcServer, self).__init__(hostname, port, **kwargs) class MpdRunner(utils.AutoRetryCandidate): def __init__( self, client, lcd, lcdproc_screen, refresh_rate, backlight_on, priority_playing, priority_not_playing, *args, **kwargs): super(MpdRunner, self).__init__(logger=logger, *args, **kwargs) self.lcd = lcd self.lcdproc_screen = lcdproc_screen self.backlight_on = backlight_on self.priority_playing = priority_playing self.priority_not_playing = priority_not_playing self.refresh_rate = refresh_rate self._connect_lcd() self.pattern = None self.screen = self.setup_screen(self.lcdproc_screen) self.hooks = {} self.subhooks = {} self.client = client @utils.auto_retry def _connect_lcd(self): self.lcd.start_session() def setup_screen(self, screen_name): logger.debug('Adding lcdproc screen %s', screen_name) screen = self.lcd.add_screen(screen_name) screen.set_heartbeat('off') screen.set_priority(self.priority_playing) width = self.lcd.server_info['screen_width'] height = self.lcd.server_info['screen_height'] logger.info('LCD screen is %dx%d', width, height) screen.set_width(width) screen.set_height(height) logger.info('%s screen added to lcdproc.', screen_name) return screen
MIT License
scidash/sciunit
sciunit/scores/collections.py
ScoreArray.get_by_name
python
def get_by_name(self, name: str) -> Union[Model, Test]: item = None for test_or_model in self.index: if test_or_model.name == name: item = self.__getitem__(test_or_model) if item is None: raise KeyError("No model or test with name '%s'" % name) return item
Get a test or a model by `name`. Args: name (str): The name of the model or test. Raises: KeyError: No model or test with name `name`. Returns: Union[Model, Test]: The model or test found.
https://github.com/scidash/sciunit/blob/68401d88b8e47d29807f8b4f9d265a23174143d9/sciunit/scores/collections.py#L69-L87
import warnings from datetime import datetime from typing import List, Tuple, Union import bs4 import numpy as np import pandas as pd from IPython.display import Javascript, display from sciunit.base import SciUnit, TestWeighted, config from sciunit.models import Model from sciunit.scores import FloatScore, NoneScore, Score from sciunit.tests import Test class ScoreArray(pd.Series, SciUnit, TestWeighted): def __init__(self, tests_or_models, scores=None, weights=None, name=None): if scores is None: scores = [NoneScore for tom in tests_or_models] tests_or_models = self.check_tests_and_models(tests_or_models) self.weights_ = [] if not weights else list(weights) name = (name or self.__class__.__name__) self._name = name super(ScoreArray, self).__init__(data=scores, index=tests_or_models, name=name) self.index_type = "tests" if isinstance(tests_or_models[0], Test) else "models" setattr(self, self.index_type, tests_or_models) state_hide = ['related_data', 'scores', 'norm_scores', 'style', 'plot', 'iat', 'at', 'iloc', 'loc', 'T'] def check_tests_and_models( self, tests_or_models: Union[Test, Model] ) -> Union[Test, Model]: assert all([isinstance(tom, Test) for tom in tests_or_models]) or all( [isinstance(tom, Model) for tom in tests_or_models] ), "A ScoreArray may be indexed by only test or models" return tests_or_models def __getitem__(self, item): if isinstance(item, str): result = self.get_by_name(item) else: result = super(ScoreArray, self).__getitem__(item) return result
MIT License
numan/sunspear
sunspear/backends/database/db.py
DatabaseBackend.obj_get
python
def obj_get(self, obj, **kwargs): if not obj: return obj obj_ids = [self._extract_id(o) for o in obj] s = self._get_select_multiple_objects_query(obj_ids) results = self.engine.execute(s).fetchall() results = map(self._db_schema_to_obj_dict, results) return results
Given a list of object ids, returns a list of objects
https://github.com/numan/sunspear/blob/536efd36786bcba1f2e24909e58015f3088cd40d/sunspear/backends/database/db.py#L156-L169
from __future__ import absolute_import, unicode_literals import calendar import copy import datetime import json import uuid import six from dateutil import tz from dateutil.parser import parse from sqlalchemy import and_, create_engine, desc, not_, or_, sql from sqlalchemy.engine.result import RowProxy from sqlalchemy.pool import QueuePool from sunspear.activitystreams.models import (SUB_ACTIVITY_VERBS_MAP, Activity, Model, Object) from sunspear.backends.base import SUB_ACTIVITY_MAP, BaseBackend from sunspear.exceptions import (SunspearDuplicateEntryException, SunspearOperationNotSupportedException, SunspearValidationException) from . import schema DB_OBJ_FIELD_MAPPING = { 'id': 'id', 'objectType': 'object_type', 'displayName': 'display_name', 'content': 'content', 'published': 'published', 'image': 'image', } DB_ACTIVITY_FIELD_MAPPING = { 'id': 'id', 'verb': 'verb', 'actor': 'actor', 'object': 'object', 'target': 'target', 'author': 'author', 'generator': 'generator', 'provider': 'provider', 'content': 'content', 'published': 'published', 'updated': 'updated', 'icon': 'icon', } DICT_FIELDS = Activity._media_fields + Object._media_fields + Activity._object_fields + ['other_data',] class DatabaseBackend(BaseBackend): def __init__(self, db_connection_string=None, verbose=False, poolsize=10, max_overflow=5, **kwargs): self._engine = create_engine(db_connection_string, echo=verbose, poolclass=QueuePool, pool_size=poolsize, max_overflow=max_overflow, convert_unicode=True) @property def engine(self): return self._engine @property def activities_table(self): return schema.tables['activities'] @property def objects_table(self): return schema.tables['objects'] @property def likes_table(self): return schema.tables['likes'] @property def replies_table(self): return schema.tables['replies'] @property def to_table(self): return schema.tables['to'] @property def bto_table(self): return schema.tables['bto'] @property def cc_table(self): return schema.tables['cc'] @property def bcc_table(self): return schema.tables['bcc'] def _get_connection(self): return self.engine.connect() def create_tables(self): schema.metadata.create_all(self.engine) def drop_tables(self): schema.metadata.drop_all(self.engine) def clear_all(self): self.drop_tables() self.create_tables() def clear_all_objects(self): raise SunspearOperationNotSupportedException() def clear_all_activities(self): self.engine.execute(self.activities_table.delete()) def obj_create(self, obj, **kwargs): obj_dict = self._get_parsed_and_validated_obj_dict(obj) obj_db_schema_dict = self._obj_dict_to_db_schema(obj_dict) self.engine.execute(self.objects_table.insert(), [obj_db_schema_dict]) return obj_dict def obj_exists(self, obj, **kwargs): obj_id = self._extract_id(obj) objs_db_table = self.objects_table return self.engine.execute(sql.select([sql.exists().where(objs_db_table.c.id == obj_id)])).scalar() def audience_targeting_exists(self, targeting_type, activity_id, object_id): audience_table = self._get_audience_targeting_table(targeting_type) return self.engine.execute(sql.select([sql.exists().where((audience_table.c.activity == activity_id) & (audience_table.c.object == object_id))])).scalar() def obj_update(self, obj, **kwargs): obj_dict = self._get_parsed_and_validated_obj_dict(obj) obj_id = self._extract_id(obj_dict) obj_db_schema_dict = self._obj_dict_to_db_schema(obj_dict) self.engine.execute( self.objects_table.update().where(self.objects_table.c.id == obj_id).values(**obj_db_schema_dict))
Apache License 2.0
ramonhagenaars/typish
typish/classes/_something.py
Something.signature
python
def signature(mcs) -> Dict[str, type]: result = OrderedDict() args = mcs.__args__ if isinstance(mcs.__args__, slice): args = (mcs.__args__,) arg_keys = sorted(args) if isinstance(mcs.__args__, dict): for key in arg_keys: result[key] = mcs.__args__[key] else: for slice_ in arg_keys: result[slice_.start] = slice_.stop return result
Return the signature of this ``Something`` as a dict. :return: a dict with attribute names as keys and types as values.
https://github.com/ramonhagenaars/typish/blob/7875850f55e2df8a9e2426e2d484ab618e347c7f/typish/classes/_something.py#L83-L100
import types from collections import OrderedDict from typing import Any, Dict, Callable, Tuple from typish.classes._subscriptable_type import SubscriptableType class _SomethingMeta(SubscriptableType): def __instancecheck__(self, instance: object) -> bool: from typish.functions._instance_of import instance_of sig = self.signature() for key in sig: attr = getattr(instance, key, None) if not attr or not instance_of(attr, sig[key]): return False return True def __subclasscheck__(self, subclass: type) -> bool: from typish.functions._subclass_of import subclass_of from typish.functions._get_type_hints_of_callable import get_args_and_return_type self_sig = self.signature() other_sig = Something.like(subclass).signature() for attr in self_sig: if attr in other_sig: attr_sig = other_sig[attr] if (not isinstance(subclass.__dict__[attr], staticmethod) and not isinstance(subclass.__dict__[attr], classmethod) and subclass_of(attr_sig, Callable)): args, rt = get_args_and_return_type(attr_sig) attr_sig = Callable[list(args[1:]), rt] if not subclass_of(attr_sig, self_sig[attr]): return False return True def __eq__(self, other: 'Something') -> bool: return (isinstance(other, _SomethingMeta) and self.signature() == other.signature()) def __repr__(self): sig = self.signature() sig_ = ', '.join(["'{}': {}".format(k, self._type_repr(sig[k])) for k in sig]) return 'typish.Something[{}]'.format(sig_) def __hash__(self): return super.__hash__(self) def _type_repr(self, obj): if isinstance(obj, type) and not issubclass(obj, Callable): return obj.__qualname__ if obj is ...: return '...' if isinstance(obj, types.FunctionType): return obj.__name__ return repr(obj) class Something(type, metaclass=_SomethingMeta): @classmethod
MIT License
microsoft/quilla
src/quilla/steps/validations/xpath.py
XPathValidation._find_all
python
def _find_all(self) -> List[WebElement]: return self.driver.find_elements(By.XPATH, self.target)
Proxy method to find all elements specified by the _target attribute Returns: A list of all the elements found for that specific target, searched by XPath Raises: NoDriverException: If the driver is not currently bound to this step
https://github.com/microsoft/quilla/blob/79eb50aeff3a01a20488b2de17db1e8465d67014/src/quilla/steps/validations/xpath.py#L71-L81
import re from typing import ( Optional, Dict, Callable, List ) from selenium.webdriver.remote.webdriver import WebDriver from selenium.webdriver.remote.webelement import WebElement from selenium.webdriver.common.by import By from quilla.ctx import Context from quilla.common.enums import ( XPathValidationStates, ValidationStates, ValidationTypes, ) from quilla.reports import ( ValidationReport, ) from quilla.steps.base_steps import BaseValidation from quilla.steps.validations.visual_parity import VisualParityState class XPathValidation(BaseValidation): def __init__( self, ctx: Context, target: str, state: XPathValidationStates, parameters: Optional[Dict], driver: Optional[WebDriver] = None, ) -> None: selector: Dict[ValidationStates, Callable[[], ValidationReport]] = { XPathValidationStates.EXISTS: self._check_exists, XPathValidationStates.NOT_EXISTS: self._check_not_exists, XPathValidationStates.VISIBLE: self._check_visible, XPathValidationStates.NOT_VISIBLE: self._check_not_visible, XPathValidationStates.TEXT_MATCHES: self._check_text_matches, XPathValidationStates.NOT_TEXT_MATCHES: self._check_not_text_matches, XPathValidationStates.HAS_PROPERTY: self._check_has_property, XPathValidationStates.NOT_HAS_PROPERTY: self._check_not_has_property, XPathValidationStates.PROPERTY_HAS_VALUE: self._check_property_has_value, XPathValidationStates.NOT_PROPERTY_HAS_VALUE: self._check_not_property_has_value, XPathValidationStates.HAS_ATTRIBUTE: self._check_has_attribute, XPathValidationStates.NOT_HAS_ATTRIBUTE: self._check_not_has_attribute, XPathValidationStates.ATTRIBUTE_HAS_VALUE: self._check_attribute_has_value, XPathValidationStates.NOT_ATTRIBUTE_HAS_VALUE: self._check_not_attribute_has_value, XPathValidationStates.VISUAL_PARITY: self._check_visual_parity, } super().__init__( ctx, ValidationTypes.XPATH, target, state, selector, parameters=parameters, driver=driver )
MIT License
autopi-io/autopi-core
src/salt/base/ext/_utils/iw_parse.py
matching_line
python
def matching_line(lines, keyword): for line in lines: matching = match(line, keyword) if matching != None: return matching return None
Returns the first matching line in a list of lines. @see match()
https://github.com/autopi-io/autopi-core/blob/db50b2d8073af2d0f50a7c83b7cb370491749e2d/src/salt/base/ext/_utils/iw_parse.py#L114-L122
import re import subprocess def get_name(cell): return matching_line(cell, "ESSID:")[1:-1] def get_quality(cell): quality = matching_line(cell, "Quality=").split()[0].split("/") return int(round(float(quality[0]) / float(quality[1]) * 100)) def get_signal_level(cell): signal = matching_line(cell, "Signal level=").split("=")[1] return int(signal.split()[0]) def get_channel(cell): channel = matching_line(cell, "Channel:") if channel: return int(channel) frequency = matching_line(cell, "Frequency:") channel = re.sub(r".*\(Channel\s(\d{1,2})\).*", r"\1", frequency) return int(channel) def get_encryption(cell): enc = "" if matching_line(cell, "Encryption key:") == "off": enc = "Open" else: for line in cell: matching = match(line,"IE:") if matching != None: wpa = match(matching,"WPA Version ") if wpa != None: enc = "WPA v." + wpa if enc == "": enc = "WEP" return enc def get_address(cell): return matching_line(cell, "Address: ") def get_bit_rates(cell): return matching_line(cell, "Bit Rates:") def sort_cells(cells): sortby = "quality" reverse = True cells.sort(None, lambda el:el[sortby], reverse)
Apache License 2.0
swissdatasciencecenter/renku-python
renku/cli/service.py
service
python
def service(ctx, env): try: import redis import rq from dotenv import load_dotenv from renku.service.cache.base import BaseCache BaseCache.cache.ping() load_dotenv(dotenv_path=env) except ImportError: click.echo( ERROR + "Dependency not found! " "Please install `pip install renku[service]` to enable service component control." ) ctx.exit(1) except redis.exceptions.ConnectionError: click.echo(ERROR + "Cannot connect to Redis") ctx.exit(1)
Manage service components.
https://github.com/swissdatasciencecenter/renku-python/blob/5e43e2eff67cdf20fc2805799fe2822e23bc503d/renku/cli/service.py#L179-L206
import os import signal import subprocess import sys import tempfile import time from datetime import datetime from pathlib import Path import click import psutil from renku.core.commands.echo import ERROR from renku.core.models.tabulate import tabulate from renku.core.utils.contexts import chdir RENKU_DAEMON_LOG_FILE = "renku.log" RENKU_DAEMON_ERR_FILE = "renku.err" SERVICE_COMPONENT_TAGS = ["api", "scheduler", "worker"] def run_api(addr="0.0.0.0", port=8080, timeout=600, is_debug=False): from gunicorn.app.wsgiapp import run svc_num_workers = os.getenv("RENKU_SVC_NUM_WORKERS", "1") svc_num_threads = os.getenv("RENKU_SVC_NUM_THREADS", "2") loading_opt = "--preload" if is_debug: loading_opt = "--reload" svc_num_workers = "1" sys.argv = [ "gunicorn", "renku.service.entrypoint:app", loading_opt, "-b", f"{addr}:{port}", "--timeout", f"{timeout}", "--workers", svc_num_workers, "--worker-class", "gthread", "--threads", svc_num_threads, "--log-level", "debug", ] sys.exit(run()) def run_worker(queues): from renku.service.jobs.queues import QUEUES from renku.service.worker import start_worker if not queues: queues = os.getenv("RENKU_SVC_WORKER_QUEUES", "") queues = [queue_name.strip() for queue_name in queues.strip().split(",") if queue_name.strip()] if not queues: queues = QUEUES start_worker(queues) def check_cmdline(cmdline, include=None): include = include or [] service_components = include + SERVICE_COMPONENT_TAGS for cmd in service_components: if cmd in cmdline: return True return False def is_renku_process(process, include): process_name = process.name().lower() if process_name == "renku" and check_cmdline(process.cmdline(), include): return True elif "python" not in process_name: return False try: command_line = process.cmdline() if not check_cmdline(command_line, include): return False for line in command_line: if line.endswith("renku"): return True except (psutil.AccessDenied, psutil.NoSuchProcess): pass return False def list_renku_processes(include=None): include = include or [] renku_processes_all = [] for pid in sorted(psutil.pids()): try: proc = psutil.Process(pid) if is_renku_process(proc, include) and proc.status() != "zombie": renku_processes_all.append(proc) except (psutil.NoSuchProcess, psutil.ZombieProcess): continue renku_proc_info = sorted( [ { "create_time": datetime.fromtimestamp(proc.create_time()).strftime("%d.%b %H:%M"), "pid": proc.pid, "cmdline": f"renku {' '.join(proc.cmdline()[2:])}", "status": proc.status(), "mem_perct": proc.memory_percent(), "cpu_perct": proc.cpu_percent(), "num_threads": proc.num_threads(), } for proc in renku_processes_all ], key=lambda k: k["cmdline"], ) return renku_proc_info def read_logs(log_file, follow=True, output_all=False): if follow and not output_all: log_file.seek(0, os.SEEK_END) while True: line = log_file.readline() if not line and follow: time.sleep(0.1) continue if not line and not follow: return yield line @click.group() @click.option("-e", "--env", default=None, type=click.Path(exists=True, dir_okay=False), help="Path to the .env file.") @click.pass_context
Apache License 2.0
stlehmann/pyads
pyads/filetimes.py
UTC.dst
python
def dst(self, dt): return ZERO
Return daylight savings time.
https://github.com/stlehmann/pyads/blob/3c505092dafb2cd3f85c77ab6c700b99976cf5da/pyads/filetimes.py#L60-L63
from typing import Optional from datetime import datetime, timedelta, tzinfo from calendar import timegm EPOCH_AS_FILETIME = 116444736000000000 HUNDREDS_OF_NANOSECONDS = 10000000 ZERO = timedelta(0) HOUR = timedelta(hours=1) class UTC(tzinfo): def utcoffset(self, dt): return ZERO def tzname(self, dt): return "UTC"
MIT License
woudt/bunq2ifttt
app/event.py
trigger_oauth_expires
python
def trigger_oauth_expires(): try: data = request.get_json() print("[trigger_oauthexp] input: {}".format(json.dumps(data))) if "triggerFields" not in data or "hours" not in data["triggerFields"]: print("[trigger_oauthexp] ERROR: hours field missing!") return json.dumps({"errors": [{"message": "Invalid data"}]}), 400 hours = data["triggerFields"]["hours"] if "trigger_identity" not in data: print("[trigger_oauthexp] ERROR: trigger_identity field missing!") return json.dumps({"errors": [{"message": "Invalid data"}]}), 400 limit = 50 if "limit" in data: limit = data["limit"] if hours == "9876543210": return trigger_oauth_expires_test(limit) timezone = "UTC" if "user" in data and "timezone" in data["user"]: timezone = data["user"]["timezone"] transactions = [] value = storage.get_value("bunq2IFTTT", "bunq_oauth") if value is not None: timestamp = value["timestamp"] + 3600 * (90*24 - int(hours)) if timestamp <= time.time(): transactions = [{ "created_at": arrow.get(timestamp) .to(timezone).isoformat(), "expires_at": arrow.get(value["timestamp"] + 90*24*3600) .to(timezone).isoformat(), "meta": { "id": str(timestamp), "timestamp": str(timestamp), } }] print("[trigger_oauthexp] Found {} transactions" .format(len(transactions))) return json.dumps({"data": transactions[:limit]}) except Exception: traceback.print_exc() print("[trigger_oauthexp] ERROR: cannot retrieve oauth expiry data") return json.dumps({"errors": [{"message": "Cannot retrieve oauth expiry data"}]}), 400
Callback for IFTTT trigger bunq_oauth_expires
https://github.com/woudt/bunq2ifttt/blob/de53ca03743b705c4f5149c756e0fd90d55231ee/app/event.py#L749-L799
import json import time import traceback import uuid import arrow import requests from flask import request import storage import util def bunq_callback_request(): try: data = request.get_json() print("[bunqcb_request] input: {}".format(json.dumps(data))) if data["NotificationUrl"]["event_type"] != "REQUEST_RESPONSE_CREATED": print("[bunqcb_request] ignoring {} event" .format(data["NotificationUrl"]["event_type"])) return 200 obj = data["NotificationUrl"]["object"]["RequestResponse"] metaid = obj["id"] if storage.seen("seen_request", metaid): print("[bunqcb_request] duplicate transaction") return 200 iban = obj["alias"]["iban"] valid, accname = util.check_valid_bunq_account(iban, "Request") if not valid: print("[bunqcb_request] trigger not enabled for this account") return 200 item = { "created_at": obj["created"], "date": arrow.get(obj["created"]).format("YYYY-MM-DD"), "amount": obj["amount_inquired"]["value"], "account": iban, "account_name": accname, "counterparty_account": counterparty_account(obj), "counterparty_name": obj["counterparty_alias"]["display_name"], "description": obj["description"], "request_id": metaid, "meta": { "id": metaid, "timestamp": arrow.get(obj["created"]).timestamp } } print("[bunqcb_request] translated: {}".format(json.dumps(item))) triggerids = [] for account in ["ANY", iban]: for trigger in storage.query("trigger_request", "account", "=", account): ident = trigger["identity"] if check_fields("request", ident, item, trigger["fields"]): triggerids.append(ident) storage.insert_value_maxsize("trigger_request", ident+"_t", item, 50) print("[bunqcb_request] Matched triggers:", json.dumps(triggerids)) if triggerids: data = {"data": []} for triggerid in triggerids: data["data"].append({"trigger_identity": triggerid}) headers = { "IFTTT-Channel-Key": util.get_ifttt_service_key(), "IFTTT-Service-Key": util.get_ifttt_service_key(), "X-Request-ID": uuid.uuid4().hex, "Content-Type": "application/json" } print("[bunqcb_request] to ifttt: {}".format(json.dumps(data))) res = requests.post("https://realtime.ifttt.com/v1/notifications", headers=headers, data=json.dumps(data)) print("[bunqcb_request] result: {} {}" .format(res.status_code, res.text)) except Exception: traceback.print_exc() print("[bunqcb_request] ERROR during handling bunq callback") return 500 return 200 def bunq_callback_mutation(): try: data = request.get_json() print("[bunqcb_mutation] input: {}".format(json.dumps(data))) payment = data["NotificationUrl"]["object"]["Payment"] metaid = payment["id"] if storage.seen("seen_mutation", metaid): print("[bunqcb_mutation] duplicate transaction") return 200 iban = payment["alias"]["iban"] valid, accname = util.check_valid_bunq_account(iban, "Mutation") if not valid: print("[bunqcb_mutation] trigger not enabled for this account") return 200 item = { "created_at": payment["created"], "date": arrow.get(payment["created"]).format("YYYY-MM-DD"), "type": mutation_type(payment), "amount": payment["amount"]["value"], "balance": payment["balance_after_mutation"]["value"], "account": iban, "account_name": accname, "counterparty_account": counterparty_account(payment), "counterparty_name": payment["counterparty_alias"]["display_name"], "description": payment["description"], "payment_id": metaid, "meta": { "id": metaid, "timestamp": arrow.get(payment["created"]).timestamp } } print("[bunqcb_mutation] translated: {}".format(json.dumps(item))) triggerids_1 = [] triggerids_2 = [] for account in ["ANY", iban]: for trigger in storage.query("trigger_mutation", "account", "=", account): ident = trigger["identity"] if check_fields("mutation", ident, item, trigger["fields"]): triggerids_1.append(ident) storage.insert_value_maxsize("trigger_mutation", ident+"_t", item, 50) for trigger in storage.query("trigger_balance", "account", "=", account): ident = trigger["identity"] if check_fields("balance", ident, item, trigger["fields"]): if not trigger["last"]: triggerids_2.append(ident) storage.insert_value_maxsize("trigger_balance", ident+"_t", item, 50) trigger["last"] = True storage.store("trigger_balance", ident, trigger) elif trigger["last"]: trigger["last"] = False storage.store("trigger_balance", ident, trigger) print("Matched mutation triggers:", json.dumps(triggerids_1)) print("Matched balance triggers:", json.dumps(triggerids_2)) data = {"data": []} for triggerid in triggerids_1 + triggerids_2: data["data"].append({"trigger_identity": triggerid}) if data["data"]: headers = { "IFTTT-Channel-Key": util.get_ifttt_service_key(), "IFTTT-Service-Key": util.get_ifttt_service_key(), "X-Request-ID": uuid.uuid4().hex, "Content-Type": "application/json" } print("[bunqcb_mutation] to ifttt: {}".format( json.dumps(data))) res = requests.post("https://realtime.ifttt.com/v1/notifications", headers=headers, data=json.dumps(data)) print("[bunqcb_mutation] result: {} {}" .format(res.status_code, res.text)) except Exception: traceback.print_exc() print("[bunqcb_mutation] ERROR during handling bunq callback") return 500 return 200 def mutation_type(payment): muttype = "TRANSFER_OTHER" if payment["type"] == "MASTERCARD": muttype = "CARD_" + payment["sub_type"] elif payment["type"] == "IDEAL" or payment["type"] == "BUNQME": muttype = "ONLINE_IDEAL" elif payment["type"] == "SOFORT": muttype = "ONLINE_SOFORT" elif payment["type"] == "EBA_SCT": muttype = "TRANSFER_REGULAR" elif payment["type"] == "SAVINGS": muttype = "TRANSFER_SAVINGS" elif payment["type"] == "INTEREST": muttype = "BUNQ_INTEREST" elif payment["type"] == "BUNQ": if payment["sub_type"] in ["BILLING", "REWARD"]: muttype = "BUNQ_"+payment["sub_type"] elif payment["sub_type"] == "REQUEST": muttype = "TRANSFER_REQUEST" elif payment["sub_type"] == "PAYMENT": if "scheduled_id" in payment and payment["scheduled_id"] is not None: muttype = "TRANSFER_SCHEDULED" else: muttype = "TRANSFER_REGULAR" return muttype def counterparty_account(payment): if "counterparty_alias" in payment and "iban" in payment["counterparty_alias"]: ctp_account = payment["counterparty_alias"]["iban"] elif payment["type"] == "MASTERCARD": ctp_account = "Card" elif payment["type"] == "IDEAL": ctp_account = "iDEAL" elif payment["type"] == "SOFORT": ctp_account = "SOFORT" else: ctp_account = "Other" return ctp_account def check_fields(triggertype, triggerid, item, fields): try: return check_types(item, fields) and check_comparators(item, fields) except Exception: print("Error in {} trigger {}".format(triggertype, triggerid)) traceback.print_exc() def check_types(item, fields): result = True if "type" in fields and fields["type"] != "ANY": result = item["type"].startswith(fields["type"]) if "type_2" in fields and fields["type_2"] != "---": result |= item["type"].startswith(fields["type_2"]) if "type_3" in fields and fields["type_3"] != "---": result |= item["type"].startswith(fields["type_3"]) if "type_4" in fields and fields["type_4"] != "---": result |= item["type"].startswith(fields["type_4"]) return result def check_comparators(item, fields): result = True if "amount_comparator" in fields: result &= check_field_num(item["amount"], fields["amount_comparator"], fields["amount_value"]) if "amount_comparator_2" in fields: result &= check_field_num(item["amount"], fields["amount_comparator_2"], fields["amount_value_2"]) if "balance_comparator" in fields: result &= check_field_num(item["balance"], fields["balance_comparator"], fields["balance_value"]) if "balance_comparator_2" in fields: result &= check_field_num(item["balance"], fields["balance_comparator_2"], fields["balance_value_2"]) if "counterparty_name_comparator" in fields: result &= check_field_str(item["counterparty_name"], fields["counterparty_name_comparator"], fields["counterparty_name_value"]) if "counterparty_name_comparator_2" in fields: result &= check_field_str(item["counterparty_name"], fields["counterparty_name_comparator_2"], fields["counterparty_name_value_2"]) if "counterparty_account_comparator" in fields: result &= check_field_str(item["counterparty_account"], fields["counterparty_account_comparator"], fields["counterparty_account_value"]) if "counterparty_account_comparator_2" in fields: result &= check_field_str(item["counterparty_account"], fields["counterparty_account_comparator_2"], fields["counterparty_account_value_2"]) if "description_comparator" in fields: result &= check_field_str(item["description"], fields["description_comparator"], fields["description_value"]) if "description_comparator_2" in fields: result &= check_field_str(item["description"], fields["description_comparator_2"], fields["description_value_2"]) return result def check_field_num(orig, comparator, target): result = False if comparator == "ignore": result = True elif comparator == "equal" and float(orig) == float(target): result = True elif comparator == "not_equal" and float(orig) != float(target): result = True elif comparator == "above" and float(orig) > float(target): result = True elif comparator == "above_equal" and float(orig) >= float(target): result = True elif comparator == "below" and float(orig) < float(target): result = True elif comparator == "below_equal" and float(orig) <= float(target): result = True elif comparator == "in" and orig in json.loads(target): result = True elif comparator == "not_in" and orig not in json.loads(target): result = True return result def check_field_str(orig, comparator, target): result = False if comparator in ["equal_nc", "not_equal_nc", "cont_nc", "not_cont_nc", "in_nc", "not_in_nc"]: orig = orig.casefold() target = target.casefold() if comparator == "ignore": result = True elif comparator in ["equal", "equal_nc"] and orig == target: result = True elif comparator in ["not_equal", "not_equal_nc"] and orig != target: result = True elif comparator in ["cont", "cont_nc"] and orig.find(target) > -1: result = True elif comparator in ["not_cont", "not_cont_nc"] and orig.find(target) == -1: result = True elif comparator in ["in", "in_nc"] and orig in json.loads(target): result = True elif comparator in ["not_in", "not_in_nc"] and orig not in json.loads(target): result = True return result def trigger_mutation(): try: data = request.get_json() print("[trigger_mutation] input: {}".format(json.dumps(data))) if "triggerFields" not in data or "account" not in data["triggerFields"]: print("[trigger_mutation] ERROR: account field missing!") return json.dumps({"errors": [{"message": "Invalid data"}]}), 400 account = data["triggerFields"]["account"] fields = data["triggerFields"] fieldsstr = json.dumps(fields) if "trigger_identity" not in data: print("[trigger_mutation] ERROR: trigger_identity field missing!") return json.dumps({"errors": [{"message": "Invalid data"}]}), 400 identity = data["trigger_identity"] limit = 50 if "limit" in data: limit = data["limit"] if account == "NL42BUNQ0123456789": return trigger_mutation_test(limit) timezone = "UTC" if "user" in data and "timezone" in data["user"]: timezone = data["user"]["timezone"] entity = storage.retrieve("trigger_mutation", identity) if entity is not None: if entity["account"] != account or json.dumps(entity["fields"]) != fieldsstr: storage.store("trigger_mutation", identity, { "account": account, "identity": identity, "fields": fields }) print("[trigger_mutation] updating trigger {} {}" .format(account, fieldsstr)) else: storage.store("trigger_mutation", identity, { "account": account, "identity": identity, "fields": fields }) print("[trigger_mutation] storing new trigger {} {}" .format(account, fieldsstr)) transactions = storage.get_value("trigger_mutation", identity+"_t") if transactions is None: transactions = [] for trans in transactions: trans["created_at"] = arrow.get(trans["created_at"]) .to(timezone).isoformat() print("[trigger_mutation] Found {} transactions" .format(len(transactions))) return json.dumps({"data": transactions[:limit]}) except Exception: traceback.print_exc() print("[trigger_mutation] ERROR: cannot retrieve transactions") return json.dumps({"errors": [{"message": "Cannot retrieve transactions"}]}), 400 def trigger_mutation_test(limit): result = [{ "created_at": "2018-01-05T11:25:15+00:00", "date": "2018-01-05", "type": "MANUAL", "amount": "1.01", "balance": "15.15", "account": "NL42BUNQ0123456789", "account_name": "Test account", "counterparty_account": "NL11BANK1111111111", "counterparty_name": "John Doe", "description": "Here you are", "payment_id": "123e4567-e89b-12d3-a456-426655440001", "meta": { "id": "1", "timestamp": "1515151515" } }, { "created_at": "2014-10-24T09:03:34+00:00", "date": "2014-10-24", "type": "MANUAL", "amount": "2.02", "balance": "14.14", "account": "NL42BUNQ0123456789", "account_name": "Test account", "counterparty_account": "NL22BANK2222222222", "counterparty_name": "Jane Doe", "description": "What I owe you", "payment_id": "123e4567-e89b-12d3-a456-426655440002", "meta": { "id": "2", "timestamp": "1414141414" } }, { "created_at": "2008-05-30T04:20:12+00:00", "date": "2008-05-30", "type": "MANUAL", "amount": "-3.03", "balance": "12.12", "account": "NL42BUNQ0123456789", "account_name": "Test account", "counterparty_account": "", "counterparty_name": "ACME Store Inc.", "description": "POS transaction 1234567890", "payment_id": "123e4567-e89b-12d3-a456-426655440003", "meta": { "id": "3", "timestamp": "1212121212" } }] return json.dumps({"data": result[:limit]}) def trigger_mutation_delete(identity): try: for index in storage.query_indexes("mutation_"+identity): storage.remove("mutation_"+identity, index) storage.remove("trigger_mutation", identity) return "" except Exception: traceback.print_exc() print("[trigger_mutation_delete] ERROR: cannot delete trigger") return json.dumps({"errors": [{"message": "Cannot delete trigger"}]}), 400 def trigger_balance(): try: data = request.get_json() print("[trigger_balance] input: {}".format(json.dumps(data))) if "triggerFields" not in data or "account" not in data["triggerFields"]: print("[trigger_balance] ERROR: account field missing!") return json.dumps({"errors": [{"message": "Invalid data"}]}), 400 account = data["triggerFields"]["account"] fields = data["triggerFields"] fieldsstr = json.dumps(fields) if "trigger_identity" not in data: print("[trigger_balance] ERROR: trigger_identity field missing!") return json.dumps({"errors": [{"message": "Invalid data"}]}), 400 identity = data["trigger_identity"] limit = 50 if "limit" in data: limit = data["limit"] if account == "NL42BUNQ0123456789": return trigger_balance_test(limit) timezone = "UTC" if "user" in data and "timezone" in data["user"]: timezone = data["user"]["timezone"] entity = storage.retrieve("trigger_balance", identity) if entity is not None: if entity["account"] != account or json.dumps(entity["fields"]) != fieldsstr or "last" not in entity: storage.store("trigger_balance", identity, { "account": account, "identity": identity, "fields": fields, "last": False }) print("[trigger_balance] updating trigger {} {}" .format(account, fieldsstr)) else: storage.store("trigger_balance", identity, { "account": account, "identity": identity, "fields": fields, "last": False }) print("[trigger_balance] storing new trigger {} {}" .format(account, fieldsstr)) transactions = storage.get_value("trigger_balance", identity+"_t") if transactions is None: transactions = [] for trans in transactions: trans["created_at"] = arrow.get(trans["created_at"]) .to(timezone).isoformat() print("[trigger_balance] Found {} transactions" .format(len(transactions))) return json.dumps({"data": transactions[:limit]}) except Exception: traceback.print_exc() print("[trigger_balance] ERROR: cannot retrieve balances") return json.dumps({"errors": [{"message": "Cannot retrieve balances"}]}), 400 def trigger_balance_test(limit): result = [{ "created_at": "2018-01-05T11:25:15+00:00", "account": "NL42BUNQ0123456789", "account_name": "Test account", "balance": "15.15", "meta": { "id": "1", "timestamp": "1515151515" } }, { "created_at": "2014-10-24T09:03:34+00:00", "account": "NL42BUNQ0123456789", "account_name": "Test account", "balance": "14.14", "meta": { "id": "2", "timestamp": "1414141414" } }, { "created_at": "2008-05-30T04:20:12+00:00", "account": "NL42BUNQ0123456789", "account_name": "Test account", "balance": "12.12", "meta": { "id": "3", "timestamp": "1212121212" } }] return json.dumps({"data": result[:limit]}) def trigger_balance_delete(identity): try: for index in storage.query_indexes("balance_"+identity): storage.remove("balance_"+identity, index) storage.remove("trigger_balance", identity) return "" except Exception: traceback.print_exc() print("[trigger_balance_delete] ERROR: cannot delete trigger") return json.dumps({"errors": [{"message": "Cannot delete trigger"}]}), 400 def trigger_request(): try: data = request.get_json() print("[trigger_request] input: {}".format(json.dumps(data))) if "triggerFields" not in data or "account" not in data["triggerFields"]: print("[trigger_request] ERROR: account field missing!") return json.dumps({"errors": [{"message": "Invalid data"}]}), 400 account = data["triggerFields"]["account"] fields = data["triggerFields"] fieldsstr = json.dumps(fields) if "trigger_identity" not in data: print("[trigger_request] ERROR: trigger_identity field missing!") return json.dumps({"errors": [{"message": "Invalid data"}]}), 400 identity = data["trigger_identity"] limit = 50 if "limit" in data: limit = data["limit"] if account == "NL42BUNQ0123456789": return trigger_request_test(limit) timezone = "UTC" if "user" in data and "timezone" in data["user"]: timezone = data["user"]["timezone"] entity = storage.retrieve("trigger_request", identity) if entity is not None: if entity["account"] != account or json.dumps(entity["fields"]) != fieldsstr: storage.store("trigger_request", identity, { "account": account, "identity": identity, "fields": fields }) print("[trigger_request] updating trigger {} {}" .format(account, fieldsstr)) else: storage.store("trigger_request", identity, { "account": account, "identity": identity, "fields": fields }) print("[trigger_request] storing new trigger {} {}" .format(account, fieldsstr)) transactions = storage.get_value("trigger_request", identity+"_t") if transactions is None: transactions = [] for trans in transactions: trans["created_at"] = arrow.get(trans["created_at"]) .to(timezone).isoformat() print("[trigger_request] Found {} transactions" .format(len(transactions))) return json.dumps({"data": transactions[:limit]}) except Exception: traceback.print_exc() print("[trigger_request] ERROR: cannot retrieve requests") return json.dumps({"errors": [{"message": "Cannot retrieve requests"}]}), 400 def trigger_request_test(limit): result = [{ "created_at": "2018-01-05T11:25:15+00:00", "amount": "1.01", "account": "NL42BUNQ0123456789", "account_name": "Test account", "counterparty_account": "NL11BANK1111111111", "counterparty_name": "John Doe", "description": "Here you are", "request_id": "123e4567-e89b-12d3-a456-426655440001", "meta": { "id": "1", "timestamp": "1515151515" } }, { "created_at": "2014-10-24T09:03:34+00:00", "amount": "2.02", "account": "NL42BUNQ0123456789", "account_name": "Test account", "counterparty_account": "NL22BANK2222222222", "counterparty_name": "Jane Doe", "description": "What I owe you", "request_id": "123e4567-e89b-12d3-a456-426655440002", "meta": { "id": "2", "timestamp": "1414141414" } }, { "created_at": "2008-05-30T04:20:12+00:00", "amount": "-3.03", "account": "NL42BUNQ0123456789", "account_name": "Test account", "counterparty_account": "", "counterparty_name": "ACME Store Inc.", "description": "POS transaction 1234567890", "request_id": "123e4567-e89b-12d3-a456-426655440003", "meta": { "id": "3", "timestamp": "1212121212" } }] return json.dumps({"data": result[:limit]}) def trigger_request_delete(identity): try: for index in storage.query_indexes("request_"+identity): storage.remove("request_"+identity, index) storage.remove("trigger_request", identity) return "" except Exception: traceback.print_exc() print("[trigger_request_delete] ERROR: cannot delete trigger") return json.dumps({"errors": [{"message": "Cannot delete trigger"}]}), 400
MIT License
pytorch/fairseq
fairseq/modules/checkpoint_activations.py
unwrap_checkpoint
python
def unwrap_checkpoint(m: torch.nn.Module): for module in m.modules(): if hasattr(module, "precheckpoint_forward"): module.forward = module.precheckpoint_forward del module.precheckpoint_forward if hasattr(module, "old_deepcopy_method"): module.__deepcopy__ = module.old_deepcopy_method del module.old_deepcopy_method return m
unwrap a module and its children from checkpoint_wrapper
https://github.com/pytorch/fairseq/blob/fcca32258c8e8bcc9f9890bf4714fa2f96b6b3e1/fairseq/modules/checkpoint_activations.py#L41-L52
import functools from typing import Any, Dict, List, Tuple, Union import torch import torch.utils.checkpoint as checkpoint from fairseq import utils def checkpoint_wrapper(m, offload_to_cpu=False): assert not hasattr( m, "precheckpoint_forward" ), "checkpoint function has already been applied?" m.precheckpoint_forward = m.forward m.forward = functools.partial( _checkpointed_forward, m.precheckpoint_forward, offload_to_cpu, ) return m
MIT License
biolink/ontobio
ontobio/golr/golr_query.py
GolrSearchQuery.autocomplete
python
def autocomplete(self): self.facet = False params = self.solr_params() logger.info("PARAMS=" + str(params)) results = self.solr.search(**params) logger.info("Docs found: {}".format(results.hits)) return self._process_autocomplete_results(results)
Execute solr autocomplete
https://github.com/biolink/ontobio/blob/da9c5ff912785ee4ab98a8a39585562ecd2bdef5/ontobio/golr/golr_query.py#L568-L577
import json import logging import pysolr import re from dataclasses import asdict from typing import Dict, List import xml.etree.ElementTree as ET from collections import OrderedDict from ontobio.vocabulary.relations import HomologyTypes from ontobio.model.GolrResults import SearchResults, AutocompleteResult, Highlight from ontobio.util.user_agent import get_user_agent from prefixcommons.curie_util import expand_uri from ontobio.util.curie_map import get_curie_map from ontobio import ecomap INVOLVED_IN="involved_in" ACTS_UPSTREAM_OF_OR_WITHIN="acts_upstream_of_or_within" ISA_PARTOF_CLOSURE="isa_partof_closure" REGULATES_CLOSURE="regulates_closure" ecomapping = ecomap.EcoMap() iea_eco = ecomapping.coderef_to_ecoclass("IEA") logger = logging.getLogger(__name__) class GolrFields: ID='id' ASSOCIATION_TYPE='association_type' SOURCE='source' OBJECT_CLOSURE='object_closure' SOURCE_CLOSURE_MAP='source_closure_map' SUBJECT_TAXON_CLOSURE_LABEL='subject_taxon_closure_label' OBJECT_TAXON_CLOSURE_LABEL = 'object_taxon_closure_label' SUBJECT_GENE_CLOSURE_MAP='subject_gene_closure_map' SUBJECT_TAXON_LABEL_SEARCHABLE='subject_taxon_label_searchable' OBJECT_TAXON_LABEL_SEARCHABLE = 'object_taxon_label_searchable' IS_DEFINED_BY='is_defined_by' SUBJECT_GENE_CLOSURE_LABEL='subject_gene_closure_label' SUBJECT_TAXON_CLOSURE='subject_taxon_closure' OBJECT_TAXON_CLOSURE = 'object_taxon_closure' OBJECT_LABEL='object_label' SUBJECT_CATEGORY='subject_category' SUBJECT_GENE_LABEL='subject_gene_label' SUBJECT_TAXON_CLOSURE_LABEL_SEARCHABLE='subject_taxon_closure_label_searchable' OBJECT_TAXON_CLOSURE_LABEL_SEARCHABLE = 'object_taxon_closure_label_searchable' SUBJECT_GENE_CLOSURE='subject_gene_closure' SUBJECT_GENE_LABEL_SEARCHABLE='subject_gene_label_searchable' OBJECT_GENE_LABEL_SEARCHABLE = 'object_gene_label_searchable' SUBJECT='subject' SUBJECT_LABEL='subject_label' SUBJECT_CLOSURE_LABEL_SEARCHABLE='subject_closure_label_searchable' OBJECT_CLOSURE_LABEL_SEARCHABLE='object_closure_label_searchable' OBJECT_CLOSURE_LABEL='object_closure_label' SUBJECT_CLOSURE_LABEL='subject_closure_label' SUBJECT_GENE='subject_gene' SUBJECT_TAXON='subject_taxon' OBJECT_TAXON = 'object_taxon' OBJECT_LABEL_SEARCHABLE='object_label_searchable' OBJECT_CATEGORY='object_category' SUBJECT_TAXON_CLOSURE_MAP='subject_taxon_closure_map' OBJECT_TAXON_CLOSURE_MAP = 'object_taxon_closure_map' QUALIFIER='qualifier' SUBJECT_TAXON_LABEL='subject_taxon_label' OBJECT_TAXON_LABEL = 'object_taxon_label' SUBJECT_CLOSURE_MAP='subject_closure_map' SUBJECT_ORTHOLOG_CLOSURE='subject_ortholog_closure' SUBJECT_CLOSURE='subject_closure' OBJECT='object' OBJECT_CLOSURE_MAP='object_closure_map' SUBJECT_LABEL_SEARCHABLE='subject_label_searchable' EVIDENCE_OBJECT='evidence_object' EVIDENCE_OBJECT_CLOSURE_MAP='evidence_object_closure_map' EVIDENCE_OBJECT_LABEL='evidence_object_label' EVIDENCE_OBJECT_CLOSURE='evidence_object_closure' EVIDENCE_OBJECT_CLOSURE_LABEL='evidence_object_closure_label' EVIDENCE='evidence' EVIDENCE_LABEL='evidence_label' EVIDENCE_CLOSURE_MAP = 'evidence_closure_map' EVIDENCE_GRAPH = 'evidence_graph' _VERSION_='_version_' SUBJECT_GENE_CLOSURE_LABEL_SEARCHABLE='subject_gene_closure_label_searchable' ASPECT='aspect' RELATION='relation' RELATION_LABEL='relation_label' FREQUENCY='frequency' FREQUENCY_LABEL='frequency_label' ONSET='onset' ONSET_LABEL='onset_label' AMIGO_SPECIFIC_FIELDS = [ 'reference', 'qualifier', 'is_redundant_for', 'type', 'evidence', 'evidence_label', 'evidence_type', 'evidence_type_label', 'evidence_with', 'evidence_closure', 'evidence_closure_label', 'evidence_subset_closure', 'evidence_subset_closure_label', 'evidence_type_closure', 'evidence_type_closure_label', 'aspect' ] def label_field(self, f): return f + "_label" def closure_field(self, f): return f + "_closure" M=GolrFields() INVERT_FIELDS_MAP = { M.SUBJECT: M.OBJECT, M.SUBJECT_CLOSURE: M.OBJECT_CLOSURE, M.SUBJECT_TAXON: M.OBJECT_TAXON, M.SUBJECT_CLOSURE_LABEL: M.OBJECT_CLOSURE_LABEL, M.SUBJECT_TAXON_CLOSURE_LABEL: M.OBJECT_TAXON_CLOSURE_LABEL, M.SUBJECT_TAXON_LABEL_SEARCHABLE: M.OBJECT_TAXON_LABEL_SEARCHABLE, M.SUBJECT_TAXON_CLOSURE: M.OBJECT_TAXON_CLOSURE, M.SUBJECT_LABEL: M.OBJECT_LABEL, M.SUBJECT_TAXON_CLOSURE_LABEL_SEARCHABLE: M.OBJECT_TAXON_CLOSURE_LABEL_SEARCHABLE, M.SUBJECT_CLOSURE_LABEL_SEARCHABLE: M.OBJECT_CLOSURE_LABEL_SEARCHABLE, M.SUBJECT_LABEL_SEARCHABLE: M.OBJECT_LABEL_SEARCHABLE, M.SUBJECT_CATEGORY: M.OBJECT_CATEGORY, M.SUBJECT_TAXON_CLOSURE_MAP: M.OBJECT_TAXON_CLOSURE_MAP, M.SUBJECT_TAXON_LABEL: M.OBJECT_TAXON_LABEL, M.SUBJECT_CLOSURE_MAP: M.OBJECT_CLOSURE_MAP, } ASPECT_MAP = { 'F': 'molecular_activity', 'P': 'biological_process', 'C': 'cellular_component' } PREFIX_NORMALIZATION_MAP = { 'MGI:MGI' : 'MGI', 'FB' : 'FlyBase', } def flip(d, x, y): dx = d.get(x) dy = d.get(y) d[x] = dy d[y] = dx def solr_quotify(v, operator="OR"): if isinstance(v, list): if len(v) == 1: return solr_quotify(v[0], operator) else: return '({})'.format(" {} ".format(operator).join([solr_quotify(x) for x in v])) else: return '"{}"'.format(v) def translate_facet_field(fcs, invert_subject_object = False): if 'facet_fields' not in fcs: return {} ffs = fcs['facet_fields'] rs={} for (facet, facetresults) in ffs.items(): if invert_subject_object: for (k,v) in INVERT_FIELDS_MAP.items(): if facet == k: facet = v break elif facet == v: facet = k break pairs = {} rs[facet] = pairs for i in range(int(len(facetresults)/2)): (fv,fc) = (facetresults[i*2],facetresults[i*2+1]) pairs[fv] = fc return rs def goassoc_fieldmap(relationship_type=ACTS_UPSTREAM_OF_OR_WITHIN): return { M.SUBJECT: 'bioentity', M.SUBJECT_CLOSURE: 'bioentity', M.SUBJECT_CATEGORY: None, M.SUBJECT_LABEL: 'bioentity_label', M.SUBJECT_TAXON: 'taxon', M.SUBJECT_TAXON_LABEL: 'taxon_label', M.SUBJECT_TAXON_CLOSURE: 'taxon_closure', M.RELATION: 'qualifier', M.OBJECT: 'annotation_class', M.OBJECT_CLOSURE: REGULATES_CLOSURE if relationship_type == ACTS_UPSTREAM_OF_OR_WITHIN else ISA_PARTOF_CLOSURE, M.OBJECT_LABEL: 'annotation_class_label', M.OBJECT_TAXON: 'object_taxon', M.OBJECT_TAXON_LABEL: 'object_taxon_label', M.OBJECT_TAXON_CLOSURE: 'object_taxon_closure', M.OBJECT_CATEGORY: None, M.EVIDENCE_OBJECT_CLOSURE: 'evidence_subset_closure', M.IS_DEFINED_BY: 'assigned_by' } def map_field(fn, m) : if m is None: return fn if fn in m: return m[fn] else: return fn class GolrServer(): pass class GolrAbstractQuery(): def get_config(self): if self.config is None: from ontobio.config import Config, get_config self.config = get_config() return self.config def _set_solr(self, url, timeout=2): self.solr = pysolr.Solr(url=url, timeout=timeout) return self.solr def _set_user_agent(self, user_agent): self.solr.get_session().headers['User-Agent'] = user_agent def _use_amigo_schema(self, object_category): if object_category is not None and object_category == 'function': return True ds = self.get_config().default_solr_schema if ds is not None and ds == 'amigo': return True return False class GolrSearchQuery(GolrAbstractQuery): def __init__(self, term=None, category=None, is_go=False, url=None, solr=None, config=None, fq=None, fq_string=None, hl=True, facet_fields=None, facet=True, search_fields=None, rows=100, start=None, prefix=None, boost_fx=None, boost_q=None, highlight_class=None, taxon=None, min_match=None, minimal_tokenizer=False, include_eqs=False, exclude_groups=False, user_agent=None): self.term = term self.category = category self.is_go = is_go self.url = url self.solr = solr self.config = config self.hl = hl self.facet = facet self.facet_fields = facet_fields self.search_fields = search_fields self.rows = rows self.start = start self.is_explicit_url = url is not None self.fq_string = fq_string if fq_string is not None else [] self.fq = fq if fq is not None else {} self.prefix = prefix self.boost_fx = boost_fx self.boost_q = boost_q self.highlight_class = highlight_class self.taxon = taxon self.min_match = min_match self.include_eqs = include_eqs self.exclude_groups = exclude_groups self.minimal_tokenizer = minimal_tokenizer self.user_agent = get_user_agent(modules=[requests, pysolr], caller_name=__name__) if user_agent is not None: self.user_agent += " {}".format(user_agent) if self.search_fields is None: self.search_fields = dict(id=3, label=2, synonym=1, definition=1, taxon_label=1, taxon_label_synonym=1, equivalent_curie=1) if self.is_go: if self.url is None: endpoint = self.get_config().amigo_solr_search solr_config = {'url': endpoint.url, 'timeout': endpoint.timeout} else: solr_config = {'url': self.url, 'timeout': 2} else: if self.url is None: endpoint = self.get_config().solr_search solr_config = {'url': endpoint.url, 'timeout': endpoint.timeout} else: solr_config = {'url': self.url, 'timeout': 2} self._set_solr(**solr_config) self._set_user_agent(self.user_agent) def update_solr_url(self, url, timeout=2): self.url = url solr_config = {'url': url, 'timeout': timeout} self._set_solr(**solr_config) self._set_user_agent(self.user_agent) def solr_params(self, mode=None): if self.facet_fields is None and self.facet: self.facet_fields = ['category', 'taxon_label'] if self.category is not None: self.fq['category'] = self.category suffixes = ['std', 'kw', 'eng'] if self.is_go: self.search_fields=dict(entity_label=3, general_blob=3) self.hl = False if 'taxon_label' in self.facet_fields: self.facet_fields.remove('taxon_label') suffixes = ['searchable'] self.fq['document_category'] = "general" qf = self._format_query_filter(self.search_fields, suffixes) if mode == 'search': for field, weight in qf.items(): if '_kw' in field: qf[field] += 2 elif '_std' in field: qf[field] += 1 if self.term is not None and ":" in self.term: qf["id_kw"] = 20 qf["equivalent_curie_kw"] = 20 if self.minimal_tokenizer: tokens = re.split(r'[\s|\'\",]+', self.term) if tokens[-1] == '': del tokens[-1] tokenized = "".join(['"{}"'.format(token) for token in tokens]) else: tokenized = self.term select_fields = ["*", "score"] params = { 'q': '{0} "{1}"'.format(tokenized, self.term), "qt": "standard", 'fl': ",".join(list(filter(None, select_fields))), "defType": "edismax", "qf": ["{}^{}".format(field, weight) for field, weight in qf.items()], 'rows': self.rows } if self.facet: params['facet'] = 'on' params['facet.field'] = self.facet_fields params['facet.limit'] = 25 params['facet.mincount'] = 1 if self.start is not None: params['start'] = self.start if self.hl: params['hl.simple.pre'] = "<em class=\"hilite\">" params['hl.snippets'] = "1000" params['hl'] = 'on' if self.fq is not None: filter_queries = ['{}:{}'.format(k,solr_quotify(v)) for (k,v) in self.fq.items()] params['fq'] = filter_queries else: params['fq'] = [] for fq in self.fq_string: params['fq'].append(fq) if self.prefix is not None: negative_filter = [p_filt[1:] for p_filt in self.prefix if p_filt.startswith('-')] positive_filter = [p_filt for p_filt in self.prefix if not p_filt.startswith('-')] if negative_filter: if self.include_eqs: single_filts = [ f'(-prefix:"{prefix}" OR -equivalent_curie:{prefix}\:*)' for prefix in negative_filter ] for filt in single_filts: params['fq'].append(filt) else: neg_filter = '({})'.format(" OR ".join([filt for filt in negative_filter])) params['fq'].append('-prefix:{}'.format(solr_quotify(negative_filter))) if positive_filter: if self.include_eqs: single_filts = [ f'(prefix:"{prefix}" OR equivalent_curie:{prefix}\:*)' for prefix in positive_filter ] pos_filter = '({})'.format(" OR ".join([filt for filt in single_filts])) params['fq'].append(pos_filter) else: params['fq'].append('prefix:{}'.format(solr_quotify(positive_filter))) if self.boost_fx is not None: params['bf'] = [] for boost in self.boost_fx: params['bf'].append(boost) if self.boost_q is not None: params['bq'] = [] for boost in self.boost_q: params['bq'].append(boost) if self.taxon is not None: for tax in self.taxon: params['fq'].append('taxon:"{}"'.format(tax)) if self.exclude_groups: params['fq'].append('leaf:1') if self.min_match is not None: params['mm'] = self.min_match if self.highlight_class is not None: params['hl.simple.pre'] = '<em class=\"{}\">'.format(self.highlight_class) return params def search(self): params = self.solr_params(mode='search') logger.info("PARAMS=" + str(params)) results = self.solr.search(**params) logger.info("Docs found: {}".format(results.hits)) return self._process_search_results(results)
BSD 3-Clause New or Revised License
algomorph/cvcalib
stereo.py
Setting.generate_parser
python
def generate_parser(defaults, console_only=False, description="Description N/A", parents=None): if console_only: parser = ap.ArgumentParser(description=description, formatter_class=ap.RawDescriptionHelpFormatter, add_help=False) else: if parents is None: raise ValueError("A conf-file+console parser requires at least a console-only parser as a parent.") parser = ap.ArgumentParser(parents=parents) for item in Setting: if (item.value.console_only and console_only) or (not item.value.console_only and not console_only): if item.value.type == 'bool_flag': parser.add_argument(item.value.shorthand, '--' + item.name, action=item.value.action, default=defaults[item.name], required=item.value.required, help=item.value.help) else: parser.add_argument(item.value.shorthand, '--' + item.name, action=item.value.action, type=item.value.type, nargs=item.value.nargs, required=item.value.required, default=defaults[item.name], help=item.value.help) if not console_only: parser.set_defaults(**defaults) return parser
@rtype: argparse.ArgumentParser @return: either a console-only or a config_file+console parser using the specified defaults and, optionally, parents. @type defaults: dict @param defaults: dictionary of default settings and their values. For a conf-file+console parser, these come from the config file. For a console-only parser, these are generated. @type console_only: bool @type description: str @param description: description of the program that uses the parser, to be used in the help file @type parents: list[argparse.ArgumentParser] | None
https://github.com/algomorph/cvcalib/blob/4ed638ea523b6d1059556a135576c7afa3a4b07f/stereo.py#L123-L157
import sys import os.path as osp import argparse as ap from enum import Enum from yaml import load, dump from multistereo.stereo_matcher_app import StereoMatcherApp import re try: from yaml import CLoader as Loader, CDumper as Dumper except ImportError: from yaml import Loader, Dumper class Argument(object): def __init__(self, default, nargs='?', arg_type=str, action='store', arg_help="Documentation N/A", console_only=False, required=False, shorthand=None): self.default = default self.required = required self.console_only = console_only self.nargs = nargs self.type = arg_type self.action = action self.help = arg_help if shorthand is None: self.shorthand = None else: self.shorthand = "-" + shorthand class Setting(Enum): settings_file = Argument(None, '?', str, 'store', "File (absolute or relative-to-execution path) where to save and/or " + "load settings for the program in YAML format.", console_only=True, required=False) save_settings = Argument(False, '?', 'bool_flag', 'store_true', "Save (or update) setting file.", console_only=True, required=False) folder = Argument("./", '?', str, 'store', "Path to root folder to work in. If set to '!settings_file_location' and a " + " settings file is provided, will be set to the location of the settings file.", console_only=False, required=False) images = Argument(["left.png", "right.png"], nargs=2, arg_help="Paths from work folder to left & right stereo images.") input_calibration = Argument(None, arg_help="Path from work folder to left & right calibration files.") output = Argument("disparity.png", arg_help="Name of the output disparity image.") preview = Argument(False, arg_type='bool_flag', arg_help="Preview the generated disparity map before saving.") @staticmethod def generate_missing_shorthands(): for item in Setting: if item.value.shorthand is None: item.value.shorthand = "-" + "".join([item[1] for item in re.findall(r"(:?^|_)(\w)", item.name)]) @staticmethod def generate_defaults_dict(): dict = {} for item in Setting: dict[item.name] = item.value.default return dict @staticmethod
Apache License 2.0
blacklight/platypush
platypush/message/event/ping.py
PingEvent.__init__
python
def __init__(self, message=None, *args, **kwargs): super().__init__(message=message, *args, **kwargs)
:param message: Ping message :type message: object
https://github.com/blacklight/platypush/blob/a5f1dc2638d7c6308325e0ca39dc7d5e262836aa/platypush/message/event/ping.py#L7-L13
from platypush.message.event import Event class PingEvent(Event):
MIT License
fredzzhang/pocket
examples/hicodet.py
transforms
python
def transforms(image, target): image = to_tensor(image) boxes_h = torch.as_tensor(target['boxes_h']) boxes_o = torch.as_tensor(target['boxes_o']) boxes = torch.zeros_like(boxes_h) boxes[:, :2] = torch.min(boxes_h[:, :2], boxes_o[:, :2]) boxes[:, 2:] = torch.max(boxes_h[:, 2:], boxes_o[:, 2:]) hoi = torch.as_tensor(target['hoi']) labels = torch.zeros(len(hoi), 600) min_iou = torch.min(box_iou(boxes_h, boxes_h), box_iou(boxes_o, boxes_o)) match = torch.nonzero(min_iou > 0.5) labels[match[:, 0], hoi[match[:, 1]]] = 1 return image, boxes, labels
Transform image and target to desired format for learning engine
https://github.com/fredzzhang/pocket/blob/3a6afadb87fc13a2a380a15aa3a68013241893c9/examples/hicodet.py#L45-L63
import os import torch from torch.utils.data import DataLoader from torchvision.transforms.functional import to_tensor from torchvision.ops.boxes import box_iou from pocket.data import HICODet from pocket.models import RoIFeatureExtractor, MultiLayerPerceptron from pocket.core import MultiLabelClassificationEngine class Net(torch.nn.Module): def __init__(self): super().__init__() self.ext = RoIFeatureExtractor() self.head = MultiLayerPerceptron([1024, 1024, 600], [True, True]) def forward(self, image, boxes): with torch.no_grad(): f = self.ext(image, boxes) return self.head(f) """Override methods to focus on classification head""" def parameters(self): return self.head.parameters() def state_dict(self): return self.head.state_dict() def load_state_dict(self, state_dict): self.head.load_state_dict(state_dict)
BSD 3-Clause New or Revised License
neo23x0/xorex
xorex.py
decrypt_pe
python
def decrypt_pe(input_file, valid_keys, output_path, only_valid): known_hashes = [] print("\n" + Fore.BLACK + Back.WHITE + "=== Original File Recovery " + Style.RESET_ALL) with open(input_file, 'rb') as fh: fdata = fh.read() if not os.path.exists(output_path): os.makedirs(output_path) for vk in valid_keys: decrypted_data = de_xor(fdata[vk["mz_offset"]:], bytearray.fromhex(vk["key"])) marker = "" color = Fore.BLUE print("Decrypting file with key '%s' and offset '%d' ..." % (vk["key"], vk["mz_offset"])) if not test_pe(decrypted_data): if only_valid: print("The resulting PE file seems to be invalid - skipping write (due to --onlyvalid flag)") continue print("The resulting PE file seems to be invalid - writing it nonetheless to disk for you to examine it") marker = "_likely_INVALID" color = Fore.RED filename = os.path.join(output_path, "%s_decrypted_%s%s.exe" % ( os.path.splitext(os.path.basename(input_file))[0], vk["key"], marker )) data_hash = hashlib.md5(decrypted_data).hexdigest() if data_hash not in known_hashes: print("Writing possible original file to " + color + "'%s'" % filename + Style.RESET_ALL + " ...") with open(filename, 'wb') as fh: fh.write(decrypted_data) known_hashes.append(data_hash) else: print("This file would be a duplicate. Skipping the output.")
Decrypt the data blob and create files :param input_file: :param valid_keys: :param output_path: :param only_valid: :return:
https://github.com/neo23x0/xorex/blob/bed0fb2256d695ef1ec9a04e6403c3a8385203c5/xorex.py#L170-L217
import os import re import argparse import collections import hashlib import pefile import traceback from colorama import init, Fore, Back, Style __version__ = "0.3.0" KNOWN_STRINGS = [b'This program', b'DOS mode'] def extract_byte_chains(input_file, window_size_max=10): fdata = [] with open(input_file, 'rb') as fh: fdata = fh.read() all_stats = [] for ws in range(1, window_size_max+1): stats = { 'length': ws, 'byte_stats': collections.Counter() } for i in range(0, len(fdata)): byte_chain = fdata[i:(i+ws)] if is_usable(byte_chain): stats['byte_stats'].update([byte_chain.hex()]) all_stats.append(stats) return all_stats def is_usable(byte_chain): only_zeros = True for c in byte_chain: if c != 0x00: only_zeros = False if only_zeros: return False return True def xor(data, key): return bytearray(a ^ b for a, b in zip(*map(bytearray, [data, key]))) def de_xor(data, key): data_decoded = bytearray() i = 0 while i < len(data): data_decoded += xor(data[i:i+len(key)], key) i += len(key) return data_decoded def evaluate_keys(input_file, all_stats, top_values): print("\n" + Fore.BLACK + Back.WHITE + "=== Brute Forcing with the Evaluated Keys " + Style.RESET_ALL) fdata = [] valid_keys = [] with open(input_file, 'rb') as fh: fdata = fh.read() for s in KNOWN_STRINGS: print("Checking for known string: '%s' in the first %d bytes of the file" % (s.decode(), int(args.m))) ws = len(s) for set in all_stats: most_common = set['byte_stats'].most_common(top_values) for key, count in most_common: for i in range(0, int(args.m)): decrypted_code = de_xor(fdata[i:(i + ws)], bytearray.fromhex(key)) if s in decrypted_code: print("FOUND STRING IN DECRYPTED CODE WITH KEY: %s" % get_key_string(key)) print("DATA: '%s' OFFSET: %d DECRYPTED: '%s'" % (fdata[i:(i+ws)].hex(), i, decrypted_code.decode())) valid_keys.append({"key": key, "mz_offset": 0}) mz_offset, rotated_key = find_mz_with_key(fdata[:i], key) if rotated_key and mz_offset > 0: print("It seems that the file has some kind of prefix (shellcode, junk etc.)") print("Found MZ header at offset: %d" % mz_offset) print("Adjusted XOR key to: %s" % get_key_string(rotated_key)) valid_keys.append({"key": rotated_key, "mz_offset": mz_offset}) return valid_keys def get_key_string(key): ascii_addon = '' ascii_key = get_ascii(key) if ascii_key: ascii_addon = Style.RESET_ALL + "ASCII '" + Fore.GREEN + "%s" % ascii_key + Style.RESET_ALL + "'" key_string = "HEX: '" + Fore.GREEN + '%s' % key + Style.RESET_ALL + "' %s" % ascii_addon return key_string def find_mz_with_key(fdata, key): for j in range(0, int(len(key))): key_val = key[-j:] + key[:-j] for i in range(0, len(fdata)): decrypted_code = de_xor(fdata[i:(i + 2)], bytearray.fromhex(key_val)) if b'MZ' == decrypted_code: return i, key_val return 0, ''
Apache License 2.0
ramonwill/data-app
Prescient/database_tools/Extracts.py
PositionSummary._set_direction
python
def _set_direction(self): if self.open_direction == "short" and self.net_position > 0: self.open_direction = "long" elif self.open_direction == "long" and self.net_position < 0: self.open_direction = "short"
Checks if there has been a reversal in the users overall trade direction and sets that direction accordingly.
https://github.com/ramonwill/data-app/blob/e4b28704940546156f9521c88eced73f1443ce7e/Prescient/database_tools/Extracts.py#L119-L127
import pandas as pd from collections import deque, namedtuple class PositionSummary(object): def __init__(self, trade_history): self.trade_history = trade_history self.average_cost = None self.open_lots = None self.ticker = self.set_ticker() self.buy_quantities = deque([]) self.buy_prices = deque([]) self.buy_dates = deque([]) self.sell_quantities = deque([]) self.sell_prices = deque([]) self.sell_dates = deque([]) self.open_direction = None self.breakdown = [] self.net_position = 0 self._apply_fifo() def __repr__(self): return (f"<Ticker: {self.ticker}, Quantity: {self.net_position}>") def set_ticker(self): tickers = set([i[0] for i in self.trade_history]) if len(tickers) == 1: return self.trade_history[0][0] else: raise ValueError("The Trade History for this security contains multiple tickers") def total_open_lots(self): if self.open_direction == "long": return sum(self.buy_quantities) elif self.open_direction == "short": return sum(self.sell_quantities) else: return None def total_market_value(self): total = None if self.buy_quantities and self.open_direction == "long": zipped = zip(self.buy_quantities, self.buy_prices) total = (quantity*price for quantity, price in zipped) elif self.sell_quantities and self.open_direction == "short": zipped = zip(self.sell_quantities, self.sell_prices) total = (quantity*price for quantity, price in zipped) return sum(total) if total is not None else None def get_average_cost(self): open_lots = self.total_open_lots() if open_lots == 0 or not open_lots: return 0 return abs(self.total_market_value()/self.total_open_lots()) def remove_trade(self, direction): if direction == "buy": popped_quantity = self.buy_quantities.popleft() self.buy_prices.popleft() self.buy_dates.popleft() elif direction == "sell": popped_quantity = self.sell_quantities.popleft() self.sell_prices.popleft() self.sell_dates.popleft() else: raise NameError("why did this happen") return popped_quantity def _collapse_trade(self): if self.sell_quantities: if self.sell_quantities[0] >= 0: self.remove_trade("sell") if self.buy_quantities: if self.buy_quantities[0] <= 0: self.remove_trade("buy") def get_summary(self): Summary = namedtuple("Summary", ["ticker", "quantity", "average_price"]) ticker = self.ticker quantity = self.net_position average_price = round(self.average_cost, 4) return Summary(ticker, quantity, average_price) def add(self, side, units, price, date): if side == "buy": self.buy_quantities.append(units) self.buy_prices.append(price) self.buy_dates.append(date) elif side == "sell": self.sell_quantities.append(units) self.sell_prices.append(price) self.sell_dates.append(date)
MIT License
vlsida/openram
compiler/base/wire_path.py
wire_path.create_rectangles
python
def create_rectangles(self): pl = self.position_list for index in range(len(pl) - 1): if pl[index][0] != pl[index + 1][0]: line_length = pl[index + 1][0] - pl[index][0] offset = [pl[index][0], pl[index][1] - 0.5 * self.layer_width] if line_length < 0: offset = [offset[0] + line_length, offset[1]] self.add_line(layer_name=self.layer_name, length=abs(line_length), offset=offset, orientation="horizontal", layer_width=self.layer_width) elif pl[index][1] != pl[index + 1][1]: line_length = pl[index + 1][1] - pl[index][1] offset = [pl[index][0] - 0.5 * self.layer_width, pl[index][1]] if line_length < 0: offset = [offset[0], offset[1] + line_length] self.add_line(layer_name=self.layer_name, length=abs(line_length), offset=offset, orientation="vertical", layer_width=self.layer_width)
Create the actual rectangles on teh appropriate layers using the position list of the corners.
https://github.com/vlsida/openram/blob/f66aac3264598eeae31225c62b6a4af52412d407/compiler/base/wire_path.py#L85-L115
from tech import drc from tech import layer as techlayer import debug from vector import vector from utils import snap_to_grid def create_rectilinear_route(my_list): pl = [snap_to_grid(x) for x in my_list] my_list = [] for index in range(len(pl) - 1): if pl[index] != pl[index + 1]: my_list.append(vector(pl[index])) if (pl[index][0] != pl[index + 1][0]) and (pl[index][1] != pl[index + 1][1]): my_list.append(vector(pl[index][0], pl[index + 1][1])) my_list.append(vector(pl[-1])) return my_list class wire_path(): def __init__(self, obj, layer, position_list, width=None): self.obj = obj self.layer_name = layer self.layer_id = techlayer[layer] if width == None: self.layer_width = drc["minwidth_{0}".format(layer)] else: self.layer_width = width self.position_list = position_list self.pins = [] self.switch_pos_list = [] self.create_layout() def create_layout(self): self.create_rectilinear() self.connect_corner() self.create_rectangles() def create_rectilinear(self): self.position_list = create_rectilinear_route(self.position_list) def connect_corner(self): from itertools import tee, islice nwise = lambda g, n=2: zip(*(islice(g, i, None) for i, g in enumerate(tee(g, n)))) threewise=nwise(self.position_list, 3) for (a, offset, c) in list(threewise): if a[0] == c[0]: continue if a[1] == c[1]: continue corner_offset = [offset[0] - 0.5 * self.layer_width, offset[1] - 0.5 * self.layer_width] self.draw_corner_wire(corner_offset) def draw_corner_wire(self, offset): self.obj.add_rect(layer=self.layer_name, offset=offset, width=self.layer_width, height=self.layer_width)
BSD 3-Clause New or Revised License
pdm-project/pdm
pdm/_vendor/halo/halo.py
Halo._get_text
python
def _get_text(self, text): animation = self._animation stripped_text = text.strip() max_spinner_length = max([len(i) for i in self._spinner["frames"]]) terminal_width = get_terminal_columns() - max_spinner_length - 1 text_length = len(stripped_text) frames = [] if terminal_width < text_length and animation: if animation == "bounce": for x in range(0, text_length - terminal_width + 1): frames.append(stripped_text[x : terminal_width + x]) frames.extend(list(reversed(frames))) elif "marquee": stripped_text = stripped_text + " " + stripped_text[:terminal_width] for x in range(0, text_length + 1): frames.append(stripped_text[x : terminal_width + x]) elif terminal_width < text_length and not animation: frames = [stripped_text[: terminal_width - 6] + " (...)"] else: frames = [stripped_text] return {"original": text, "frames": frames}
Creates frames based on the selected animation Returns ------- self
https://github.com/pdm-project/pdm/blob/a5ef1fa3b09b53d9d0575657f4d23c9cc2f2cc11/pdm/_vendor/halo/halo.py#L397-L437
from __future__ import absolute_import, unicode_literals import atexit import functools import sys import threading import time import pdm._vendor.halo.cursor as cursor from pdm._vendor.log_symbols.symbols import LogSymbols, is_supported from pdm._vendor.spinners.spinners import Spinners from pdm._vendor.halo._utils import ( colored_frame, decode_utf_8_text, get_environment, get_terminal_columns, is_text_type, encode_utf_8_text, ) class Halo(object): CLEAR_LINE = "\033[K" CLEAR_REST = "\033[J" SPINNER_PLACEMENTS = ( "left", "right", ) _instances = [] _lock = threading.Lock() def __init__( self, text="", color="cyan", text_color=None, spinner=None, animation=None, placement="left", interval=-1, enabled=True, indent="", stream=sys.stdout, ): self._color = color self._animation = animation self.spinner = spinner self.text = text self._text_color = text_color self._interval = ( int(interval) if int(interval) > 0 else self._spinner["interval"] ) self._stream = stream self.placement = placement self._frame_index = 0 self._text_index = 0 self._spinner_thread = None self._stop_spinner = None self._spinner_id = None self.enabled = enabled self._stopped = False self._content = "" self.indent = indent environment = get_environment() def clean_up(): self.stop() if environment in ("ipython", "jupyter"): from IPython import get_ipython ip = get_ipython() ip.events.register("post_run_cell", clean_up) else: atexit.register(clean_up) def __enter__(self): return self.start() def __exit__(self, type, value, traceback): self.stop() def __call__(self, f): @functools.wraps(f) def wrapped(*args, **kwargs): with self: return f(*args, **kwargs) return wrapped @property def spinner(self): return self._spinner @spinner.setter def spinner(self, spinner=None): self._spinner = self._get_spinner(spinner) self._frame_index = 0 self._text_index = 0 @property def text(self): return self._text["original"] @text.setter def text(self, text): self._text = self._get_text(text) @property def text_color(self): return self._text_color @text_color.setter def text_color(self, text_color): self._text_color = text_color @property def color(self): return self._color @color.setter def color(self, color): self._color = color @property def placement(self): return self._placement @placement.setter def placement(self, placement): if placement not in self.SPINNER_PLACEMENTS: raise ValueError( "Unknown spinner placement '{0}', available are {1}".format( placement, self.SPINNER_PLACEMENTS ) ) self._placement = placement @property def spinner_id(self): return self._spinner_id @property def animation(self): return self._animation @animation.setter def animation(self, animation): self._animation = animation self._text = self._get_text(self._text["original"]) def _check_stream(self): if self._stream.closed: return False try: check_stream_writable = self._stream.writable except AttributeError: pass else: return check_stream_writable() return True def _pop_stream_content_until_self(self, clear_self=False): erased_content = [] lines_to_erase = self._content.count("\n") if clear_self else 0 for inst in Halo._instances[::-1]: if inst is self: break erased_content.append(inst._content) lines_to_erase += inst._content.count("\n") if lines_to_erase > 0: self._write_stream("\033[{}A".format(lines_to_erase)) self._write_stream(self.CLEAR_REST) return "".join(reversed(erased_content)) def _write_stream(self, s): if self._check_stream(): self._stream.write(s) def _write(self, s, overwrite=False): if s.startswith("\r"): s = f"\r{self.indent}{s[1:]}" else: s = f"{self.indent}{s}" with Halo._lock: erased_content = self._pop_stream_content_until_self(overwrite) self._write_stream(s) self._write_stream(erased_content) self._content = s if overwrite else self._content + s def _hide_cursor(self): if self._check_stream() and self._stream.isatty(): cursor.hide(stream=self._stream) def _show_cursor(self): if self._check_stream() and self._stream.isatty(): cursor.show(stream=self._stream) def _get_spinner(self, spinner): default_spinner = Spinners["dots"].value if spinner and type(spinner) == dict: return spinner if is_supported(): if all([is_text_type(spinner), spinner in Spinners.__members__]): return Spinners[spinner].value else: return default_spinner else: return Spinners["line"].value
MIT License
exopy/exopy
tests/measurement/workspace/test_measurement_edition.py
edition_view
python
def edition_view(measurement, workspace, exopy_qtbot): pl = measurement.plugin pl.edited_measurements.add(measurement) measurement.root_task.add_child_task(0, BreakTask(name='Test')) item = MeasurementEditorDockItem(workspace=workspace, measurement=measurement, name='test') return DockItemTestingWindow(widget=item)
Start plugins and add measurements before creating the execution view.
https://github.com/exopy/exopy/blob/aeda9bcfad2d2f76903c7ad2800ea2110ff689b2/tests/measurement/workspace/test_measurement_edition.py#L31-L42
import pytest import enaml from exopy.testing.util import (handle_dialog, wait_for_window_displayed, wait_for_destruction, handle_question) from exopy.tasks.tasks.logic.loop_exceptions_tasks import BreakTask from exopy.utils.widgets.qt_clipboard import CLIPBOARD with enaml.imports(): from exopy.testing.windows import DockItemTestingWindow from exopy.measurement.workspace.measurement_edition import (MeasurementEditorDockItem, MeasureEditorDialog, SaveAction, build_task, TaskCopyAction, TaskPasteAction) from exopy.utils.widgets.qt_tree_menu import CutAction @pytest.fixture
BSD 3-Clause New or Revised License
wellcometrust/reach
base/elastic/import_refs_from_s3.py
clean_es
python
def clean_es(es): common.recreate_index(es, 'datalabs-references')
Empty the elasticsearch database. Args: es: a living connection to elasticsearch
https://github.com/wellcometrust/reach/blob/1aa42c7d8aaf0a91d033af8448a33f37563b0365/base/elastic/import_refs_from_s3.py#L57-L64
import tempfile import csv import json import random import boto3 from urllib.parse import urlparse from argparse import ArgumentParser from elasticsearch import Elasticsearch from . import common parser = ArgumentParser() parser.add_argument('s3_url') parser.add_argument('-s', '--size', default=1024, type=int, help=('The megabytes to pull. Defaults to 100.' 'A negative value will pull the entire dataset')) parser.add_argument('-H', '--host', default='127.0.0.1', help='Address of the Elasticsearch server') parser.add_argument('-P', '--port', default='9200', help='Port of the Elasticsearch server') parser.add_argument('-C', '--clean', dest='clean', action='store_true', help='Clean the elasticsearch database before import') def write_to_es(es, line): body = json.dumps({ 'title': line['title'] }) es.index( index='datalabs-references', ignore=400, body=body, doc_type='reference' )
MIT License
sublimetext/packagedev
plugins/settings/known_settings.py
KnownSettings._theme_completions
python
def _theme_completions(key, default): hidden = get_setting('settings.exclude_theme_patterns') or [] if int(sublime.version()) >= 4095 and key == 'theme': yield format_completion_item(value="auto", annotation="dark-/light switching") for theme_path in ResourcePath.glob_resources("*.sublime-theme"): if not any(hide in theme_path.name for hide in hidden): yield format_completion_item( value=theme_path.name, default=default, annotation="theme" )
Create completions of all visible themes. default (string): The default `theme` value. The set will not include color schemes matching at least one entry of `"settings.exclude_theme_patterns": []` setting. Returns: {(trigger, contents), ...} A set of all completions. - trigger (string): base file name of the theme - contents (string): the file name to commit to the settings
https://github.com/sublimetext/packagedev/blob/35a68969f94459d38341ea373ab2e583e60d8cda/plugins/settings/known_settings.py#L751-L773
import collections import logging import os import re import textwrap import time import weakref import sublime from sublime_lib import encodings, ResourcePath from ..lib.weakmethod import WeakMethodProxy from ..lib import get_setting from .region_math import VALUE_SCOPE, get_value_region_at, get_last_key_name_from logger = logging.getLogger(__name__) PREF_FILE = "Preferences.sublime-settings" PREF_FILE_ALIAS = "Base File.sublime-settings" KIND_SETTING = (sublime.KIND_ID_VARIABLE, "S", "Setting") def html_encode(string): return string.replace("&", "&amp;") .replace("<", "&lt;") .replace(">", "&gt;") .replace("\t", "&nbsp;&nbsp;") .replace(" ", "&nbsp;&nbsp;") .replace("\n", "<br>") if string else "" def format_completion_item(value, default=None, is_default=False, label=None, annotation=None): if isinstance(value, dict): raise ValueError("Cannot format dictionary value", value) if not is_default: is_default = value in default if isinstance(default, list) else value == default type_ = type(value).__name__ return sublime.CompletionItem( trigger=sublime.encode_value(label or value).strip('"'), annotation=("(default) {}" if is_default else "{}").format(annotation or ""), completion=value, kind=(sublime.KIND_ID_SNIPPET, type_[0], type_), ) def decode_value(string): if string.lower() == "true": return True if string.lower() == "false": return False try: return int(string) except ValueError: return float(string) class KnownSettings(object): cache = weakref.WeakValueDictionary() _is_initialized = False _is_loaded = False filename = None on_loaded_callbacks = None on_loaded_once_callbacks = None defaults = None comments = None fallback_settings = None def __new__(cls, filename, on_loaded=None, **kwargs): obj = cls.cache.get(filename) if obj: logger.debug("cache hit %r", filename) return cls.cache[filename] else: obj = super().__new__(cls, **kwargs) cls.cache[filename] = obj return obj def __init__(self, filename): if not self._is_initialized: self.filename = filename self.on_loaded_callbacks = [] self.on_loaded_once_callbacks = [] self._is_initialized = True self.defaults = collections.ChainMap() self.comments = collections.ChainMap() self.trigger_settings_reload() def add_on_loaded(self, on_loaded, once=False): if self._is_loaded: sublime.set_timeout_async(on_loaded, 0) if not once: self.on_loaded_callbacks.append(WeakMethodProxy(on_loaded)) elif not self._is_loaded: self.on_loaded_once_callbacks.append(WeakMethodProxy(on_loaded)) def __del__(self): logger.debug("deleting KnownSettings instance for %r", self.filename) def __iter__(self): return iter(self.defaults) def trigger_settings_reload(self): sublime.set_timeout_async(self._load_settings, 0) def _load_settings(self): ignored_patterns = frozenset(("/User/", "/Preferences Editor/")) logger.debug("loading defaults and comments for %r", self.filename) start_time = time.time() resources = sublime.find_resources(self.filename) resources += sublime.find_resources(self.filename + "-hints") if self.filename == PREF_FILE: resources += sublime.find_resources(PREF_FILE_ALIAS) logger.debug("found %d %r files", len(resources), self.filename) for resource in resources: if any(ignored in resource for ignored in ignored_patterns): logger.debug("ignoring %r", resource) continue try: logger.debug("parsing %r", resource) lines = sublime.load_resource(resource).splitlines() for key, value in self._parse_settings(lines).items(): self.defaults.setdefault(key, value) except Exception as e: logger.error("error parsing %r - %s%r", resource, e.__class__.__name__, e.args) duration = time.time() - start_time logger.debug("loading took %.3fs", duration) is_syntax_specific = self._is_syntax_specific() if is_syntax_specific and not self.fallback_settings: self.fallback_settings = KnownSettings(PREF_FILE) self.defaults.maps.append(self.fallback_settings.defaults) self.comments.maps.append(self.fallback_settings.comments) self.fallback_settings.add_on_loaded(self._has_loaded, once=True) else: if self.fallback_settings and not is_syntax_specific: self.fallback_settings = None self.defaults.maps.pop() self.comments.maps.pop() self._has_loaded() def _has_loaded(self): self._is_loaded = True for callback in self.on_loaded_once_callbacks: try: callback() except ReferenceError: pass self.on_loaded_once_callbacks.clear() for callback in tuple(self.on_loaded_callbacks): try: callback() except ReferenceError: logger.debug("removing gone-away weak on_loaded_callback reference") self.on_loaded_callbacks.remove(callback) def _is_syntax_specific(self): syntax_file_exts = (".sublime-syntax", ".tmLanguage") name_no_ext = os.path.splitext(self.filename)[0] for ext in syntax_file_exts: syntax_file_name = name_no_ext + ext resources = sublime.find_resources(syntax_file_name) if resources: logger.debug("syntax-specific settings file for %r", resources[0]) return True return False def _parse_settings(self, lines): content = [] comment = [] in_comment = False for line in lines: stripped = line.strip() if in_comment: if stripped.endswith("*/"): in_comment = False line = line.rstrip("*/ \t") if line: comment.append(line) elif stripped.startswith("* "): comment.append(stripped[2:]) else: comment.append(line) continue elif not stripped: continue if stripped.startswith("/*"): in_comment = True stripped = stripped[2:].lstrip("*") if stripped: comment.append(stripped) continue if stripped.startswith("//"): stripped = stripped[2:] if not stripped or not stripped.endswith("//"): comment.append(stripped) continue content.append(line) if comment: match = re.match(r'"((?:[^"]|\\.)*)":', stripped) if not match: continue key = match.group(1) if key not in self.comments: self.comments[key] = textwrap.dedent('\n'.join(comment)) comment.clear() return sublime.decode_value('\n'.join(content)) def build_tooltip(self, view, key): if key in self.defaults: comment = html_encode(self.comments.get(key) or "No description.") default = html_encode( sublime.encode_value(self.defaults.get(key), pretty=True)) else: comment, default = "No description.", "unknown setting" return ( "<h1>{key}</h1>" "<h2>Default: {default}</h2>" "<p>{comment}</p>" ).format(**locals()) def insert_snippet(self, view, key): value_regions = view.find_by_selector(VALUE_SCOPE) if not value_regions: selector = "meta.mapping" value_regions = view.find_by_selector(selector) if not value_regions: point = view.size() is_empty_line = not view.substr(view.line(point)).strip() bol = "{\n\t" if is_empty_line else "\n{\n\t" eol = ",$0\n}\n" else: point = value_regions[-1].end() - 1 bol, eol = "\t", "\n" else: value_region = value_regions[-1] value_str = view.substr(value_region) value_str_trimmed = value_str.rstrip() ws_length = len(value_str) - len(value_str_trimmed) point = view.line(value_region.end() - ws_length).end() if value_str_trimmed.endswith(","): bol, eol = "\n", "," else: bol, eol = ",\n", "" snippet = self._key_snippet(key, self.defaults[key], bol, eol) view.sel().clear() view.sel().add(point) view.run_command('insert_snippet', {'contents': snippet}) def key_completions(self, view, prefix, point): if view.match_selector(point - 1, "string"): completions = [ sublime.CompletionItem( trigger=key, completion=key, kind=KIND_SETTING, ) for key in self.defaults ] else: line = view.substr(view.line(point)).strip() eol = "," if len(line) == len(prefix) else ",\n" completions = [ sublime.CompletionItem( trigger=key, completion=self._key_snippet(key, value, eol=eol), completion_format=sublime.COMPLETION_FORMAT_SNIPPET, kind=KIND_SETTING, ) for key, value in self.defaults.items() ] return completions @staticmethod def _key_snippet(key, value, bol="", eol=",\n"): encoded = sublime.encode_value(value) encoded = encoded.replace("\\", "\\\\") encoded = encoded.replace("$", "\\$") encoded = encoded.replace("}", "\\}") if isinstance(value, str): fmt = '{bol}"{key}": "${{1:{encoded}}}"{eol}' encoded = encoded[1:-1] elif isinstance(value, list): fmt = '{bol}"{key}":\n[\n\t${{1:{encoded}}}\n]{eol}' encoded = encoded[1:-1] elif isinstance(value, dict): fmt = '{bol}"{key}":\n{{\n\t${{1:{encoded}}}\n}}{eol}' encoded = encoded[1:-1] else: fmt = '{bol}"{key}": ${{1:{encoded}}}{eol}' return fmt.format(**locals()) def value_completions(self, view, prefix, point): value_region = get_value_region_at(view, point) if not value_region: logger.debug("unable to find current key region") return None key = get_last_key_name_from(view, value_region.begin()) if not key: logger.debug("unable to find current key") return None completions_map = {c.trigger: c for c in self._value_completions_for(key)} completions = list(completions_map.values()) if not completions: logger.debug("no completions to offer") return None is_str = any( bool(isinstance(c.completion, str) or (isinstance(c.completion, list) and c.completion and isinstance(c.completion[0], str))) for c in completions ) in_str = view.match_selector(point, "string") logger.debug("completing a string (%s) within a string (%s)", is_str, in_str) is_list = isinstance(self.defaults.get(key), list) in_list = view.match_selector(point, "meta.sequence") logger.debug("completing a list item (%s) within a list (%s)", is_list, in_list) if in_str and not is_str: msg = "Cannot complete value set within a string" view.window().status_message(msg) logger.warning(msg) return None if in_str and is_str: completions = [ c for c in completions if isinstance(c.completion, str) ] else: typed_region = sublime.Region(value_region.begin(), point) typed = view.substr(typed_region).lstrip() for c in completions: value = c.completion if isinstance(value, frozenset): value = dict(value) if isinstance(value, float): value_str = str(value) if value_str.startswith(typed): offset = len(typed) - len(prefix) value_str = value_str[offset:] elif typed: continue else: value_str = sublime.encode_value(value) if is_list and not in_list: value_str = "[{}]".format(value_str) value_str = value_str.replace("$", "\\$") c.completion = value_str return completions def _value_completions_for(self, key): logger.debug("building completions for key %r", key) default = self.defaults.get(key) logger.debug("default value: %r", default) if key in ('color_scheme', 'dark_color_scheme', 'light_color_scheme'): yield from self._color_scheme_completions(key, default) elif key in ('default_encoding', 'fallback_encoding'): yield from self._encoding_completions(default) elif key in ('theme', 'dark_theme', 'light_theme'): yield from self._theme_completions(key, default) else: yield from self._completions_from_comment(key, default) yield from self._completions_from_default(key, default) def _completions_from_comment(self, key, default): comment = self.comments.get(key) if not comment: return for match in re.finditer(r"`([^`\n]+)`", comment): value = match.group(1) try: value = sublime.decode_value(value) except ValueError: pass if isinstance(value, list): for v in value: if not isinstance(v, dict): yield format_completion_item(v, default) elif isinstance(value, dict): pass else: yield format_completion_item(value, default) for match in re.finditer(r'"([\.\w]+)"', comment): value, = match.groups() try: value = decode_value(value) except ValueError: pass yield format_completion_item(value, default) @staticmethod def _completions_from_default(key, default): if default is None or default == "": return elif isinstance(default, bool): for value in [True, False]: yield format_completion_item(value, default=default) elif isinstance(default, list): for value in default: yield format_completion_item(value, is_default=True) elif isinstance(default, dict): return else: yield format_completion_item(default, is_default=True) @staticmethod def _color_scheme_completions(key, default): if int(sublime.version()) >= 4095 and key == 'color_scheme': yield format_completion_item(value="auto", annotation="dark-/light switching") hidden = get_setting('settings.exclude_color_scheme_patterns') or [] for scheme_path in sublime.find_resources("*.sublime-color-scheme"): if not any(hide in scheme_path for hide in hidden): try: root, package, *_, name = scheme_path.split("/") except ValueError: continue if root == 'Cache': continue yield format_completion_item(value=name, default=default, annotation=package) for scheme_path in sublime.find_resources("*.tmTheme"): if not any(hide in scheme_path for hide in hidden): try: root, package, *_, name = scheme_path.split("/") except ValueError: continue if root == 'Cache': continue yield format_completion_item( value=scheme_path, default=default, label=name, annotation=package ) @staticmethod def _encoding_completions(default): for enc in encodings.SUBLIME_TO_STANDARD.keys(): yield format_completion_item(value=enc, default=default, annotation="encoding") @staticmethod
MIT License
pajbot/pajbot
pajbot/modules/playsound.py
PlaysoundModule.add_playsound_command
python
def add_playsound_command(self, bot, source, message, **rest): options, name, link = self.parse_playsound_arguments(message) if options is False or name is False or link is False or link is None: bot.whisper( source, "Invalid usage. Correct syntax: !add playsound <name> <link> " + "[--volume 0-100] [--cooldown 60/none] [--enabled/--disabled]", ) return name = self.massage_name(name) if not self.validate_name(name): bot.whisper( source, "Invalid Playsound name. The playsound name may only contain lowercase latin letters, 0-9, -, or _. No spaces :rage:", ) return with DBManager.create_session_scope() as session: count = session.query(Playsound).filter(Playsound.name == name).count() if count > 0: bot.whisper( source, "A Playsound with that name already exists. Use !edit playsound " + "or !remove playsound to edit or delete it.", ) return playsound = Playsound(name=name) if not self.update_link(bot, source, playsound, link): return if not self.update_volume(bot, source, playsound, options): return if not self.update_cooldown(bot, source, playsound, options): return if not self.update_enabled(bot, source, playsound, options): return session.add(playsound) bot.whisper(source, "Successfully added your playsound.") log_msg = f"The {name} playsound has been added" AdminLogManager.add_entry("Playsound added", source, log_msg)
Method for creating playsounds. Usage: !add playsound PLAYSOUNDNAME LINK [options] Multiple options available: --volume VOLUME --cooldown COOLDOWN --enabled/--disabled
https://github.com/pajbot/pajbot/blob/42e19a692eb663556bc78d0d86eef1a667728f46/pajbot/modules/playsound.py#L318-L376
import json import logging import re from argparse import ArgumentParser from pajbot.managers.db import DBManager from pajbot.models.playsound import Playsound from pajbot.modules import BaseModule from pajbot.modules import ModuleSetting from pajbot.models.command import Command from pajbot.managers.adminlog import AdminLogManager log = logging.getLogger(__name__) class PlaysoundModule(BaseModule): ID = __name__.split(".")[-1] NAME = "Playsound" DESCRIPTION = "Play a sound on stream with !#playsound" CATEGORY = "Feature" SETTINGS = [ ModuleSetting( key="command_name", label="Command name (e.g. #playsound)", type="text", required=True, placeholder="Command name (no !)", default="#playsound", constraints={"min_str_len": 2, "max_str_len": 15}, ), ModuleSetting( key="point_cost", label="Point cost", type="number", required=True, placeholder="Point cost", default=200, constraints={"min_value": 0, "max_value": 1000000}, ), ModuleSetting( key="token_cost", label="Token cost", type="number", required=True, placeholder="Token cost", default=0, constraints={"min_value": 0, "max_value": 1000000}, ), ModuleSetting( key="global_cd", label="Global playsound cooldown (seconds)", type="number", required=True, placeholder="", default=10, constraints={"min_value": 0, "max_value": 600}, ), ModuleSetting( key="default_sample_cd", label="Default per-sample cooldown (seconds)", type="number", required=True, placeholder="", default=30, constraints={"min_value": 0, "max_value": 600}, ), ModuleSetting( key="user_cd", label="Per-user cooldown (seconds)", type="number", required=True, placeholder="", default=0, constraints={"min_value": 0, "max_value": 600}, ), ModuleSetting( key="global_volume", label="Global volume (0-100)", type="number", required=True, placeholder="", default=40, constraints={"min_value": 0, "max_value": 100}, ), ModuleSetting(key="sub_only", label="Subscribers only", type="boolean", required=True, default=False), ModuleSetting(key="can_whisper", label="Command can be whispered", type="boolean", required=True, default=True), ModuleSetting( key="confirmation_whisper", label="Send user a whisper when sound was successfully played", type="boolean", required=True, default=True, ), ModuleSetting( key="global_cd_whisper", label="Send user a whisper when playsounds are on global cooldown", type="boolean", required=True, default=True, ), ModuleSetting( key="user_cd_whisper", label="Send user a whisper when they hit the user-specific cooldown", type="boolean", required=True, default=True, ), ] def __init__(self, bot): super().__init__(bot) if bot: bot.socket_manager.add_handler("playsound.play", self.on_web_playsound) self.sample_cooldown = set() self.user_cooldown = set() self.global_cooldown = False def on_web_playsound(self, data): playsound_name = data["name"] with DBManager.create_session_scope() as session: playsound = session.query(Playsound).filter(Playsound.name == playsound_name).one_or_none() if playsound is None: log.warning(f'Web UI tried to play invalid playsound "{playsound_name}". Ignoring.') return payload = { "link": playsound.link, "volume": int(round(playsound.volume * self.settings["global_volume"] / 100)), } log.debug(f"Playsound module is emitting payload: {json.dumps(payload)}") self.bot.websocket_manager.emit("play_sound", payload) def reset_global_cd(self): self.global_cooldown = False def play_sound(self, bot, source, message, **rest): if not message: return playsound_name = self.massage_name(message.split(" ")[0]) with DBManager.create_session_scope() as session: playsound = session.query(Playsound).filter_by(name=playsound_name).one_or_none() if playsound is None: bot.whisper( source, f"The playsound you gave does not exist. Check out all the valid playsounds here: https://{self.bot.bot_domain}/playsounds", ) return False if self.global_cooldown and source.level < Command.BYPASS_DELAY_LEVEL: if self.settings["global_cd_whisper"]: bot.whisper( source, f"Another user played a sample too recently. Please try again after the global cooldown of {self.settings['global_cd']} seconds has run out.", ) return False if source.id in self.user_cooldown and source.level < Command.BYPASS_DELAY_LEVEL: if self.settings["user_cd_whisper"]: bot.whisper( source, f"You can only play a sound every {self.settings['user_cd']} seconds. Please wait until the cooldown has run out.", ) return False cooldown = playsound.cooldown if cooldown is None: cooldown = self.settings["default_sample_cd"] if playsound_name in self.sample_cooldown and source.level < Command.BYPASS_DELAY_LEVEL: bot.whisper( source, f"The playsound {playsound.name} was played too recently. Please wait until its cooldown of {cooldown} seconds has run out.", ) return False if not playsound.enabled: bot.whisper( source, f"The playsound you gave is disabled. Check out all the valid playsounds here: https://{self.bot.bot_domain}/playsounds", ) return False payload = { "link": playsound.link, "volume": int(round(playsound.volume * self.settings["global_volume"] / 100)), } log.debug(f"Playsound module is emitting payload: {json.dumps(payload)}") bot.websocket_manager.emit("play_sound", payload) if self.settings["confirmation_whisper"]: bot.whisper(source, f"Successfully played the sound {playsound_name} on stream!") self.global_cooldown = True self.user_cooldown.add(source.id) self.sample_cooldown.add(playsound.name) bot.execute_delayed(cooldown, self.sample_cooldown.remove, playsound.name) bot.execute_delayed(self.settings["user_cd"], self.user_cooldown.remove, source.id) bot.execute_delayed(self.settings["global_cd"], self.reset_global_cd) @staticmethod def parse_playsound_arguments(message): parser = ArgumentParser() parser.add_argument("--volume", dest="volume", type=int) parser.add_argument("--cooldown", dest="cooldown", type=str) parser.add_argument("--enabled", dest="enabled", action="store_true") parser.add_argument("--disabled", dest="enabled", action="store_false") parser.set_defaults(volume=None, cooldown=None, enabled=None) try: args, unknown = parser.parse_known_args(message.split()) except SystemExit: return False, False, False except: log.exception("Unhandled exception in add_command") return False, False, False options = {k: v for k, v in vars(args).items() if v is not None} if len(unknown) < 1: return False, False, False name = unknown[0] link = None if len(unknown) < 2 else " ".join(unknown[1:]) return options, name, link @staticmethod def massage_name(name): if name is not None: return name.lower() return name re_valid_names = re.compile("^[a-z0-9\\-_]+$") @staticmethod def validate_name(name): return name is not None and PlaysoundModule.re_valid_names.match(name) re_valid_links = re.compile("^https://\\S*$") @staticmethod def validate_link(link): return link is not None and PlaysoundModule.re_valid_links.match(link) def update_link(self, bot, source, playsound, link): if link is not None: if not self.validate_link(link): bot.whisper( source, "Error: Invalid link. Valid links must start with https:// " "and cannot contain spaces" ) return False playsound.link = link return True @staticmethod def validate_volume(volume): return volume is not None and 0 <= volume <= 100 def update_volume(self, bot, source, playsound, parsed_options): if "volume" in parsed_options: if not self.validate_volume(parsed_options["volume"]): bot.whisper(source, "Error: Volume must be between 0 and 100.") return False playsound.volume = parsed_options["volume"] return True @staticmethod def validate_cooldown(cooldown): return cooldown is None or cooldown >= 0 def update_cooldown(self, bot, source, playsound, parsed_options): if "cooldown" in parsed_options: if parsed_options["cooldown"].lower() == "none": cooldown_int = None else: try: cooldown_int = int(parsed_options["cooldown"]) except ValueError: bot.whisper(source, 'Error: Cooldown must be a number or the string "none".') return False if not self.validate_cooldown(cooldown_int): bot.whisper(source, "Error: Cooldown must be positive.") return False playsound.cooldown = cooldown_int return True @staticmethod def update_enabled(bot, source, playsound, parsed_options): if "enabled" in parsed_options: playsound.enabled = parsed_options["enabled"] return True
MIT License
oamg/leapp
leapp/repository/scan.py
scan_libraries
python
def scan_libraries(repo, path, repo_path): if os.listdir(path): repo.add(DefinitionKind.LIBRARIES, os.path.relpath(path, repo_path))
Scans libraries and adds them to the repository. :param repo: Instance of the repository :type repo: :py:class:`leapp.repository.Repository` :param path: path to the libraries :type path: str :param repo_path: path to the repository :type repo_path: str
https://github.com/oamg/leapp/blob/c8faeb1599edb3e7265b5451c55ce6c792519078/leapp/repository/scan.py#L212-L224
import os from leapp.repository import Repository, DefinitionKind from leapp.repository.manager import RepositoryManager from leapp.repository.actor_definition import ActorDefinition from leapp.exceptions import RepositoryConfigurationError from leapp.utils.repository import get_global_repositories_data, get_user_config_repo_data, find_repos def _make_repo_lookup(include_locals): data = {} for repo_id, entry in get_global_repositories_data().items(): if entry['enabled']: data.update({entry['id']: entry['path']}) if include_locals: data.update(get_user_config_repo_data().get('repos', {})) return data def _resolve_repository_links(manager, include_locals): repo_lookup = _make_repo_lookup(include_locals=include_locals) if not repo_lookup and manager.get_missing_repo_links(): raise RepositoryConfigurationError('No repos configured? Try adding some with "snactor repo find"') finished = False while not finished: missing = manager.get_missing_repo_links() for repo_id in missing: if repo_id in repo_lookup: manager.add_repo(scan_repo(repo_lookup[repo_id])) break else: finished = True if manager.get_missing_repo_links(): raise RepositoryConfigurationError('Missing repositories detected: {}'.format(', '.join(missing))) def find_and_scan_repositories(path, manager=None, include_locals=False): if os.path.exists(path): manager = manager or RepositoryManager() for repository in find_repos(path): manager.add_repo(scan_repo(repository)) _resolve_repository_links(manager=manager, include_locals=include_locals) return manager def scan_repo(path): path = os.path.abspath(path) return scan(Repository(path), path) def scan(repository, path): repository.log.debug("Scanning path %s", path) scan_tasks = ( ('topics', scan_topics), ('models', scan_models), ('actors', scan_actors), ('tags', scan_tags), ('workflows', scan_workflows), ('files', scan_files), ('libraries', scan_libraries), ('tests', scan_tests), ('tools', scan_tools), ('apis', scan_apis)) dirs = [e for e in os.listdir(path) if os.path.isdir(os.path.join(path, e))] for name, task in scan_tasks: if name in dirs: task(repository, os.path.join(path, name), path) return repository def scan_topics(repo, path, repo_path): for root, unused, files in os.walk(path): for module in files: unused, ext = os.path.splitext(module) if ext == '.py': path = os.path.join(root, module) repo.add(DefinitionKind.TOPIC, os.path.relpath(path, repo_path)) def scan_actors(repo, path, repo_path): for root, unused, files in os.walk(path): for module in files: if module == 'actor.py': rel_path = os.path.relpath(root, repo_path) repo.add(DefinitionKind.ACTOR, scan(ActorDefinition(rel_path, repo_path, log=repo.log), root)) def scan_tags(repo, path, repo_path): for root, unused, files in os.walk(path): for module in files: unused, ext = os.path.splitext(module) if ext == '.py': path = os.path.join(root, module) repo.add(DefinitionKind.TAG, os.path.relpath(path, repo_path)) def scan_models(repo, path, repo_path): for root, unused, files in os.walk(path): for module in files: unused, ext = os.path.splitext(module) if ext == '.py': path = os.path.join(root, module) repo.add(DefinitionKind.MODEL, os.path.relpath(path, repo_path)) def scan_workflows(repo, path, repo_path): for root, unused, files in os.walk(path): for module in files: unused, ext = os.path.splitext(module) if ext == '.py': path = os.path.join(root, module) repo.add(DefinitionKind.WORKFLOW, os.path.relpath(path, repo_path)) def scan_files(repo, path, repo_path): if os.listdir(path): repo.add(DefinitionKind.FILES, os.path.relpath(path, repo_path))
Apache License 2.0