code
stringlengths
281
23.7M
class BytesDequeStreamReader(): def __init__(self): self.buffer = collections.deque() self.eof = False def readline(self): b = [] while True: while True: try: c = self.buffer.popleft() except IndexError: (yield) else: break n = (c.find(b'\n') + 1) if (n == len(c)): b.append(c) break elif (n > 0): b.append(c[:n]) self.buffer.appendleft(c[n:]) break else: b.append(c) return b''.join(b) def readexactly(self, n): if (n == 0): return b'' b = [] while True: while True: try: c = self.buffer.popleft() except IndexError: (yield) else: break n -= len(c) if (n == 0): b.append(c) break elif (n < 0): b.append(c[:n]) self.buffer.appendleft(c[n:]) break else: b.append(c) return b''.join(b) def feed_data(self, data): self.buffer.append(data) def feed_eof(self): self.eof = True def at_eof(self): return (self.eof and (not self.buffer))
def apply_fixups(reporoot: Path): output_dir = (reporoot / 'docs/reference') remove_extra_files(output_dir) page_paths = output_dir.glob('qualtran/**/*.md') for path in page_paths: if fixup_all_symbols_page(path): continue with path.open('r') as f: content = f.read() content = fixup_suffix(content) with path.open('w') as f: f.write(content)
def create_parser(): parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description='learn BPE-based word segmentation') parser.add_argument('--input', '-i', type=argparse.FileType('r'), default=sys.stdin, metavar='PATH', help='Input file (default: standard input).') parser.add_argument('--codes', '-c', type=argparse.FileType('r'), metavar='PATH', required=True, help='File with BPE codes (created by learn_bpe.py).') parser.add_argument('--merges', '-m', type=int, default=(- 1), metavar='INT', help=('Use this many BPE operations (<= number of learned symbols)' + 'default: Apply all the learned merge operations')) parser.add_argument('--output', '-o', type=argparse.FileType('w'), default=sys.stdout, metavar='PATH', help='Output file (default: standard output)') parser.add_argument('--separator', '-s', type=str, default='', metavar='STR', help="Separator between non-final subword units (default: '%(default)s'))") parser.add_argument('--vocabulary', type=argparse.FileType('r'), default=None, metavar='PATH', help='Vocabulary file (built with get_vocab.py). If provided, this script reverts any merge operations that produce an OOV.') parser.add_argument('--vocabulary-threshold', type=int, default=None, metavar='INT', help='Vocabulary threshold. If vocabulary is provided, any word with frequency < threshold will be treated as OOV') parser.add_argument('--glossaries', type=str, nargs='+', default=None, metavar='STR', help=('Glossaries. The strings provided in glossaries will not be affected' + 'by the BPE (i.e. they will neither be broken into subwords, nor concatenated with other subwords')) return parser
class AddressBookUI(UserInterface): def assemble(self): page_layout = PageLayout(document_layout=Container(), contents_layout=ColumnLayout(ColumnOptions('main', size=ResponsiveSize(lg=6))).with_slots()) self.define_page(HTML5Page).use_layout(page_layout) find = self.define_view('/', title='Addresses') find.set_slot('main', AddressBookPanel.factory())
class TestNeuTral(unittest.TestCase): def setUp(self): self.n_train = 200 self.n_test = 100 self.contamination = 0.1 self.roc_floor = 0.8 (self.X_train, self.X_test, self.y_train, self.y_test) = generate_data(n_train=self.n_train, n_test=self.n_test, n_features=10, contamination=self.contamination, random_state=42) device = ('cuda' if torch.cuda.is_available() else 'cpu') self.clf = NeuTraL(epochs=1, device=device) self.clf.fit(self.X_train) def test_parameters(self): assert (hasattr(self.clf, 'decision_scores_') and (self.clf.decision_scores_ is not None)) assert (hasattr(self.clf, 'labels_') and (self.clf.labels_ is not None)) assert (hasattr(self.clf, 'threshold_') and (self.clf.threshold_ is not None)) def test_prediction_scores(self): pred_scores = self.clf.decision_function(self.X_test) assert_equal(pred_scores.shape[0], self.X_test.shape[0]) def test_prediction_labels(self): pred_labels = self.clf.predict(self.X_test) assert_equal(pred_labels.shape, self.y_test.shape) def test_prediction_labels_confidence(self): (pred_labels, confidence) = self.clf.predict(self.X_test, return_confidence=True) assert_equal(pred_labels.shape, self.y_test.shape) assert_equal(confidence.shape, self.y_test.shape) assert (confidence.min() >= 0) assert (confidence.max() <= 1) def tearDown(self): pass
class TestAssertionRewrite(): def test_place_initial_imports(self) -> None: s = "'Doc string'\nother = stuff" m = rewrite(s) assert isinstance(m.body[0], ast.Expr) for imp in m.body[1:3]: assert isinstance(imp, ast.Import) assert (imp.lineno == 2) assert (imp.col_offset == 0) assert isinstance(m.body[3], ast.Assign) s = 'from __future__ import division\nother_stuff' m = rewrite(s) assert isinstance(m.body[0], ast.ImportFrom) for imp in m.body[1:3]: assert isinstance(imp, ast.Import) assert (imp.lineno == 2) assert (imp.col_offset == 0) assert isinstance(m.body[3], ast.Expr) s = "'doc string'\nfrom __future__ import division" m = rewrite(s) assert isinstance(m.body[0], ast.Expr) assert isinstance(m.body[1], ast.ImportFrom) for imp in m.body[2:4]: assert isinstance(imp, ast.Import) assert (imp.lineno == 2) assert (imp.col_offset == 0) s = "'doc string'\nfrom __future__ import division\nother" m = rewrite(s) assert isinstance(m.body[0], ast.Expr) assert isinstance(m.body[1], ast.ImportFrom) for imp in m.body[2:4]: assert isinstance(imp, ast.Import) assert (imp.lineno == 3) assert (imp.col_offset == 0) assert isinstance(m.body[4], ast.Expr) s = 'from . import relative\nother_stuff' m = rewrite(s) for imp in m.body[:2]: assert isinstance(imp, ast.Import) assert (imp.lineno == 1) assert (imp.col_offset == 0) assert isinstance(m.body[3], ast.Expr) def test_location_is_set(self) -> None: s = textwrap.dedent('\n\n assert False, (\n\n "Ouch"\n )\n\n ') m = rewrite(s) for node in m.body: if isinstance(node, ast.Import): continue for n in [node, *ast.iter_child_nodes(node)]: assert (n.lineno == 3) assert (n.col_offset == 0) assert (n.end_lineno == 6) assert (n.end_col_offset == 3) def test_dont_rewrite(self) -> None: s = "'PYTEST_DONT_REWRITE'\nassert 14" m = rewrite(s) assert (len(m.body) == 2) assert isinstance(m.body[1], ast.Assert) assert (m.body[1].msg is None) def test_dont_rewrite_plugin(self, pytester: Pytester) -> None: contents = {'conftest.py': "pytest_plugins = 'plugin'; import plugin", 'plugin.py': "'PYTEST_DONT_REWRITE'", 'test_foo.py': 'def test_foo(): pass'} pytester.makepyfile(**contents) result = pytester.runpytest_subprocess() assert ('warning' not in ''.join(result.outlines)) def test_rewrites_plugin_as_a_package(self, pytester: Pytester) -> None: pkgdir = pytester.mkpydir('plugin') pkgdir.joinpath('__init__.py').write_text('import pytest\\ndef special_asserter():\n def special_assert(x, y):\n assert x == y\n return special_assert\n', encoding='utf-8') pytester.makeconftest('pytest_plugins = ["plugin"]') pytester.makepyfile('def test(special_asserter): special_asserter(1, 2)\n') result = pytester.runpytest() result.stdout.fnmatch_lines(['*assert 1 == 2*']) def test_honors_pep_235(self, pytester: Pytester, monkeypatch) -> None: pytester.makepyfile(test_y='x = 1') xdir = pytester.mkdir('x') pytester.mkpydir(str(xdir.joinpath('test_Y'))) xdir.joinpath('test_Y').joinpath('__init__.py').write_text('x = 2', encoding='utf-8') pytester.makepyfile('import test_y\nimport test_Y\ndef test():\n assert test_y.x == 1\n assert test_Y.x == 2\n') monkeypatch.syspath_prepend(str(xdir)) pytester.runpytest().assert_outcomes(passed=1) def test_name(self, request) -> None: def f1() -> None: assert False assert (getmsg(f1) == 'assert False') def f2() -> None: f = False assert f assert (getmsg(f2) == 'assert False') def f3() -> None: assert a_global assert (getmsg(f3, {'a_global': False}) == 'assert False') def f4() -> None: assert (sys == 42) msg = getmsg(f4, {'sys': sys}) assert (msg == 'assert sys == 42') def f5() -> None: assert (cls == 42) class X(): pass msg = getmsg(f5, {'cls': X}) assert (msg is not None) lines = msg.splitlines() assert (lines == ['assert cls == 42']) def test_assertrepr_compare_same_width(self, request) -> None: def f() -> None: assert ((('' * 5) + 'A') == (('' * 5) + 'B')) msg = getmsg(f) assert (msg is not None) line = msg.splitlines()[0] if (request.config.getoption('verbose') > 1): assert (line == "assert 'A' == 'B'") else: assert (line == "assert '...A' == '...B'") def test_dont_rewrite_if_hasattr_fails(self, request) -> None: class Y(): def __getattr__(self, attribute_name): raise KeyError() def __repr__(self) -> str: return 'Y' def __init__(self) -> None: self.foo = 3 def f() -> None: assert (cls().foo == 2) msg = getmsg(f, {'cls': Y}) assert (msg is not None) lines = msg.splitlines() assert (lines == ['assert 3 == 2', ' + where 3 = Y.foo', ' + where Y = cls()']) def test_assert_already_has_message(self) -> None: def f(): assert False, 'something bad!' assert (getmsg(f) == 'AssertionError: something bad!\nassert False') def test_assertion_message(self, pytester: Pytester) -> None: pytester.makepyfile('\n def test_foo():\n assert 1 == 2, "The failure message"\n ') result = pytester.runpytest() assert (result.ret == 1) result.stdout.fnmatch_lines(['*AssertionError*The failure message*', '*assert 1 == 2*']) def test_assertion_message_multiline(self, pytester: Pytester) -> None: pytester.makepyfile('\n def test_foo():\n assert 1 == 2, "A multiline\\nfailure message"\n ') result = pytester.runpytest() assert (result.ret == 1) result.stdout.fnmatch_lines(['*AssertionError*A multiline*', '*failure message*', '*assert 1 == 2*']) def test_assertion_message_tuple(self, pytester: Pytester) -> None: pytester.makepyfile('\n def test_foo():\n assert 1 == 2, (1, 2)\n ') result = pytester.runpytest() assert (result.ret == 1) result.stdout.fnmatch_lines([('*AssertionError*%s*' % repr((1, 2))), '*assert 1 == 2*']) def test_assertion_message_expr(self, pytester: Pytester) -> None: pytester.makepyfile('\n def test_foo():\n assert 1 == 2, 1 + 2\n ') result = pytester.runpytest() assert (result.ret == 1) result.stdout.fnmatch_lines(['*AssertionError*3*', '*assert 1 == 2*']) def test_assertion_message_escape(self, pytester: Pytester) -> None: pytester.makepyfile("\n def test_foo():\n assert 1 == 2, 'To be escaped: %'\n ") result = pytester.runpytest() assert (result.ret == 1) result.stdout.fnmatch_lines(['*AssertionError: To be escaped: %', '*assert 1 == 2']) def test_assertion_messages_bytes(self, pytester: Pytester) -> None: pytester.makepyfile("def test_bytes_assertion():\n assert False, b'ohai!'\n") result = pytester.runpytest() assert (result.ret == 1) result.stdout.fnmatch_lines(["*AssertionError: b'ohai!'", '*assert False']) def test_boolop(self) -> None: def f1() -> None: f = g = False assert (f and g) assert (getmsg(f1) == 'assert (False)') def f2() -> None: f = True g = False assert (f and g) assert (getmsg(f2) == 'assert (True and False)') def f3() -> None: f = False g = True assert (f and g) assert (getmsg(f3) == 'assert (False)') def f4() -> None: f = g = False assert (f or g) assert (getmsg(f4) == 'assert (False or False)') def f5() -> None: f = g = False assert ((not f) and (not g)) getmsg(f5, must_pass=True) def x() -> bool: return False def f6() -> None: assert (x() and x()) assert (getmsg(f6, {'x': x}) == 'assert (False)\n + where False = x()') def f7() -> None: assert (False or x()) assert (getmsg(f7, {'x': x}) == 'assert (False or False)\n + where False = x()') def f8() -> None: assert ((1 in {}) and (2 in {})) assert (getmsg(f8) == 'assert (1 in {})') def f9() -> None: x = 1 y = 2 assert ((x in {1: None}) and (y in {})) assert (getmsg(f9) == 'assert (1 in {1: None} and 2 in {})') def f10() -> None: f = True g = False assert (f or g) getmsg(f10, must_pass=True) def f11() -> None: f = g = h = (lambda : True) assert (f() and g() and h()) getmsg(f11, must_pass=True) def test_short_circuit_evaluation(self) -> None: def f1() -> None: assert (True or explode) getmsg(f1, must_pass=True) def f2() -> None: x = 1 assert ((x == 1) or (x == 2)) getmsg(f2, must_pass=True) def test_unary_op(self) -> None: def f1() -> None: x = True assert (not x) assert (getmsg(f1) == 'assert not True') def f2() -> None: x = 0 assert ((~ x) + 1) assert (getmsg(f2) == 'assert (~0 + 1)') def f3() -> None: x = 3 assert ((- x) + x) assert (getmsg(f3) == 'assert (-3 + 3)') def f4() -> None: x = 0 assert ((+ x) + x) assert (getmsg(f4) == 'assert (+0 + 0)') def test_binary_op(self) -> None: def f1() -> None: x = 1 y = (- 1) assert (x + y) assert (getmsg(f1) == 'assert (1 + -1)') def f2() -> None: assert (not (5 % 4)) assert (getmsg(f2) == 'assert not (5 % 4)') def test_boolop_percent(self) -> None: def f1() -> None: assert ((3 % 2) and False) assert (getmsg(f1) == 'assert ((3 % 2) and False)') def f2() -> None: assert (False or (4 % 2)) assert (getmsg(f2) == 'assert (False or (4 % 2))') def test_at_operator_issue1290(self, pytester: Pytester) -> None: pytester.makepyfile('\n class Matrix(object):\n def __init__(self, num):\n self.num = num\n def __matmul__(self, other):\n return self.num * other.num\n\n def test_multmat_operator():\n assert Matrix(2) Matrix(3) == 6') pytester.runpytest().assert_outcomes(passed=1) def test_starred_with_side_effect(self, pytester: Pytester) -> None: pytester.makepyfile(' def test():\n f = lambda x: x\n x = iter([1, 2, 3])\n assert 2 * next(x) == f(*[next(x)])\n ') pytester.runpytest().assert_outcomes(passed=1) def test_call(self) -> None: def g(a=42, *args, **kwargs) -> bool: return False ns = {'g': g} def f1() -> None: assert g() assert (getmsg(f1, ns) == 'assert False\n + where False = g()') def f2() -> None: assert g(1) assert (getmsg(f2, ns) == 'assert False\n + where False = g(1)') def f3() -> None: assert g(1, 2) assert (getmsg(f3, ns) == 'assert False\n + where False = g(1, 2)') def f4() -> None: assert g(1, g=42) assert (getmsg(f4, ns) == 'assert False\n + where False = g(1, g=42)') def f5() -> None: assert g(1, 3, g=23) assert (getmsg(f5, ns) == 'assert False\n + where False = g(1, 3, g=23)') def f6() -> None: seq = [1, 2, 3] assert g(*seq) assert (getmsg(f6, ns) == 'assert False\n + where False = g(*[1, 2, 3])') def f7() -> None: x = 'a' assert g(**{x: 2}) assert (getmsg(f7, ns) == "assert False\n + where False = g(**{'a': 2})") def test_attribute(self) -> None: class X(): g = 3 ns = {'x': X} def f1() -> None: assert (not x.g) assert (getmsg(f1, ns) == 'assert not 3\n + where 3 = x.g') def f2() -> None: x.a = False assert x.a assert (getmsg(f2, ns) == 'assert False\n + where False = x.a') def test_comparisons(self) -> None: def f1() -> None: (a, b) = range(2) assert (b < a) assert (getmsg(f1) == 'assert 1 < 0') def f2() -> None: (a, b, c) = range(3) assert (a > b > c) assert (getmsg(f2) == 'assert 0 > 1') def f3() -> None: (a, b, c) = range(3) assert (a < b > c) assert (getmsg(f3) == 'assert 1 > 2') def f4() -> None: (a, b, c) = range(3) assert (a < b <= c) getmsg(f4, must_pass=True) def f5() -> None: (a, b, c) = range(3) assert (a < b) assert (b < c) getmsg(f5, must_pass=True) def test_len(self, request) -> None: def f(): values = list(range(10)) assert (len(values) == 11) msg = getmsg(f) assert (msg == 'assert 10 == 11\n + where 10 = len([0, 1, 2, 3, 4, 5, ...])') def test_custom_reprcompare(self, monkeypatch) -> None: def my_reprcompare1(op, left, right) -> str: return '42' monkeypatch.setattr(util, '_reprcompare', my_reprcompare1) def f1() -> None: assert (42 < 3) assert (getmsg(f1) == 'assert 42') def my_reprcompare2(op, left, right) -> str: return f'{left} {op} {right}' monkeypatch.setattr(util, '_reprcompare', my_reprcompare2) def f2() -> None: assert (1 < 3 < 5 <= 4 < 7) assert (getmsg(f2) == 'assert 5 <= 4') def test_assert_raising__bool__in_comparison(self) -> None: def f() -> None: class A(): def __bool__(self): raise ValueError(42) def __lt__(self, other): return A() def __repr__(self): return '<MY42 object>' def myany(x) -> bool: return False assert myany((A() < 0)) msg = getmsg(f) assert (msg is not None) assert ('<MY42 object> < 0' in msg) def test_assert_handling_raise_in__iter__(self, pytester: Pytester) -> None: pytester.makepyfile(' class A:\n def __iter__(self):\n raise ValueError()\n\n def __eq__(self, o: object) -> bool:\n return self is o\n\n def __repr__(self):\n return "<A object>"\n\n assert A() == A()\n ') result = pytester.runpytest() result.stdout.fnmatch_lines(['*E*assert <A object> == <A object>']) def test_formatchar(self) -> None: def f() -> None: assert ('%test' == 'test') msg = getmsg(f) assert (msg is not None) assert msg.startswith("assert '%test' == 'test'") def test_custom_repr(self, request) -> None: def f() -> None: class Foo(): a = 1 def __repr__(self): return '\n{ \n~ \n}' f = Foo() assert (0 == f.a) msg = getmsg(f) assert (msg is not None) lines = util._format_lines([msg]) assert (lines == ['assert 0 == 1\n + where 1 = \\n{ \\n~ \\n}.a']) def test_custom_repr_non_ascii(self) -> None: def f() -> None: class A(): name = 'a' def __repr__(self): return self.name.encode('UTF-8') a = A() assert (not a.name) msg = getmsg(f) assert (msg is not None) assert ('UnicodeDecodeError' not in msg) assert ('UnicodeEncodeError' not in msg)
class WideResNet(nn.Module): def __init__(self, depth, num_classes, widen_factor=1, dropRate=0.0): super(WideResNet, self).__init__() nChannels = [16, (16 * widen_factor), (32 * widen_factor), (64 * widen_factor)] assert (((depth - 4) % 6) == 0), 'depth should be 6n+4' n = ((depth - 4) // 6) block = BasicBlock self.conv1 = nn.Conv2d(3, nChannels[0], kernel_size=3, stride=1, padding=1, bias=False) self.block1 = NetworkBlock(n, nChannels[0], nChannels[1], block, 1, dropRate) self.block2 = NetworkBlock(n, nChannels[1], nChannels[2], block, 2, dropRate) self.block3 = NetworkBlock(n, nChannels[2], nChannels[3], block, 2, dropRate) self.bn1 = nn.BatchNorm2d(nChannels[3]) self.relu = nn.ReLU(inplace=True) self.fc = nn.Linear(nChannels[3], num_classes) self.nChannels = nChannels[3] for m in self.modules(): if isinstance(m, nn.Conv2d): n = ((m.kernel_size[0] * m.kernel_size[1]) * m.out_channels) m.weight.data.normal_(0, math.sqrt((2.0 / n))) elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.Linear): m.bias.data.zero_() def forward(self, x, is_feat=False, preact=False): out = self.conv1(x) out = self.block1(out) out = self.block2(out) out = self.block3(out) out = self.relu(self.bn1(out)) out = F.avg_pool2d(out, 8) out = out.view((- 1), self.nChannels) embedding = out out = self.fc(out) return (out, embedding)
class Metric(): def __init__(self, n_jobs=1, device='cpu', batch_size=512, **kwargs): self.n_jobs = n_jobs self.device = device self.batch_size = batch_size for (k, v) in kwargs.values(): setattr(self, k, v) def __call__(self, ref=None, gen=None, pref=None, pgen=None): assert ((ref is None) != (pref is None)), 'specify ref xor pref' assert ((gen is None) != (pgen is None)), 'specify gen xor pgen' if (pref is None): pref = self.precalc(ref) if (pgen is None): pgen = self.precalc(gen) return self.metric(pref, pgen) def precalc(self, moleclues): raise NotImplementedError def metric(self, pref, pgen): raise NotImplementedError
class _AcquireContext(): __slots__ = ['_acquire', '_conn'] def __init__(self, acquire): self._acquire = acquire self._conn = None async def __aenter__(self): self._conn = (await self._acquire()) return self._conn async def __aexit__(self, exc_type, exc_val, exc_tb): (conn, self._conn) = (self._conn, None) (await conn.release()) def __await__(self): return self._acquire().__await__()
class LossWriter(SummaryWriter): def __init__(self, log_dir=None, comment=''): if (log_dir == None): log_dir = ('./logs/tensorboard/' + time.strftime('%Y-%m-%d--%H-%M-%S', time.localtime(time.time()))) super(LossWriter, self).__init__(log_dir=log_dir, comment=comment) def write_loss(self, loss_name, scalar, n_iter): self.add_scalar(('Loss/' + loss_name), scalar, n_iter)
def paginate_query(query, limit=50, sort_field_name=None, page_number=None, offset_val=0): results = list(query) page_token = None if (len(results) > limit): start_index = getattr(results[limit], (sort_field_name or 'id')) is_datetime = False if isinstance(start_index, datetime): start_index = (start_index.isoformat() + 'Z') if (getattr(results[limit], sort_field_name) == getattr(results[(limit - 1)], sort_field_name)): offset_val = (limit + offset_val) is_datetime = True page_token = {'start_index': start_index, 'page_number': ((page_number + 1) if page_number else 1), 'is_datetime': is_datetime, 'offset_val': offset_val} return (results[0:limit], page_token)
class WidgetGallery(QDialog): def __init__(self, parent=None): super(WidgetGallery, self).__init__(parent) self.originalPalette = QApplication.palette() styleComboBox = QComboBox() styleComboBox.addItems(QStyleFactory.keys()) styleLabel = QLabel('&Style:') styleLabel.setBuddy(styleComboBox) self.useStylePaletteCheckBox = QCheckBox("&Use style's standard palette") self.useStylePaletteCheckBox.setChecked(True) disableWidgetsCheckBox = QCheckBox('&Disable widgets') self.createTopLeftGroupBox() self.createTopRightGroupBox() self.createBottomLeftTabWidget() self.createBottomRightGroupBox() self.createProgressBar() styleComboBox.textActivated.connect(self.changeStyle) self.useStylePaletteCheckBox.toggled.connect(self.changePalette) disableWidgetsCheckBox.toggled.connect(self.topLeftGroupBox.setDisabled) disableWidgetsCheckBox.toggled.connect(self.topRightGroupBox.setDisabled) disableWidgetsCheckBox.toggled.connect(self.bottomLeftTabWidget.setDisabled) disableWidgetsCheckBox.toggled.connect(self.bottomRightGroupBox.setDisabled) topLayout = QHBoxLayout() topLayout.addWidget(styleLabel) topLayout.addWidget(styleComboBox) topLayout.addStretch(1) topLayout.addWidget(self.useStylePaletteCheckBox) topLayout.addWidget(disableWidgetsCheckBox) mainLayout = QGridLayout() mainLayout.addLayout(topLayout, 0, 0, 1, 2) mainLayout.addWidget(self.topLeftGroupBox, 1, 0) mainLayout.addWidget(self.topRightGroupBox, 1, 1) mainLayout.addWidget(self.bottomLeftTabWidget, 2, 0) mainLayout.addWidget(self.bottomRightGroupBox, 2, 1) mainLayout.addWidget(self.progressBar, 3, 0, 1, 2) mainLayout.setRowStretch(1, 1) mainLayout.setRowStretch(2, 1) mainLayout.setColumnStretch(0, 1) mainLayout.setColumnStretch(1, 1) self.setLayout(mainLayout) self.setWindowTitle('Styles') self.changeStyle('Windows') def changeStyle(self, styleName): QApplication.setStyle(QStyleFactory.create(styleName)) self.changePalette() def changePalette(self): if self.useStylePaletteCheckBox.isChecked(): QApplication.setPalette(QApplication.style().standardPalette()) else: QApplication.setPalette(self.originalPalette) def advanceProgressBar(self): curVal = self.progressBar.value() maxVal = self.progressBar.maximum() self.progressBar.setValue((curVal + ((maxVal - curVal) // 100))) def createTopLeftGroupBox(self): self.topLeftGroupBox = QGroupBox('Group 1') radioButton1 = QRadioButton('Radio button 1') radioButton2 = QRadioButton('Radio button 2') radioButton3 = QRadioButton('Radio button 3') radioButton1.setChecked(True) checkBox = QCheckBox('Tri-state check box') checkBox.setTristate(True) checkBox.setCheckState(Qt.CheckState.PartiallyChecked) layout = QVBoxLayout() layout.addWidget(radioButton1) layout.addWidget(radioButton2) layout.addWidget(radioButton3) layout.addWidget(checkBox) layout.addStretch(1) self.topLeftGroupBox.setLayout(layout) def createTopRightGroupBox(self): self.topRightGroupBox = QGroupBox('Group 2') defaultPushButton = QPushButton('Default Push Button') defaultPushButton.setDefault(True) togglePushButton = QPushButton('Toggle Push Button') togglePushButton.setCheckable(True) togglePushButton.setChecked(True) flatPushButton = QPushButton('Flat Push Button') flatPushButton.setFlat(True) layout = QVBoxLayout() layout.addWidget(defaultPushButton) layout.addWidget(togglePushButton) layout.addWidget(flatPushButton) layout.addStretch(1) self.topRightGroupBox.setLayout(layout) def createBottomLeftTabWidget(self): self.bottomLeftTabWidget = QTabWidget() self.bottomLeftTabWidget.setSizePolicy(QSizePolicy.Policy.Preferred, QSizePolicy.Policy.Ignored) tab1 = QWidget() tableWidget = QTableWidget(10, 10) tab1hbox = QHBoxLayout() tab1hbox.setContentsMargins(5, 5, 5, 5) tab1hbox.addWidget(tableWidget) tab1.setLayout(tab1hbox) tab2 = QWidget() textEdit = QTextEdit() textEdit.setPlainText('Twinkle, twinkle, little star,\nHow I wonder what you are.\nUp above the world so high,\nLike a diamond in the sky.\nTwinkle, twinkle, little star,\nHow I wonder what you are!\n') tab2hbox = QHBoxLayout() tab2hbox.setContentsMargins(5, 5, 5, 5) tab2hbox.addWidget(textEdit) tab2.setLayout(tab2hbox) self.bottomLeftTabWidget.addTab(tab1, '&Table') self.bottomLeftTabWidget.addTab(tab2, 'Text &Edit') def createBottomRightGroupBox(self): self.bottomRightGroupBox = QGroupBox('Group 3') self.bottomRightGroupBox.setCheckable(True) self.bottomRightGroupBox.setChecked(True) lineEdit = QLineEdit('s3cRe7') lineEdit.setEchoMode(QLineEdit.EchoMode.Password) spinBox = QSpinBox(self.bottomRightGroupBox) spinBox.setValue(50) dateTimeEdit = QDateTimeEdit(self.bottomRightGroupBox) dateTimeEdit.setDateTime(QDateTime.currentDateTime()) slider = QSlider(Qt.Orientation.Horizontal, self.bottomRightGroupBox) slider.setValue(40) scrollBar = QScrollBar(Qt.Orientation.Horizontal, self.bottomRightGroupBox) scrollBar.setValue(60) dial = QDial(self.bottomRightGroupBox) dial.setValue(30) dial.setNotchesVisible(True) layout = QGridLayout() layout.addWidget(lineEdit, 0, 0, 1, 2) layout.addWidget(spinBox, 1, 0, 1, 2) layout.addWidget(dateTimeEdit, 2, 0, 1, 2) layout.addWidget(slider, 3, 0) layout.addWidget(scrollBar, 4, 0) layout.addWidget(dial, 3, 1, 2, 1) layout.setRowStretch(5, 1) self.bottomRightGroupBox.setLayout(layout) def createProgressBar(self): self.progressBar = QProgressBar() self.progressBar.setRange(0, 10000) self.progressBar.setValue(0) timer = QTimer(self) timer.timeout.connect(self.advanceProgressBar) timer.start(1000)
def test_BL_correction(): wl = (np.linspace(800, 950, 4) * 1e-09) GaAs = material('GaAs')() thick_cell = SolarCell([Layer(material=GaAs, width=si('20um'))]) opts = State() opts.position = None prepare_solar_cell(thick_cell, opts) position = np.arange(0, thick_cell.width, 1e-09) opts.position = position opts.recalculate_absorption = True opts.no_back_reflection = False opts.BL_correction = False opts.wavelength = wl solve_tmm(thick_cell, **opts) no_corr = thick_cell.absorbed opts.BL_correction = True solve_tmm(thick_cell, **opts) with_corr = thick_cell.absorbed assert (with_corr == approx(np.array([0., 0., 0., 0]))) assert (no_corr == approx(np.array([0., 0., 0., 0])))
class Density(): def __init__(self, gdf, values, spatial_weights, unique_id, areas=None, verbose=True): self.gdf = gdf self.sw = spatial_weights self.id = gdf[unique_id] results_list = [] data = gdf.copy() if ((values is not None) and (not isinstance(values, str))): data['mm_v'] = values values = 'mm_v' self.values = data[values] if (areas is not None): if (not isinstance(areas, str)): data['mm_a'] = areas areas = 'mm_a' else: data['mm_a'] = data.geometry.area areas = 'mm_a' self.areas = data[areas] data = data.set_index(unique_id) for index in tqdm(data.index, total=data.shape[0], disable=(not verbose)): if (index in spatial_weights.neighbors): neighbours = [index] neighbours += spatial_weights.neighbors[index] subset = data.loc[neighbours] values_list = subset[values] areas_list = subset[areas] results_list.append((np.sum(values_list) / np.sum(areas_list))) else: results_list.append(np.nan) self.series = pd.Series(results_list, index=gdf.index)
class UnixConfigLexer(RegexLexer): name = 'Unix/Linux config files' aliases = ['unixconfig', 'linuxconfig'] filenames = [] url = ' version_added = '2.12' tokens = {'root': [('^#.*', Comment), ('\\n', Whitespace), (':', Punctuation), ('[0-9]+', Number), ('((?!\\n)[a-zA-Z0-9\\_\\-\\s\\(\\),]){2,}', Text), ('[^:\\n]+', String)]}
def subset_by_max_weight(constant_counts, max_weight): assert (max_weight > 0), max_weight heap = [(0, [])] for (constant, count) in constant_counts: weight = get_weight(count) (tot_weight, subset) = heap[0] if ((not tot_weight) or ((tot_weight + weight) <= max_weight)): subset.append(constant) heapq.heapreplace(heap, ((tot_weight + weight), subset)) else: heapq.heappush(heap, (weight, [constant])) heap.sort(key=_largest_subset_sort_key) return heap
def when_program_starts_1(self): self.say((1.0 + 1.0)) self.wait(1.0) self.say((3.0 - 1.0)) self.wait(1.0) self.say((1.0 * 2.0)) self.wait(1.0) self.say((2.0 / 1.0)) self.wait(1.0) self.say((2.0 / 0.0)) self.wait(1.0) self.say(self.pick_random(1.0, 10.0)) self.wait(1.0) self.say((2 > 1)) self.wait(1.0) self.say((2 < 1)) self.wait(1.0) self.say((50 == 50)) self.wait(1.0) self.say((51 == 50)) self.wait(1.0) self.say(((121 > 50) and (2 < 50))) self.wait(1.0) self.say(((51 > 50) or (3 > 50))) self.wait(1.0) self.say((not (51 > 50)))
class ComposeMessage(Mutation): class Arguments(): body = String() recipient = String() feedback = String() _required def mutate(_root, info, body, recipient): sender = info.context.user if (len(body) < 3): return ComposeMessage(feedback=_("can't you write down something more?")) try: recipient_ = Author.objects.get(username=recipient) validate_user_text(body) except Author.DoesNotExist: return ComposeMessage(feedback=_('no such person though')) except ValidationError as error: return ComposeMessage(feedback=error.message) sent = Message.objects.compose(sender, recipient_, body) if (not sent): return ComposeMessage(feedback=_("we couldn't send your message")) return ComposeMessage(feedback=_('your message has been successfully sent'))
def fftconvolve(in1, in2, mode='full', axes=None): in1 = cp.ascontiguousarray(in1) in2 = cp.ascontiguousarray(in2) noaxes = (axes is None) if (in1.ndim == in2.ndim == 0): return (in1 * in2) elif (in1.ndim != in2.ndim): raise ValueError('in1 and in2 should have the same dimensionality') elif ((in1.size == 0) or (in2.size == 0)): return cp.array([]) (_, axes) = _init_nd_shape_and_axes_sorted(in1, shape=None, axes=axes) axes = cp.asnumpy(axes) if ((not noaxes) and (not axes.size)): raise ValueError('when provided, axes cannot be empty') if noaxes: other_axes = np.array([], dtype=cp.intc) else: other_axes = np.setdiff1d(np.arange(in1.ndim), axes) s1 = np.array(in1.shape) s2 = np.array(in2.shape) if (not np.all((((s1[other_axes] == s2[other_axes]) | (s1[other_axes] == 1)) | (s2[other_axes] == 1)))): raise ValueError('incompatible shapes for in1 and in2: {0} and {1}'.format(in1.shape, in2.shape)) complex_result = (np.issubdtype(in1.dtype, np.complexfloating) or np.issubdtype(in2.dtype, cp.complexfloating)) shape = np.maximum(s1, s2) shape[axes] = ((s1[axes] + s2[axes]) - 1) if _inputs_swap_needed(mode, s1, s2): (in1, s1, in2, s2) = (in2, s2, in1, s1) fshape = [next_fast_len(d) for d in shape[axes]] fslice = tuple([slice(sz) for sz in shape]) if (not complex_result): sp1 = cp.fft.rfftn(in1, fshape, axes=axes) sp2 = cp.fft.rfftn(in2, fshape, axes=axes) ret = cp.fft.irfftn((sp1 * sp2), fshape, axes=axes)[fslice].copy() else: sp1 = cp.fft.fftn(in1, fshape, axes=axes) sp2 = cp.fft.fftn(in2, fshape, axes=axes) ret = cp.fft.ifftn((sp1 * sp2), axes=axes)[fslice].copy() if (mode == 'full'): return ret elif (mode == 'same'): return _centered(ret, s1) elif (mode == 'valid'): shape_valid = shape.copy() shape_valid[axes] = ((s1[axes] - s2[axes]) + 1) return _centered(ret, shape_valid) else: raise ValueError("acceptable mode flags are 'valid', 'same', or 'full'")
.patch('translation_canary.translatable._tests', [_picky_test]) class TestTestPOT(unittest.TestCase): def test_success(self): with tempfile.NamedTemporaryFile(suffix='.pot') as potfile: poobj = POFile() poobj.append(POEntry(msgstr='test string')) poobj.save(potfile.name) self.assertTrue(testPOT(potfile.name)) def test_some_failure(self): with tempfile.NamedTemporaryFile(suffix='.pot') as potfile: poobj = POFile() poobj.append(POEntry(msgstr='test string')) poobj.append(POEntry(msgstr='pest string')) poobj.save(potfile.name) self.assertFalse(testPOT(potfile.name)) def test_all_failure(self): with tempfile.NamedTemporaryFile(suffix='.pot') as potfile: poobj = POFile() poobj.append(POEntry(msgstr='pest string')) poobj.append(POEntry(msgstr='past string')) poobj.save(potfile.name) self.assertFalse(testPOT(potfile.name))
class QuantReLU(nn.ReLU): def __init__(self, inplace: bool=False, bit=5, power=False, additive=True, grad_scale=None): super(QuantReLU, self).__init__(inplace) self.layer_type = 'QuantReLU' self.bit = bit self.power = power self.grad_scale = grad_scale if (self.bit < 32): if power: if (self.bit > 2): self.proj_set_weight = build_power_value(B=(self.bit - 1), additive=additive) self.proj_set_act = build_power_value(B=self.bit, additive=additive) self.act_alpha = torch.nn.Parameter(torch.tensor(6.0)) def forward(self, x): if (self.bit == 32): return F.relu(x, inplace=self.inplace) x = F.relu(x, inplace=self.inplace) if self.power: x = apot_quantization(x, self.act_alpha, self.proj_set_act, False, self.grad_scale) else: x = uniform_quantization(x, self.act_alpha, self.bit, False, self.grad_scale) return x def show_params(self): if (self.bit != 32): act_alpha = round(self.act_alpha.data.item(), 3) print('clipping threshold activation alpha: {:2f}'.format(act_alpha))
def test_ddm_form(): (options, stochastic_model, limit_state) = setup('ffd') form_ffd = ra.Form(analysis_options=options, stochastic_model=stochastic_model, limit_state=limit_state) form_ffd.run() (options, stochastic_model, limit_state) = setup('ddm') form_ddm = ra.Form(analysis_options=options, stochastic_model=stochastic_model, limit_state=limit_state) form_ddm.run() assert (pytest.approx(form_ffd.beta, abs=1e-05) == form_ddm.beta)
class ObjectProxy(object): def __init__(self, processId, proxyId, typeStr='', parent=None): object.__init__(self) self.__dict__['_processId'] = processId self.__dict__['_typeStr'] = typeStr self.__dict__['_proxyId'] = proxyId self.__dict__['_attributes'] = () self.__dict__['_proxyOptions'] = {'callSync': None, 'timeout': None, 'returnType': None, 'deferGetattr': None, 'noProxyTypes': None, 'autoProxy': None} self.__dict__['_handler'] = RemoteEventHandler.getHandler(processId) self.__dict__['_handler'].registerProxy(self) def _setProxyOptions(self, **kwds): for k in kwds: if (k not in self._proxyOptions): raise KeyError(("Unrecognized proxy option '%s'" % k)) self._proxyOptions.update(kwds) def _getValue(self): return self._handler.getObjValue(self) def _getProxyOption(self, opt): val = self._proxyOptions[opt] if (val is None): return self._handler.getProxyOption(opt) return val def _getProxyOptions(self): return dict([(k, self._getProxyOption(k)) for k in self._proxyOptions]) def __reduce__(self): return (unpickleObjectProxy, (self._processId, self._proxyId, self._typeStr, self._attributes)) def __repr__(self): return ('<ObjectProxy for process %d, object 0x%x: %s >' % (self._processId, self._proxyId, self._typeStr)) def __getattr__(self, attr, **kwds): opts = self._getProxyOptions() for k in opts: if (('_' + k) in kwds): opts[k] = kwds.pop(('_' + k)) if (opts['deferGetattr'] is True): return self._deferredAttr(attr) else: return self._handler.getObjAttr(self, attr, **opts) def _deferredAttr(self, attr): return DeferredObjectProxy(self, attr) def __call__(self, *args, **kwds): opts = self._getProxyOptions() for k in opts: if (('_' + k) in kwds): opts[k] = kwds.pop(('_' + k)) return self._handler.callObj(obj=self, args=args, kwds=kwds, **opts) def _getSpecialAttr(self, attr): return self._deferredAttr(attr) def __getitem__(self, *args): return self._getSpecialAttr('__getitem__')(*args) def __setitem__(self, *args): return self._getSpecialAttr('__setitem__')(*args, _callSync='off') def __setattr__(self, *args): return self._getSpecialAttr('__setattr__')(*args, _callSync='off') def __str__(self, *args): return self._getSpecialAttr('__str__')(*args, _returnType='value') def __len__(self, *args): return self._getSpecialAttr('__len__')(*args) def __add__(self, *args): return self._getSpecialAttr('__add__')(*args) def __sub__(self, *args): return self._getSpecialAttr('__sub__')(*args) def __div__(self, *args): return self._getSpecialAttr('__div__')(*args) def __truediv__(self, *args): return self._getSpecialAttr('__truediv__')(*args) def __floordiv__(self, *args): return self._getSpecialAttr('__floordiv__')(*args) def __mul__(self, *args): return self._getSpecialAttr('__mul__')(*args) def __pow__(self, *args): return self._getSpecialAttr('__pow__')(*args) def __iadd__(self, *args): return self._getSpecialAttr('__iadd__')(*args, _callSync='off') def __isub__(self, *args): return self._getSpecialAttr('__isub__')(*args, _callSync='off') def __idiv__(self, *args): return self._getSpecialAttr('__idiv__')(*args, _callSync='off') def __itruediv__(self, *args): return self._getSpecialAttr('__itruediv__')(*args, _callSync='off') def __ifloordiv__(self, *args): return self._getSpecialAttr('__ifloordiv__')(*args, _callSync='off') def __imul__(self, *args): return self._getSpecialAttr('__imul__')(*args, _callSync='off') def __ipow__(self, *args): return self._getSpecialAttr('__ipow__')(*args, _callSync='off') def __rshift__(self, *args): return self._getSpecialAttr('__rshift__')(*args) def __lshift__(self, *args): return self._getSpecialAttr('__lshift__')(*args) def __irshift__(self, *args): return self._getSpecialAttr('__irshift__')(*args, _callSync='off') def __ilshift__(self, *args): return self._getSpecialAttr('__ilshift__')(*args, _callSync='off') def __eq__(self, *args): return self._getSpecialAttr('__eq__')(*args) def __ne__(self, *args): return self._getSpecialAttr('__ne__')(*args) def __lt__(self, *args): return self._getSpecialAttr('__lt__')(*args) def __gt__(self, *args): return self._getSpecialAttr('__gt__')(*args) def __le__(self, *args): return self._getSpecialAttr('__le__')(*args) def __ge__(self, *args): return self._getSpecialAttr('__ge__')(*args) def __and__(self, *args): return self._getSpecialAttr('__and__')(*args) def __or__(self, *args): return self._getSpecialAttr('__or__')(*args) def __xor__(self, *args): return self._getSpecialAttr('__xor__')(*args) def __iand__(self, *args): return self._getSpecialAttr('__iand__')(*args, _callSync='off') def __ior__(self, *args): return self._getSpecialAttr('__ior__')(*args, _callSync='off') def __ixor__(self, *args): return self._getSpecialAttr('__ixor__')(*args, _callSync='off') def __mod__(self, *args): return self._getSpecialAttr('__mod__')(*args) def __radd__(self, *args): return self._getSpecialAttr('__radd__')(*args) def __rsub__(self, *args): return self._getSpecialAttr('__rsub__')(*args) def __rdiv__(self, *args): return self._getSpecialAttr('__rdiv__')(*args) def __rfloordiv__(self, *args): return self._getSpecialAttr('__rfloordiv__')(*args) def __rtruediv__(self, *args): return self._getSpecialAttr('__rtruediv__')(*args) def __rmul__(self, *args): return self._getSpecialAttr('__rmul__')(*args) def __rpow__(self, *args): return self._getSpecialAttr('__rpow__')(*args) def __rrshift__(self, *args): return self._getSpecialAttr('__rrshift__')(*args) def __rlshift__(self, *args): return self._getSpecialAttr('__rlshift__')(*args) def __rand__(self, *args): return self._getSpecialAttr('__rand__')(*args) def __ror__(self, *args): return self._getSpecialAttr('__ror__')(*args) def __rxor__(self, *args): return self._getSpecialAttr('__ror__')(*args) def __rmod__(self, *args): return self._getSpecialAttr('__rmod__')(*args) def __hash__(self): return id(self)
(params=[True, False]) def blacklisted_emails(request): mock_blacklisted_domains = {'BLACKLISTED_EMAIL_DOMAINS': ['blacklisted.com', 'blacklisted.net']} with patch('features.BLACKLISTED_EMAILS', request.param): with patch.dict('data.model.config.app_config', mock_blacklisted_domains): (yield)
_config def test_ratiotile_basic(manager): manager.test_window('one') manager.test_window('two') manager.test_window('three') sleep(0.1) assert (manager.c.window.info()['width'] == 264) assert (manager.c.window.info()['height'] == 598) assert (manager.c.window.info()['x'] == 0) assert (manager.c.window.info()['y'] == 0) assert (manager.c.window.info()['name'] == 'three') manager.c.group.next_window() assert (manager.c.window.info()['width'] == 264) assert (manager.c.window.info()['height'] == 598) assert (manager.c.window.info()['x'] == 266) assert (manager.c.window.info()['y'] == 0) assert (manager.c.window.info()['name'] == 'two') manager.c.group.next_window() assert (manager.c.window.info()['width'] == 266) assert (manager.c.window.info()['height'] == 598) assert (manager.c.window.info()['x'] == 532) assert (manager.c.window.info()['y'] == 0) assert (manager.c.window.info()['name'] == 'one')
def test_feature_group_sub_group(): m = folium.Map([0.0, 0.0], zoom_start=6) fg = folium.FeatureGroup() m.add_child(fg) g1 = plugins.FeatureGroupSubGroup(fg, 'g1') m.add_child(g1) folium.Marker([1, 1]).add_to(g1) folium.Marker([(- 1), (- 1)]).add_to(g1) g2 = plugins.FeatureGroupSubGroup(fg, 'g2') folium.Marker([(- 1), 1]).add_to(g2) folium.Marker([1, (- 1)]).add_to(g2) m.add_child(g2) folium.LayerControl().add_to(m) out = normalize(m._parent.render()) assert ('<script src=" in out) tmpl = Template('\n var {{ this.get_name() }} = L.featureGroup.subGroup(\n {{ this._group.get_name() }}\n );\n ') assert (normalize(tmpl.render(this=g1)) in out) assert (normalize(tmpl.render(this=g2)) in out) tmpl = Template('{{ this.get_name() }}.addTo({{ this._parent.get_name() }});') assert (normalize(tmpl.render(this=g1)) in out) assert (normalize(tmpl.render(this=g2)) in out)
class WithDataPortal(WithAdjustmentReader, WithBcolzEquityMinuteBarReader, WithBcolzFutureMinuteBarReader): DATA_PORTAL_USE_DAILY_DATA = True DATA_PORTAL_USE_MINUTE_DATA = True DATA_PORTAL_USE_ADJUSTMENTS = True DATA_PORTAL_FIRST_TRADING_DAY = None DATA_PORTAL_LAST_AVAILABLE_SESSION = None DATA_PORTAL_LAST_AVAILABLE_MINUTE = None DATA_PORTAL_MINUTE_HISTORY_PREFETCH = DEFAULT_MINUTE_HISTORY_PREFETCH DATA_PORTAL_DAILY_HISTORY_PREFETCH = DEFAULT_DAILY_HISTORY_PREFETCH def make_data_portal(self): if (self.DATA_PORTAL_FIRST_TRADING_DAY is None): if self.DATA_PORTAL_USE_MINUTE_DATA: self.DATA_PORTAL_FIRST_TRADING_DAY = self.bcolz_equity_minute_bar_reader.first_trading_day elif self.DATA_PORTAL_USE_DAILY_DATA: self.DATA_PORTAL_FIRST_TRADING_DAY = self.bcolz_equity_daily_bar_reader.first_trading_day return DataPortal(self.asset_finder, self.trading_calendar, first_trading_day=self.DATA_PORTAL_FIRST_TRADING_DAY, equity_daily_reader=(self.bcolz_equity_daily_bar_reader if self.DATA_PORTAL_USE_DAILY_DATA else None), equity_minute_reader=(self.bcolz_equity_minute_bar_reader if self.DATA_PORTAL_USE_MINUTE_DATA else None), adjustment_reader=(self.adjustment_reader if self.DATA_PORTAL_USE_ADJUSTMENTS else None), future_minute_reader=(self.bcolz_future_minute_bar_reader if self.DATA_PORTAL_USE_MINUTE_DATA else None), future_daily_reader=(MinuteResampleSessionBarReader(self.bcolz_future_minute_bar_reader.trading_calendar, self.bcolz_future_minute_bar_reader) if self.DATA_PORTAL_USE_MINUTE_DATA else None), last_available_session=self.DATA_PORTAL_LAST_AVAILABLE_SESSION, last_available_minute=self.DATA_PORTAL_LAST_AVAILABLE_MINUTE, minute_history_prefetch_length=self.DATA_PORTAL_MINUTE_HISTORY_PREFETCH, daily_history_prefetch_length=self.DATA_PORTAL_DAILY_HISTORY_PREFETCH) def init_instance_fixtures(self): super(WithDataPortal, self).init_instance_fixtures() self.data_portal = self.make_data_portal()
_config def test_qtile_cmd(manager): manager.test_window('foo') wid = manager.c.window.info()['id'] for obj in ['window', 'group', 'screen']: assert run_qtile_cmd('-s {} -o {} -f info'.format(manager.sockfile, obj)) layout = run_qtile_cmd('-s {} -o layout -f info'.format(manager.sockfile)) assert (layout['name'] == 'stack') assert (layout['group'] == 'a') window = run_qtile_cmd('-s {} -o window {} -f info'.format(manager.sockfile, wid)) assert (window['id'] == wid) assert (window['name'] == 'foo') assert (window['group'] == 'a') group = run_qtile_cmd('-s {} -o group {} -f info'.format(manager.sockfile, 'a')) assert (group['name'] == 'a') assert (group['screen'] == 0) assert (group['layouts'] == ['stack', 'stack', 'stack']) assert (group['focus'] == 'foo') assert (run_qtile_cmd('-s {} -o screen {} -f info'.format(manager.sockfile, 0)) == {'height': 600, 'index': 0, 'width': 800, 'x': 0, 'y': 0}) bar = run_qtile_cmd('-s {} -o bar {} -f info'.format(manager.sockfile, 'bottom')) assert (bar['height'] == 20) assert (bar['width'] == 800) assert (bar['size'] == 20) assert (bar['position'] == 'bottom')
class Abstractor(object): def __init__(self, abs_dir, max_len=30, cuda=True): abs_meta = json.load(open(join(abs_dir, 'meta.json'))) assert (abs_meta['net'] == 'base_abstractor') abs_args = abs_meta['net_args'] abs_ckpt = load_best_ckpt(abs_dir) word2id = pkl.load(open(join(abs_dir, 'vocab.pkl'), 'rb')) abstractor = CopySumm(**abs_args) abstractor.load_state_dict(abs_ckpt) self._device = torch.device(('cuda' if cuda else 'cpu')) self._net = abstractor.to(self._device) self._word2id = word2id self._id2word = {i: w for (w, i) in word2id.items()} self._max_len = max_len def _prepro(self, raw_article_sents): ext_word2id = dict(self._word2id) ext_id2word = dict(self._id2word) for raw_words in raw_article_sents: for w in raw_words: if (not (w in ext_word2id)): ext_word2id[w] = len(ext_word2id) ext_id2word[len(ext_id2word)] = w articles = conver2id(UNK, self._word2id, raw_article_sents) art_lens = [len(art) for art in articles] article = pad_batch_tensorize(articles, PAD, cuda=False).to(self._device) extend_arts = conver2id(UNK, ext_word2id, raw_article_sents) extend_art = pad_batch_tensorize(extend_arts, PAD, cuda=False).to(self._device) extend_vsize = len(ext_word2id) dec_args = (article, art_lens, extend_art, extend_vsize, START, END, UNK, self._max_len) return (dec_args, ext_id2word) def __call__(self, raw_article_sents): self._net.eval() (dec_args, id2word) = self._prepro(raw_article_sents) (decs, attns) = self._net.batch_decode(*dec_args) def argmax(arr, keys): return arr[max(range(len(arr)), key=(lambda i: keys[i].item()))] dec_sents = [] for (i, raw_words) in enumerate(raw_article_sents): dec = [] for (id_, attn) in zip(decs, attns): if (id_[i] == END): break elif (id_[i] == UNK): dec.append(argmax(raw_words, attn[i])) else: dec.append(id2word[id_[i].item()]) dec_sents.append(dec) return dec_sents
class ResourcesViewFull(StatsView): name = 'resourcesViewFull' contexts = ['drone', 'fighter', 'cargo'] def __init__(self, parent): StatsView.__init__(self) self.parent = parent self.mainFrame = gui.mainFrame.MainFrame.getInstance() self.mainFrame.additionsPane.notebook.Bind(EVT_NOTEBOOK_PAGE_CHANGED, self.pageChanged) def pageChanged(self, event): page = self.mainFrame.additionsPane.getName(event.GetSelection()) if (page == 'Cargo'): self.toggleContext('cargo') elif (page == 'Fighters'): self.toggleContext('fighter') else: self.toggleContext('drone') event.Skip() def toggleContext(self, context): for x in self.contexts: bitmap = getattr(self, 'bitmapFull{}Bay'.format(x.capitalize())) base = getattr(self, 'baseFull{}Bay'.format(x.capitalize())) if (context == x): bitmap.Show() base.Show(True) else: bitmap.Hide() base.Hide(True) fighter_sizer = getattr(self, 'boxSizerFighter') drone_sizer = getattr(self, 'boxSizerDrones') if (context != 'fighter'): fighter_sizer.ShowItems(False) drone_sizer.ShowItems(True) else: fighter_sizer.ShowItems(True) drone_sizer.ShowItems(False) self.panel.Layout() self.headerPanel.Layout() def getHeaderText(self, fit): return _t('Resources') def getTextExtentW(self, text): (width, height) = self.parent.GetTextExtent(text) return width def populatePanel(self, contentPanel, headerPanel): contentSizer = contentPanel.GetSizer() root = wx.BoxSizer(wx.VERTICAL) contentSizer.Add(root, 0, wx.EXPAND, 0) sizer = wx.BoxSizer(wx.HORIZONTAL) root.Add(sizer, 0, wx.EXPAND) root.Add(wx.StaticLine(contentPanel, wx.ID_ANY, style=wx.HORIZONTAL), 0, wx.EXPAND) sizerResources = wx.BoxSizer(wx.HORIZONTAL) root.Add(sizerResources, 1, wx.EXPAND, 0) parent = self.panel = contentPanel self.headerPanel = headerPanel panel = 'full' base = sizerResources sizer.AddStretchSpacer() tooltipText = {'turret': _t('Turret hardpoints'), 'launcher': _t('Launcher hardpoints'), 'drones': _t('Drones active'), 'fighter': _t('Fighter squadrons active'), 'calibration': _t('Calibration')} for type_ in ('turret', 'launcher', 'drones', 'fighter', 'calibration'): box = wx.BoxSizer(wx.HORIZONTAL) bitmap = BitmapLoader.getStaticBitmap(('%s_big' % type_), parent, 'gui') tooltip = wx.ToolTip(tooltipText[type_]) bitmap.SetToolTip(tooltip) box.Add(bitmap, 0, wx.ALIGN_CENTER) sizer.Add(box, 0, wx.ALIGN_CENTER) suffix = {'turret': 'Hardpoints', 'launcher': 'Hardpoints', 'drones': 'Active', 'fighter': 'Tubes', 'calibration': 'Points'} lbl = wx.StaticText(parent, wx.ID_ANY, '0') setattr(self, ('label%sUsed%s%s' % (panel.capitalize(), type_.capitalize(), suffix[type_].capitalize())), lbl) box.Add(lbl, 0, (wx.ALIGN_CENTER | wx.LEFT), 5) box.Add(wx.StaticText(parent, wx.ID_ANY, '/'), 0, wx.ALIGN_CENTER) lbl = wx.StaticText(parent, wx.ID_ANY, '0') setattr(self, ('label%sTotal%s%s' % (panel.capitalize(), type_.capitalize(), suffix[type_].capitalize())), lbl) box.Add(lbl, 0, wx.ALIGN_CENTER) setattr(self, 'boxSizer{}'.format(type_.capitalize()), box) if (type_ != 'drones'): sizer.AddStretchSpacer() gauge_font = wx.Font(fonts.NORMAL, wx.SWISS, wx.NORMAL, wx.NORMAL, False) tooltipText = {'cpu': _t('CPU'), 'pg': _t('PowerGrid'), 'droneBay': _t('Drone bay'), 'fighterBay': _t('Fighter bay'), 'droneBandwidth': _t('Drone bandwidth'), 'cargoBay': _t('Cargo bay')} for (i, group) in enumerate((('cpu', 'pg'), ('cargoBay', 'droneBay', 'fighterBay', 'droneBandwidth'))): main = wx.BoxSizer(wx.VERTICAL) base.Add(main, 1, wx.ALIGN_CENTER) for type_ in group: capitalizedType = (type_[0].capitalize() + type_[1:]) bitmap = BitmapLoader.getStaticBitmap((type_ + '_big'), parent, 'gui') tooltip = wx.ToolTip(tooltipText[type_]) bitmap.SetToolTip(tooltip) stats = wx.BoxSizer(wx.VERTICAL) absolute = wx.BoxSizer(wx.HORIZONTAL) stats.Add(absolute, 0, wx.EXPAND) b = wx.BoxSizer(wx.HORIZONTAL) main.Add(b, 1, wx.ALIGN_CENTER) b.Add(bitmap, 0, wx.ALIGN_BOTTOM) b.Add(stats, 1, wx.EXPAND) lbl = wx.StaticText(parent, wx.ID_ANY, '0') setattr(self, ('label%sUsed%s' % (panel.capitalize(), capitalizedType)), lbl) absolute.Add(lbl, 0, (wx.ALIGN_LEFT | wx.LEFT), 3) absolute.Add(wx.StaticText(parent, wx.ID_ANY, '/'), 0, wx.ALIGN_LEFT) lbl = wx.StaticText(parent, wx.ID_ANY, '0') setattr(self, ('label%sTotal%s' % (panel.capitalize(), capitalizedType)), lbl) absolute.Add(lbl, 0, wx.ALIGN_LEFT) units = {'cpu': ' tf', 'pg': ' MW', 'droneBandwidth': ' mbit/s', 'droneBay': ' m3', 'fighterBay': ' m3', 'cargoBay': ' m3'} lbl = wx.StaticText(parent, wx.ID_ANY, ('%s' % units[type_])) absolute.Add(lbl, 0, wx.ALIGN_LEFT) gauge = PyGauge(parent, gauge_font, 1) gauge.SetValueRange(0, 0) gauge.SetMinSize((self.getTextExtentW('1.999M/1.99M MW'), 23)) gauge.SetFractionDigits(2) setattr(self, ('gauge%s%s' % (panel.capitalize(), capitalizedType)), gauge) stats.Add(gauge, 0, wx.ALIGN_CENTER) setattr(self, ('base%s%s' % (panel.capitalize(), capitalizedType)), b) setattr(self, ('bitmap%s%s' % (panel.capitalize(), capitalizedType)), bitmap) self.toggleContext('drone') def refreshPanel(self, fit): stats = (('label%sUsedTurretHardpoints', (lambda : fit.getHardpointsUsed(FittingHardpoint.TURRET)), 0, 0, 0), ('label%sTotalTurretHardpoints', (lambda : fit.ship.getModifiedItemAttr('turretSlotsLeft')), 0, 0, 0), ('label%sUsedLauncherHardpoints', (lambda : fit.getHardpointsUsed(FittingHardpoint.MISSILE)), 0, 0, 0), ('label%sTotalLauncherHardpoints', (lambda : fit.ship.getModifiedItemAttr('launcherSlotsLeft')), 0, 0, 0), ('label%sUsedDronesActive', (lambda : fit.activeDrones), 0, 0, 0), ('label%sTotalDronesActive', (lambda : fit.extraAttributes['maxActiveDrones']), 0, 0, 0), ('label%sUsedFighterTubes', (lambda : fit.fighterTubesUsed), 3, 0, 9), ('label%sTotalFighterTubes', (lambda : fit.fighterTubesTotal), 3, 0, 9), ('label%sUsedCalibrationPoints', (lambda : fit.calibrationUsed), 0, 0, 0), ('label%sTotalCalibrationPoints', (lambda : fit.ship.getModifiedItemAttr('upgradeCapacity')), 0, 0, 0), ('label%sUsedPg', (lambda : fit.pgUsed), 4, 0, 9), ('label%sUsedCpu', (lambda : fit.cpuUsed), 4, 0, 9), ('label%sTotalPg', (lambda : fit.ship.getModifiedItemAttr('powerOutput')), 4, 0, 9), ('label%sTotalCpu', (lambda : fit.ship.getModifiedItemAttr('cpuOutput')), 4, 0, 9), ('label%sUsedDroneBay', (lambda : fit.droneBayUsed), 3, 0, 9), ('label%sUsedFighterBay', (lambda : fit.fighterBayUsed), 3, 0, 9), ('label%sUsedDroneBandwidth', (lambda : fit.droneBandwidthUsed), 3, 0, 9), ('label%sTotalDroneBay', (lambda : fit.ship.getModifiedItemAttr('droneCapacity')), 3, 0, 9), ('label%sTotalDroneBandwidth', (lambda : fit.ship.getModifiedItemAttr('droneBandwidth')), 3, 0, 9), ('label%sTotalFighterBay', (lambda : fit.ship.getModifiedItemAttr('fighterCapacity')), 3, 0, 9), ('label%sUsedCargoBay', (lambda : fit.cargoBayUsed), 3, 0, 9), ('label%sTotalCargoBay', (lambda : fit.ship.getModifiedItemAttr('capacity')), 3, 0, 9)) panel = 'Full' usedTurretHardpoints = 0 labelUTH = '' totalTurretHardpoints = 0 labelTTH = '' usedLauncherHardpoints = 0 labelULH = '' totalLauncherHardPoints = 0 labelTLH = '' usedDronesActive = 0 labelUDA = '' totalDronesActive = 0 labelTDA = '' usedFighterTubes = 0 labelUFT = '' totalFighterTubes = 0 labelTFT = '' usedCalibrationPoints = 0 labelUCP = '' totalCalibrationPoints = 0 labelTCP = '' for (labelName, value, prec, lowest, highest) in stats: label = getattr(self, (labelName % panel)) value = (value() if (fit is not None) else 0) value = (value if (value is not None) else 0) if ((labelName % panel) == ('label%sUsedTurretHardpoints' % panel)): usedTurretHardpoints = value labelUTH = label elif ((labelName % panel) == ('label%sTotalTurretHardpoints' % panel)): totalTurretHardpoints = value labelTTH = label elif ((labelName % panel) == ('label%sUsedLauncherHardpoints' % panel)): usedLauncherHardpoints = value labelULH = label elif ((labelName % panel) == ('label%sTotalLauncherHardpoints' % panel)): totalLauncherHardPoints = value labelTLH = label elif ((labelName % panel) == ('label%sUsedDronesActive' % panel)): usedDronesActive = value labelUDA = label elif ((labelName % panel) == ('label%sTotalDronesActive' % panel)): totalDronesActive = value labelTDA = label elif ((labelName % panel) == ('label%sUsedFighterTubes' % panel)): usedFighterTubes = value labelUFT = label elif ((labelName % panel) == ('label%sTotalFighterTubes' % panel)): totalFighterTubes = value labelTFT = label elif ((labelName % panel) == ('label%sUsedCalibrationPoints' % panel)): usedCalibrationPoints = value labelUCP = label elif ((labelName % panel) == ('label%sTotalCalibrationPoints' % panel)): totalCalibrationPoints = value labelTCP = label if isinstance(value, str): label.SetLabel(value) label.SetToolTip(wx.ToolTip(value)) else: label.SetLabel(formatAmount(value, prec, lowest, highest)) label.SetToolTip(wx.ToolTip(('%.1f' % value))) label.InvalidateBestSize() colorWarn = wx.Colour(204, 51, 51) colorNormal = wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOWTEXT) if (usedTurretHardpoints > totalTurretHardpoints): colorT = colorWarn else: colorT = colorNormal if (usedLauncherHardpoints > totalLauncherHardPoints): colorL = colorWarn else: colorL = colorNormal if (usedDronesActive > totalDronesActive): colorD = colorWarn else: colorD = colorNormal if (usedFighterTubes > totalFighterTubes): colorF = colorWarn else: colorF = colorNormal if (usedCalibrationPoints > totalCalibrationPoints): colorC = colorWarn else: colorC = colorNormal labelUTH.SetForegroundColour(colorT) labelTTH.SetForegroundColour(colorT) labelULH.SetForegroundColour(colorL) labelTLH.SetForegroundColour(colorL) labelUDA.SetForegroundColour(colorD) labelTDA.SetForegroundColour(colorD) labelUFT.SetForegroundColour(colorF) labelTFT.SetForegroundColour(colorF) labelUCP.SetForegroundColour(colorC) labelTCP.SetForegroundColour(colorC) if (fit is not None): resMax = ((lambda : fit.ship.getModifiedItemAttr('cpuOutput')), (lambda : fit.ship.getModifiedItemAttr('powerOutput')), (lambda : fit.ship.getModifiedItemAttr('droneCapacity')), (lambda : fit.ship.getModifiedItemAttr('fighterCapacity')), (lambda : fit.ship.getModifiedItemAttr('droneBandwidth')), (lambda : fit.ship.getModifiedItemAttr('capacity'))) else: resMax = None i = 0 for resourceType in ('cpu', 'pg', 'droneBay', 'fighterBay', 'droneBandwidth', 'cargoBay'): if (fit is not None): capitalizedType = (resourceType[0].capitalize() + resourceType[1:]) gauge = getattr(self, ('gauge%s%s' % (panel, capitalizedType))) resUsed = getattr(fit, ('%sUsed' % resourceType)) gauge.SetValueRange((resUsed or 0), (resMax[i]() or 0)) i += 1 else: capitalizedType = (resourceType[0].capitalize() + resourceType[1:]) gauge = getattr(self, ('gauge%s%s' % (panel, capitalizedType))) gauge.SetValueRange(0, 0) i += 1 self.panel.Layout() self.headerPanel.Layout()
def state_dict() -> Dict[(str, Any)]: state = base.state_dict() lights_state: Dict[(str, Any)] = {} lights_state['ringConnected'] = redis.get('ring_initialized') lights_state['ringProgram'] = storage.get('ring_program') lights_state['ringBrightness'] = storage.get('ring_brightness') lights_state['ringMonochrome'] = storage.get('ring_monochrome') lights_state['wledLedCount'] = storage.get('wled_led_count') lights_state['wledIp'] = storage.get('wled_ip') lights_state['wledPort'] = storage.get('wled_port') lights_state['wledConnected'] = redis.get('wled_initialized') lights_state['wledProgram'] = storage.get('wled_program') lights_state['wledBrightness'] = storage.get('wled_brightness') lights_state['wledMonochrome'] = storage.get('wled_monochrome') lights_state['stripConnected'] = redis.get('strip_initialized') lights_state['stripProgram'] = storage.get('strip_program') lights_state['stripBrightness'] = storage.get('strip_brightness') lights_state['screenConnected'] = redis.get('screen_initialized') lights_state['screenProgram'] = storage.get('screen_program') lights_state['initialResolution'] = util.format_resolution(storage.get('initial_resolution')) lights_state['dynamicResolution'] = storage.get('dynamic_resolution') lights_state['currentResolution'] = util.format_resolution(redis.get('current_resolution')) lights_state['currentFps'] = f"{redis.get('current_fps'):.2f}" lights_state['ups'] = storage.get('ups') lights_state['programSpeed'] = storage.get('program_speed') (red, green, blue) = (int((val * 255)) for val in storage.get('fixed_color')) lights_state['fixedColor'] = f'#{red:02x}{green:02x}{blue:02x}' state['lights'] = lights_state return state
def delete_all_deployment_namespace(kubecli: KrknKubernetes, namespace: str): try: deployments = kubecli.get_deployment_ns(namespace) for deployment in deployments: logging.info(('Deleting deployment' + deployment)) kubecli.delete_deployment(deployment, namespace) except Exception as e: logging.error('Exception when calling delete_all_deployment_namespace: %s\n', str(e)) raise e return deployments
def contractreceivechannelwithdraw_from_event(event: DecodedEvent, fee_config: MediationFeeConfig) -> ContractReceiveChannelWithdraw: data = event.event_data args = data['args'] channel_identifier = args['channel_identifier'] participant = args['participant'] total_withdraw = args['total_withdraw'] return ContractReceiveChannelWithdraw(canonical_identifier=CanonicalIdentifier(chain_identifier=event.chain_id, token_network_address=TokenNetworkAddress(event.originating_contract), channel_identifier=channel_identifier), total_withdraw=total_withdraw, participant=participant, transaction_hash=event.transaction_hash, block_number=event.block_number, block_hash=event.block_hash, fee_config=fee_config)
class ToolConsumer(ToolOutbound): def has_required_params(self): return all([self.launch_params.get(x) for x in LAUNCH_PARAMS_REQUIRED]) def set_config(self, config): if (self.launch_url is None): self.launch_url = config.launch_url self.launch_params.update(config.custom_params)
_torch _pytesseract class LayoutLMv2ProcessorIntegrationTests(unittest.TestCase): _property def get_images(self): from datasets import load_dataset ds = load_dataset('hf-internal-testing/fixtures_docvqa', split='test') image_1 = Image.open(ds[0]['file']).convert('RGB') image_2 = Image.open(ds[1]['file']).convert('RGB') return (image_1, image_2) _property def get_tokenizers(self): slow_tokenizer = LayoutLMv2Tokenizer.from_pretrained('microsoft/layoutlmv2-base-uncased') fast_tokenizer = LayoutLMv2TokenizerFast.from_pretrained('microsoft/layoutlmv2-base-uncased') return [slow_tokenizer, fast_tokenizer] def test_processor_case_1(self): image_processor = LayoutLMv2ImageProcessor() tokenizers = self.get_tokenizers images = self.get_images for tokenizer in tokenizers: processor = LayoutLMv2Processor(image_processor=image_processor, tokenizer=tokenizer) input_image_proc = image_processor(images[0], return_tensors='pt') input_processor = processor(images[0], return_tensors='pt') expected_keys = ['attention_mask', 'bbox', 'image', 'input_ids', 'token_type_ids'] actual_keys = sorted(input_processor.keys()) self.assertListEqual(actual_keys, expected_keys) self.assertAlmostEqual(input_image_proc['pixel_values'].sum(), input_processor['image'].sum(), delta=0.01) expected_decoding = '[CLS] 11 : 14 to 11 : 39 a. m 11 : 39 to 11 : 44 a. m. 11 : 44 a. m. to 12 : 25 p. m. 12 : 25 to 12 : 58 p. m. 12 : 58 to 4 : 00 p. m. 2 : 00 to 5 : 00 p. m. coffee break coffee will be served for men and women in the lobby adjacent to exhibit area. please move into exhibit area. ( exhibits open ) trrf general session ( part | ) presiding : lee a. waller trrf vice president introductory remarks lee a. waller, trrf vice presi - dent individual interviews with trrf public board members and sci - entific advisory council mem - bers conducted by trrf treasurer philip g. kuehn to get answers which the public refrigerated warehousing industry is looking for. plus questions from the floor. dr. emil m. mrak, university of cal - ifornia, chairman, trrf board ; sam r. cecil, university of georgia college of agriculture ; dr. stanley charm, tufts university school of medicine ; dr. robert h. cotton, itt continental baking company ; dr. owen fennema, university of wis - consin ; dr. robert e. hardenburg, usda. questions and answers exhibits open capt. jack stoney room trrf scientific advisory council meeting ballroom foyer [SEP]' decoding = processor.decode(input_processor.input_ids.squeeze().tolist()) self.assertSequenceEqual(decoding, expected_decoding) input_image_proc = image_processor(images, return_tensors='pt') input_processor = processor(images, padding=True, return_tensors='pt') expected_keys = ['attention_mask', 'bbox', 'image', 'input_ids', 'token_type_ids'] actual_keys = sorted(input_processor.keys()) self.assertListEqual(actual_keys, expected_keys) self.assertAlmostEqual(input_image_proc['pixel_values'].sum(), input_processor['image'].sum(), delta=0.01) expected_decoding = "[CLS] 7 itc limited report and accounts 2013 itc s brands : an asset for the nation the consumer needs and aspirations they fulfil, the benefit they generate for millions across itc s value chains, the future - ready capabilities that support them, and the value that they create for the country, have made itc s brands national assets, adding to india s competitiveness. it is itc s aspiration to be the no 1 fmcg player in the country, driven by its new fmcg businesses. a recent nielsen report has highlighted that itc's new fmcg businesses are the fastest growing among the top consumer goods companies operating in india. itc takes justifiable pride that, along with generating economic value, these celebrated indian brands also drive the creation of larger societal capital through the virtuous cycle of sustainable and inclusive growth. di wills * ; love delightfully soft skin? aia ans source : : / / www. industrydocuments. ucsf. edu / docs / snbx0223 [SEP] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD] [PAD]" decoding = processor.decode(input_processor.input_ids[1].tolist()) self.assertSequenceEqual(decoding, expected_decoding) def test_processor_case_2(self): image_processor = LayoutLMv2ImageProcessor(apply_ocr=False) tokenizers = self.get_tokenizers images = self.get_images for tokenizer in tokenizers: processor = LayoutLMv2Processor(image_processor=image_processor, tokenizer=tokenizer) words = ['hello', 'world'] boxes = [[1, 2, 3, 4], [5, 6, 7, 8]] input_processor = processor(images[0], words, boxes=boxes, return_tensors='pt') expected_keys = ['input_ids', 'bbox', 'token_type_ids', 'attention_mask', 'image'] actual_keys = list(input_processor.keys()) for key in expected_keys: self.assertIn(key, actual_keys) expected_decoding = '[CLS] hello world [SEP]' decoding = processor.decode(input_processor.input_ids.squeeze().tolist()) self.assertSequenceEqual(decoding, expected_decoding) words = [['hello', 'world'], ['my', 'name', 'is', 'niels']] boxes = [[[1, 2, 3, 4], [5, 6, 7, 8]], [[3, 2, 5, 1], [6, 7, 4, 2], [3, 9, 2, 4], [1, 1, 2, 3]]] input_processor = processor(images, words, boxes=boxes, padding=True, return_tensors='pt') expected_keys = ['attention_mask', 'bbox', 'image', 'input_ids', 'token_type_ids'] actual_keys = sorted(input_processor.keys()) self.assertListEqual(actual_keys, expected_keys) expected_decoding = '[CLS] hello world [SEP] [PAD] [PAD] [PAD]' decoding = processor.decode(input_processor.input_ids[0].tolist()) self.assertSequenceEqual(decoding, expected_decoding) expected_bbox = [[0, 0, 0, 0], [3, 2, 5, 1], [6, 7, 4, 2], [3, 9, 2, 4], [1, 1, 2, 3], [1, 1, 2, 3], [1000, 1000, 1000, 1000]] self.assertListEqual(input_processor.bbox[1].tolist(), expected_bbox) def test_processor_case_3(self): image_processor = LayoutLMv2ImageProcessor(apply_ocr=False) tokenizers = self.get_tokenizers images = self.get_images for tokenizer in tokenizers: processor = LayoutLMv2Processor(image_processor=image_processor, tokenizer=tokenizer) words = ['weirdly', 'world'] boxes = [[1, 2, 3, 4], [5, 6, 7, 8]] word_labels = [1, 2] input_processor = processor(images[0], words, boxes=boxes, word_labels=word_labels, return_tensors='pt') expected_keys = ['attention_mask', 'bbox', 'image', 'input_ids', 'labels', 'token_type_ids'] actual_keys = sorted(input_processor.keys()) self.assertListEqual(actual_keys, expected_keys) expected_decoding = '[CLS] weirdly world [SEP]' decoding = processor.decode(input_processor.input_ids.squeeze().tolist()) self.assertSequenceEqual(decoding, expected_decoding) expected_labels = [(- 100), 1, (- 100), 2, (- 100)] self.assertListEqual(input_processor.labels.squeeze().tolist(), expected_labels) words = [['hello', 'world'], ['my', 'name', 'is', 'niels']] boxes = [[[1, 2, 3, 4], [5, 6, 7, 8]], [[3, 2, 5, 1], [6, 7, 4, 2], [3, 9, 2, 4], [1, 1, 2, 3]]] word_labels = [[1, 2], [6, 3, 10, 2]] input_processor = processor(images, words, boxes=boxes, word_labels=word_labels, padding=True, return_tensors='pt') expected_keys = ['attention_mask', 'bbox', 'image', 'input_ids', 'labels', 'token_type_ids'] actual_keys = sorted(input_processor.keys()) self.assertListEqual(actual_keys, expected_keys) expected_decoding = '[CLS] my name is niels [SEP]' decoding = processor.decode(input_processor.input_ids[1].tolist()) self.assertSequenceEqual(decoding, expected_decoding) expected_bbox = [[0, 0, 0, 0], [3, 2, 5, 1], [6, 7, 4, 2], [3, 9, 2, 4], [1, 1, 2, 3], [1, 1, 2, 3], [1000, 1000, 1000, 1000]] self.assertListEqual(input_processor.bbox[1].tolist(), expected_bbox) expected_labels = [(- 100), 6, 3, 10, 2, (- 100), (- 100)] self.assertListEqual(input_processor.labels[1].tolist(), expected_labels) def test_processor_case_4(self): image_processor = LayoutLMv2ImageProcessor() tokenizers = self.get_tokenizers images = self.get_images for tokenizer in tokenizers: processor = LayoutLMv2Processor(image_processor=image_processor, tokenizer=tokenizer) question = "What's his name?" input_processor = processor(images[0], question, return_tensors='pt') expected_keys = ['attention_mask', 'bbox', 'image', 'input_ids', 'token_type_ids'] actual_keys = sorted(input_processor.keys()) self.assertListEqual(actual_keys, expected_keys) expected_decoding = "[CLS] what's his name? [SEP] 11 : 14 to 11 : 39 a. m 11 : 39 to 11 : 44 a. m. 11 : 44 a. m. to 12 : 25 p. m. 12 : 25 to 12 : 58 p. m. 12 : 58 to 4 : 00 p. m. 2 : 00 to 5 : 00 p. m. coffee break coffee will be served for men and women in the lobby adjacent to exhibit area. please move into exhibit area. ( exhibits open ) trrf general session ( part | ) presiding : lee a. waller trrf vice president introductory remarks lee a. waller, trrf vice presi - dent individual interviews with trrf public board members and sci - entific advisory council mem - bers conducted by trrf treasurer philip g. kuehn to get answers which the public refrigerated warehousing industry is looking for. plus questions from the floor. dr. emil m. mrak, university of cal - ifornia, chairman, trrf board ; sam r. cecil, university of georgia college of agriculture ; dr. stanley charm, tufts university school of medicine ; dr. robert h. cotton, itt continental baking company ; dr. owen fennema, university of wis - consin ; dr. robert e. hardenburg, usda. questions and answers exhibits open capt. jack stoney room trrf scientific advisory council meeting ballroom foyer [SEP]" decoding = processor.decode(input_processor.input_ids.squeeze().tolist()) self.assertSequenceEqual(decoding, expected_decoding) questions = ['How old is he?', "what's the time"] input_processor = processor(images, questions, padding='max_length', max_length=20, truncation=True, return_tensors='pt') expected_keys = ['attention_mask', 'bbox', 'image', 'input_ids', 'token_type_ids'] actual_keys = sorted(input_processor.keys()) self.assertListEqual(actual_keys, expected_keys) expected_decoding = "[CLS] what's the time [SEP] 7 itc limited report and accounts 2013 itc s [SEP]" decoding = processor.decode(input_processor.input_ids[1].tolist()) self.assertSequenceEqual(decoding, expected_decoding) expected_bbox = [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [1000, 1000, 1000, 1000], [0, 45, 67, 80], [72, 56, 109, 67], [72, 56, 109, 67], [116, 56, 189, 67], [198, 59, 253, 66], [257, 59, 285, 66], [289, 59, 365, 66], [372, 59, 407, 66], [74, 136, 161, 158], [74, 136, 161, 158], [74, 136, 161, 158], [74, 136, 161, 158], [1000, 1000, 1000, 1000]] self.assertListEqual(input_processor.bbox[1].tolist(), expected_bbox) def test_processor_case_5(self): image_processor = LayoutLMv2ImageProcessor(apply_ocr=False) tokenizers = self.get_tokenizers images = self.get_images for tokenizer in tokenizers: processor = LayoutLMv2Processor(image_processor=image_processor, tokenizer=tokenizer) question = "What's his name?" words = ['hello', 'world'] boxes = [[1, 2, 3, 4], [5, 6, 7, 8]] input_processor = processor(images[0], question, words, boxes, return_tensors='pt') expected_keys = ['attention_mask', 'bbox', 'image', 'input_ids', 'token_type_ids'] actual_keys = sorted(input_processor.keys()) self.assertListEqual(actual_keys, expected_keys) expected_decoding = "[CLS] what's his name? [SEP] hello world [SEP]" decoding = processor.decode(input_processor.input_ids.squeeze().tolist()) self.assertSequenceEqual(decoding, expected_decoding) questions = ['How old is he?', "what's the time"] words = [['hello', 'world'], ['my', 'name', 'is', 'niels']] boxes = [[[1, 2, 3, 4], [5, 6, 7, 8]], [[3, 2, 5, 1], [6, 7, 4, 2], [3, 9, 2, 4], [1, 1, 2, 3]]] input_processor = processor(images, questions, words, boxes, padding=True, return_tensors='pt') expected_keys = ['attention_mask', 'bbox', 'image', 'input_ids', 'token_type_ids'] actual_keys = sorted(input_processor.keys()) self.assertListEqual(actual_keys, expected_keys) expected_decoding = '[CLS] how old is he? [SEP] hello world [SEP] [PAD] [PAD] [PAD]' decoding = processor.decode(input_processor.input_ids[0].tolist()) self.assertSequenceEqual(decoding, expected_decoding) expected_decoding = "[CLS] what's the time [SEP] my name is niels [SEP]" decoding = processor.decode(input_processor.input_ids[1].tolist()) self.assertSequenceEqual(decoding, expected_decoding) expected_bbox = [[6, 7, 4, 2], [3, 9, 2, 4], [1, 1, 2, 3], [1, 1, 2, 3], [1000, 1000, 1000, 1000]] self.assertListEqual(input_processor.bbox[1].tolist()[(- 5):], expected_bbox)
.parametrize('properties, inc_counter, expected', [([('name', 'gorilla')], False, 'GM-gorilla'), ([('namespace', 'apes'), ('name', 'gorilla')], False, 'GM-apes/gorilla'), ([('name', 'gorilla')], True, 'GM-gorilla-2'), ([('namespace', 'apes'), ('name', 'gorilla')], True, 'GM-apes/gorilla-2')]) def test_full_name(properties, inc_counter, expected): script = greasemonkey.GreasemonkeyScript(properties, code='') if inc_counter: script.dedup_suffix += 1 assert (script.full_name() == expected)
.parametrize(('tag', 'expected_version'), [('1.1', '1.1'), ('release-1.1', '1.1'), pytest.param('3.3.1-rc26', '3.3.1rc26', marks=pytest.mark.issue(266))]) def test_tag_to_version(tag: str, expected_version: str) -> None: version = str(tag_to_version(tag, c)) assert (version == expected_version)
class TestDuplicate(unittest.TestCase): def setUp(self): self.test_dir = tempfile.TemporaryDirectory(prefix='qiime2-test-temp-') self.dst1 = os.path.join(self.test_dir.name, 'dst1') self.dst2 = os.path.join(self.test_dir.name, 'dst2') self.dir = os.path.join(self.test_dir.name, 'dir') with open(self.dst2, 'w') as fh: fh.write('This is not the secret') os.mkdir(self.dir) self.src = os.path.join(self.test_dir.name, 'src') self.missing = os.path.join(self.test_dir.name, 'missing') with open(self.src, 'w') as fh: fh.write(SECRET) def tearDown(self): self.test_dir.cleanup() def test_src_not_exists(self): with self.assertRaisesRegex(FileNotFoundError, self.missing): util.duplicate(self.missing, self.dst1) def test_src_dir(self): with self.assertRaisesRegex(IsADirectoryError, self.dir): util.duplicate(self.dir, self.dst1) def test_dst_not_exists(self): util.duplicate(self.src, self.dst1) assert os.path.exists(self.dst1) with open(self.dst1) as fh: self.assertEqual(fh.read(), SECRET) def test_dst_exists(self): with self.assertRaisesRegex(FileExistsError, self.dst2): util.duplicate(self.src, self.dst2) def test_dst_dir(self): with self.assertRaisesRegex(IsADirectoryError, self.dir): util.duplicate(self.src, self.dir) ('qiime2.util.os.link', side_effect=EACCES) def test_perm_error_EACCES(self, mocked_link): with self.assertRaisesRegex(PermissionError, 'insufficient r/w permissions'): util.duplicate(self.src, self.dst1) assert mocked_link.called ('qiime2.util.os.link', side_effect=EPERM) def test_perm_error_EPERM(self, mocked_link): util.duplicate(self.src, self.dst1) assert mocked_link.called assert os.path.exists(self.dst1) with open(self.dst1) as fh: self.assertEqual(fh.read(), SECRET) ('qiime2.util.os.link', side_effect=EXDEV) def test_cross_device_src_not_exists(self, mocked_link): with self.assertRaisesRegex(FileNotFoundError, self.missing): util.duplicate(self.missing, self.dst1) ('qiime2.util.os.link', side_effect=EXDEV) def test_cross_device_src_dir(self, mocked_link): with self.assertRaisesRegex(IsADirectoryError, self.dir): util.duplicate(self.dir, self.dst1) ('qiime2.util.os.link', side_effect=EXDEV) def test_cross_device_dst_not_exists(self, mocked_link): util.duplicate(self.src, self.dst1) assert mocked_link.called assert os.path.exists(self.dst1) with open(self.dst1) as fh: self.assertEqual(fh.read(), SECRET) ('qiime2.util.os.link', side_effect=EXDEV) def test_cross_device_dst_exists(self, mocked_link): with self.assertRaisesRegex(FileExistsError, self.dst2): util.duplicate(self.src, self.dst2) ('qiime2.util.os.link', side_effect=EXDEV) def test_cross_device_dst_dir(self, mocked_link): with self.assertRaisesRegex(IsADirectoryError, self.dir): util.duplicate(self.src, self.dir) ('qiime2.util.os.link', side_effect=EXDEV) ('qiime2.util.shutil.copyfile', side_effect=EACCES) def test_cross_device_perm_error(self, mocked_link, mocked_copyfile): with self.assertRaisesRegex(PermissionError, 'insufficient r/w permissions'): util.duplicate(self.src, self.dst1) assert mocked_link.called assert mocked_copyfile.called
.skip('read_csv_cirrus time parsing is broken') .parametrize('filename', cirrus_files) def test_read_csv_cirrus_details(filename): from acoustics.aio import read_csv_cirrus file = ('_'.join(filename) + '.csv') csv_path = os.path.join(data_path(), 'cirrus', file) data = read_csv_cirrus(csv_path) if (filename[0] == 'det_global'): np.sum(data.LAeq) else: np.sum(data['125Hz'])
class Incompatibility(): def __init__(self, terms: list[Term], cause: IncompatibilityCause) -> None: if ((len(terms) != 1) and isinstance(cause, ConflictCause) and any(((term.is_positive() and term.dependency.is_root) for term in terms))): terms = [term for term in terms if ((not term.is_positive()) or (not term.dependency.is_root))] if ((len(terms) != 1) and ((len(terms) != 2) or (terms[0].dependency.complete_name == terms[(- 1)].dependency.complete_name))): by_name: dict[(str, dict[(str, Term)])] = {} for term in terms: if (term.dependency.complete_name not in by_name): by_name[term.dependency.complete_name] = {} by_ref = by_name[term.dependency.complete_name] ref = term.dependency.complete_name if (ref in by_ref): value = by_ref[ref].intersect(term) err_msg = f"Package '{ref}' is listed as a dependency of itself." assert (value is not None), err_msg by_ref[ref] = value else: by_ref[ref] = term new_terms = [] for by_ref in by_name.values(): positive_terms = [term for term in by_ref.values() if term.is_positive()] if positive_terms: new_terms += positive_terms continue new_terms += list(by_ref.values()) terms = new_terms self._terms = terms self._cause = cause def terms(self) -> list[Term]: return self._terms def cause(self) -> IncompatibilityCause: return self._cause def external_incompatibilities(self) -> Iterator[Incompatibility]: if isinstance(self._cause, ConflictCause): cause: ConflictCause = self._cause (yield from cause.conflict.external_incompatibilities) (yield from cause.other.external_incompatibilities) else: (yield self) def is_failure(self) -> bool: return ((len(self._terms) == 0) or ((len(self._terms) == 1) and self._terms[0].dependency.is_root)) def __str__(self) -> str: if isinstance(self._cause, DependencyCause): assert (len(self._terms) == 2) depender = self._terms[0] dependee = self._terms[1] assert depender.is_positive() assert (not dependee.is_positive()) return f'{self._terse(depender, allow_every=True)} depends on {self._terse(dependee)}' elif isinstance(self._cause, PythonCause): assert (len(self._terms) == 1) assert self._terms[0].is_positive() text = f'{self._terse(self._terms[0], allow_every=True)} requires ' text += f'Python {self._cause.python_version}' return text elif isinstance(self._cause, PlatformCause): assert (len(self._terms) == 1) assert self._terms[0].is_positive() text = f'{self._terse(self._terms[0], allow_every=True)} requires ' text += f'platform {self._cause.platform}' return text elif isinstance(self._cause, NoVersionsCause): assert (len(self._terms) == 1) assert self._terms[0].is_positive() return f'no versions of {self._terms[0].dependency.name} match {self._terms[0].constraint}' elif isinstance(self._cause, RootCause): assert (len(self._terms) == 1) assert (not self._terms[0].is_positive()) assert self._terms[0].dependency.is_root return f'{self._terms[0].dependency.name} is {self._terms[0].dependency.constraint}' elif self.is_failure(): return 'version solving failed' if (len(self._terms) == 1): term = self._terms[0] verb = ('forbidden' if term.is_positive() else 'required') return f'{term.dependency.name} is {verb}' if (len(self._terms) == 2): term1 = self._terms[0] term2 = self._terms[1] if (term1.is_positive() == term2.is_positive()): if (not term1.is_positive()): return f'either {self._terse(term1)} or {self._terse(term2)}' package1 = (term1.dependency.name if term1.constraint.is_any() else self._terse(term1)) package2 = (term2.dependency.name if term2.constraint.is_any() else self._terse(term2)) return f'{package1} is incompatible with {package2}' positive = [] negative = [] for term in self._terms: if term.is_positive(): positive.append(self._terse(term)) else: negative.append(self._terse(term)) if (positive and negative): if (len(positive) != 1): return f"if {' and '.join(positive)} then {' or '.join(negative)}" positive_term = next((term for term in self._terms if term.is_positive())) return f"{self._terse(positive_term, allow_every=True)} requires {' or '.join(negative)}" elif positive: return f"one of {' or '.join(positive)} must be false" else: return f"one of {' or '.join(negative)} must be true" def and_to_string(self, other: Incompatibility, this_line: (int | None), other_line: (int | None)) -> str: requires_both = self._try_requires_both(other, this_line, other_line) if (requires_both is not None): return requires_both requires_through = self._try_requires_through(other, this_line, other_line) if (requires_through is not None): return requires_through requires_forbidden = self._try_requires_forbidden(other, this_line, other_line) if (requires_forbidden is not None): return requires_forbidden buffer = [str(self)] if (this_line is not None): buffer.append(f' {this_line!s}') buffer.append(f' and {other!s}') if (other_line is not None): buffer.append(f' {other_line!s}') return '\n'.join(buffer) def _try_requires_both(self, other: Incompatibility, this_line: (int | None), other_line: (int | None)) -> (str | None): if ((len(self._terms) == 1) or (len(other.terms) == 1)): return None this_positive = self._single_term_where((lambda term: term.is_positive())) if (this_positive is None): return None other_positive = other._single_term_where((lambda term: term.is_positive())) if (other_positive is None): return None if (this_positive.dependency != other_positive.dependency): return None this_negatives = ' or '.join([self._terse(term) for term in self._terms if (not term.is_positive())]) other_negatives = ' or '.join([self._terse(term) for term in other.terms if (not term.is_positive())]) buffer = [(self._terse(this_positive, allow_every=True) + ' ')] is_dependency = (isinstance(self.cause, DependencyCause) and isinstance(other.cause, DependencyCause)) if is_dependency: buffer.append('depends on') else: buffer.append('requires') buffer.append(f' both {this_negatives}') if (this_line is not None): buffer.append(f' ({this_line})') buffer.append(f' and {other_negatives}') if (other_line is not None): buffer.append(f' ({other_line})') return ''.join(buffer) def _try_requires_through(self, other: Incompatibility, this_line: (int | None), other_line: (int | None)) -> (str | None): if ((len(self._terms) == 1) or (len(other.terms) == 1)): return None this_negative = self._single_term_where((lambda term: (not term.is_positive()))) other_negative = other._single_term_where((lambda term: (not term.is_positive()))) if ((this_negative is None) and (other_negative is None)): return None this_positive = self._single_term_where((lambda term: term.is_positive())) other_positive = self._single_term_where((lambda term: term.is_positive())) if ((this_negative is not None) and (other_positive is not None) and (this_negative.dependency.name == other_positive.dependency.name) and this_negative.inverse.satisfies(other_positive)): prior = self prior_negative = this_negative prior_line = this_line latter = other latter_line = other_line elif ((other_negative is not None) and (this_positive is not None) and (other_negative.dependency.name == this_positive.dependency.name) and other_negative.inverse.satisfies(this_positive)): prior = other prior_negative = other_negative prior_line = other_line latter = self latter_line = this_line else: return None prior_positives = [term for term in prior.terms if term.is_positive()] buffer = [] if (len(prior_positives) > 1): prior_string = ' or '.join([self._terse(term) for term in prior_positives]) buffer.append(f'if {prior_string} then ') else: if isinstance(prior.cause, DependencyCause): verb = 'depends on' else: verb = 'requires' buffer.append(f'{self._terse(prior_positives[0], allow_every=True)} {verb} ') buffer.append(self._terse(prior_negative)) if (prior_line is not None): buffer.append(f' ({prior_line})') buffer.append(' which ') if isinstance(latter.cause, DependencyCause): buffer.append('depends on ') else: buffer.append('requires ') buffer.append(' or '.join([self._terse(term) for term in latter.terms if (not term.is_positive())])) if (latter_line is not None): buffer.append(f' ({latter_line})') return ''.join(buffer) def _try_requires_forbidden(self, other: Incompatibility, this_line: (int | None), other_line: (int | None)) -> (str | None): if ((len(self._terms) != 1) and (len(other.terms) != 1)): return None if (len(self.terms) == 1): prior = other latter = self prior_line = other_line latter_line = this_line else: prior = self latter = other prior_line = this_line latter_line = other_line negative = prior._single_term_where((lambda term: (not term.is_positive()))) if (negative is None): return None if (not negative.inverse.satisfies(latter.terms[0])): return None positives = [t for t in prior.terms if t.is_positive()] buffer = [] if (len(positives) > 1): prior_string = ' or '.join([self._terse(term) for term in positives]) buffer.append(f'if {prior_string} then ') else: buffer.append(self._terse(positives[0], allow_every=True)) if isinstance(prior.cause, DependencyCause): buffer.append(' depends on ') else: buffer.append(' requires ') buffer.append((self._terse(latter.terms[0]) + ' ')) if (prior_line is not None): buffer.append(f'({prior_line}) ') if isinstance(latter.cause, PythonCause): cause: PythonCause = latter.cause buffer.append(f'which requires Python {cause.python_version}') elif isinstance(latter.cause, NoVersionsCause): buffer.append("which doesn't match any versions") else: buffer.append('which is forbidden') if (latter_line is not None): buffer.append(f' ({latter_line})') return ''.join(buffer) def _terse(self, term: Term, allow_every: bool=False) -> str: if (allow_every and term.constraint.is_any()): return f'every version of {term.dependency.complete_name}' if term.dependency.is_root: pretty_name: str = term.dependency.pretty_name return pretty_name if term.dependency.source_type: return str(term.dependency) return f'{term.dependency.pretty_name} ({term.dependency.pretty_constraint})' def _single_term_where(self, callable: Callable[([Term], bool)]) -> (Term | None): found = None for term in self._terms: if (not callable(term)): continue if (found is not None): return None found = term return found def __repr__(self) -> str: return f'<Incompatibility {self!s}>'
def process_refs(x, dag): if (type(x) == dict): for (k, v) in x.items(): x[k] = process_refs(v, dag) return x elif (type(x) == list): for (i, e) in enumerate(x): x[i] = process_refs(e, dag) return x elif (type(x) == outputReference): return x.pointer.resolve(dag.getNode(x.stepid).result) else: return x
def lookup_team_invite(code, user_obj=None): try: found = TeamMemberInvite.get((TeamMemberInvite.invite_token == code)) except TeamMemberInvite.DoesNotExist: raise DataModelException('Invalid confirmation code.') if (user_obj and (found.user != user_obj)): raise DataModelException('Invalid confirmation code.') return found
.xdist_group('stickerset') class TestStickerSetWithRequest(): async def test_create_sticker_set(self, bot, chat_id, sticker_file, animated_sticker_file, video_sticker_file): test_by = f'test_by_{bot.username}' for sticker_set in [test_by, f'animated_{test_by}', f'video_{test_by}']: try: ss = (await bot.get_sticker_set(sticker_set)) assert isinstance(ss, StickerSet) except BadRequest as e: if (not (e.message == 'Stickerset_invalid')): raise e if sticker_set.startswith(test_by): s = (await bot.create_new_sticker_set(chat_id, name=sticker_set, title='Sticker Test', stickers=[InputSticker(sticker_file, emoji_list=[''])], sticker_format=StickerFormat.STATIC)) assert s elif sticker_set.startswith('animated'): a = (await bot.create_new_sticker_set(chat_id, name=sticker_set, title='Animated Test', stickers=[InputSticker(animated_sticker_file, emoji_list=[''])], sticker_format=StickerFormat.ANIMATED)) assert a elif sticker_set.startswith('video'): v = (await bot.create_new_sticker_set(chat_id, name=sticker_set, title='Video Test', stickers=[InputSticker(video_sticker_file, emoji_list=[''])], sticker_format=StickerFormat.VIDEO)) assert v async def test_delete_sticker_set(self, bot, chat_id, sticker_file): name = f"{''.join(random.choices(string.ascii_lowercase, k=5))}_temp_set_by_{bot.username}" assert (await bot.create_new_sticker_set(chat_id, name=name, title='Stickerset delete Test', stickers=[InputSticker(sticker_file, emoji_list=[''])], sticker_format=StickerFormat.STATIC)) (await asyncio.sleep(1)) assert (await bot.delete_sticker_set(name)) async def test_set_custom_emoji_sticker_set_thumbnail(self, bot, chat_id, animated_sticker_file): ss_name = f'custom_emoji_set_by_{bot.username}' try: ss = (await bot.get_sticker_set(ss_name)) assert (ss.sticker_type == Sticker.CUSTOM_EMOJI) except BadRequest: assert (await bot.create_new_sticker_set(chat_id, name=ss_name, title='Custom Emoji Sticker Set', stickers=[InputSticker(animated_sticker_file, emoji_list=[''])], sticker_format=StickerFormat.ANIMATED, sticker_type=Sticker.CUSTOM_EMOJI)) assert (await bot.set_custom_emoji_sticker_set_thumbnail(ss_name, '')) async def test_bot_methods_1_png(self, bot, chat_id, sticker_file): with data_file('telegram_sticker.png').open('rb') as f: file = (await bot.upload_sticker_file(chat_id, sticker=f, sticker_format=StickerFormat.STATIC)) assert file (await asyncio.sleep(1)) tasks = asyncio.gather(bot.add_sticker_to_set(chat_id, f'test_by_{bot.username}', sticker=InputSticker(sticker=file.file_id, emoji_list=[''])), bot.add_sticker_to_set(chat_id, f'test_by_{bot.username}', sticker=InputSticker(sticker=sticker_file, emoji_list=[''], mask_position=MaskPosition(MaskPosition.EYES, (- 1), 1, 2)))) assert all((await tasks)) async def test_bot_methods_1_tgs(self, bot, chat_id): (await asyncio.sleep(1)) assert (await bot.add_sticker_to_set(chat_id, f'animated_test_by_{bot.username}', sticker=InputSticker(sticker=data_file('telegram_animated_sticker.tgs').open('rb'), emoji_list=['']))) async def test_bot_methods_1_webm(self, bot, chat_id): (await asyncio.sleep(1)) with data_file('telegram_video_sticker.webm').open('rb') as f: assert (await bot.add_sticker_to_set(chat_id, f'video_test_by_{bot.username}', sticker=InputSticker(sticker=f, emoji_list=['']))) async def test_bot_methods_2_png(self, bot, sticker_set): (await asyncio.sleep(1)) file_id = sticker_set.stickers[0].file_id assert (await bot.set_sticker_position_in_set(file_id, 1)) async def test_bot_methods_2_tgs(self, bot, animated_sticker_set): (await asyncio.sleep(1)) file_id = animated_sticker_set.stickers[0].file_id assert (await bot.set_sticker_position_in_set(file_id, 1)) async def test_bot_methods_2_webm(self, bot, video_sticker_set): (await asyncio.sleep(1)) file_id = video_sticker_set.stickers[0].file_id assert (await bot.set_sticker_position_in_set(file_id, 1)) async def test_bot_methods_3_png(self, bot, chat_id, sticker_set_thumb_file): (await asyncio.sleep(1)) assert (await bot.set_sticker_set_thumbnail(f'test_by_{bot.username}', chat_id, sticker_set_thumb_file)) async def test_bot_methods_3_tgs(self, bot, chat_id, animated_sticker_file, animated_sticker_set): (await asyncio.sleep(1)) animated_test = f'animated_test_by_{bot.username}' file_id = animated_sticker_set.stickers[(- 1)].file_id tasks = asyncio.gather(bot.set_sticker_set_thumbnail(animated_test, chat_id, animated_sticker_file), bot.set_sticker_set_thumbnail(animated_test, chat_id, file_id)) assert all((await tasks)) .skip("Skipped for now since Telegram throws a 'File is too big' error regardless of the .webm file size.") def test_bot_methods_3_webm(self, bot, chat_id, video_sticker_file, video_sticker_set): pass async def test_bot_methods_4_png(self, bot, sticker_set): if (len(sticker_set.stickers) <= 1): pytest.skip('Sticker set only has one sticker, deleting it will delete the set.') (await asyncio.sleep(1)) file_id = sticker_set.stickers[(- 1)].file_id assert (await bot.delete_sticker_from_set(file_id)) async def test_bot_methods_4_tgs(self, bot, animated_sticker_set): if (len(animated_sticker_set.stickers) <= 1): pytest.skip('Sticker set only has one sticker, deleting it will delete the set.') (await asyncio.sleep(1)) file_id = animated_sticker_set.stickers[(- 1)].file_id assert (await bot.delete_sticker_from_set(file_id)) async def test_bot_methods_4_webm(self, bot, video_sticker_set): if (len(video_sticker_set.stickers) <= 1): pytest.skip('Sticker set only has one sticker, deleting it will delete the set.') (await asyncio.sleep(1)) file_id = video_sticker_set.stickers[(- 1)].file_id assert (await bot.delete_sticker_from_set(file_id)) async def test_bot_methods_5_png(self, bot, sticker_set): file_id = sticker_set.stickers[(- 1)].file_id assert (await bot.set_sticker_emoji_list(file_id, ['', ''])) ss = (await bot.get_sticker_set(f'test_by_{bot.username}')) assert (ss.stickers[(- 1)].emoji == '') async def test_bot_methods_5_tgs(self, bot, animated_sticker_set): file_id = animated_sticker_set.stickers[(- 1)].file_id assert (await bot.set_sticker_emoji_list(file_id, ['', ''])) ss = (await bot.get_sticker_set(f'animated_test_by_{bot.username}')) assert (ss.stickers[(- 1)].emoji == '') async def test_bot_methods_5_webm(self, bot, video_sticker_set): file_id = video_sticker_set.stickers[(- 1)].file_id assert (await bot.set_sticker_emoji_list(file_id, ['', ''])) ss = (await bot.get_sticker_set(f'video_test_by_{bot.username}')) assert (ss.stickers[(- 1)].emoji == '') async def test_bot_methods_6_png(self, bot): assert (await bot.set_sticker_set_title(f'test_by_{bot.username}', 'new title')) ss = (await bot.get_sticker_set(f'test_by_{bot.username}')) assert (ss.title == 'new title') async def test_bot_methods_6_tgs(self, bot): assert (await bot.set_sticker_set_title(f'animated_test_by_{bot.username}', 'new title')) ss = (await bot.get_sticker_set(f'animated_test_by_{bot.username}')) assert (ss.title == 'new title') async def test_bot_methods_6_webm(self, bot): assert (await bot.set_sticker_set_title(f'video_test_by_{bot.username}', 'new title')) ss = (await bot.get_sticker_set(f'video_test_by_{bot.username}')) assert (ss.title == 'new title') async def test_bot_methods_7_png(self, bot, sticker_set): file_id = sticker_set.stickers[(- 1)].file_id assert (await bot.set_sticker_keywords(file_id, ['test', 'test2'])) async def test_bot_methods_7_tgs(self, bot, animated_sticker_set): file_id = animated_sticker_set.stickers[(- 1)].file_id assert (await bot.set_sticker_keywords(file_id, ['test', 'test2'])) async def test_bot_methods_7_webm(self, bot, video_sticker_set): file_id = video_sticker_set.stickers[(- 1)].file_id assert (await bot.set_sticker_keywords(file_id, ['test', 'test2']))
def features_for(report): features = [] dedupe_vulns = {} for (pkg_id, pkg) in report['packages'].items(): pkg_env = report['environments'][pkg_id][0] pkg_vulns = [] for vuln_id in report['package_vulnerabilities'].get(pkg_id, []): vuln_key = ((((pkg['name'] + '_') + pkg['version']) + '_') + report['vulnerabilities'][vuln_id].get('name', '')) if (not dedupe_vulns.get(vuln_key, False)): pkg_vulns.append(report['vulnerabilities'][vuln_id]) dedupe_vulns[vuln_key] = True enrichments = ({key: sorted(val, key=(lambda x: x['baseScore']), reverse=True)[0] for (key, val) in list(report['enrichments'].values())[0][0].items()} if report.get('enrichments', {}) else {}) features.append(Feature(pkg['name'], '', '', pkg_env['introduced_in'], pkg['version'], [Vulnerability(fetch_vuln_severity(vuln, enrichments), vuln['updater'], vuln['links'], maybe_urlencoded((vuln['fixed_in_version'] if (vuln['fixed_in_version'] != '0') else '')), vuln['description'], vuln['name'], Metadata(vuln['updater'], vuln.get('repository', {}).get('name'), vuln.get('repository', {}).get('uri'), vuln.get('distribution', {}).get('name'), vuln.get('distribution', {}).get('version'), NVD(CVSSv3(enrichments.get(vuln['id'], {}).get('vectorString', ''), enrichments.get(vuln['id'], {}).get('baseScore', ''))))) for vuln in pkg_vulns])) return features
class LimboLexer(RegexLexer): name = 'Limbo' url = ' aliases = ['limbo'] filenames = ['*.b'] mimetypes = ['text/limbo'] version_added = '2.0' tokens = {'whitespace': [('^(\\s*)([a-zA-Z_]\\w*:)(\\s*\\n)', bygroups(Whitespace, Name.Label, Whitespace)), ('\\n', Whitespace), ('\\s+', Whitespace), ('#(\\n|(.|\\n)*?[^\\\\]\\n)', Comment.Single)], 'string': [('"', String, '#pop'), ('\\\\([\\\\abfnrtv"\\\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape), ('[^\\\\"\\n]+', String), ('\\\\', String)], 'statements': [('"', String, 'string'), ("'(\\\\.|\\\\[0-7]{1,3}|\\\\x[a-fA-F0-9]{1,2}|[^\\\\\\'\\n])'", String.Char), ('(\\d+\\.\\d*|\\.\\d+|\\d+)[eE][+-]?\\d+', Number.Float), ('(\\d+\\.\\d*|\\.\\d+|\\d+[fF])', Number.Float), ('16r[0-9a-fA-F]+', Number.Hex), ('8r[0-7]+', Number.Oct), ('((([1-3]\\d)|([2-9]))r)?(\\d+)', Number.Integer), ('[()\\[\\],.]', Punctuation), ('[~!%^&*+=|?:<>/-]|(->)|(<-)|(=>)|(::)', Operator), ('(alt|break|case|continue|cyclic|do|else|exitfor|hd|if|implement|import|include|len|load|orpick|return|spawn|tagof|tl|to|while)\\b', Keyword), ('(byte|int|big|real|string|array|chan|list|adt|fn|ref|of|module|self|type)\\b', Keyword.Type), ('(con|iota|nil)\\b', Keyword.Constant), ('[a-zA-Z_]\\w*', Name)], 'statement': [include('whitespace'), include('statements'), ('[{}]', Punctuation), (';', Punctuation, '#pop')], 'root': [include('whitespace'), default('statement')]} def analyse_text(text): if re.search('^implement \\w+;', text, re.MULTILINE): return 0.7
class ModelSaver(ModelSaverBase): def _save(self, step, model): model_state_dict = model.state_dict() model_state_dict = {k: v for (k, v) in model_state_dict.items() if ('generator' not in k)} generator_state_dict = model.generator.state_dict() vocab = deepcopy(self.fields) for side in ['src', 'tgt']: keys_to_pop = [] if hasattr(vocab[side], 'fields'): unk_token = vocab[side].fields[0][1].vocab.itos[0] for (key, value) in vocab[side].fields[0][1].vocab.stoi.items(): if ((value == 0) and (key != unk_token)): keys_to_pop.append(key) for key in keys_to_pop: vocab[side].fields[0][1].vocab.stoi.pop(key, None) checkpoint = {'model': model_state_dict, 'generator': generator_state_dict, 'vocab': vocab, 'opt': self.model_opt, 'optim': self.optim.state_dict()} logger.info(('Saving checkpoint %s_step_%d.pt' % (self.base_path, step))) checkpoint_path = ('%s_step_%d.pt' % (self.base_path, step)) torch.save(checkpoint, checkpoint_path) return (checkpoint, checkpoint_path) def _rm_checkpoint(self, name): os.remove(name)
def build_parser(): parser = argparse.ArgumentParser() parser.add_argument('--files', '-f', type=str, nargs='*', help='Fragment files') parser.add_argument('--genome', '-g', type=str, default='hg38', choices=['hg38', 'hg19'], help='Genome to use') parser.add_argument('--dry', action='store_true', help='Dry run') return parser
def preprocess_for_eval(image, output_height, output_width): tf.summary.image('image', tf.expand_dims(image, 0)) image = tf.to_float(image) resized_image = tf.image.resize_image_with_crop_or_pad(image, output_width, output_height) tf.summary.image('resized_image', tf.expand_dims(resized_image, 0)) return tf.image.per_image_standardization(resized_image)
def set_suction_state(state, must_succeed=True): rospy.wait_for_service('/set_suction') set_suction = rospy.ServiceProxy('/set_suction', _SetSuction) try: response = set_suction(state) except: if must_succeed: raise Exception('Could set robot suction state.') return False if (must_succeed and (not response.success)): raise Exception('Could set robot suction state.') return response.success
class TestExperiments(TestCase): def test_discharge_rest_charge(self): experiment = pybamm.Experiment(['Discharge at C/2 for 1 hour', 'Rest for 1 hour', 'Charge at C/2 for 1 hour'], period='0.5 hours') model = pybamm.lithium_ion.SPM() sim = pybamm.Simulation(model, experiment=experiment, solver=pybamm.CasadiSolver()) sim.solve() np.testing.assert_array_almost_equal(sim._solution['Time [h]'].entries, np.array([0, 0.5, 1, (1 + 1e-09), 1.5, 2, (2 + 1e-09), 2.5, 3])) cap = model.default_parameter_values['Nominal cell capacity [A.h]'] np.testing.assert_array_almost_equal(sim._solution['Current [A]'].entries, ((([(cap / 2)] * 3) + ([0] * 3)) + ([((- cap) / 2)] * 3))) def test_rest_discharge_rest(self): experiment = pybamm.Experiment(['Rest for 5 minutes', 'Discharge at 0.1C until 3V', 'Rest for 30 minutes'], period='1 minute') parameter_values = pybamm.ParameterValues('Chen2020') model = pybamm.lithium_ion.DFN() sim = pybamm.Simulation(model, parameter_values=parameter_values, experiment=experiment, solver=pybamm.CasadiSolver()) sol = sim.solve() np.testing.assert_array_almost_equal(sol['Current [A]'].data[:5], 0) np.testing.assert_array_almost_equal(sol['Current [A]'].data[(- 29):], 0) def test_gitt(self): experiment = pybamm.Experiment((['Discharge at C/20 for 1 hour', 'Rest for 1 hour'] * 10), period='6 minutes') model = pybamm.lithium_ion.SPM() sim = pybamm.Simulation(model, experiment=experiment, solver=pybamm.CasadiSolver()) sim.solve() cap = model.default_parameter_values['Nominal cell capacity [A.h]'] np.testing.assert_array_almost_equal(sim._solution['Current [A]'].entries, ((([(cap / 20)] * 11) + ([0] * 11)) + ((([(cap / 20)] * 11) + ([0] * 11)) * 9))) def test_infeasible(self): experiment = pybamm.Experiment(([('Discharge at 1C for 0.5 hours',)] * 4)) model = pybamm.lithium_ion.SPM() sim = pybamm.Simulation(model, experiment=experiment, solver=pybamm.CasadiSolver()) sol = sim.solve() self.assertEqual(len(sol.cycles), 3) def test_drive_cycle(self): drive_cycle = np.array([np.arange(100), (5 * np.ones(100))]).T c_step = pybamm.step.current(value=drive_cycle, duration=100, termination=['4.00 V']) experiment = pybamm.Experiment([c_step]) model = pybamm.lithium_ion.SPM() param = pybamm.ParameterValues('Chen2020') sim = pybamm.Simulation(model, experiment=experiment, parameter_values=param, solver=pybamm.CasadiSolver()) sol = sim.solve() assert np.all((sol['Terminal voltage [V]'].entries >= 4.0))
def test_multi_upblks(do_test): a = CaseTwoUpblksSliceComp.DUT() a._rtlir_test_ref = {'multi_upblks_1': CombUpblk('multi_upblks_1', [Assign([Slice(Attribute(Base(a), 'out'), Number(0), Number(4))], Attribute(Base(a), 'in_'), True)]), 'multi_upblks_2': CombUpblk('multi_upblks_2', [Assign([Slice(Attribute(Base(a), 'out'), Number(4), Number(8))], Attribute(Base(a), 'in_'), True)])} do_test(a)
class ModelParallelTransformerEncoderLayer(TransformerEncoderLayer): def build_fc1(self, input_dim, output_dim, q_noise, qn_block_size): if (q_noise > 0): raise NotImplementedError return ColumnParallelLinear(input_dim, output_dim, gather_output=False) def build_fc2(self, input_dim, output_dim, q_noise, qn_block_size): if (q_noise > 0): raise NotImplementedError return RowParallelLinear(input_dim, output_dim, input_is_parallel=True) def build_self_attention(self, embed_dim, args, **unused_kwargs): return ModelParallelMultiheadAttention(embed_dim, args.encoder_attention_heads, dropout=args.attention_dropout, self_attention=True)
def test_clone_new_inputs(): x = pt.tensor(dtype=np.float64, shape=(None,)) y = pt.tensor(dtype=np.float64, shape=(1,)) z = pt.add(x, y) assert (z.type.shape == (None,)) x_new = pt.tensor(dtype=np.float64, shape=(1,)) z_node_new = z.owner.clone_with_new_inputs([x_new, y]) assert (z_node_new.outputs[0].type.shape == (1,)) assert (z_node_new.inputs[0].type.shape == (1,)) assert (z_node_new.inputs[1].type.shape == (1,)) z = pt.add(x_new, y) assert (z.type.shape == (1,)) z_node_new = z.owner.clone_with_new_inputs([x, y], strict=True) assert (z_node_new.outputs[0].type.shape == (1,)) assert (z_node_new.inputs[0].type.shape == (1,)) assert (z_node_new.inputs[1].type.shape == (1,))
('/api/conversations/get_folder_list', methods=['POST']) def get_folder_list() -> Response: user_id = DEFAULT_USER_ID folders = [] try: db = get_user_conversation_storage() folder_list = db.folder.find({'user_id': user_id}) for folder in folder_list: folders.append({'id': str(folder['_id']), 'name': folder['name'], 'type': 'chat'}) return jsonify({'success': True, 'data': folders}) except Exception as e: return Response(response=None, status=f'{INTERNAL} error fetch folder list')
def _role_to_node_properties(role: Role, start_idx: int, privileged: bool=False) -> Dict[(str, object)]: role.mounts += get_device_mounts(role.resource.devices) mount_points = [] volumes = [] devices = [] for (i, mount) in enumerate(role.mounts): name = f'mount_{i}' if isinstance(mount, BindMount): volumes.append({'name': name, 'host': {'sourcePath': mount.src_path}}) mount_points.append({'containerPath': mount.dst_path, 'readOnly': mount.read_only, 'sourceVolume': name}) elif isinstance(mount, VolumeMount): volumes.append({'name': name, 'efsVolumeConfiguration': {'fileSystemId': mount.src}}) mount_points.append({'containerPath': mount.dst_path, 'readOnly': mount.read_only, 'sourceVolume': name}) elif isinstance(mount, DeviceMount): perm_map = {'r': 'READ', 'w': 'WRITE', 'm': 'MKNOD'} devices.append({'hostPath': mount.src_path, 'containerPath': mount.dst_path, 'permissions': [perm_map[p] for p in mount.permissions]}) else: raise TypeError(f'unknown mount type {mount}') container = {'command': ([role.entrypoint] + role.args), 'image': role.image, 'environment': [{'name': k, 'value': v} for (k, v) in role.env.items()], 'privileged': privileged, 'resourceRequirements': resource_requirements_from_resource(role.resource), 'linuxParameters': {'sharedMemorySize': role.resource.memMB, 'devices': devices}, 'logConfiguration': {'logDriver': 'awslogs'}, 'mountPoints': mount_points, 'volumes': volumes} if (role.num_replicas > 1): instance_type = instance_type_from_resource(role.resource) if (instance_type is not None): container['instanceType'] = instance_type return {'targetNodes': f'{start_idx}:{((start_idx + role.num_replicas) - 1)}', 'container': container}
def setup_raiden(matrix_server: str, print_step: StepPrinter, contracts_version, eth_rpc_endpoint: str, web3: Web3, base_datadir: Path, keystore: Path, free_port_generator: Iterator[Port]) -> RaidenTestSetup: print_step('Deploying Raiden contracts') client = JSONRPCClient(web3, get_private_key(keystore)) contract_manager = ContractManager(contracts_precompiled_path(contracts_version)) proxy_manager = ProxyManager(rpc_client=client, contract_manager=contract_manager, metadata=ProxyManagerMetadata(token_network_registry_deployed_at=GENESIS_BLOCK_NUMBER, filters_start_at=GENESIS_BLOCK_NUMBER)) token = deploy_token(deploy_client=client, contract_manager=contract_manager, initial_amount=TokenAmount((1000 * denoms.ether)), decimals=18, token_name='TKN', token_symbol='TKN', token_contract_name=CONTRACT_CUSTOM_TOKEN) contract_addresses = deploy_smoketest_contracts(client=client, chain_id=CHAINNAME_TO_ID['smoketest'], contract_manager=contract_manager, token_address=token.address) confirmed_block_identifier = client.get_confirmed_blockhash() registry = proxy_manager.token_network_registry(TokenNetworkRegistryAddress(contract_addresses[CONTRACT_TOKEN_NETWORK_REGISTRY]), block_identifier=confirmed_block_identifier) registry.add_token(token_address=TokenAddress(to_canonical_address(token.address)), channel_participant_deposit_limit=TokenAmount(UINT256_MAX), token_network_deposit_limit=TokenAmount(UINT256_MAX), given_block_identifier=confirmed_block_identifier) service_registry = proxy_manager.service_registry(ServiceRegistryAddress(contract_addresses[CONTRACT_SERVICE_REGISTRY]), block_identifier=confirmed_block_identifier) price = service_registry.current_price(confirmed_block_identifier) amount = TokenAmount(price) token_proxy = proxy_manager.token(TokenAddress(to_canonical_address(token.address)), confirmed_block_identifier) token_proxy.approve(Address(service_registry.address), amount) assert (price <= token_proxy.balance_of(client.address)), 'must have enough balance' service_registry.deposit(BLOCK_ID_LATEST, amount) pfs_port = next(free_port_generator) pfs_url = f' service_registry.set_url(pfs_url) user_deposit_contract_address = to_checksum_address(contract_addresses[CONTRACT_USER_DEPOSIT]) print_step('Starting dummy PFS') pfs_greenlet = gevent.spawn(_start_dummy_pfs, pfs_url, to_checksum_address(registry.address), user_deposit_contract_address) print_step('Setting up Raiden') args = {'address': to_checksum_address(TEST_ACCOUNT_ADDRESS), 'datadir': keystore, 'eth_rpc_endpoint': eth_rpc_endpoint, 'gas_price': 'fast', 'keystore_path': keystore, 'matrix_server': matrix_server, 'chain_id': str(CHAINNAME_TO_ID['smoketest']), 'password_file': os.path.join(base_datadir, 'pw'), 'user_deposit_contract_address': user_deposit_contract_address, 'sync_check': False, 'environment_type': Environment.DEVELOPMENT, 'pathfinding_service_address': pfs_url} current_block = client.block_number() target_block_number = (current_block + DEFAULT_NUMBER_OF_BLOCK_CONFIRMATIONS) while (current_block < target_block_number): current_block = client.block_number() sleep(0.5) return RaidenTestSetup(args=args, token=token, contract_addresses=contract_addresses, pfs_greenlet=pfs_greenlet)
('-n', '--no', arg_only=True, action='store_true', help='Answer no to all questions') ('-y', '--yes', arg_only=True, action='store_true', help='Answer yes to all questions') ('--baseurl', arg_only=True, default=default_base, help=('The URL all git operations start from. Default: %s' % default_base)) ('-b', '--branch', arg_only=True, default=default_branch, help=('The branch to clone. Default: %s' % default_branch)) ('-H', '--home', arg_only=True, default=Path(os.environ['QMK_HOME']), type=Path, help=('The location for QMK Firmware. Default: %s' % os.environ['QMK_HOME'])) ('fork', arg_only=True, default=default_fork, nargs='?', help=('The qmk_firmware fork to clone. Default: %s' % default_fork)) ('Setup your computer for qmk_firmware.') def setup(cli): clone_prompt = ('Would you like to clone {fg_cyan}%s{fg_reset} to {fg_cyan}%s{fg_reset}?' % (cli.args.fork, shlex.quote(str(cli.args.home)))) home_prompt = ('Would you like to set {fg_cyan}%s{fg_reset} as your QMK home?' % (shlex.quote(str(cli.args.home)),)) if (cli.args.yes and cli.args.no): cli.log.error("Can't use both --yes and --no at the same time.") exit(1) if is_qmk_firmware(cli.args.home): cli.log.info('Found qmk_firmware at %s.', str(cli.args.home)) elif (cli.args.home.exists() and any(cli.args.home.iterdir())): path_str = str(cli.args.home) if (cli.args.home.name != 'qmk_firmware'): cli.log.warning(('Warning: %s does not end in "qmk_firmware". Did you mean to use "--home %s/qmk_firmware"?' % (path_str, path_str))) cli.log.error("Path '%s' exists but is not a qmk_firmware clone!", path_str) exit(1) else: cli.log.error('Could not find qmk_firmware!') if yesno(clone_prompt): git_url = '/'.join((cli.args.baseurl, cli.args.fork)) if git_clone(git_url, cli.args.home, cli.args.branch): git_upstream(cli.args.home) else: exit(1) else: cli.log.warning('Not cloning qmk_firmware due to user input or --no flag.') if ((str(cli.args.home) != os.environ['QMK_HOME']) and yesno(home_prompt)): cli.config['user']['qmk_home'] = str(cli.args.home.absolute()) cli.config_source['user']['qmk_home'] = 'config_file' cli.write_config_option('user', 'qmk_home') if cli.args.home.exists(): color = ('--color' if cli.config.general.color else '--no-color') unicode = ('--unicode' if cli.config.general.unicode else '--no-unicode') doctor_command = [Path(sys.argv[0]).as_posix(), color, unicode, 'doctor'] if cli.args.no: doctor_command.append('-n') if cli.args.yes: doctor_command.append('-y') cli.run(doctor_command, stdin=None, capture_output=False, cwd=cli.args.home)
class Dir(Task): def __init__(self, file): Task.__init__(self, file, 'Dir') def IsVerify(self): if (('Path' in self.__dict__) and ('Depth' in self.__dict__) and ('Mask' in self.__dict__)): if ((self.Path == '*') and (Mask == '*') and (self.Depth == '0')): self.Concerns = 'This is full recursive dirwalk' return True return False def CreateCommandLine(self): str = 'dir' if ('Path' in self.__dict__): str = ('%s -path %s' % (str, self.Path)) if ('Mask' in self.__dict__): str = ('%s -mask %s' % (str, self.Mask)) if ('Depth' in self.__dict__): if (self.Depth == '0'): str = ('%s -recursive' % str) if ('Maximum' in self.__dict__): str = ('%s -max %s' % (str, self.Maximum)) if ('Listall' in self.__dict__): if (not bool(self.Listall)): str = ('%s -dirsonly' % str) if (('Direction' in self.__dict__) and ('Time' in self.__dict__)): str = ('%s -%s %s' % (str, self.Direction, self.Time)) if ('timeType' in self.__dict__): str = ('%s -time %s' % (str, self.timeType)) return [str] def Display(self): dsz.ui.Echo('Directory listing') cmds = CreateCommandLine for cmd in cmds: dsz.ui.Echo((' `%s`' % cmd)) dsz.ui.Echo(('%s' % self.Concerns), dsz.WARNING)
def test_fileread_empty_key_raises(): context = Context({'fileRead': {'path': '/arb', 'key': ''}}) with pytest.raises(KeyInContextHasNoValueError) as err_info: fileread.run_step(context) assert (str(err_info.value) == "context['fileRead']['key'] must have a value for pypyr.steps.fileread.")
def Lie_Group(): Print_Function() coords = symbols('t x y z', real=True) (st4d, g0, g1, g2, g3) = Ga.build('gamma*t|x|y|z', g=[1, (- 1), (- 1), (- 1)], coords=coords) I = st4d.i a = st4d.mv('a', 'vector') B = st4d.mv('B', 'bivector') print('a =', a) print('B =', B) print('a|B =', (a | B)) print(((a | B) | B).simplify().Fmt(3, '(a|B)|B')) print((((a | B) | B) | B).simplify().Fmt(3, '((a|B)|B)|B')) return
def _resolve_deps(source_names: Iterable[str]) -> List[str]: requirements = {source: dependencies.intersection(source_names) for (source, dependencies) in SOURCE_DEPENDENCIES.items() if (source in source_names)} solution: List[str] = [] while requirements: satisfied = {source for (source, targets) in requirements.items() if (not targets)} if (not satisfied): raise RuntimeError(f'Missing dependencies or circular dependency in: {requirements}') for source in satisfied: del requirements[source] for dependencies in requirements.values(): dependencies -= satisfied solution.extend(satisfied) return solution
def run(settings): settings.description = 'ATOM IoUNet with default settings, for DeT Tracker.' settings.batch_size = 64 settings.num_workers = 8 settings.print_interval = 1 settings.normalize_mean = [0.485, 0.456, 0.406] settings.normalize_std = [0.229, 0.224, 0.225] settings.search_area_factor = 5.0 settings.feature_sz = 18 settings.output_sz = (settings.feature_sz * 16) settings.center_jitter_factor = {'train': 0, 'test': 4.5} settings.scale_jitter_factor = {'train': 0, 'test': 0.5} input_dtype = 'rgbcolormap' coco_train = MSCOCOSeq_depth(settings.env.cocodepth_dir, dtype=input_dtype) lasot_depth_train = Lasot_depth(root=settings.env.lasotdepth_dir, dtype=input_dtype) depthtrack_train = DepthTrack(root=settings.env.depthtrack_dir, split='train', dtype=input_dtype) depthtrack_val = DepthTrack(root=settings.env.depthtrack_dir, split='val', dtype=input_dtype) transform_joint = tfm.Transform(tfm.ToGrayscale(probability=0.05)) transform_train = tfm.Transform(tfm.ToTensorAndJitter(0.2), tfm.Normalize(mean=settings.normalize_mean, std=settings.normalize_std)) transform_val = tfm.Transform(tfm.ToTensor(), tfm.Normalize(mean=settings.normalize_mean, std=settings.normalize_std)) proposal_params = {'min_iou': 0.1, 'boxes_per_frame': 16, 'sigma_factor': [0.01, 0.05, 0.1, 0.2, 0.3]} data_processing_train = processing.ATOMProcessing(search_area_factor=settings.search_area_factor, output_sz=settings.output_sz, center_jitter_factor=settings.center_jitter_factor, scale_jitter_factor=settings.scale_jitter_factor, mode='sequence', proposal_params=proposal_params, transform=transform_train, joint_transform=transform_joint) data_processing_val = processing.ATOMProcessing(search_area_factor=settings.search_area_factor, output_sz=settings.output_sz, center_jitter_factor=settings.center_jitter_factor, scale_jitter_factor=settings.scale_jitter_factor, mode='sequence', proposal_params=proposal_params, transform=transform_val, joint_transform=transform_joint) dataset_train = sampler.ATOMSampler([lasot_depth_train, depthtrack_train, coco_train], [1, 1, 1], samples_per_epoch=(1000 * settings.batch_size), max_gap=50, processing=data_processing_train) loader_train = LTRLoader('train', dataset_train, training=True, batch_size=settings.batch_size, num_workers=settings.num_workers, shuffle=True, drop_last=True, stack_dim=1) dataset_val = sampler.ATOMSampler([depthtrack_val], [1], samples_per_epoch=(500 * settings.batch_size), max_gap=50, processing=data_processing_val) loader_val = LTRLoader('val', dataset_val, training=False, batch_size=settings.batch_size, num_workers=settings.num_workers, shuffle=False, drop_last=True, epoch_interval=5, stack_dim=1) net = atom_models.atom_resnet18_DeT(backbone_pretrained=True, merge_type='max') objective = nn.MSELoss() actor = actors.AtomActor(net=net, objective=objective) optimizer = optim.Adam([{'params': actor.net.bb_regressor.parameters()}, {'params': actor.net.feature_extractor.parameters(), 'lr': 2e-05}, {'params': actor.net.feature_extractor_depth.parameters(), 'lr': 2e-05}], lr=0.001) lr_scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=15, gamma=0.2) trainer = LTRTrainer(actor, [loader_train, loader_val], optimizer, settings, lr_scheduler) trainer.train(80, load_latest=True, fail_safe=True)
.parametrize('username,password', users) .parametrize('project_id', projects) def test_project_update_import_post_import_file_empty(db, settings, client, username, password, project_id): client.login(username=username, password=password) projects_count = Project.objects.count() project = Project.objects.get(pk=project_id) project_updated = project.updated project_snapshot_count = project.snapshots.count() project_snapshot_values_count = project.values.filter(snapshot=None).count() project_values_count = project.values.count() url = reverse('project_update_import', args=[project_id]) xml_file = os.path.join(settings.BASE_DIR, 'xml', 'project.xml') with open(xml_file, encoding='utf8') as f: response = client.post(url, {'method': 'upload_file', 'uploaded_file': f}) if (project_id in change_project_permission_map.get(username, [])): assert (response.status_code == 302) assert response.url.startswith(f'/projects/{project_id}/import/') response = client.get(response.url) assert (response.status_code == 200) response = client.post(url, {'method': 'import_file'}) for file_value in Value.objects.filter(value_type=VALUE_TYPE_FILE): assert Path(settings.MEDIA_ROOT).joinpath(file_value.file.name).exists() project = Project.objects.get(pk=project_id) assert (Project.objects.count() == projects_count) assert (project.snapshots.count() == project_snapshot_count) assert (project.values.count() == project_values_count) assert (project.values.filter(snapshot=None).count() == project_snapshot_values_count) assert (project.updated == project_updated) assert (response.status_code == 302) assert (response.url == f'/projects/{project_id}/') elif password: assert (response.status_code == 403) else: assert (response.status_code == 302) assert response.url.startswith('/account/login/')
class MultiQueryClient(RouterQueryClient): def __init__(self, *clients: RouterQueryClient, hooks: Optional[ExecutionHooks]=None) -> None: self.clients = clients self.metrics_wrapper = StreamingErrorHandler(hooks) async def stream(self, model: str, prompt: Prompt, request: Request, priority: QueuePriority) -> AsyncIterator[AviaryModelResponse]: (client, _) = (await self._find_client_for_model(model)) if (not client): raise HTTPException(status.HTTP_404_NOT_FOUND, f'Unable to find {model}. Please ensure that the model exists and you have permission.') with step('aviary_request', request.state.request_id, baggage={'model_id': model}) as span: request.state.aviary_request_span = span async for x in self.metrics_wrapper.handle_failure(model, request, prompt, client.stream(model, prompt, request, priority)): (yield x) async def _find_client_for_model(self, model: str, raise_if_missing=True) -> Tuple[(RouterQueryClient, ModelData)]: for client in self.clients: model_def = (await client.model(model)) if model_def: return (client, model_def) return (None, None) async def models(self): all_model_data: List[Dict[(str, ModelData)]] = (await asyncio.gather(*(client.models() for client in self.clients))) return {k: v for model_data in reversed(all_model_data) for (k, v) in model_data.items()} async def model(self, model_id: str): (_, model_data) = (await self._find_client_for_model(model_id)) return model_data
def get_hits(first_blood, cnt, filename): start = '' if (cnt % 2): start = another_player(first_blood) else: start = first_blood data = pd.read_csv(filename) result = pd.DataFrame([]) for i in range(len(data['frame_num'])): hitting.append(start) start = another_player(start) if ((type(data['lose_reason'][i]) == str) and (type(data['getpoint_player'][i]) == str)): start = data['getpoint_player'][i] result['hitting'] = hitting data['hitting'] = result['hitting'] data.to_csv(filename, index=False)
class CryptHash(BaseHash): def __init__(self, algorithm): super(CryptHash, self).__init__(algorithm) if (not HAS_CRYPT): raise Exception("crypt.crypt cannot be used as the 'crypt' python library is not installed or is unusable.", orig_exc=CRYPT_E) if sys.platform.startswith('darwin'): raise Exception('crypt.crypt not supported on Mac OS X/Darwin, install passlib python module') if (algorithm not in self.algorithms): raise Exception(("crypt.crypt does not support '%s' algorithm" % self.algorithm)) self.algo_data = self.algorithms[algorithm] def hash(self, secret, salt=None, salt_size=None, rounds=None, ident=None): salt = self._salt(salt, salt_size) rounds = self._rounds(rounds) ident = self._ident(ident) return self._hash(secret, salt, rounds, ident) def _salt(self, salt, salt_size): salt_size = (salt_size or self.algo_data.salt_size) ret = (salt or random_salt(salt_size)) if re.search('[^./0-9A-Za-z]', ret): raise Exception('invalid characters in salt') if ((self.algo_data.salt_exact and (len(ret) != self.algo_data.salt_size)) or ((not self.algo_data.salt_exact) and (len(ret) > self.algo_data.salt_size))): raise Exception('invalid salt size') return ret def _rounds(self, rounds): if (rounds == self.algo_data.implicit_rounds): return None else: return rounds def _ident(self, ident): if (not ident): return self.algo_data.crypt_id if (self.algorithm == 'bcrypt'): return ident return None def _hash(self, secret, salt, rounds, ident): saltstring = '' if ident: saltstring = ('$%s' % ident) if rounds: saltstring += ('$rounds=%d' % rounds) saltstring += ('$%s' % salt) try: result = Crypto.crypt(secret, saltstring) orig_exc = None except OSError as e: result = None orig_exc = e if (not result): raise Exception(("crypt.crypt does not support '%s' algorithm" % self.algorithm), orig_exc=orig_exc) return result
class LogFilter(): def __init__(self, config: Dict[(str, str)], default_level: str): self._should_log: Dict[(Tuple[(str, str)], bool)] = {} self._default_level = config.get('', default_level) self._log_rules = [((logger.split('.') if logger else []), level) for (logger, level) in config.items()] def _get_log_level(self, logger_name: str) -> str: best_match_length = 0 best_match_level = self._default_level for module in self._log_rules: (match_length, level) = _match_list(module, logger_name) if ((match_length > best_match_length) and (level is not None)): best_match_length = match_length best_match_level = level return best_match_level def should_log(self, logger_name: str, level: str) -> bool: if ((logger_name, level) not in self._should_log): log_level_per_rule = self._get_log_level(logger_name) log_level_per_rule_numeric = getattr(logging, log_level_per_rule.upper(), 10) log_level_event_numeric = getattr(logging, level.upper(), 10) should_log = (log_level_event_numeric >= log_level_per_rule_numeric) self._should_log[(logger_name, level)] = should_log return self._should_log[(logger_name, level)]
(frozen=True) class ActionInitMediator(BalanceProofStateChange): from_hop: HopState candidate_route_states: List[RouteState] from_transfer: LockedTransferSignedState def __post_init__(self) -> None: super().__post_init__() typecheck(self.from_hop, HopState) typecheck(self.from_transfer, LockedTransferSignedState)
class SQLAlchemyMetaDataProvider(MetaDataProvider): def __init__(self, url: str, engine_kwargs: Optional[Dict[(str, Any)]]=None): super().__init__() self.metadata_obj = MetaData() try: if (engine_kwargs is None): engine_kwargs = {} self.engine = create_engine(url, **engine_kwargs) except NoSuchModuleError as e: u = make_url(url) raise MetaDataProviderException(f'SQLAlchemy dialect driver {u.drivername} is not installed correctly') from e try: self.engine.connect() except OperationalError as e: raise MetaDataProviderException(f'Could not connect to {url}') from e def _get_table_columns(self, schema: str, table: str, **kwargs) -> List[str]: columns = [] try: sqlalchemy_table = Table(table, self.metadata_obj, schema=schema, autoload_with=self.engine) columns = [c.name for c in sqlalchemy_table.columns] except (NoSuchTableError, OperationalError): logger.warning('error listing columns for table %s.%s in %s, return empty list instead', schema, table, self.engine.url) return columns
.parametrize('has_output_dir', [False, True]) def test_on_output_file_button_exists(skip_qtbot, tmp_path, mocker, has_output_dir): mock_prompt = mocker.patch('randovania.gui.lib.common_qt_lib.prompt_user_for_output_file', autospec=True) if has_output_dir: output_directory = tmp_path.joinpath('output_path') expected_default_name = str(tmp_path.joinpath('output_path', 'Echoes Randomizer - MyHash')) output_directory.mkdir() else: output_directory = None expected_default_name = 'Echoes Randomizer - MyHash' options = MagicMock() options.options_for_game.return_value = EchoesPerGameOptions(cosmetic_patches=EchoesCosmeticPatches.default(), output_directory=output_directory) window = EchoesGameExportDialog(options, {}, 'MyHash', True, []) mock_prompt.return_value = tmp_path.joinpath('foo', 'game.iso') skip_qtbot.mouseClick(window.output_file_button, QtCore.Qt.MouseButton.LeftButton) mock_prompt.assert_called_once_with(window, (expected_default_name + '.iso'), ['iso']) assert (window.output_file_edit.text() == str(tmp_path.joinpath('foo', 'game.iso'))) assert tmp_path.joinpath('foo').is_dir()
.parametrize('provider', providers) def test_select_table_join_multiple_wildcards(provider: MetaDataProvider): sql = 'insert into test_v\n select a, h\n from (\n select x.*, y.* from db.tbl_x x\n join db.tbl_y y on x.id = y.id\n ) t\n ' assert_column_lineage_equal(sql, [(ColumnQualifierTuple('a', 'db.tbl_x'), ColumnQualifierTuple('a', '<default>.test_v')), (ColumnQualifierTuple('h', 'db.tbl_y'), ColumnQualifierTuple('h', '<default>.test_v'))], metadata_provider=provider)
class EvalConfig(): config = attr.ib() config_args = attr.ib() logdir = attr.ib() section = attr.ib() inferred = attr.ib() output = attr.ib() eval_tb_dir = attr.ib() vis_dir = attr.ib() part = attr.ib(default='spider') data = attr.ib(default=None) virtuoso_server = attr.ib(default=None)
def test_cmdfinalization_hook_exception(capsys): app = PluggedApp() app.register_cmdfinalization_hook(app.cmdfinalization_hook_exception) stop = app.onecmd_plus_hooks('say hello') (out, err) = capsys.readouterr() assert (not stop) assert (out == 'hello\n') assert err assert (app.called_cmdfinalization == 1) app.reset_counters() app.register_cmdfinalization_hook(app.cmdfinalization_hook) stop = app.onecmd_plus_hooks('say hello') (out, err) = capsys.readouterr() assert (not stop) assert (out == 'hello\n') assert err assert (app.called_cmdfinalization == 1)
class ConstructorHooks(PyObjectSequenceOption[_ConstructorHook]): name = 'constructor_hooks' def get_constructor(cls, typ: type, options: Options) -> _HookReturn: for hook in options.get_value_for(cls): result = hook(typ) if (result is not None): return result return None
_module() class LoadAnnotations(object): def __init__(self, reduce_zero_label=False, file_client_args=dict(backend='disk'), imdecode_backend='pillow'): self.reduce_zero_label = reduce_zero_label self.file_client_args = file_client_args.copy() self.file_client = None self.imdecode_backend = imdecode_backend def __call__(self, results): if (self.file_client is None): self.file_client = mmcv.FileClient(**self.file_client_args) if (results.get('seg_prefix', None) is not None): filename = osp.join(results['seg_prefix'], results['ann_info']['seg_map']) else: filename = results['ann_info']['seg_map'] img_bytes = self.file_client.get(filename) gt_semantic_seg = mmcv.imfrombytes(img_bytes, flag='unchanged', backend=self.imdecode_backend).squeeze().astype(np.uint8) if (results.get('label_map', None) is not None): for (old_id, new_id) in results['label_map'].items(): gt_semantic_seg[(gt_semantic_seg == old_id)] = new_id if self.reduce_zero_label: gt_semantic_seg[(gt_semantic_seg == 0)] = 255 gt_semantic_seg = (gt_semantic_seg - 1) gt_semantic_seg[(gt_semantic_seg == 254)] = 255 results['gt_semantic_seg'] = gt_semantic_seg results['seg_fields'].append('gt_semantic_seg') return results def __repr__(self): repr_str = self.__class__.__name__ repr_str += f'(reduce_zero_label={self.reduce_zero_label},' repr_str += f"imdecode_backend='{self.imdecode_backend}')" return repr_str
def test_fit_start(inference_spec, simple_model): mu_init = 17 mu_sigma_init = 13 with simple_model: if (type(inference_spec()) == ASVGD): return elif (type(inference_spec()) == ADVI): has_start_sigma = True else: has_start_sigma = False kw = {'start': {'mu': mu_init}} if has_start_sigma: kw.update({'start_sigma': {'mu': mu_sigma_init}}) with simple_model: inference = inference_spec(**kw) [observed_value] = [simple_model.rvs_to_values[obs] for obs in simple_model.observed_RVs] if observed_value.name.startswith('minibatch'): warn_ctxt = pytest.warns(UserWarning, match='Could not extract data from symbolic observation') else: warn_ctxt = nullcontext() try: with warn_ctxt: trace = inference.fit(n=0).sample(10000) except NotImplementedInference as e: pytest.skip(str(e)) np.testing.assert_allclose(np.mean(trace.posterior['mu']), mu_init, rtol=0.05) if has_start_sigma: np.testing.assert_allclose(np.std(trace.posterior['mu']), mu_sigma_init, rtol=0.05)
def host_from_dict(host_dict): if (not host_dict): return None name = (host_dict['host_name'] if host_dict.has_key('host_name') else None) domain_name = (host_dict['domain_name'] if host_dict.has_key('domain_name') else None) ip = (host_dict['ip_address'] if host_dict.has_key('ip_address') else None) if host_dict.has_key('mac_address'): mac = host_dict['mac_address'] mac = ('%s (%s)' % (mac, getoui(mac))) else: mac = None if ((ip == '') or (ip == '0.0.0.0') or (ip == 'unknown')): ip = None mac = None if ((mac == '') or (mac == '00:00:00:00:00:00')): mac = None try: host = Host(name, domain_name, ip, mac) except: host = None return host
class MSDilationBlock(nn.Module): def __init__(self, in_channels, kernel_size=3, dilation=[1, 1, 1, 1], bias=True): super(MSDilationBlock, self).__init__() self.conv_blocks = nn.ModuleList() for i in range(4): self.conv_blocks.append(conv_block(in_channels, in_channels, kernel_size, dilation=dilation[i], bias=bias)) self.conv_fusion = SpectralNorm(nn.Conv2d((in_channels * 4), in_channels, kernel_size=kernel_size, stride=1, padding=((kernel_size - 1) // 2), bias=bias)) def forward(self, x): out = [] for i in range(4): out.append(self.conv_blocks[i](x)) out = torch.cat(out, 1) out = (self.conv_fusion(out) + x) return out
class MockFTP(ftplib.FTP): def __init__(self): self._files = None self._size = 0 self._dirlist = None self._exists = True self._stack = deque() self._contents = '' def storbinary(self, command, f): f.seek(0, os.SEEK_END) self._size = f.tell() def retrbinary(self, command, callback): callback(self._contents) return def pwd(self): return '/'.join(self._stack) def nlst(self, dirname=None): return self._files def mkd(self, dirname): return def rmd(self, dirname): return def delete(self, filename): if (not self._exists): raise Exception("Doesn't exist") return True def rename(self, fromname, toname): return def cwd(self, pathname): if (not self._exists): self._exists = True raise Exception("Doesn't exist") for dir_ in pathname.split('/'): if (dir_ == '..'): self._stack.pop() else: self._stack.append(dir_) def size(self, filename): return self._size def dir(self, dirname, callback): for line in self._dirlist.splitlines(): callback(line) def sendcmd(self, command): return command def set_pasv(self, passive): return passive def quit(self): raise Exception('Fake a a problem with quit') def close(self): return True def _set_files(self, files): self._files = files def _set_dirlist(self, dirlist): self._dirlist = dirlist def _set_exists(self, exists): self._exists = exists def _set_contents(self, contents): self._contents = contents
_arg_scope def avg_pool3d(inputs, kernel_size, stride=2, padding='VALID', data_format=DATA_FORMAT_NDHWC, outputs_collections=None, scope=None): if (data_format not in (DATA_FORMAT_NCDHW, DATA_FORMAT_NDHWC)): raise ValueError('data_format has to be either NCDHW or NDHWC.') with ops.name_scope(scope, 'AvgPool3D', [inputs]) as sc: inputs = ops.convert_to_tensor(inputs) df = ('channels_first' if (data_format and data_format.startswith('NC')) else 'channels_last') layer = pooling_layers.AveragePooling3D(pool_size=kernel_size, strides=stride, padding=padding, data_format=df, _scope=sc) outputs = layer.apply(inputs) return utils.collect_named_outputs(outputs_collections, sc, outputs)
class TestPayloadUtility(): little_endian_payload = b'\x01\x02\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\xff\xfe\xff\xfd\xff\xff\xff\xfc\xff\xff\xff\xff\xff\xff\xff\x00\x00\xa0?\x00\x00\x00\x00\x00\x00\\x01\x00test\x11' big_endian_payload = b'\x01\x00\x02\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x04\xff\xff\xfe\xff\xff\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xfc?\xa0\x00\\x19\x00\x00\x00\x00\x00\x00\x00\x01test\x11' bitstring = [True, False, False, False, True, False, False, False] def test_little_endian_payload_builder(self): builder = BinaryPayloadBuilder(byteorder=Endian.LITTLE, wordorder=Endian.LITTLE) builder.add_8bit_uint(1) builder.add_16bit_uint(2) builder.add_32bit_uint(3) builder.add_64bit_uint(4) builder.add_8bit_int((- 1)) builder.add_16bit_int((- 2)) builder.add_32bit_int((- 3)) builder.add_64bit_int((- 4)) builder.add_32bit_float(1.25) builder.add_64bit_float(6.25) builder.add_16bit_uint(1) builder.add_string('test') builder.add_bits(self.bitstring) assert (self.little_endian_payload == builder.encode()) def test_big_endian_payload_builder(self): builder = BinaryPayloadBuilder(byteorder=Endian.BIG) builder.add_8bit_uint(1) builder.add_16bit_uint(2) builder.add_32bit_uint(3) builder.add_64bit_uint(4) builder.add_8bit_int((- 1)) builder.add_16bit_int((- 2)) builder.add_32bit_int((- 3)) builder.add_64bit_int((- 4)) builder.add_32bit_float(1.25) builder.add_64bit_float(6.25) builder.add_16bit_uint(1) builder.add_string('test') builder.add_bits(self.bitstring) assert (self.big_endian_payload == builder.encode()) def test_payload_builder_reset(self): builder = BinaryPayloadBuilder() builder.add_8bit_uint(18) builder.add_8bit_uint(52) builder.add_8bit_uint(86) builder.add_8bit_uint(120) assert (builder.encode() == b'\x124Vx') assert (builder.build() == [b'\x124', b'Vx']) builder.reset() assert (not builder.encode()) assert (not builder.build()) def test_payload_builder_with_raw_payload(self): _coils1 = [False, False, True, True, False, True, False, False, False, False, False, True, False, False, True, False, False, True, True, True, True, False, False, False, False, True, False, True, False, True, True, False] _coils2 = [False, False, False, True, False, False, True, False, False, False, True, True, False, True, False, False, False, True, False, True, False, True, True, False, False, True, True, True, True, False, False, False] builder = BinaryPayloadBuilder([b'\x12', b'4', b'V', b'x'], repack=True) assert (builder.encode() == b'\x124Vx') assert (builder.to_registers() == [13330, 30806]) coils = builder.to_coils() assert (_coils1 == coils) builder = BinaryPayloadBuilder([b'\x12', b'4', b'V', b'x'], byteorder=Endian.BIG) assert (builder.encode() == b'\x124Vx') assert (builder.to_registers() == [4660, 22136]) assert (str(builder) == '\x124Vx') coils = builder.to_coils() assert (_coils2 == coils) def test_little_endian_payload_decoder(self): decoder = BinaryPayloadDecoder(self.little_endian_payload, byteorder=Endian.LITTLE, wordorder=Endian.LITTLE) assert (decoder.decode_8bit_uint() == 1) assert (decoder.decode_16bit_uint() == 2) assert (decoder.decode_32bit_uint() == 3) assert (decoder.decode_64bit_uint() == 4) assert (decoder.decode_8bit_int() == (- 1)) assert (decoder.decode_16bit_int() == (- 2)) assert (decoder.decode_32bit_int() == (- 3)) assert (decoder.decode_64bit_int() == (- 4)) assert (decoder.decode_32bit_float() == 1.25) assert (decoder.decode_64bit_float() == 6.25) assert (not decoder.skip_bytes(2)) assert (decoder.decode_string(4).decode() == 'test') assert (self.bitstring == decoder.decode_bits()) def test_big_endian_payload_decoder(self): decoder = BinaryPayloadDecoder(self.big_endian_payload, byteorder=Endian.BIG) assert (decoder.decode_8bit_uint() == 1) assert (decoder.decode_16bit_uint() == 2) assert (decoder.decode_32bit_uint() == 3) assert (decoder.decode_64bit_uint() == 4) assert (decoder.decode_8bit_int() == (- 1)) assert (decoder.decode_16bit_int() == (- 2)) assert (decoder.decode_32bit_int() == (- 3)) assert (decoder.decode_64bit_int() == (- 4)) assert (decoder.decode_32bit_float() == 1.25) assert (decoder.decode_64bit_float() == 6.25) assert (not decoder.skip_bytes(2)) assert (decoder.decode_string(4) == b'test') assert (self.bitstring == decoder.decode_bits()) def test_payload_decoder_reset(self): decoder = BinaryPayloadDecoder(b'\x124') assert (decoder.decode_8bit_uint() == 18) assert (decoder.decode_8bit_uint() == 52) decoder.reset() assert (decoder.decode_16bit_uint() == 13330) def test_payload_decoder_register_factory(self): payload = [1, 2, 3, 4] decoder = BinaryPayloadDecoder.fromRegisters(payload, byteorder=Endian.LITTLE) encoded = b'\x00\x01\x00\x02\x00\x03\x00\x04' assert (encoded == decoder.decode_string(8)) decoder = BinaryPayloadDecoder.fromRegisters(payload, byteorder=Endian.BIG) encoded = b'\x00\x01\x00\x02\x00\x03\x00\x04' assert (encoded == decoder.decode_string(8)) with pytest.raises(ParameterException): BinaryPayloadDecoder.fromRegisters('abcd') def test_payload_decoder_coil_factory(self): payload = [1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1] decoder = BinaryPayloadDecoder.fromCoils(payload, byteorder=Endian.LITTLE) encoded = b'\x88\x11' assert (encoded == decoder.decode_string(2)) decoder = BinaryPayloadDecoder.fromCoils(payload, byteorder=Endian.BIG) encoded = b'\x88\x11' assert (encoded == decoder.decode_string(2)) with pytest.raises(ParameterException): BinaryPayloadDecoder.fromCoils('abcd')
.parametrize('info1, info2, count', [(message.MessageInfo(usertypes.MessageLevel.info, 'test'), message.MessageInfo(usertypes.MessageLevel.info, 'test'), 1), (message.MessageInfo(usertypes.MessageLevel.info, 'test'), message.MessageInfo(usertypes.MessageLevel.info, 'test2'), 2), (message.MessageInfo(usertypes.MessageLevel.info, 'test'), message.MessageInfo(usertypes.MessageLevel.error, 'test'), 2), (message.MessageInfo(usertypes.MessageLevel.info, 'test', rich=True), message.MessageInfo(usertypes.MessageLevel.info, 'test', rich=False), 2), (message.MessageInfo(usertypes.MessageLevel.info, 'test'), message.MessageInfo(usertypes.MessageLevel.info, 'test', replace='test'), 2)]) def test_show_message_twice(view, info1, info2, count): view.show_message(info1) view.show_message(info2) assert (len(view._messages) == count)
def accum_slots(usr_act_turns): inform_hist = {} book_inform_hist = {} output_str = [] for usr_act in usr_act_turns: if (usr_act.act in ['inform_type', 'inform_type_change']): inform_hist.update(usr_act.parameters) elif (usr_act.act in ['make_reservation', 'make_reservation_change_time']): book_inform_hist.update(usr_act.parameters) for slot_name in inform_hist.keys(): output_str.append(inform_hist[slot_name]) output_str.append('EOS_Z1') for slot_name in book_inform_hist.keys(): output_str.append(book_inform_hist[slot_name]) output_str.append('EOS_Z3') if (usr_act_turns[(- 1)].act in ['request']): for slot in usr_act_turns[(- 1)].parameters: output_str.append(slot) output_str.append('EOS_Z2') return ' '.join(output_str)
class TestTargets(unittest.TestCase): def test_reverse_target(self): input_expected = [([0], [0]), ([0, 1], [1, 0]), ([1, 1], [1, 1]), ([1, 0, 1], [1, 0, 1]), ([0, 0, 1, 1], [1, 1, 0, 0])] env = alg.reverse.ReverseEnv() for (input_arr, expected) in input_expected: target = env.target_from_input_data(input_arr) self.assertEqual(target, expected) def test_reversed_addition_target(self): env = alg.reversed_addition.ReversedAdditionEnv(base=3) input_expected = [([[1, 1], [1, 1]], [2, 2]), ([[2, 2], [0, 1]], [1, 2]), ([[2, 1], [1, 1], [1, 1], [1, 0]], [0, 0, 0, 2])] for (input_grid, expected_target) in input_expected: self.assertEqual(env.target_from_input_data(input_grid), expected_target) def test_reversed_addition_3rows(self): env = alg.reversed_addition.ReversedAdditionEnv(base=3, rows=3) input_expected = [([[1, 1, 0], [0, 1, 1]], [2, 2]), ([[1, 1, 2], [0, 1, 1]], [1, 0, 1])] for (input_grid, expected_target) in input_expected: self.assertEqual(env.target_from_input_data(input_grid), expected_target) def test_copy_target(self): env = alg.copy_.CopyEnv() self.assertEqual(env.target_from_input_data([0, 1, 2]), [0, 1, 2]) def test_duplicated_input_target(self): env = alg.duplicated_input.DuplicatedInputEnv(duplication=2) self.assertEqual(env.target_from_input_data([0, 0, 0, 0, 1, 1]), [0, 0, 1]) def test_repeat_copy_target(self): env = alg.repeat_copy.RepeatCopyEnv() self.assertEqual(env.target_from_input_data([0, 1, 2]), [0, 1, 2, 2, 1, 0, 0, 1, 2])
def backlight(action): def f(qtile): brightness = int(subprocess.run(['xbacklight', '-get'], stdout=subprocess.PIPE).stdout) if ((brightness != 1) or (action != 'dec')): if (((brightness > 49) and (action == 'dec')) or ((brightness > 39) and (action == 'inc'))): subprocess.run(['xbacklight', f'-{action}', '10', '-fps', '10']) else: subprocess.run(['xbacklight', f'-{action}', '1']) return f
class ConverterTests(unittest.IsolatedAsyncioTestCase): def setUpClass(cls): cls.context = MagicMock cls.context.author = 'bob' cls.fixed_utc_now = datetime.fromisoformat('2019-01-01T00:00:00+00:00') async def test_package_name_for_valid(self): test_values = ('foo', 'le_mon', 'num83r') for name in test_values: with self.subTest(identifier=name): conversion = (await PackageName.convert(self.context, name)) self.assertEqual(name, conversion) async def test_package_name_for_invalid(self): test_values = ('text_with_a_dot.', 'UpperCaseName', 'dashed-name') for name in test_values: with self.subTest(identifier=name), self.assertRaises(BadArgument): (await PackageName.convert(self.context, name)) async def test_duration_converter_for_valid(self): test_values = (('1Y', {'years': 1}), ('1y', {'years': 1}), ('1year', {'years': 1}), ('1years', {'years': 1}), ('1m', {'months': 1}), ('1month', {'months': 1}), ('1months', {'months': 1}), ('1w', {'weeks': 1}), ('1W', {'weeks': 1}), ('1week', {'weeks': 1}), ('1weeks', {'weeks': 1}), ('1d', {'days': 1}), ('1D', {'days': 1}), ('1day', {'days': 1}), ('1days', {'days': 1}), ('1h', {'hours': 1}), ('1H', {'hours': 1}), ('1hour', {'hours': 1}), ('1hours', {'hours': 1}), ('1M', {'minutes': 1}), ('1minute', {'minutes': 1}), ('1minutes', {'minutes': 1}), ('1s', {'seconds': 1}), ('1S', {'seconds': 1}), ('1second', {'seconds': 1}), ('1seconds', {'seconds': 1}), ('1y1m1w1d1H1M1S', {'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1, 'seconds': 1}), ('5y100S', {'years': 5, 'seconds': 100}), ('2w28H', {'weeks': 2, 'hours': 28}), ('1 year 2 months', {'years': 1, 'months': 2}), ('1d 2H', {'days': 1, 'hours': 2}), ('1 week2 days', {'weeks': 1, 'days': 2})) converter = Duration() for (duration, duration_dict) in test_values: expected_datetime = (self.fixed_utc_now + relativedelta(**duration_dict)) with patch('bot.converters.datetime') as mock_datetime: mock_datetime.now.return_value = self.fixed_utc_now with self.subTest(duration=duration, duration_dict=duration_dict): converted_datetime = (await converter.convert(self.context, duration)) self.assertEqual(converted_datetime, expected_datetime) async def test_duration_converter_for_invalid(self): test_values = ('1d1w', '1s1y', '1 year 2 years', '1 M 10 minutes', '1MVes', '1y3breads', 'ym', ' 1y', '1S ', '1y 1m', 'Guido van Rossum', 'lemon lemon lemon lemon lemon lemon lemon') converter = Duration() for invalid_duration in test_values: with self.subTest(invalid_duration=invalid_duration): exception_message = f'`{invalid_duration}` is not a valid duration string.' with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): (await converter.convert(self.context, invalid_duration)) ('bot.converters.datetime') async def test_duration_converter_out_of_range(self, mock_datetime): mock_datetime.__add__.side_effect = ValueError mock_datetime.now.return_value = mock_datetime duration = f'{MAXYEAR}y' exception_message = f'`{duration}` results in a datetime outside the supported range.' with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): (await Duration().convert(self.context, duration)) async def test_isodatetime_converter_for_valid(self): test_values = (('2019-09-02T02:03:05Z', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02 02:03:05Z', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02T03:18:05+01:15', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02 03:18:05+01:15', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02T00:48:05-01:15', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02 00:48:05-01:15', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02T03:18:05+0115', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02 03:18:05+0115', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02T00:48:05-0115', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02 00:48:05-0115', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02 03:03:05+01', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02T01:03:05-01', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02T02:03:05', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-09-02 02:03:05', datetime(2019, 9, 2, 2, 3, 5, tzinfo=UTC)), ('2019-11-12T09:15', datetime(2019, 11, 12, 9, 15, tzinfo=UTC)), ('2019-11-12 09:15', datetime(2019, 11, 12, 9, 15, tzinfo=UTC)), ('2019-04-01', datetime(2019, 4, 1, tzinfo=UTC)), ('2019-02-01', datetime(2019, 2, 1, tzinfo=UTC)), ('2025', datetime(2025, 1, 1, tzinfo=UTC))) converter = ISODateTime() for (datetime_string, expected_dt) in test_values: with self.subTest(datetime_string=datetime_string, expected_dt=expected_dt): converted_dt = (await converter.convert(self.context, datetime_string)) self.assertEqual(converted_dt, expected_dt) async def test_isodatetime_converter_for_invalid(self): test_values = ('1Y', '1d', '1H', '10:10:10', '10:00', '19-01-01', 'fisk the tag master') converter = ISODateTime() for datetime_string in test_values: with self.subTest(datetime_string=datetime_string): exception_message = f'`{datetime_string}` is not a valid ISO-8601 datetime string' with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): (await converter.convert(self.context, datetime_string)) async def test_hush_duration_converter_for_valid(self): test_values = (('0', 0), ('15', 15), ('10', 10), ('5m', 5), ('5M', 5), ('forever', (- 1))) converter = HushDurationConverter() for (minutes_string, expected_minutes) in test_values: with self.subTest(minutes_string=minutes_string, expected_minutes=expected_minutes): converted = (await converter.convert(self.context, minutes_string)) self.assertEqual(expected_minutes, converted) async def test_hush_duration_converter_for_invalid(self): test_values = (('16', 'Duration must be at most 15 minutes.'), ('10d', '10d is not a valid minutes duration.'), ('-1', '-1 is not a valid minutes duration.')) converter = HushDurationConverter() for (invalid_minutes_string, exception_message) in test_values: with self.subTest(invalid_minutes_string=invalid_minutes_string, exception_message=exception_message), self.assertRaisesRegex(BadArgument, re.escape(exception_message)): (await converter.convert(self.context, invalid_minutes_string))
def _is_an_unambiguous_user_argument(argument: str) -> bool: has_id_or_mention = bool((IDConverter()._get_id_match(argument) or RE_USER_MENTION.match(argument))) argument = argument.removeprefix('') has_username = ((len(argument) > 5) and (argument[(- 5)] == '#')) return (has_id_or_mention or has_username)
def test_singlestep_cases_first(singlestage_cases, foregroundasync_backend): wflow = singlestage_cases inputdata = {'par': 0} wflow.view().init(inputdata) right_taskspec = wflow.rules[0].rule.stagespec['cases'][0]['step'] assert (wflow.rules[0].applicable(wflow) == False) wflow.view().rules[(- 1)].apply(wflow) c = frommodel_controller('', {}, wflow) c.sync_backend() assert (len(wflow.dag.nodes()) == 1) assert (wflow.rules[0].applicable(wflow) == True) wflow.rules[0].apply(wflow) assert (len(wflow.dag.nodes()) == 2) assert (wflow.dag.getNodeByName('hello_world').task.spec == right_taskspec)
def get_things_to_delete(root): extensions = ['.exe', '.pyd', '.dll'] all_libs = set() needed = set() for (base, dirs, files) in os.walk(root): for f in files: lib = f.lower() path = os.path.join(base, f) ext_lower = os.path.splitext(f)[(- 1)].lower() if (ext_lower in extensions): if (ext_lower == '.exe'): needed.add(lib) all_libs.add(f.lower()) for lib in get_dependencies(path): all_libs.add(lib) needed.add(lib) if (not find_lib(root, lib)): print('MISSING:', path, lib) for (namespace, version, lib) in get_required_by_typelibs(): all_libs.add(lib) needed.add(lib) if (not find_lib(root, lib)): print('MISSING:', namespace, version, lib) to_delete = [] for not_depended_on in (all_libs - needed): path = get_lib_path(root, not_depended_on) if path: to_delete.append(path) return to_delete
def test_identifiers(requests_mock): requests_mock.get(f'{API_V1}/observations/identifiers', json=j_observation_identifiers, status_code=200) results = iNatClient().observations.identifiers(place_id=125323, iconic_taxa='Amphibia') assert ((len(results) == 3) and isinstance(results[0], User)) assert (results[0].id == 112514) assert (results[0].login == 'earthstephen') assert (results[0].count == 1)
class ConvTr1d_t(nn.Module): def __init__(self, in_c, out_c, k_size, **args): super().__init__() self.conv_tr = nn.ConvTranspose1d((in_c + 1), out_c, k_size, **args) self.init_params() def init_params(self): nn.init.xavier_uniform_(self.conv_tr.weight) nn.init.zeros_(self.conv_tr.bias) def forward(self, t, x): (b_x, c_x, h) = x.size() time = (torch.ones((b_x, 1, h)).to(x) * t) x_aug = torch.cat([x, time], dim=1) out = self.conv_tr(x_aug) return out
class Migration(migrations.Migration): dependencies = [('views', '0029_view_order')] operations = [migrations.AlterField(model_name='view', name='help_lang1', field=models.TextField(blank=True, help_text='The help text for this view (in the primary language).', verbose_name='Help (primary)')), migrations.AlterField(model_name='view', name='help_lang2', field=models.TextField(blank=True, help_text='The help text for this view (in the secondary language).', verbose_name='Help (secondary)')), migrations.AlterField(model_name='view', name='help_lang3', field=models.TextField(blank=True, help_text='The help text for this view (in the tertiary language).', verbose_name='Help (tertiary)')), migrations.AlterField(model_name='view', name='help_lang4', field=models.TextField(blank=True, help_text='The help text for this view (in the quaternary language).', verbose_name='Help (quaternary)')), migrations.AlterField(model_name='view', name='help_lang5', field=models.TextField(blank=True, help_text='The help text for this view (in the quinary language).', verbose_name='Help (quinary)')), migrations.AlterField(model_name='view', name='title_lang1', field=models.CharField(blank=True, help_text='The title for this view (in the primary language).', max_length=256, verbose_name='Title (primary)')), migrations.AlterField(model_name='view', name='title_lang2', field=models.CharField(blank=True, help_text='The title for this view (in the secondary language).', max_length=256, verbose_name='Title (secondary)')), migrations.AlterField(model_name='view', name='title_lang3', field=models.CharField(blank=True, help_text='The title for this view (in the tertiary language).', max_length=256, verbose_name='Title (tertiary)')), migrations.AlterField(model_name='view', name='title_lang4', field=models.CharField(blank=True, help_text='The title for this view (in the quaternary language).', max_length=256, verbose_name='Title (quaternary)')), migrations.AlterField(model_name='view', name='title_lang5', field=models.CharField(blank=True, help_text='The title for this view (in the quinary language).', max_length=256, verbose_name='Title (quinary)'))]
def setup_logger(): Path('results/config_sweeper_logs/').mkdir(parents=True, exist_ok=True) logger = logging.getLogger(__name__) for handler in logger.root.handlers: if (type(handler) is logging.StreamHandler): handler.setLevel(logging.ERROR) formatter = logging.Formatter('%(asctime)s - %(message)s') now = datetime.datetime.now().__format__('%Y-%m-%d_%H-%M-%S') file_handler = logging.FileHandler(f'results/config_sweeper_logs/{now}.log', 'a') file_handler.setFormatter(formatter) file_handler.setLevel(logging.INFO) logger.addHandler(file_handler) logger.setLevel(logging.INFO) return logger
def get_indexed_dataset_(data_prefix, data_impl, skip_warmup): print_rank_0(' > building dataset index ...') start_time = time.time() indexed_dataset = make_indexed_dataset(data_prefix, data_impl, skip_warmup) assert (indexed_dataset.sizes.shape[0] == indexed_dataset.doc_idx[(- 1)]) print_rank_0(' > finished creating indexed dataset in {:4f} seconds'.format((time.time() - start_time))) print_rank_0(' > indexed dataset stats:') print_rank_0(' number of documents: {}'.format((indexed_dataset.doc_idx.shape[0] - 1))) print_rank_0(' number of sentences: {}'.format(indexed_dataset.sizes.shape[0])) return indexed_dataset
class LBX(object): def __init__(self, formula, use_cld=False, solver_name='m22', use_timer=False): self.oracle = Solver(name=solver_name, bootstrap_with=formula.hard, use_timer=use_timer) self.solver = solver_name if (isinstance(formula, WCNFPlus) and formula.atms): assert self.oracle.supports_atmost(), '{0} does not support native cardinality constraints. Make sure you use the right type of formula.'.format(solver_name) for atm in formula.atms: self.oracle.add_atmost(*atm) self.topv = formula.nv self.soft = formula.soft self.sels = [] self.ucld = use_cld VariableMap = collections.namedtuple('VariableMap', ['e2i', 'i2e']) self.vmap = VariableMap(e2i={}, i2e={}) for v in range(1, (formula.nv + 1)): self.vmap.e2i[v] = v self.vmap.i2e[v] = v for cl in self.soft: sel = cl[0] if ((len(cl) > 1) or (cl[0] < 0)): self.topv += 1 sel = self.topv self.oracle.add_clause((cl + [(- sel)])) self.sels.append(sel) def __del__(self): self.delete() def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.delete() def delete(self): if self.oracle: self.oracle.delete() self.oracle = None def add_clause(self, clause, soft=False): cl = list(map((lambda l: self._map_extlit(l)), (clause if ((not (len(clause) == 2)) or (not (type(clause[0]) in (list, tuple, set)))) else clause[0]))) if (not soft): if ((not (len(clause) == 2)) or (not (type(clause[0]) in (list, tuple, set)))): self.oracle.add_clause(cl) else: assert self.oracle.supports_atmost(), '{0} does not support native cardinality constraints. Make sure you use the right type of formula.'.format(self.solver) self.oracle.add_atmost(cl, clause[1]) else: self.soft.append(cl) sel = cl[0] if ((len(cl) > 1) or (cl[0] < 0)): self.topv += 1 sel = self.topv self.oracle.add_clause((cl + [(- sel)])) self.sels.append(sel) def compute(self, enable=[]): self.setd = [] self.satc = [False for cl in self.soft] self.solution = None self.bb_assumps = [] self.ss_assumps = [] if self.oracle.solve(assumptions=[self.sels[(cl_id - 1)] for cl_id in enable]): self._filter_satisfied(update_setd=True) self._compute() self.solution = list(map((lambda i: (i + 1)), filter((lambda i: (not self.satc[i])), range(len(self.soft))))) return self.solution def enumerate(self): done = False while (not done): mcs = self.compute() if (mcs != None): (yield mcs) else: done = True def block(self, mcs): self.oracle.add_clause([self.sels[(cl_id - 1)] for cl_id in mcs]) def _satisfied(self, cl, model): for l in cl: if ((len(model) < abs(l)) or (model[(abs(l) - 1)] == l)): return True return False def _filter_satisfied(self, update_setd=False): model = self.oracle.get_model() setd = set() for (i, cl) in enumerate(self.soft): if (not self.satc[i]): if self._satisfied(cl, model): self.satc[i] = True self.ss_assumps.append(self.sels[i]) else: setd = setd.union(set(cl)) if update_setd: self.setd = sorted(setd) def _compute(self): i = 0 while (i < len(self.setd)): if self.ucld: self.do_cld_check(self.setd[i:]) i = 0 if self.setd: if self.oracle.solve(assumptions=((self.ss_assumps + self.bb_assumps) + [self.setd[i]])): self._filter_satisfied() else: self.bb_assumps.append((- self.setd[i])) i += 1 def do_cld_check(self, cld): self.topv += 1 sel = self.topv cld.append((- sel)) self.oracle.add_clause(cld) if self.oracle.solve(assumptions=((self.ss_assumps + self.bb_assumps) + [sel])): self._filter_satisfied(update_setd=True) else: self.bb_assumps.extend([(- l) for l in cld[:(- 1)]]) self.setd = [] self.oracle.add_clause([(- sel)]) def _map_extlit(self, l): v = abs(l) if (v in self.vmap.e2i): return int(copysign(self.vmap.e2i[v], l)) else: self.topv += 1 self.vmap.e2i[v] = self.topv self.vmap.i2e[self.topv] = v return int(copysign(self.topv, l)) def oracle_time(self): return self.oracle.time_accum()
def _cobaltscript_generator(url, logger, jd, ppn, queue=None, run_job=None): cobalt_params = '' total_cpu_count = 1 if jd.name: cobalt_params += ('#COBALT --jobname %s\n' % jd.name) if jd.working_directory: if (not os.path.isabs(jd.working_directory)): jd.working_directory = ('$HOME/%s' % jd.working_directory) cobalt_params += ('#COBALT --cwd %s\n' % jd.working_directory) if jd.output: if (jd.working_directory and (not os.path.isabs(jd.output))): std_output = ('%s/%s' % (jd.working_directory, jd.output)) else: std_output = jd.output cobalt_params += ('#COBALT --output %s\n' % std_output) if jd.error: if (jd.working_directory and (not os.path.isabs(jd.error))): std_error = ('%s/%s' % (jd.working_directory, jd.error)) else: std_error = jd.error cobalt_params += ('#COBALT --error %s\n' % std_error) if jd.wall_time_limit: hours = int((jd.wall_time_limit / 60)) minutes = (jd.wall_time_limit % 60) cobalt_params += ('#COBALT --time %s:%s:00\n' % (str(hours).zfill(2), str(minutes).zfill(2))) queue = (queue or jd.queue) if queue: cobalt_params += ('#COBALT --queue %s\n' % queue) if jd.project: cobalt_params += ('#COBALT --project %s\n' % str(jd.project)) if jd.job_contact: cobalt_params += ('#COBALT --notify %s\n' % str(jd.job_contact)) if jd.attribute_exists('total_cpu_count'): total_cpu_count = jd.total_cpu_count if jd.attribute_exists('processes_per_host'): ppn = jd.processes_per_host number_of_nodes = (total_cpu_count / ppn) if ((total_cpu_count % ppn) > 0): number_of_nodes += 1 if jd.attribute_exists('number_of_processes'): number_of_processes = jd.number_of_processes else: logger.debug('number_of_processes not specified. default: 1 per node') number_of_processes = number_of_nodes cobalt_params += ('#COBALT --nodecount %d\n' % number_of_nodes) cobalt_params += ('#COBALT --proccount %d\n' % number_of_processes) cobalt_attrs = jd.system_architecture.get('options', []) if cobalt_attrs: cobalt_params += ('#COBALT --attrs %s\n' % ':'.join(cobalt_attrs)) if jd.environment: cobalt_params += ('#COBALT --env %s\n' % ':'.join([('%s=%s' % (k, str(v).replace(':', '\\:').replace('=', '\\='))) for (k, v) in jd.environment.items()])) cobalt_params += ('#COBALT --env SAGA_PPN=%d\n' % ppn) exec_n_args = '' if jd.executable: exec_n_args += ('%s ' % jd.executable) if jd.arguments: for arg in jd.arguments: exec_n_args += ('%s ' % arg) exec_n_args = exec_n_args.replace('$', '\\$').strip() cobaltscript = ('\n#!/bin/bash\n%s\n%s\n\n' % (cobalt_params, exec_n_args)) cobaltscript = cobaltscript.replace('"', '\\"') return cobaltscript
class TestPredictor(unittest.TestCase): def setUpClass(self): test_path = os.path.dirname(os.path.realpath(__file__)) src = SourceField() trg = TargetField() dataset = torchtext.data.TabularDataset(path=os.path.join(test_path, 'data/eng-fra.txt'), format='tsv', fields=[('src', src), ('trg', trg)]) src.build_vocab(dataset) trg.build_vocab(dataset) encoder = EncoderRNN(len(src.vocab), 10, 10, rnn_cell='lstm') decoder = DecoderRNN(len(trg.vocab), 10, 10, trg.sos_id, trg.eos_id, rnn_cell='lstm') seq2seq = Seq2seq(encoder, decoder) self.predictor = Predictor(seq2seq, src.vocab, trg.vocab) def test_predict(self): src_seq = 'I am fat' tgt_seq = self.predictor.predict(src_seq.split(' ')) for tok in tgt_seq: self.assertTrue((tok in self.predictor.tgt_vocab.stoi))
class Solution(object): def hIndex(self, citations): ls = len(citations) papers = ([0] * (ls + 1)) for c in citations: papers[min(ls, c)] += 1 (k, s) = (ls, papers[ls]) while (k > s): k -= 1 s += papers[k] return k
class HandledBoosterList(HandledList): def append(self, booster): if booster.isInvalid: HandledList.append(self, booster) self.remove(booster) return if self.__slotCheck(booster): HandledList.append(self, booster) self.remove(booster) return HandledList.append(self, booster) def insert(self, idx, booster): if booster.isInvalid: HandledList.insert(self, idx, booster) self.remove(booster) return if self.__slotCheck(booster): HandledList.insert(self, idx, booster) self.remove(booster) return HandledList.insert(self, idx, booster) def makeRoom(self, booster): oldObj = next((b for b in self if (b.slot == booster.slot)), None) if (oldObj is not None): pyfalog.info('Slot {0} occupied with {1}, replacing with {2}', booster.slot, oldObj.item.name, booster.item.name) position = self.index(oldObj) from gui.fitCommands.helpers import BoosterInfo boosterInfo = BoosterInfo.fromBooster(oldObj) oldObj.itemID = 0 self.remove(oldObj) return (position, boosterInfo) return (None, None) def __slotCheck(self, booster): return any(((b.slot == booster.slot) for b in self))
def make_github_table(reduced_by_error): header = '| no. | error | status |' sep = '|-:|:-|:-|' lines = [header, sep] for error in reduced_by_error: count = reduced_by_error[error]['count'] line = f'| {count} | {error[:100]} | |' lines.append(line) return '\n'.join(lines)