content
stringlengths
0
1.55M
""" test_markup ~~~~~~~~~~~ Test various Sphinx-specific markup extensions. :copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """<import_stmt>re<import_stmt>pytest<import_from_stmt>docutils frontend nodes utils<import_from_stmt>docutils.parsers.rst Parser<as>RstParser<import_from_stmt>sphinx addnodes<import_from_stmt>sphinx.builders.html.transforms KeyboardTransform<import_from_stmt>sphinx.builders.latex LaTeXBuilder<import_from_stmt>sphinx.roles XRefRole<import_from_stmt>sphinx.testing.util Struct assert_node<import_from_stmt>sphinx.transforms SphinxSmartQuotes<import_from_stmt>sphinx.util docutils texescape<import_from_stmt>sphinx.util.docutils sphinx_domains<import_from_stmt>sphinx.writers.html HTMLTranslator HTMLWriter<import_from_stmt>sphinx.writers.latex LaTeXTranslator LaTeXWriter<line_sep>@pytest.fixture<def_stmt>settings app<block_start>texescape.init()# otherwise done by the latex builder optparser=frontend.OptionParser(components=(RstParser HTMLWriter LaTeXWriter))<line_sep>settings=optparser.get_default_values()<line_sep>settings.smart_quotes=<true><line_sep>settings.env=app.builder.env<line_sep>settings.env.temp_data['docname']='dummy'<line_sep>settings.contentsname='dummy'<line_sep>settings.rfc_base_url='http://tools.ietf.org/html/'<line_sep>domain_context=sphinx_domains(settings.env)<line_sep>domain_context.enable()<line_sep><yield>settings<line_sep>domain_context.disable()<block_end>@pytest.fixture<def_stmt>new_document settings<block_start><def_stmt>create <block_start>document=utils.new_document('test data' settings)<line_sep>document['file']='dummy'<line_sep><return>document<block_end><return>create<block_end>@pytest.fixture<def_stmt>inliner new_document<block_start>document=new_document()<line_sep>document.reporter.get_source_and_line=<lambda>line=1:('dummy.rst' line)<line_sep><return>Struct(document=document reporter=document.reporter)<block_end>@pytest.fixture<def_stmt>parse new_document<block_start><def_stmt>parse_ rst<block_start>document=new_document()<line_sep>parser=RstParser()<line_sep>parser.parse(rst document)<line_sep>SphinxSmartQuotes(document startnode=<none>).apply()<for_stmt>msg document.traverse(nodes.system_message)<block_start><if_stmt>msg['level']<eq>1<block_start>msg.replace_self([])<block_end><block_end><return>document<block_end><return>parse_<block_end># since we're not resolving the markup afterwards, these nodes may remain <class_stmt>ForgivingTranslator<block_start><def_stmt>visit_pending_xref self node<block_start><pass><block_end><def_stmt>depart_pending_xref self node<block_start><pass><block_end><block_end><class_stmt>ForgivingHTMLTranslator(HTMLTranslator ForgivingTranslator)<block_start><pass><block_end><class_stmt>ForgivingLaTeXTranslator(LaTeXTranslator ForgivingTranslator)<block_start><pass><block_end>@pytest.fixture<def_stmt>verify_re_html app parse<block_start><def_stmt>verify rst html_expected<block_start>document=parse(rst)<line_sep>KeyboardTransform(document).apply()<line_sep>html_translator=ForgivingHTMLTranslator(document app.builder)<line_sep>document.walkabout(html_translator)<line_sep>html_translated=''.join(html_translator.fragment).strip()<assert_stmt>re.match(html_expected html_translated) 'from '+rst<block_end><return>verify<block_end>@pytest.fixture<def_stmt>verify_re_latex app parse<block_start><def_stmt>verify rst latex_expected<block_start>document=parse(rst)<line_sep>app.builder=LaTeXBuilder(app)<line_sep>app.builder.set_environment(app.env)<line_sep>app.builder.init()<line_sep>theme=app.builder.themes.get('manual')<line_sep>latex_translator=ForgivingLaTeXTranslator(document app.builder theme)<line_sep>latex_translator.first_document=-1# don't write \begin{document} document.walkabout(latex_translator)<line_sep>latex_translated=''.join(latex_translator.body).strip()<assert_stmt>re.match(latex_expected latex_translated) 'from '+repr(rst)<block_end><return>verify<block_end>@pytest.fixture<def_stmt>verify_re verify_re_html verify_re_latex<block_start><def_stmt>verify_re_ rst html_expected latex_expected<block_start><if_stmt>html_expected<block_start>verify_re_html(rst html_expected)<block_end><if_stmt>latex_expected<block_start>verify_re_latex(rst latex_expected)<block_end><block_end><return>verify_re_<block_end>@pytest.fixture<def_stmt>verify verify_re_html verify_re_latex<block_start><def_stmt>verify_ rst html_expected latex_expected<block_start><if_stmt>html_expected<block_start>verify_re_html(rst re.escape(html_expected)+'$')<block_end><if_stmt>latex_expected<block_start>verify_re_latex(rst re.escape(latex_expected)+'$')<block_end><block_end><return>verify_<block_end>@pytest.fixture<def_stmt>get_verifier verify verify_re<block_start>v={'verify':verify 'verify_re':verify_re }<def_stmt>get name<block_start><return>v[name]<block_end><return>get<block_end>@pytest.mark.parametrize('type,rst,html_expected,latex_expected' [(# pep role 'verify' ':pep:`8`' ('<p><span class="target" id="index-0"></span><a class="pep reference external" '<concat>'href="http://www.python.org/dev/peps/pep-0008"><strong>PEP 8</strong></a></p>') ('\\sphinxAtStartPar\n'<concat>'\\index{Python Enhancement Proposals@\\spxentry{Python Enhancement Proposals}'<concat>'!PEP 8@\\spxentry{PEP 8}}\\sphinxhref{http://www.python.org/dev/peps/pep-0008}'<concat>'{\\sphinxstylestrong{PEP 8}}')) (# pep role with anchor 'verify' ':pep:`8#id1`' ('<p><span class="target" id="index-0"></span><a class="pep reference external" '<concat>'href="http://www.python.org/dev/peps/pep-0008#id1">'<concat>'<strong>PEP 8#id1</strong></a></p>') ('\\sphinxAtStartPar\n'<concat>'\\index{Python Enhancement Proposals@\\spxentry{Python Enhancement Proposals}'<concat>'!PEP 8\\#id1@\\spxentry{PEP 8\\#id1}}\\sphinxhref'<concat>'{http://www.python.org/dev/peps/pep-0008\\#id1}'<concat>'{\\sphinxstylestrong{PEP 8\\#id1}}')) (# rfc role 'verify' ':rfc:`2324`' ('<p><span class="target" id="index-0"></span><a class="rfc reference external" '<concat>'href="http://tools.ietf.org/html/rfc2324.html"><strong>RFC 2324</strong></a></p>') ('\\sphinxAtStartPar\n'<concat>'\\index{RFC@\\spxentry{RFC}!RFC 2324@\\spxentry{RFC 2324}}'<concat>'\\sphinxhref{http://tools.ietf.org/html/rfc2324.html}'<concat>'{\\sphinxstylestrong{RFC 2324}}')) (# rfc role with anchor 'verify' ':rfc:`2324#id1`' ('<p><span class="target" id="index-0"></span><a class="rfc reference external" '<concat>'href="http://tools.ietf.org/html/rfc2324.html#id1">'<concat>'<strong>RFC 2324#id1</strong></a></p>') ('\\sphinxAtStartPar\n'<concat>'\\index{RFC@\\spxentry{RFC}!RFC 2324\\#id1@\\spxentry{RFC 2324\\#id1}}'<concat>'\\sphinxhref{http://tools.ietf.org/html/rfc2324.html\\#id1}'<concat>'{\\sphinxstylestrong{RFC 2324\\#id1}}')) (# correct interpretation of code with whitespace 'verify_re' '``code sample``' ('<p><code class="(samp )?docutils literal notranslate"><span class="pre">'<concat>'code</span>&#160;&#160; <span class="pre">sample</span></code></p>') r'\\sphinxAtStartPar\n\\sphinxcode{\\sphinxupquote{code sample}}' ) (# interpolation of arrows in menuselection 'verify' ':menuselection:`a --> b`' ('<p><span class="menuselection">a \N{TRIANGULAR BULLET} b</span></p>') '\\sphinxAtStartPar\n\\sphinxmenuselection{a \\(\\rightarrow\\) b}' ) (# interpolation of ampersands in menuselection 'verify' ':menuselection:`&Foo -&&- &Bar`' ('<p><span class="menuselection"><span class="accelerator">F</span>oo '<concat>'-&amp;- <span class="accelerator">B</span>ar</span></p>') ('\\sphinxAtStartPar\n'<concat>r'\sphinxmenuselection{\sphinxaccelerator{F}oo \sphinxhyphen{}'<concat>r'\&\sphinxhyphen{} \sphinxaccelerator{B}ar}') ) (# interpolation of ampersands in guilabel 'verify' ':guilabel:`&Foo -&&- &Bar`' ('<p><span class="guilabel"><span class="accelerator">F</span>oo '<concat>'-&amp;- <span class="accelerator">B</span>ar</span></p>') ('\\sphinxAtStartPar\n'<concat>r'\sphinxguilabel{\sphinxaccelerator{F}oo \sphinxhyphen{}\&\sphinxhyphen{} \sphinxaccelerator{B}ar}') ) (# no ampersands in guilabel 'verify' ':guilabel:`Foo`' '<p><span class="guilabel">Foo</span></p>' '\\sphinxAtStartPar\n\\sphinxguilabel{Foo}' ) (# kbd role 'verify' ':kbd:`space`' '<p><kbd class="kbd docutils literal notranslate">space</kbd></p>' '\\sphinxAtStartPar\n\\sphinxkeyboard{\\sphinxupquote{space}}' ) (# kbd role 'verify' ':kbd:`Control+X`' ('<p><kbd class="kbd compound docutils literal notranslate">'<concat>'<kbd class="kbd docutils literal notranslate">Control</kbd>'<concat>'+'<concat>'<kbd class="kbd docutils literal notranslate">X</kbd>'<concat>'</kbd></p>') '\\sphinxAtStartPar\n\\sphinxkeyboard{\\sphinxupquote{Control+X}}' ) (# kbd role 'verify' ':kbd:`Alt+^`' ('<p><kbd class="kbd compound docutils literal notranslate">'<concat>'<kbd class="kbd docutils literal notranslate">Alt</kbd>'<concat>'+'<concat>'<kbd class="kbd docutils literal notranslate">^</kbd>'<concat>'</kbd></p>') ('\\sphinxAtStartPar\n'<concat>'\\sphinxkeyboard{\\sphinxupquote{Alt+\\textasciicircum{}}}') ) (# kbd role 'verify' ':kbd:`M-x M-s`' ('<p><kbd class="kbd compound docutils literal notranslate">'<concat>'<kbd class="kbd docutils literal notranslate">M</kbd>'<concat>'-'<concat>'<kbd class="kbd docutils literal notranslate">x</kbd>'<concat>' '<concat>'<kbd class="kbd docutils literal notranslate">M</kbd>'<concat>'-'<concat>'<kbd class="kbd docutils literal notranslate">s</kbd>'<concat>'</kbd></p>') ('\\sphinxAtStartPar\n'<concat>'\\sphinxkeyboard{\\sphinxupquote{M\\sphinxhyphen{}x M\\sphinxhyphen{}s}}') ) (# kbd role 'verify' ':kbd:`-`' '<p><kbd class="kbd docutils literal notranslate">-</kbd></p>' ('\\sphinxAtStartPar\n'<concat>'\\sphinxkeyboard{\\sphinxupquote{\\sphinxhyphen{}}}') ) (# kbd role 'verify' ':kbd:`Caps Lock`' '<p><kbd class="kbd docutils literal notranslate">Caps Lock</kbd></p>' ('\\sphinxAtStartPar\n'<concat>'\\sphinxkeyboard{\\sphinxupquote{Caps Lock}}') ) (# non-interpolation of dashes in option role 'verify_re' ':option:`--with-option`' ('<p><code( class="xref std std-option docutils literal notranslate")?>'<concat>'<span class="pre">--with-option</span></code></p>$') (r'\\sphinxAtStartPar\n'<concat>r'\\sphinxcode{\\sphinxupquote{\\sphinxhyphen{}\\sphinxhyphen{}with\\sphinxhyphen{}option}}$') ) (# verify smarty-pants quotes 'verify' '"John"' '<p>“John”</p>' "\\sphinxAtStartPar\n“John”" ) (# ... but not in literal text 'verify' '``"John"``' ('<p><code class="docutils literal notranslate"><span class="pre">'<concat>'&quot;John&quot;</span></code></p>') '\\sphinxAtStartPar\n\\sphinxcode{\\sphinxupquote{"John"}}' ) (# verify classes for inline roles 'verify' ':manpage:`mp(1)`' '<p><em class="manpage">mp(1)</em></p>' '\\sphinxAtStartPar\n\\sphinxstyleliteralemphasis{\\sphinxupquote{mp(1)}}' ) (# correct escaping in normal mode 'verify' 'Γ\\\\∞$' <none> '\\sphinxAtStartPar\nΓ\\textbackslash{}\\(\\infty\\)\\$' ) (# in verbatim code fragments 'verify' '::\n\n @Γ\\∞${}' <none> ('\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]\n'<concat>'@Γ\\PYGZbs{}\\(\\infty\\)\\PYGZdl{}\\PYGZob{}\\PYGZcb{}\n'<concat>'\\end{sphinxVerbatim}') ) (# in URIs 'verify_re' '`test <https://www.google.com/~me/>`_' <none> r'\\sphinxAtStartPar\n\\sphinxhref{https://www.google.com/~me/}{test}.*' ) (# description list: simple 'verify' 'term\n description' '<dl class="docutils">\n<dt>term</dt><dd>description</dd>\n</dl>' <none> ) (# description list: with classifiers 'verify' 'term : class1 : class2\n description' ('<dl class="docutils">\n<dt>term<span class="classifier">class1</span>'<concat>'<span class="classifier">class2</span></dt><dd>description</dd>\n</dl>') <none> ) (# glossary (description list): multiple terms 'verify' '.. glossary::\n\n term1\n term2\n description' ('<dl class="glossary docutils">\n'<concat>'<dt id="term-term1">term1<a class="headerlink" href="#term-term1"'<concat>' title="Permalink to this term">¶</a></dt>'<concat>'<dt id="term-term2">term2<a class="headerlink" href="#term-term2"'<concat>' title="Permalink to this term">¶</a></dt>'<concat>'<dd>description</dd>\n</dl>') <none> ) ])<def_stmt>test_inline get_verifier type rst html_expected latex_expected<block_start>verifier=get_verifier(type)<line_sep>verifier(rst html_expected latex_expected)<block_end>@pytest.mark.parametrize('type,rst,html_expected,latex_expected' [('verify' r'4 backslashes \\\\' r'<p>4 backslashes \\</p>' <none> ) ])@pytest.mark.skipif(docutils.__version_info__<l>(0 16) reason='docutils-0.16 or above is required')<def_stmt>test_inline_docutils16 get_verifier type rst html_expected latex_expected<block_start>verifier=get_verifier(type)<line_sep>verifier(rst html_expected latex_expected)<block_end>@pytest.mark.sphinx(confoverrides={'latex_engine':'xelatex'})@pytest.mark.parametrize('type,rst,html_expected,latex_expected' [(# in verbatim code fragments 'verify' '::\n\n @Γ\\∞${}' <none> ('\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]\n'<concat>'@Γ\\PYGZbs{}∞\\PYGZdl{}\\PYGZob{}\\PYGZcb{}\n'<concat>'\\end{sphinxVerbatim}') ) ])<def_stmt>test_inline_for_unicode_latex_engine get_verifier type rst html_expected latex_expected<block_start>verifier=get_verifier(type)<line_sep>verifier(rst html_expected latex_expected)<block_end><def_stmt>test_samp_role parse# no braces <block_start>text=':samp:`a{b}c`'<line_sep>doctree=parse(text)<line_sep>assert_node(doctree[0] [nodes.paragraph nodes.literal ("a" [nodes.emphasis "b"] "c")])<line_sep># nested braces text=':samp:`a{{b}}c`'<line_sep>doctree=parse(text)<line_sep>assert_node(doctree[0] [nodes.paragraph nodes.literal ("a" [nodes.emphasis "{b"] "}c")])<line_sep># half-opened braces text=':samp:`a{bc`'<line_sep>doctree=parse(text)<line_sep>assert_node(doctree[0] [nodes.paragraph nodes.literal "a{bc"])<line_sep># escaped braces text=':samp:`a\\\\{b}c`'<line_sep>doctree=parse(text)<line_sep>assert_node(doctree[0] [nodes.paragraph nodes.literal "a{b}c"])<line_sep># no braces (whitespaces are keeped as is) text=':samp:`code sample`'<line_sep>doctree=parse(text)<line_sep>assert_node(doctree[0] [nodes.paragraph nodes.literal "code sample"])<block_end><def_stmt>test_download_role parse# implicit <block_start>text=':download:`sphinx.rst`'<line_sep>doctree=parse(text)<line_sep>assert_node(doctree[0] [nodes.paragraph addnodes.download_reference nodes.literal "sphinx.rst"])<line_sep>assert_node(doctree[0][0] refdoc='dummy' refdomain='' reftype='download' refexplicit=<false> reftarget='sphinx.rst' refwarn=<false>)<line_sep>assert_node(doctree[0][0][0] classes=['xref' 'download'])<line_sep># explicit text=':download:`reftitle <sphinx.rst>`'<line_sep>doctree=parse(text)<line_sep>assert_node(doctree[0] [nodes.paragraph addnodes.download_reference nodes.literal "reftitle"])<line_sep>assert_node(doctree[0][0] refdoc='dummy' refdomain='' reftype='download' refexplicit=<true> reftarget='sphinx.rst' refwarn=<false>)<line_sep>assert_node(doctree[0][0][0] classes=['xref' 'download'])<block_end><def_stmt>test_XRefRole inliner<block_start>role=XRefRole()<line_sep># implicit doctrees,errors=role('ref' 'rawtext' 'text' 5 inliner {} [])<assert_stmt>len(doctrees)<eq>1<line_sep>assert_node(doctrees[0] [addnodes.pending_xref nodes.literal 'text'])<line_sep>assert_node(doctrees[0] refdoc='dummy' refdomain='' reftype='ref' reftarget='text' refexplicit=<false> refwarn=<false>)<assert_stmt>errors<eq>[]<line_sep># explicit doctrees,errors=role('ref' 'rawtext' 'title <target>' 5 inliner {} [])<line_sep>assert_node(doctrees[0] [addnodes.pending_xref nodes.literal 'title'])<line_sep>assert_node(doctrees[0] refdoc='dummy' refdomain='' reftype='ref' reftarget='target' refexplicit=<true> refwarn=<false>)<line_sep># bang doctrees,errors=role('ref' 'rawtext' '!title <target>' 5 inliner {} [])<line_sep>assert_node(doctrees[0] [nodes.literal 'title <target>'])<line_sep># refdomain doctrees,errors=role('test:doc' 'rawtext' 'text' 5 inliner {} [])<line_sep>assert_node(doctrees[0] [addnodes.pending_xref nodes.literal 'text'])<line_sep>assert_node(doctrees[0] refdoc='dummy' refdomain='test' reftype='doc' reftarget='text' refexplicit=<false> refwarn=<false>)<line_sep># fix_parens role=XRefRole(fix_parens=<true>)<line_sep>doctrees,errors=role('ref' 'rawtext' 'text()' 5 inliner {} [])<line_sep>assert_node(doctrees[0] [addnodes.pending_xref nodes.literal 'text()'])<line_sep>assert_node(doctrees[0] refdoc='dummy' refdomain='' reftype='ref' reftarget='text' refexplicit=<false> refwarn=<false>)<line_sep># lowercase role=XRefRole(lowercase=<true>)<line_sep>doctrees,errors=role('ref' 'rawtext' 'TEXT' 5 inliner {} [])<line_sep>assert_node(doctrees[0] [addnodes.pending_xref nodes.literal 'TEXT'])<line_sep>assert_node(doctrees[0] refdoc='dummy' refdomain='' reftype='ref' reftarget='text' refexplicit=<false> refwarn=<false>)<block_end>@pytest.mark.sphinx('dummy' testroot='prolog')<def_stmt>test_rst_prolog app status warning<block_start>app.builder.build_all()<line_sep>rst=app.env.get_doctree('restructuredtext')<line_sep>md=app.env.get_doctree('markdown')<line_sep># rst_prolog assert_node(rst[0] nodes.paragraph)<line_sep>assert_node(rst[0][0] nodes.emphasis)<line_sep>assert_node(rst[0][0][0] nodes.Text)<assert_stmt>rst[0][0][0]<eq>'Hello world'<line_sep># rst_epilog assert_node(rst[-1] nodes.section)<line_sep>assert_node(rst[-1][-1] nodes.paragraph)<line_sep>assert_node(rst[-1][-1][0] nodes.emphasis)<line_sep>assert_node(rst[-1][-1][0][0] nodes.Text)<assert_stmt>rst[-1][-1][0][0]<eq>'Good-bye world'<line_sep># rst_prolog & rst_epilog on exlucding reST parser <assert_stmt><not>md.rawsource.startswith('*Hello world*.')<assert_stmt><not>md.rawsource.endswith('*Good-bye world*.\n')<block_end>@pytest.mark.sphinx('dummy' testroot='keep_warnings')<def_stmt>test_keep_warnings_is_True app status warning<block_start>app.builder.build_all()<line_sep>doctree=app.env.get_doctree('index')<line_sep>assert_node(doctree[0] nodes.section)<assert_stmt>len(doctree[0])<eq>2<line_sep>assert_node(doctree[0][1] nodes.system_message)<block_end>@pytest.mark.sphinx('dummy' testroot='keep_warnings' confoverrides={'keep_warnings':<false>})<def_stmt>test_keep_warnings_is_False app status warning<block_start>app.builder.build_all()<line_sep>doctree=app.env.get_doctree('index')<line_sep>assert_node(doctree[0] nodes.section)<assert_stmt>len(doctree[0])<eq>1<block_end>@pytest.mark.sphinx('dummy' testroot='refonly_bullet_list')<def_stmt>test_compact_refonly_bullet_list app status warning<block_start>app.builder.build_all()<line_sep>doctree=app.env.get_doctree('index')<line_sep>assert_node(doctree[0] nodes.section)<assert_stmt>len(doctree[0])<eq>5<assert_stmt>doctree[0][1].astext()<eq>'List A:'<line_sep>assert_node(doctree[0][2] nodes.bullet_list)<line_sep>assert_node(doctree[0][2][0][0] addnodes.compact_paragraph)<assert_stmt>doctree[0][2][0][0].astext()<eq>'genindex'<assert_stmt>doctree[0][3].astext()<eq>'List B:'<line_sep>assert_node(doctree[0][4] nodes.bullet_list)<line_sep>assert_node(doctree[0][4][0][0] nodes.paragraph)<assert_stmt>doctree[0][4][0][0].astext()<eq>'Hello'<block_end>@pytest.mark.sphinx('dummy' testroot='default_role')<def_stmt>test_default_role1 app status warning<block_start>app.builder.build_all()<line_sep># default-role: pep doctree=app.env.get_doctree('index')<line_sep>assert_node(doctree[0] nodes.section)<line_sep>assert_node(doctree[0][1] nodes.paragraph)<line_sep>assert_node(doctree[0][1][0] addnodes.index)<line_sep>assert_node(doctree[0][1][1] nodes.target)<line_sep>assert_node(doctree[0][1][2] nodes.reference classes=["pep"])<line_sep># no default-role doctree=app.env.get_doctree('foo')<line_sep>assert_node(doctree[0] nodes.section)<line_sep>assert_node(doctree[0][1] nodes.paragraph)<line_sep>assert_node(doctree[0][1][0] nodes.title_reference)<line_sep>assert_node(doctree[0][1][1] nodes.Text)<block_end>@pytest.mark.sphinx('dummy' testroot='default_role' confoverrides={'default_role':'guilabel'})<def_stmt>test_default_role2 app status warning<block_start>app.builder.build_all()<line_sep># default-role directive is stronger than configratuion doctree=app.env.get_doctree('index')<line_sep>assert_node(doctree[0] nodes.section)<line_sep>assert_node(doctree[0][1] nodes.paragraph)<line_sep>assert_node(doctree[0][1][0] addnodes.index)<line_sep>assert_node(doctree[0][1][1] nodes.target)<line_sep>assert_node(doctree[0][1][2] nodes.reference classes=["pep"])<line_sep># default_role changes the default behavior doctree=app.env.get_doctree('foo')<line_sep>assert_node(doctree[0] nodes.section)<line_sep>assert_node(doctree[0][1] nodes.paragraph)<line_sep>assert_node(doctree[0][1][0] nodes.inline classes=["guilabel"])<line_sep>assert_node(doctree[0][1][1] nodes.Text)<block_end>
# sql/default_comparator.py # Copyright (C) 2005-2018 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Default implementation of SQL comparison operations. """<import_from_stmt>.. exc util<import_from_stmt>. type_api<import_from_stmt>. operators<import_from_stmt>.elements BindParameter True_ False_ BinaryExpression Null _const_expr _clause_element_as_expr ClauseList ColumnElement TextClause UnaryExpression collate _is_literal _literal_as_text ClauseElement and_ or_ Slice Visitable _literal_as_binds CollectionAggregate<import_from_stmt>.selectable SelectBase Alias Selectable ScalarSelect<def_stmt>_boolean_compare expr op obj negate=<none> reverse=<false> _python_is_types=(util.NoneType bool) result_type=<none> **kwargs<block_start><if_stmt>result_type<is><none><block_start>result_type=type_api.BOOLEANTYPE<block_end><if_stmt>isinstance(obj _python_is_types+(Null True_ False_))# allow x ==/!= True/False to be treated as a literal. # this comes out to "== / != true/false" or "1/0" if those # constants aren't supported and works on all platforms <block_start><if_stmt>op<in>(operators.eq operators.ne)<and>isinstance(obj (bool True_ False_))<block_start><return>BinaryExpression(expr _literal_as_text(obj) op type_=result_type negate=negate modifiers=kwargs)<block_end><elif_stmt>op<in>(operators.is_distinct_from operators.isnot_distinct_from)<block_start><return>BinaryExpression(expr _literal_as_text(obj) op type_=result_type negate=negate modifiers=kwargs)<block_end><else_stmt># all other None/True/False uses IS, IS NOT <block_start><if_stmt>op<in>(operators.eq operators.is_)<block_start><return>BinaryExpression(expr _const_expr(obj) operators.is_ negate=operators.isnot type_=result_type)<block_end><elif_stmt>op<in>(operators.ne operators.isnot)<block_start><return>BinaryExpression(expr _const_expr(obj) operators.isnot negate=operators.is_ type_=result_type)<block_end><else_stmt><block_start><raise>exc.ArgumentError("Only '=', '!=', 'is_()', 'isnot()', "<concat>"'is_distinct_from()', 'isnot_distinct_from()' "<concat>"operators can be used with None/True/False")<block_end><block_end><block_end><else_stmt><block_start>obj=_check_literal(expr op obj)<block_end><if_stmt>reverse<block_start><return>BinaryExpression(obj expr op type_=result_type negate=negate modifiers=kwargs)<block_end><else_stmt><block_start><return>BinaryExpression(expr obj op type_=result_type negate=negate modifiers=kwargs)<block_end><block_end><def_stmt>_custom_op_operate expr op obj reverse=<false> result_type=<none> **kw<block_start><if_stmt>result_type<is><none><block_start><if_stmt>op.return_type<block_start>result_type=op.return_type<block_end><elif_stmt>op.is_comparison<block_start>result_type=type_api.BOOLEANTYPE<block_end><block_end><return>_binary_operate(expr op obj reverse=reverse result_type=result_type **kw)<block_end><def_stmt>_binary_operate expr op obj reverse=<false> result_type=<none> **kw<block_start>obj=_check_literal(expr op obj)<if_stmt>reverse<block_start>left,right=obj expr<block_end><else_stmt><block_start>left,right=expr obj<block_end><if_stmt>result_type<is><none><block_start>op,result_type=left.comparator._adapt_expression(op right.comparator)<block_end><return>BinaryExpression(left right op type_=result_type modifiers=kw)<block_end><def_stmt>_conjunction_operate expr op other **kw<block_start><if_stmt>op<is>operators.and_<block_start><return>and_(expr other)<block_end><elif_stmt>op<is>operators.or_<block_start><return>or_(expr other)<block_end><else_stmt><block_start><raise>NotImplementedError()<block_end><block_end><def_stmt>_scalar expr op fn **kw<block_start><return>fn(expr)<block_end><def_stmt>_in_impl expr op seq_or_selectable negate_op **kw<block_start>seq_or_selectable=_clause_element_as_expr(seq_or_selectable)<if_stmt>isinstance(seq_or_selectable ScalarSelect)<block_start><return>_boolean_compare(expr op seq_or_selectable negate=negate_op)<block_end><elif_stmt>isinstance(seq_or_selectable SelectBase)# TODO: if we ever want to support (x, y, z) IN (select x, # y, z from table), we would need a multi-column version of # as_scalar() to produce a multi- column selectable that # does not export itself as a FROM clause <block_start><return>_boolean_compare(expr op seq_or_selectable.as_scalar() negate=negate_op **kw)<block_end><elif_stmt>isinstance(seq_or_selectable (Selectable TextClause))<block_start><return>_boolean_compare(expr op seq_or_selectable negate=negate_op **kw)<block_end><elif_stmt>isinstance(seq_or_selectable ClauseElement)<block_start><if_stmt>isinstance(seq_or_selectable BindParameter)<and>seq_or_selectable.expanding<block_start><return>_boolean_compare(expr op seq_or_selectable negate=negate_op)<block_end><else_stmt><block_start><raise>exc.InvalidRequestError('in_() accepts'<concat>' either a list of expressions, '<concat>'a selectable, or an "expanding" bound parameter: %r'%seq_or_selectable)<block_end><block_end># Handle non selectable arguments as sequences args=[]<for_stmt>o seq_or_selectable<block_start><if_stmt><not>_is_literal(o)<block_start><if_stmt><not>isinstance(o operators.ColumnOperators)<block_start><raise>exc.InvalidRequestError('in_() accepts'<concat>' either a list of expressions, '<concat>'a selectable, or an "expanding" bound parameter: %r'%o)<block_end><block_end><elif_stmt>o<is><none><block_start>o=Null()<block_end><else_stmt><block_start>o=expr._bind_param(op o)<block_end>args.append(o)<block_end><if_stmt>len(args)<eq>0<block_start>op,negate_op=(operators.empty_in_op operators.empty_notin_op)<if>op<is>operators.in_op<else>(operators.empty_notin_op operators.empty_in_op)<block_end><return>_boolean_compare(expr op ClauseList(*args).self_group(against=op) negate=negate_op)<block_end><def_stmt>_getitem_impl expr op other **kw<block_start><if_stmt>isinstance(expr.type type_api.INDEXABLE)<block_start>other=_check_literal(expr op other)<line_sep><return>_binary_operate(expr op other **kw)<block_end><else_stmt><block_start>_unsupported_impl(expr op other **kw)<block_end><block_end><def_stmt>_unsupported_impl expr op *arg **kw<block_start><raise>NotImplementedError("Operator '%s' is not supported on "<concat>"this expression"%op.__name__)<block_end><def_stmt>_inv_impl expr op **kw<block_start>"""See :meth:`.ColumnOperators.__inv__`."""<if_stmt>hasattr(expr 'negation_clause')<block_start><return>expr.negation_clause<block_end><else_stmt><block_start><return>expr._negate()<block_end><block_end><def_stmt>_neg_impl expr op **kw<block_start>"""See :meth:`.ColumnOperators.__neg__`."""<line_sep><return>UnaryExpression(expr operator=operators.neg type_=expr.type)<block_end><def_stmt>_match_impl expr op other **kw<block_start>"""See :meth:`.ColumnOperators.match`."""<line_sep><return>_boolean_compare(expr operators.match_op _check_literal(expr operators.match_op other) result_type=type_api.MATCHTYPE negate=operators.notmatch_op<if>op<is>operators.match_op<else>operators.match_op **kw)<block_end><def_stmt>_distinct_impl expr op **kw<block_start>"""See :meth:`.ColumnOperators.distinct`."""<line_sep><return>UnaryExpression(expr operator=operators.distinct_op type_=expr.type)<block_end><def_stmt>_between_impl expr op cleft cright **kw<block_start>"""See :meth:`.ColumnOperators.between`."""<line_sep><return>BinaryExpression(expr ClauseList(_check_literal(expr operators.and_ cleft) _check_literal(expr operators.and_ cright) operator=operators.and_ group=<false> group_contents=<false>) op negate=operators.notbetween_op<if>op<is>operators.between_op<else>operators.between_op modifiers=kw)<block_end><def_stmt>_collate_impl expr op other **kw<block_start><return>collate(expr other)<block_end># a mapping of operators with the method they use, along with # their negated operator for comparison operators operator_lookup={"and_":(_conjunction_operate ) "or_":(_conjunction_operate ) "inv":(_inv_impl ) "add":(_binary_operate ) "mul":(_binary_operate ) "sub":(_binary_operate ) "div":(_binary_operate ) "mod":(_binary_operate ) "truediv":(_binary_operate ) "custom_op":(_custom_op_operate ) "json_path_getitem_op":(_binary_operate ) "json_getitem_op":(_binary_operate ) "concat_op":(_binary_operate ) "any_op":(_scalar CollectionAggregate._create_any) "all_op":(_scalar CollectionAggregate._create_all) "lt":(_boolean_compare operators.ge) "le":(_boolean_compare operators.gt) "ne":(_boolean_compare operators.eq) "gt":(_boolean_compare operators.le) "ge":(_boolean_compare operators.lt) "eq":(_boolean_compare operators.ne) "is_distinct_from":(_boolean_compare operators.isnot_distinct_from) "isnot_distinct_from":(_boolean_compare operators.is_distinct_from) "like_op":(_boolean_compare operators.notlike_op) "ilike_op":(_boolean_compare operators.notilike_op) "notlike_op":(_boolean_compare operators.like_op) "notilike_op":(_boolean_compare operators.ilike_op) "contains_op":(_boolean_compare operators.notcontains_op) "startswith_op":(_boolean_compare operators.notstartswith_op) "endswith_op":(_boolean_compare operators.notendswith_op) "desc_op":(_scalar UnaryExpression._create_desc) "asc_op":(_scalar UnaryExpression._create_asc) "nullsfirst_op":(_scalar UnaryExpression._create_nullsfirst) "nullslast_op":(_scalar UnaryExpression._create_nullslast) "in_op":(_in_impl operators.notin_op) "notin_op":(_in_impl operators.in_op) "is_":(_boolean_compare operators.is_) "isnot":(_boolean_compare operators.isnot) "collate":(_collate_impl ) "match_op":(_match_impl ) "notmatch_op":(_match_impl ) "distinct_op":(_distinct_impl ) "between_op":(_between_impl ) "notbetween_op":(_between_impl ) "neg":(_neg_impl ) "getitem":(_getitem_impl ) "lshift":(_unsupported_impl ) "rshift":(_unsupported_impl ) "contains":(_unsupported_impl ) }<def_stmt>_check_literal expr operator other bindparam_type=<none><block_start><if_stmt>isinstance(other (ColumnElement TextClause))<block_start><if_stmt>isinstance(other BindParameter)<and>other.type._isnull<block_start>other=other._clone()<line_sep>other.type=expr.type<block_end><return>other<block_end><elif_stmt>hasattr(other '__clause_element__')<block_start>other=other.__clause_element__()<block_end><elif_stmt>isinstance(other type_api.TypeEngine.Comparator)<block_start>other=other.expr<block_end><if_stmt>isinstance(other (SelectBase Alias))<block_start><return>other.as_scalar()<block_end><elif_stmt><not>isinstance(other Visitable)<block_start><return>expr._bind_param(operator other type_=bindparam_type)<block_end><else_stmt><block_start><return>other<block_end><block_end>
# The MIT License (MIT) # # Copyright (c) 2020 <NAME> for Adafruit Industries LLC # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. """ Make a key (button) repeat when held down """<import_stmt>time<class_stmt>KeyRepeat<block_start>"""Track the state of a button and, while it is held, output a press every 'rate' seconds"""<def_stmt>__init__ self getter rate=0.5<block_start>self.getter=getter<line_sep>self.rate_ns=round(rate<times>1e9)<line_sep>self.next=-1<block_end>@property<def_stmt>value self<block_start>"""True when a button is first pressed, or once every 'rate' seconds thereafter"""<line_sep>state=self.getter()<if_stmt><not>state<block_start>self.next=-1<line_sep><return><false><block_end>now=time.monotonic_ns()<if_stmt>state<and>now<g>self.next<block_start>self.next=now+self.rate_ns<line_sep><return><true><block_end><return><false><block_end><block_end>
<import_stmt>pytest<import_from_stmt>pathlib Path<import_from_stmt>blendtorch btt<line_sep>BLENDDIR=Path(__file__).parent/'blender'<class_stmt>MyEnv(btt.env.OpenAIRemoteEnv)<block_start><def_stmt>__init__ self background=<true> **kwargs<block_start>super().__init__(version='1.0.0')<line_sep>self.launch(scene=BLENDDIR/'env.blend' script=BLENDDIR/'env.blend.py' background=background **kwargs)<line_sep># For Blender 2.9 if we pass scene='', the tests below fail since # _env_post_step() is not called. Its unclear currently why this happens. <block_end><block_end><def_stmt>_run_remote_env background<block_start>env=MyEnv(background=background)<line_sep>obs=env.reset()<assert_stmt>obs<eq>0.<line_sep>obs,reward,done,info=env.step(0.1)<assert_stmt>obs<eq>pytest.approx(0.1)<assert_stmt>reward<eq>0.<assert_stmt><not>done<assert_stmt>info['count']<eq>2# 1 is already set by reset() obs,reward,done,info=env.step(0.6)<assert_stmt>obs<eq>pytest.approx(0.6)<assert_stmt>reward<eq>1.<assert_stmt><not>done<assert_stmt>info['count']<eq>3<for_stmt>_ range(8)<block_start>obs,reward,done,info=env.step(0.6)<block_end><assert_stmt>done<line_sep>obs=env.reset()<assert_stmt>obs<eq>0.<line_sep>obs,reward,done,info=env.step(0.1)<assert_stmt>obs<eq>pytest.approx(0.1)<assert_stmt>reward<eq>0.<assert_stmt><not>done<assert_stmt>info['count']<eq>2<line_sep>env.close()<block_end>@pytest.mark.background<def_stmt>test_remote_env <block_start>_run_remote_env(background=<true>)<block_end><def_stmt>test_remote_env_ui <block_start>_run_remote_env(background=<false>)<block_end>
<import_stmt>imp<import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>numpy<as>np<import_stmt>torch.nn.functional<as>F<import_from_stmt>pytorch_ares.attack_torch.utils loss_adv<class_stmt>MIM(object)<block_start>'''Projected Gradient Descent'''<def_stmt>__init__ self net epsilon p stepsize steps decay_factor data_name target loss device<block_start>self.epsilon=epsilon<line_sep>self.p=p<line_sep>self.net=net<line_sep>self.decay_factor=decay_factor<line_sep>self.stepsize=stepsize<line_sep>self.target=target<line_sep>self.steps=steps<line_sep>self.loss=loss<line_sep>self.data_name=data_name<line_sep>self.device=device<if_stmt>self.data_name<eq>"cifar10"<and>self.target<block_start><raise>AssertionError('cifar10 dont support targeted attack')<block_end><block_end><def_stmt>forward self image label target_labels<block_start>image,label=image.to(self.device) label.to(self.device)<if_stmt>target_labels<is><not><none><block_start>target_labels=target_labels.to(self.device)<block_end>batchsize=image.shape[0]<line_sep>advimage=image<line_sep>momentum=torch.zeros_like(image).detach()<line_sep># PGD to get adversarial example <for_stmt>i range(self.steps)<block_start>advimage=advimage.clone().detach().requires_grad_(<true>)# clone the advimage as the next iteration input netOut=self.net(advimage)<line_sep>loss=loss_adv(self.loss netOut label target_labels self.target self.device)<line_sep>grad=torch.autograd.grad(loss [advimage])[0].detach()<line_sep>grad_norm=torch.norm(nn.Flatten()(grad) p=1 dim=1)<line_sep>grad=grad/grad_norm.view([-1]+[1]<times>(len(grad.shape)-1))<line_sep>grad=grad+momentum<times>self.decay_factor<line_sep>momentum=grad<if_stmt>self.p<eq>np.inf<block_start>updates=grad.sign()<block_end><else_stmt><block_start>normVal=torch.norm(grad.view(batchsize -1) self.p 1)<line_sep>updates=grad/normVal.view(batchsize 1 1 1)<block_end>updates=updates<times>self.stepsize<line_sep>advimage=advimage+updates<line_sep># project the disturbed image to feasible set if needed delta=advimage-image<if_stmt>self.p<eq>np.inf<block_start>delta=torch.clamp(delta -self.epsilon self.epsilon)<block_end><else_stmt><block_start>normVal=torch.norm(delta.view(batchsize -1) self.p 1)<line_sep>mask=normVal<le>self.epsilon<line_sep>scaling=self.epsilon/normVal<line_sep>scaling[mask]=1<line_sep>delta=delta<times>scaling.view(batchsize 1 1 1)<block_end>advimage=image+delta<line_sep>advimage=torch.clamp(advimage 0 1)<block_end>#cifar10(-1,1) <return>advimage<block_end><block_end>
''' The randomization defense method, which applies random . '''<import_stmt>tensorflow<as>tf<import_from_stmt>ares.defense.input_transformation input_transformation<def_stmt>randomize xs scale_min=0.875 pad_value=0.0<block_start>''' Apply random rescaling and padding to xs. :param xs: A batch of inputs for some classifier. :param scale_min: The random rescaling rate would be chosen between ``scale_min`` and 1.0. :param pad_value: ``constant_values`` parameter for the ``tf.pad`` method. :return: A new tensor with same shape and dtype as xs. '''<line_sep>ratio=tf.random.uniform(() minval=scale_min maxval=1.0)<line_sep>height,width=tf.cast(xs.shape[1].value<times>ratio tf.int32) tf.cast(xs.shape[2].value<times>ratio tf.int32)<line_sep>xs_rescaled=tf.image.resize(xs (height width) method=tf.image.ResizeMethod.NEAREST_NEIGHBOR align_corners=<true> preserve_aspect_ratio=<false>)<line_sep>height_rem,width_rem=xs.shape[1].value-height xs.shape[2].value-width<line_sep>pad_left=tf.random_uniform(() 0 width_rem dtype=tf.int32)<line_sep>pad_right=width_rem-pad_left<line_sep>pad_top=tf.random_uniform(() 0 height_rem dtype=tf.int32)<line_sep>pad_bottom=height_rem-pad_top<line_sep>xs_padded=tf.pad(xs_rescaled [[0 0] [pad_top pad_bottom] [pad_left pad_right] [0 0]] constant_values=pad_value)<line_sep>xs_padded.set_shape(xs.shape)<line_sep><return>xs_padded<block_end><def_stmt>randomization scale_min=0.875 pad_value=0.0<block_start>''' A decorator to apply randomize rescaling and padding to input of the classifier. :param scale_min: The random rescaling rate would be chosen between ``scale_min`` and 1.0. :param pad_value: ``constant_values`` parameter for the ``tf.pad`` method. '''<def_stmt>args_fn _<block_start><return>(scale_min pad_value)<block_end><def_stmt>kwargs_fn _<block_start><return>{}<block_end><return><lambda>rs_class:input_transformation(rs_class randomize args_fn kwargs_fn)<block_end>
################################################################################ # Copyright (c) 2015-2018 Skymind, Inc. # # This program and the accompanying materials are made available under the # terms of the Apache License, Version 2.0 which is available at # https://www.apache.org/licenses/LICENSE-2.0. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # SPDX-License-Identifier: Apache-2.0 ################################################################################ <import_from_stmt>.java_classes *<import_stmt>numpy<as>np<import_stmt>ctypes<import_stmt>warnings<line_sep>native_ops=NativeOpsHolder.getInstance().getDeviceNativeOps()<line_sep># DATA TYPE MANAGEMENT DOUBLE=DataType.DOUBLE<line_sep>FLOAT=DataType.FLOAT<line_sep>HALF=DataType.HALF<line_sep>LONG=DataType.LONG<line_sep>INT=DataType.INT<line_sep>SHORT=DataType.SHORT<line_sep>UBYTE=DataType.UBYTE<line_sep>BYTE=DataType.BYTE<line_sep>BOOL=DataType.BOOL<line_sep>UTF8=DataType.UTF8<line_sep>COMPRESSED=DataType.COMPRESSED<line_sep>UNKNOWN=DataType.UNKNOWN<line_sep>SUPPORTED_JAVA_DTYPES=[DOUBLE FLOAT HALF LONG INT SHORT BOOL#UTF8 ]<line_sep>SUPPORTED_PYTHON_DTYPES=[np.float64 np.float32 np.float16 np.int64 np.int32 np.int16 np.bool_#np.str_ ]<line_sep>_PY2J={SUPPORTED_PYTHON_DTYPES[i]:SUPPORTED_JAVA_DTYPES[i]<for>i range(len(SUPPORTED_JAVA_DTYPES))}<line_sep>_J2PY={SUPPORTED_JAVA_DTYPES[i]:SUPPORTED_PYTHON_DTYPES[i]<for>i range(len(SUPPORTED_JAVA_DTYPES))}<def_stmt>_dtype_py2j dtype<block_start><if_stmt>isinstance(dtype str)<block_start>dtype=np.dtype(dtype).type<block_end><elif_stmt>isinstance(dtype np.dtype)<block_start>dtype=dtype.type<block_end>jtype=_PY2J.get(dtype)<if_stmt>jtype<is><none><block_start><raise>NotImplementedError("Unsupported type: "+dtype.name)<block_end><return>jtype<block_end><def_stmt>_dtype_j2py dtype<block_start>pytype=_J2PY.get(dtype)<if_stmt>pytype<is><none><block_start><raise>NotImplementedError("Unsupported type: "+(str(dtype)))<block_end><return>pytype<block_end><def_stmt>set_context_dtype dtype<block_start>''' Sets the dtype for nd4j # Arguments dtype: 'float' or 'double' '''<line_sep>dtype_map={'float32':'float' 'float64':'double'}<line_sep>dtype=dtype_map.get(dtype dtype)<if_stmt>dtype<not><in>['float' 'double']<block_start><raise>ValueError("Invalid dtype '{}'. Available dtypes are 'float' and 'double'.".format(dtype))<block_end>dtype_=DataTypeUtil.getDtypeFromContext(dtype)<line_sep>DataTypeUtil.setDTypeForContext(dtype_)<if_stmt>get_context_dtype()<ne>dtype<block_start>warnings.warn("Can not set context dtype now. Set it at the beginning of your program.")<block_end><block_end><def_stmt>get_context_dtype <block_start>''' Returns the nd4j dtype '''<line_sep>dtype=DataTypeUtil.getDtypeFromContext()<line_sep><return>DataTypeUtil.getDTypeForName(dtype)<block_end>_refs=[]<def_stmt>_from_numpy np_array<block_start>''' Convert numpy array to nd4j array '''<line_sep>pointer_address,_=np_array.__array_interface__['data']<line_sep>_refs.append(np_array)<line_sep>pointer=native_ops.pointerForAddress(pointer_address)<line_sep>size=np_array.size<line_sep>pointer.limit(size)<line_sep>jdtype=_dtype_py2j(np_array.dtype)<line_sep>''' mapping = { DOUBLE: DoublePointer, FLOAT: FloatPointer, HALF: HalfPointer, LONG: LongPointer, INT: IntPointer, SHORT: ShortPointer, BOOL: BoolPointer } pc = mapping[jdtype] #pointer = pc(pointer) '''<line_sep>buff=Nd4j.createBuffer(pointer size jdtype)<assert_stmt>buff.address()<eq>pointer_address<line_sep>_refs.append(buff)<line_sep>elem_size=buff.getElementSize()<assert_stmt>elem_size<eq>np_array.dtype.itemsize<line_sep>strides=np_array.strides<line_sep>strides=[dim/elem_size<for>dim strides]<line_sep>shape=np_array.shape<line_sep>nd4j_array=Nd4j.create(buff shape strides 0)<assert_stmt>buff.address()<eq>nd4j_array.data().address()<line_sep><return>nd4j_array<block_end><def_stmt>_to_numpy nd4j_array<block_start>''' Convert nd4j array to numpy array '''<line_sep>buff=nd4j_array.data()<line_sep>address=buff.pointer().address()<line_sep>dtype=nd4j_array.dataType().toString()<line_sep>mapping={'DOUBLE':ctypes.c_double 'FLOAT':ctypes.c_float 'HALF':ctypes.c_short 'LONG':ctypes.c_long 'INT':ctypes.c_int 'SHORT':ctypes.c_short 'BOOL':ctypes.c_bool}<line_sep>Pointer=ctypes.POINTER(mapping[dtype])<line_sep>pointer=ctypes.cast(address Pointer)<line_sep>np_array=np.ctypeslib.as_array(pointer tuple(nd4j_array.shape()))<line_sep><return>np_array<block_end><def_stmt>_indarray x<block_start>typ=type(x)<if_stmt>typ<is>INDArray<block_start><return>x<block_end><elif_stmt>typ<is>ndarray<block_start><return>x.array<block_end><elif_stmt>'numpy'<in>str(typ)<block_start><return>_from_numpy(x)<block_end><elif_stmt>typ<in>(list tuple)<block_start><return>_from_numpy(np.array(x))<block_end><elif_stmt>typ<in>(int float)<block_start><return>Nd4j.scalar(x)<block_end><else_stmt><block_start><raise>Exception('Data type not understood :'+str(typ))<block_end><block_end><def_stmt>_nparray x<block_start>typ=type(x)<if_stmt>typ<is>INDArray<block_start><return>ndarray(x).numpy()<block_end><elif_stmt>typ<is>ndarray<block_start><return>x.numpy()<block_end><elif_stmt>'numpy'<in>str(typ)<block_start><return>x<block_end><elif_stmt>typ<in>(list tuple)<block_start><return>np.array(x)<block_end><elif_stmt>typ<in>(int float)<block_start><return>np.array(x)<block_end><else_stmt><block_start><raise>Exception('Data type not understood :'+str(typ))<block_end><block_end><def_stmt>broadcast_like y x<block_start>xs=x.shape()<line_sep>ys=y.shape()<if_stmt>xs<eq>ys<block_start><return>y<block_end>_xs=tuple(xs)<line_sep>_ys=tuple(ys)<line_sep>nx=len(xs)<line_sep>ny=len(ys)<if_stmt>nx<g>ny<block_start>diff=nx-ny<line_sep>ys=([1]<times>diff)+ys<line_sep>y=y.reshape(ys)<line_sep>ny=nx<block_end><elif_stmt>ny<g>nx<block_start><raise>Exception('Unable to broadcast shapes '+str(_xs)+''<concat>' and '+str(_ys))<block_end>yt=[]<line_sep>rep_y=<false><for_stmt>xd,yd zip(xs ys)<block_start><if_stmt>xd<eq>yd<block_start>yt.append(1)<block_end><elif_stmt>xd<eq>1<block_start><raise>Exception('Unable to broadcast shapes '+str(_xs)+''<concat>' and '+str(_ys))<block_end><elif_stmt>yd<eq>1<block_start>yt.append(xd)<line_sep>rep_y=<true><block_end><else_stmt><block_start><raise>Exception('Unable to broadcast shapes '+str(_xs)+''<concat>' and '+str(_ys))<block_end><block_end><if_stmt>rep_y<block_start>y=y.repmat(*yt)<block_end><return>y<block_end><def_stmt>broadcast x y<block_start>xs=x.shape()<line_sep>ys=y.shape()<if_stmt>xs<eq>ys<block_start><return>x y<block_end>_xs=tuple(xs)<line_sep>_ys=tuple(ys)<line_sep>nx=len(xs)<line_sep>ny=len(ys)<if_stmt>nx<g>ny<block_start>diff=nx-ny<line_sep>ys=([1]<times>diff)+ys<line_sep>y=y.reshape(*ys)<line_sep>ny=nx<block_end><elif_stmt>ny<g>nx<block_start>diff=ny-nx<line_sep>xs=([1]<times>diff)+xs<line_sep>x=x.reshape(*xs)<line_sep>nx=ny<block_end>xt=[]<line_sep>yt=[]<line_sep>rep_x=<false><line_sep>rep_y=<false><for_stmt>xd,yd zip(xs ys)<block_start><if_stmt>xd<eq>yd<block_start>xt.append(1)<line_sep>yt.append(1)<block_end><elif_stmt>xd<eq>1<block_start>xt.append(yd)<line_sep>yt.append(1)<line_sep>rep_x=<true><block_end><elif_stmt>yd<eq>1<block_start>xt.append(1)<line_sep>yt.append(xd)<line_sep>rep_y=<true><block_end><else_stmt><block_start><raise>Exception('Unable to broadcast shapes '+str(_xs)+''<concat>' and '+str(_ys))<block_end><block_end><if_stmt>rep_x<block_start>x=Nd4j.tile(x *xt)<block_end><if_stmt>rep_y<block_start><try_stmt><block_start>y=Nd4j.tile(y *yt)<block_end><except_stmt><block_start>y=Nd4j.tile(y *yt)<block_end><block_end><return>x y<block_end><class_stmt>ndarray(object)<block_start><def_stmt>__init__ self data dtype=<none># we ignore dtype for now <block_start>typ=type(data)<if_stmt>'nd4j'<in>typ.__name__# Note that we don't make a copy here <block_start>self.array=data<block_end><elif_stmt>typ<is>ndarray<block_start>self.array=data.array.dup()<block_end><else_stmt><block_start><if_stmt>typ<is><not>np.ndarray<block_start>data=np.array(data)<block_end>self.array=_from_numpy(data)<block_end><block_end><def_stmt>numpy self<block_start><try_stmt><block_start><return>self.np_array<block_end><except_stmt>AttributeError<block_start>self.np_array=_to_numpy(self.array)<line_sep><return>self.np_array<block_end><block_end>@property<def_stmt>size self<block_start><return>self.array.length()<block_end>@property<def_stmt>shape self<block_start><return>tuple(self.array.shape())<block_end>@shape.setter<def_stmt>shape self value<block_start>arr=self.reshape(value)<line_sep>self.array=arr.array<block_end>@property<def_stmt>ndim self<block_start><return>len(self.array.shape())<block_end><def_stmt>__getitem__ self key<block_start><return>ndarray(self.numpy()[key])<if_stmt>type(key)<is>int<block_start><return>ndarray(self.array.get(NDArrayIndex.point(key)))<block_end><if_stmt>type(key)<is>slice<block_start>start=key.start<line_sep>stop=key.stop<line_sep>step=key.step<if_stmt>start<is><none><block_start>start=0<block_end><if_stmt>stop<is><none><block_start>shape=self.array.shape()<if_stmt>shape[0]<eq>1<block_start>stop=shape[1]<block_end><else_stmt><block_start>stop=shape[0]<block_end><block_end><if_stmt>stop-start<le>0<block_start><return><none><block_end><if_stmt>step<is><none><or>step<eq>1<block_start><return>ndarray(self.array.get(NDArrayIndex.interval(start stop)))<block_end><else_stmt><block_start><return>ndarray(self.array.get(NDArrayIndex.interval(start step stop)))<block_end><block_end><if_stmt>type(key)<is>list<block_start><raise>NotImplementedError('Sorry, this type of indexing is not supported yet.')<block_end><if_stmt>type(key)<is>tuple<block_start>key=list(key)<line_sep>shape=self.array.shape()<line_sep>ndim=len(shape)<line_sep>nk=len(key)<line_sep>key<augadd>[slice(<none>)]<times>(ndim-nk)<line_sep>args=[]<for_stmt>i,dim enumerate(key)<block_start><if_stmt>type(dim)<is>int<block_start>args.append(NDArrayIndex.point(dim))<block_end><elif_stmt>type(dim)<is>slice<block_start><if_stmt>dim<eq>slice(<none>)<block_start>args.append(NDArrayIndex.all())<block_end><else_stmt><block_start>start=dim.start<line_sep>stop=dim.stop<line_sep>step=dim.step<if_stmt>start<is><none><block_start>start=0<block_end><if_stmt>stop<is><none><block_start>stop=shape[i]<block_end><if_stmt>stop-start<le>0<block_start><return><none><block_end><if_stmt>step<is><none><or>step<eq>1<block_start>args.append(NDArrayIndex.interval(start stop))<block_end><else_stmt><block_start>args.append(NDArrayIndex.interval(start step stop))<block_end><block_end><block_end><elif_stmt>type(dim)<in>(list tuple)<block_start><raise>NotImplementedError('Sorry, this type of indexing is not supported yet.')<block_end><block_end><return>ndarray(self.array.get(*args))<block_end><block_end><def_stmt>__setitem__ self key other<block_start>self.numpy()[key]=_nparray(other)<line_sep><return><line_sep>other=_indarray(other)<line_sep>view=self[key]<if_stmt>view<is><none><block_start><return><block_end>view=view.array<line_sep>other=broadcast_like(other view)<line_sep>view.assign(other)<block_end><def_stmt>__add__ self other<block_start><return>ndarray(self.numpy()+_nparray(other))<line_sep>other=_indarray(other)<line_sep>x,y=broadcast(self.array other)<line_sep><return>ndarray(x.add(y))<block_end><def_stmt>__sub__ self other<block_start><return>ndarray(self.numpy()-_nparray(other))<line_sep>other=_indarray(other)<line_sep>x,y=broadcast(self.array other)<line_sep><return>ndarray(x.sub(y))<block_end><def_stmt>__mul__ self other<block_start><return>ndarray(self.numpy()<times>_nparray(other))<line_sep>other=_indarray(other)<line_sep>x,y=broadcast(self.array other)<line_sep><return>ndarray(x.mul(y))<block_end><def_stmt>__div__ self other<block_start><return>ndarray(self.numpy()/_nparray(other))<line_sep>other=_indarray(other)<line_sep>x,y=broadcast(self.array other)<line_sep><return>ndarray(x.div(y))<block_end><def_stmt>__pow__ self other<block_start><return>ndarray(self.numpy()<power>_nparray(other))<line_sep>other=_indarray(other)<line_sep>x,y=broadcast(self.array other)<line_sep><return>ndarray(Transforms.pow(x y))<block_end><def_stmt>__iadd__ self other<block_start>self.numpy().__iadd__(_nparray(other))<line_sep><return>self<line_sep>other=_indarray(other)<if_stmt>self.array.shape()<eq>other.shape()<block_start>self.array=self.array.addi(other)<block_end><else_stmt><block_start>x,y=broadcast(self.array other)<line_sep>self.array=x.add(y)<block_end><return>self<block_end><def_stmt>__isub__ self other<block_start>self.numpy().__isub__(_nparray(other))<line_sep><return>self<line_sep>other=_indarray(other)<if_stmt>self.array.shape()<eq>other.shape()<block_start>self.array=self.array.subi(other)<block_end><else_stmt><block_start>x,y=broadcast(self.array other)<line_sep>self.array=x.sub(y)<block_end><return>self<block_end><def_stmt>__imul__ self other<block_start>self.numpy().__imul__(_nparray(other))<line_sep><return>self<line_sep>other=_indarray(other)<if_stmt>self.array.shape()<eq>other.shape()<block_start>self.array=self.array.muli(other)<block_end><else_stmt><block_start>x,y=broadcast(self.array other)<line_sep>self.array=x.mul(y)<block_end><return>self<block_end><def_stmt>__idiv__ self other<block_start>self.numpy().__idiv__(_nparray(other))<line_sep><return>self<line_sep>other=_indarray(other)<if_stmt>self.array.shape()<eq>other.shape()<block_start>self.array=self.array.divi(other)<block_end><else_stmt><block_start>x,y=broadcast(self.array other)<line_sep>self.array=x.div(y)<block_end><return>self<block_end><def_stmt>__ipow__ self other<block_start>self.numpy().__ipow__(_nparray(other))<line_sep><return>self<line_sep>other=_indarray(other)<if_stmt>self.array.shape()<eq>other.shape()<block_start>self.array=self.array.divi(other)<block_end><else_stmt><block_start>x,y=broadcast(self.array other)<line_sep>self.array=Transforms.pow(x y)<block_end><return>self<block_end><def_stmt>__getattr__ self attr<block_start><import_stmt>ops<line_sep>f=getattr(ops attr)<line_sep>setattr(ndarray attr f)<line_sep><return>getattr(self attr)<block_end><def_stmt>__int__ self<block_start><if_stmt>self.array.length()<eq>1<block_start><return>self.array.getInt(0)<block_end><raise>Exception('Applicable only for scalars')<block_end><def_stmt>__float__ self<block_start><if_stmt>self.array.length()<eq>1<block_start><return>self.array.getDouble(0)<block_end><raise>Exception('Applicable only for scalars')<block_end>@property<def_stmt>T self<block_start><return>self.transpose()<block_end><block_end><def_stmt>array *args **kwargs<block_start><return>ndarray(*args **kwargs)<block_end>
<import_stmt>numpy<as>np<import_stmt>scipy<as>sp<import_stmt>scipy.sparse.linalg<as>splinalg<def_stmt>eig2_nL g tol_eigs=1.0e-6 normalize:bool=<true> dim:int=1<block_start>""" DESCRIPTION ----------- Computes the eigenvector that corresponds to the second smallest eigenvalue of the normalized Laplacian matrix then it uses sweep cut to round the solution. PARAMETERS (mandatory) ---------------------- g: graph object PARAMETERS (optional) --------------------- dim: positive, int default == 1 The number of eigenvectors or dimensions to compute. tol_eigs: positive float, double default == 1.0e-6 Tolerance for computation of the eigenvector that corresponds to the second smallest eigenvalue of the normalized Laplacian matrix. normalize: bool, default == True True if we should return the eigenvectors of the generalized eigenvalue problem associated with the normalized Laplacian. This should be on unless you know what you are doing. RETURNS ------ p: Eigenvector or Eigenvector matrixthat corresponds to the second smallest eigenvalue of the normalized Laplacian matrix and larger eigenvectors if dim >= 0. """<line_sep>n=g.adjacency_matrix.shape[0]<line_sep>D_sqrt_neg=sp.sparse.spdiags(g.dn_sqrt.transpose() 0 n n)<line_sep>L=sp.sparse.identity(n)-D_sqrt_neg.dot((g.adjacency_matrix.dot(D_sqrt_neg)))<line_sep>emb_eig_val,p=splinalg.eigsh(L which='SM' k=1+dim tol=tol_eigs)<line_sep>F=np.real(p[: 1:])<if_stmt>normalize<block_start>F<augmul>g.dn_sqrt[: np.newaxis]<block_end><return>F emb_eig_val<block_end>""" Random walks and local cuts in graphs, Chung, LAA 2007 We just form the sub-matrix of the Laplacian and use the eigenvector there. """<def_stmt>eig2nL_subgraph g ref_nodes tol_eigs=1.0e-6 normalize:bool=<true><block_start>A_sub=g.adjacency_matrix.tocsr()[ref_nodes :].tocsc()[: ref_nodes]<line_sep>nref=len(ref_nodes)<line_sep>D_sqrt_neg=sp.sparse.spdiags(g.dn_sqrt[ref_nodes].transpose() 0 nref nref)<line_sep>L_sub=sp.sparse.identity(nref)-D_sqrt_neg.dot((A_sub.dot(D_sqrt_neg)))<line_sep>emb_eig_val,emb_eig=splinalg.eigsh(L_sub which='SM' k=1 tol=tol_eigs)<line_sep>emb_eig<augmul>-1<if>max(emb_eig)<l>0<else>1<line_sep>f=emb_eig[: 0]<if_stmt>normalize<block_start>f<augmul>g.dn_sqrt[ref_nodes]<block_end><return>((ref_nodes f) emb_eig_val)<block_end>
# Copyright (C) 2015-2021 Regents of the University of California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>enum<import_stmt>logging<import_stmt>os<import_stmt>shutil<import_from_stmt>abc ABC abstractmethod<import_from_stmt>argparse ArgumentParser _ArgumentGroup<import_from_stmt>contextlib contextmanager<import_from_stmt>typing Any Callable ContextManager Dict Iterator List Optional Tuple Type TypeVar Union NamedTuple <import_from_stmt>toil.common Toil cacheDirName Config<import_from_stmt>toil.deferred DeferredFunctionManager<import_from_stmt>toil.fileStores.abstractFileStore AbstractFileStore<import_from_stmt>toil.job JobDescription<import_from_stmt>toil.resource Resource<line_sep>logger=logging.getLogger(__name__)<line_sep># Value to use as exitStatus in UpdatedBatchJobInfo.exitStatus when status is not available. EXIT_STATUS_UNAVAILABLE_VALUE=255<class_stmt>BatchJobExitReason(enum.Enum)<block_start>FINISHED:int=1# Successfully finished. FAILED:int=2# Job finished, but failed. LOST:int=3# Preemptable failure (job's executing host went away). KILLED:int=4# Job killed before finishing. ERROR:int=5# Internal error. MEMLIMIT:int=6<block_end># Job hit batch system imposed memory limit <class_stmt>UpdatedBatchJobInfo(NamedTuple)<block_start>jobID:int<line_sep>exitStatus:int<line_sep>""" The exit status (integer value) of the job. 0 implies successful. EXIT_STATUS_UNAVAILABLE_VALUE is used when the exit status is not available (e.g. job is lost). """<line_sep>exitReason:Optional[BatchJobExitReason]<line_sep>wallTime:Union[float int <none>]<block_end># Information required for worker cleanup on shutdown of the batch system. <class_stmt>WorkerCleanupInfo(NamedTuple)<block_start>workDir:str<line_sep>"""workdir path (where the cache would go)"""<line_sep>workflowID:str<line_sep>"""used to identify files specific to this workflow"""<line_sep>cleanWorkDir:str<block_end><class_stmt>AbstractBatchSystem(ABC)<block_start>""" An abstract (as far as Python currently allows) base class to represent the interface the batch system must provide to Toil. """<line_sep>@classmethod@abstractmethod<def_stmt>supportsAutoDeployment cls<arrow>bool<block_start>""" Whether this batch system supports auto-deployment of the user script itself. If it does, the :meth:`.setUserScript` can be invoked to set the resource object representing the user script. Note to implementors: If your implementation returns True here, it should also override """<line_sep><raise>NotImplementedError()<block_end>@classmethod@abstractmethod<def_stmt>supportsWorkerCleanup cls<arrow>bool<block_start>""" Indicates whether this batch system invokes :meth:`BatchSystemSupport.workerCleanup` after the last job for a particular workflow invocation finishes. Note that the term *worker* refers to an entire node, not just a worker process. A worker process may run more than one job sequentially, and more than one concurrent worker process may exist on a worker node, for the same workflow. The batch system is said to *shut down* after the last worker process terminates. """<line_sep><raise>NotImplementedError()<block_end><def_stmt>setUserScript self userScript:Resource<arrow><none><block_start>""" Set the user script for this workflow. This method must be called before the first job is issued to this batch system, and only if :meth:`.supportsAutoDeployment` returns True, otherwise it will raise an exception. :param userScript: the resource object representing the user script or module and the modules it depends on. """<line_sep><raise>NotImplementedError()<block_end>@abstractmethod<def_stmt>issueBatchJob self jobDesc:JobDescription job_environment:Optional[Dict[str str]]=<none><arrow>int<block_start>""" Issues a job with the specified command to the batch system and returns a unique jobID. :param jobDesc a toil.job.JobDescription :param job_environment: a collection of job-specific environment variables to be set on the worker. :return: a unique jobID that can be used to reference the newly issued job """<line_sep><raise>NotImplementedError()<block_end>@abstractmethod<def_stmt>killBatchJobs self jobIDs:List[int]<arrow><none><block_start>""" Kills the given job IDs. After returning, the killed jobs will not appear in the results of getRunningBatchJobIDs. The killed job will not be returned from getUpdatedBatchJob. :param jobIDs: list of IDs of jobs to kill """<line_sep><raise>NotImplementedError()<block_end># FIXME: Return value should be a set (then also fix the tests) @abstractmethod<def_stmt>getIssuedBatchJobIDs self<arrow>List[int]<block_start>""" Gets all currently issued jobs :return: A list of jobs (as jobIDs) currently issued (may be running, or may be waiting to be run). Despite the result being a list, the ordering should not be depended upon. """<line_sep><raise>NotImplementedError()<block_end>@abstractmethod<def_stmt>getRunningBatchJobIDs self<arrow>Dict[int float]<block_start>""" Gets a map of jobs as jobIDs that are currently running (not just waiting) and how long they have been running, in seconds. :return: dictionary with currently running jobID keys and how many seconds they have been running as the value """<line_sep><raise>NotImplementedError()<block_end>@abstractmethod<def_stmt>getUpdatedBatchJob self maxWait:int<arrow>Optional[UpdatedBatchJobInfo]<block_start>""" Returns information about job that has updated its status (i.e. ceased running, either successfully or with an error). Each such job will be returned exactly once. Does not return info for jobs killed by killBatchJobs, although they may cause None to be returned earlier than maxWait. :param maxWait: the number of seconds to block, waiting for a result :return: If a result is available, returns UpdatedBatchJobInfo. Otherwise it returns None. wallTime is the number of seconds (a strictly positive float) in wall-clock time the job ran for, or None if this batch system does not support tracking wall time. """<line_sep><raise>NotImplementedError()<block_end><def_stmt>getSchedulingStatusMessage self<arrow>Optional[str]<block_start>""" Get a log message fragment for the user about anything that might be going wrong in the batch system, if available. If no useful message is available, return None. This can be used to report what resource is the limiting factor when scheduling jobs, for example. If the leader thinks the workflow is stuck, the message can be displayed to the user to help them diagnose why it might be stuck. :return: User-directed message about scheduling state. """<line_sep># Default implementation returns None. # Override to provide scheduling status information. <return><none><block_end>@abstractmethod<def_stmt>shutdown self<arrow><none><block_start>""" Called at the completion of a toil invocation. Should cleanly terminate all worker threads. """<line_sep><raise>NotImplementedError()<block_end><def_stmt>setEnv self name:str value:Optional[str]=<none><arrow><none><block_start>""" Set an environment variable for the worker process before it is launched. The worker process will typically inherit the environment of the machine it is running on but this method makes it possible to override specific variables in that inherited environment before the worker is launched. Note that this mechanism is different to the one used by the worker internally to set up the environment of a job. A call to this method affects all jobs issued after this method returns. Note to implementors: This means that you would typically need to copy the variables before enqueuing a job. If no value is provided it will be looked up from the current environment. """<line_sep><raise>NotImplementedError()<block_end>@classmethod<def_stmt>add_options cls parser:Union[ArgumentParser _ArgumentGroup]<arrow><none><block_start>""" If this batch system provides any command line options, add them to the given parser. """<line_sep><pass><block_end>OptionType=TypeVar('OptionType')<line_sep>@classmethod<def_stmt>setOptions cls setOption:Callable[[str Optional[Callable[[Any] OptionType]] Optional[Callable[[OptionType] <none>]] Optional[OptionType] Optional[List[str]]] <none>]<arrow><none><block_start>""" Process command line or configuration options relevant to this batch system. :param setOption: A function with signature setOption(option_name, parsing_function=None, check_function=None, default=None, env=None) returning nothing, used to update run configuration as a side effect. """<line_sep># TODO: change type to a Protocol to express kwarg names, or else use a # different interface (generator?) <pass><block_end><def_stmt>getWorkerContexts self<arrow>List[ContextManager[Any]]<block_start>""" Get a list of picklable context manager objects to wrap worker work in, in order. Can be used to ask the Toil worker to do things in-process (such as configuring environment variables, hot-deploying user scripts, or cleaning up a node) that would otherwise require a wrapping "executor" process. """<line_sep><return>[]<block_end><block_end><class_stmt>BatchSystemSupport(AbstractBatchSystem)<block_start>""" Partial implementation of AbstractBatchSystem, support methods. """<def_stmt>__init__ self config:Config maxCores:float maxMemory:int maxDisk:int<arrow><none><block_start>""" Initializes initial state of the object :param toil.common.Config config: object is setup by the toilSetup script and has configuration parameters for the jobtree. You can add code to that script to get parameters for your batch system. :param float maxCores: the maximum number of cores the batch system can request for any one job :param int maxMemory: the maximum amount of memory the batch system can request for any one job, in bytes :param int maxDisk: the maximum amount of disk space the batch system can request for any one job, in bytes """<line_sep>super().__init__()<line_sep>self.config=config<line_sep>self.maxCores=maxCores<line_sep>self.maxMemory=maxMemory<line_sep>self.maxDisk=maxDisk<line_sep>self.environment:Dict[str str]={}<line_sep>self.workerCleanupInfo=WorkerCleanupInfo(workDir=self.config.workDir workflowID=self.config.workflowID cleanWorkDir=self.config.cleanWorkDir)<block_end><def_stmt>checkResourceRequest self memory:int cores:float disk:int job_name:str='' detail:str=''<arrow><none><block_start>""" Check resource request is not greater than that available or allowed. :param int memory: amount of memory being requested, in bytes :param float cores: number of cores being requested :param int disk: amount of disk space being requested, in bytes :param str job_name: Name of the job being checked, for generating a useful error report. :param str detail: Batch-system-specific message to include in the error. :raise InsufficientSystemResources: raised when a resource is requested in an amount greater than allowed """<line_sep>batch_system=self.__class__.__name__<or>'this batch system'<for_stmt>resource,requested,available [('cores' cores self.maxCores) ('memory' memory self.maxMemory) ('disk' disk self.maxDisk)]<block_start><assert_stmt>requested<is><not><none><if_stmt>requested<g>available<block_start>unit='bytes of '<if>resource<in>('disk' 'memory')<else>''<line_sep>R=f'The job {job_name} is r'<if>job_name<else>'R'<if_stmt>resource<eq>'disk'<block_start>msg=(f'{R}equesting {requested} {unit}{resource} for temporary space, '<concat>f'more than the maximum of {available} {unit}{resource} of free space on '<concat>f'{self.config.workDir} that {batch_system} was configured with, or enforced '<concat>f'by --max{resource.capitalize()}. Try setting/changing the toil option '<concat>f'"--workDir" or changing the base temporary directory by setting TMPDIR.')<block_end><else_stmt><block_start>msg=(f'{R}equesting {requested} {unit}{resource}, more than the maximum of '<concat>f'{available} {unit}{resource} that {batch_system} was configured with, '<concat>f'or enforced by --max{resource.capitalize()}.')<block_end><if_stmt>detail<block_start>msg<augadd>detail<block_end><raise>InsufficientSystemResources(msg)<block_end><block_end><block_end><def_stmt>setEnv self name:str value:Optional[str]=<none><arrow><none><block_start>""" Set an environment variable for the worker process before it is launched. The worker process will typically inherit the environment of the machine it is running on but this method makes it possible to override specific variables in that inherited environment before the worker is launched. Note that this mechanism is different to the one used by the worker internally to set up the environment of a job. A call to this method affects all jobs issued after this method returns. Note to implementors: This means that you would typically need to copy the variables before enqueuing a job. If no value is provided it will be looked up from the current environment. :param str name: the environment variable to be set on the worker. :param str value: if given, the environment variable given by name will be set to this value. if None, the variable's current value will be used as the value on the worker :raise RuntimeError: if value is None and the name cannot be found in the environment """<if_stmt>value<is><none><block_start><try_stmt><block_start>value=os.environ[name]<block_end><except_stmt>KeyError<block_start><raise>RuntimeError(f"{name} does not exist in current environment")<block_end><block_end>self.environment[name]=value<block_end><def_stmt>formatStdOutErrPath self toil_job_id:int cluster_job_id:str std:str<arrow>str<block_start>""" Format path for batch system standard output/error and other files generated by the batch system itself. Files will be written to the Toil work directory (which may be on a shared file system) with names containing both the Toil and batch system job IDs, for ease of debugging job failures. :param: int toil_job_id : The unique id that Toil gives a job. :param: cluster_job_id : What the cluster, for example, GridEngine, uses as its internal job id. :param: string std : The provenance of the stream (for example: 'err' for 'stderr' or 'out' for 'stdout') :rtype: string : Formatted filename; however if self.config.noStdOutErr is true, returns '/dev/null' or equivalent. """<if_stmt>self.config.noStdOutErr<block_start><return>os.devnull<block_end>fileName:str=f'toil_{self.config.workflowID}.{toil_job_id}.{cluster_job_id}.{std}.log'<line_sep>workDir:str=Toil.getToilWorkDir(self.config.workDir)<line_sep><return>os.path.join(workDir fileName)<block_end>@staticmethod<def_stmt>workerCleanup info:WorkerCleanupInfo<arrow><none><block_start>""" Cleans up the worker node on batch system shutdown. Also see :meth:`supportsWorkerCleanup`. :param WorkerCleanupInfo info: A named tuple consisting of all the relevant information for cleaning up the worker. """<assert_stmt>isinstance(info WorkerCleanupInfo)<line_sep>workflowDir=Toil.getLocalWorkflowDir(info.workflowID info.workDir)<line_sep>DeferredFunctionManager.cleanupWorker(workflowDir)<line_sep>workflowDirContents=os.listdir(workflowDir)<line_sep>AbstractFileStore.shutdownFileStore(workflowDir info.workflowID)<if_stmt>(info.cleanWorkDir<eq>'always'<or>info.cleanWorkDir<in>('onSuccess' 'onError')<and>workflowDirContents<in>([] [cacheDirName(info.workflowID)]))<block_start>shutil.rmtree(workflowDir ignore_errors=<true>)<block_end><block_end><block_end><class_stmt>NodeInfo<block_start>""" The coresUsed attribute is a floating point value between 0 (all cores idle) and 1 (all cores busy), reflecting the CPU load of the node. The memoryUsed attribute is a floating point value between 0 (no memory used) and 1 (all memory used), reflecting the memory pressure on the node. The coresTotal and memoryTotal attributes are the node's resources, not just the used resources The requestedCores and requestedMemory attributes are all the resources that Toil Jobs have reserved on the node, regardless of whether the resources are actually being used by the Jobs. The workers attribute is an integer reflecting the number of workers currently active workers on the node. """<def_stmt>__init__ self coresUsed:float memoryUsed:float coresTotal:float memoryTotal:int requestedCores:float requestedMemory:int workers:int<arrow><none><block_start>self.coresUsed=coresUsed<line_sep>self.memoryUsed=memoryUsed<line_sep>self.coresTotal=coresTotal<line_sep>self.memoryTotal=memoryTotal<line_sep>self.requestedCores=requestedCores<line_sep>self.requestedMemory=requestedMemory<line_sep>self.workers=workers<block_end><block_end><class_stmt>AbstractScalableBatchSystem(AbstractBatchSystem)<block_start>""" A batch system that supports a variable number of worker nodes. Used by :class:`toil. provisioners.clusterScaler.ClusterScaler` to scale the number of worker nodes in the cluster up or down depending on overall load. """<line_sep>@abstractmethod<def_stmt>getNodes self preemptable:Optional[bool]=<none><arrow>Dict[str NodeInfo]<block_start>""" Returns a dictionary mapping node identifiers of preemptable or non-preemptable nodes to NodeInfo objects, one for each node. :param preemptable: If True (False) only (non-)preemptable nodes will be returned. If None, all nodes will be returned. """<line_sep><raise>NotImplementedError()<block_end>@abstractmethod<def_stmt>nodeInUse self nodeIP:str<arrow>bool<block_start>""" Can be used to determine if a worker node is running any tasks. If the node is doesn't exist, this function should simply return False. :param nodeIP: The worker nodes private IP address :return: True if the worker node has been issued any tasks, else False """<line_sep><raise>NotImplementedError()<block_end># TODO: May be unused! @abstractmethod@contextmanager<def_stmt>nodeFiltering self filter:Optional[Callable[[NodeInfo] bool]]<arrow>Iterator[<none>]<block_start>""" Used to prevent races in autoscaling where 1) nodes have reported to the autoscaler as having no jobs 2) scaler decides to terminate these nodes. In parallel the batch system assigns jobs to the same nodes 3) scaler terminates nodes, resulting in job failures for all jobs on that node. Call this method prior to node termination to ensure that nodes being considered for termination are not assigned new jobs. Call the method again passing None as the filter to disable the filtering after node termination is done. :param method: This will be used as a filter on nodes considered when assigning new jobs. After this context manager exits the filter should be removed """<line_sep><raise>NotImplementedError()<block_end>@abstractmethod<def_stmt>ignoreNode self nodeAddress:str<arrow><none><block_start>""" Stop sending jobs to this node. Used in autoscaling when the autoscaler is ready to terminate a node, but jobs are still running. This allows the node to be terminated after the current jobs have finished. :param nodeAddress: IP address of node to ignore. """<line_sep><raise>NotImplementedError()<block_end>@abstractmethod<def_stmt>unignoreNode self nodeAddress:str<arrow><none><block_start>""" Stop ignoring this address, presumably after a node with this address has been terminated. This allows for the possibility of a new node having the same address as a terminated one. """<line_sep><raise>NotImplementedError()<block_end><block_end><class_stmt>InsufficientSystemResources(Exception)<block_start><pass><block_end>
<import_from_stmt>.gripper_model GripperModel<import_from_stmt>.gripper_factory gripper_factory<import_from_stmt>.gripper_tester GripperTester<import_from_stmt>.panda_gripper PandaGripper<import_from_stmt>.rethink_gripper RethinkGripper<import_from_stmt>.robotiq_85_gripper Robotiq85Gripper<import_from_stmt>.robotiq_three_finger_gripper RobotiqThreeFingerGripper RobotiqThreeFingerDexterousGripper<import_from_stmt>.panda_gripper PandaGripper<import_from_stmt>.jaco_three_finger_gripper JacoThreeFingerGripper JacoThreeFingerDexterousGripper<import_from_stmt>.robotiq_140_gripper Robotiq140Gripper<import_from_stmt>.wiping_gripper WipingGripper<import_from_stmt>.null_gripper NullGripper<line_sep>GRIPPER_MAPPING={"RethinkGripper":RethinkGripper "PandaGripper":PandaGripper "JacoThreeFingerGripper":JacoThreeFingerGripper "JacoThreeFingerDexterousGripper":JacoThreeFingerDexterousGripper "WipingGripper":WipingGripper "Robotiq85Gripper":Robotiq85Gripper "Robotiq140Gripper":Robotiq140Gripper "RobotiqThreeFingerGripper":RobotiqThreeFingerGripper "RobotiqThreeFingerDexterousGripper":RobotiqThreeFingerDexterousGripper <none>:NullGripper }<line_sep>ALL_GRIPPERS=GRIPPER_MAPPING.keys()<line_sep>
# Copyright (c) 2014, Fundacion Dr. <NAME> # All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. <import_from_future_stmt> absolute_import<import_stmt>unittest<import_from_stmt>barf.arch ARCH_X86_MODE_32<import_from_stmt>barf.arch ARCH_X86_MODE_64<import_from_stmt>barf.arch.x86.parser X86Parser<class_stmt>X86Parser32BitsTests(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self._parser=X86Parser(ARCH_X86_MODE_32)<block_end><def_stmt>test_two_oprnd_reg_reg self<block_start>asm=self._parser.parse("add eax, ebx")<line_sep>self.assertEqual(str(asm) "add eax, ebx")<block_end><def_stmt>test_two_oprnd_reg_imm self<block_start>asm=self._parser.parse("add eax, 0x12345678")<line_sep>self.assertEqual(str(asm) "add eax, 0x12345678")<block_end><def_stmt>test_two_oprnd_reg_mem self<block_start>asm=self._parser.parse("add eax, [ebx + edx * 4 + 0x10]")<line_sep>self.assertEqual(str(asm) "add eax, [ebx+edx*4+0x10]")<block_end><def_stmt>test_two_oprnd_mem_reg self<block_start>asm=self._parser.parse("add [ebx + edx * 4 + 0x10], eax")<line_sep>self.assertEqual(str(asm) "add [ebx+edx*4+0x10], eax")<block_end><def_stmt>test_one_oprnd_reg self<block_start>asm=self._parser.parse("inc eax")<line_sep>self.assertEqual(str(asm) "inc eax")<block_end><def_stmt>test_one_oprnd_imm self<block_start>asm=self._parser.parse("jmp 0x12345678")<line_sep>self.assertEqual(str(asm) "jmp 0x12345678")<block_end><def_stmt>test_one_oprnd_mem self<block_start>asm=self._parser.parse("inc dword ptr [ebx+edx*4+0x10]")<line_sep>self.assertEqual(str(asm) "inc dword ptr [ebx+edx*4+0x10]")<block_end><def_stmt>test_zero_oprnd self<block_start>asm=self._parser.parse("nop")<line_sep>self.assertEqual(str(asm) "nop")<block_end># Misc # ======================================================================== # <def_stmt>test_misc_1 self<block_start>asm=self._parser.parse("mov dword ptr [-0x21524111], ecx")<line_sep>self.assertEqual(str(asm) "mov dword ptr [-0x21524111], ecx")<line_sep>self.assertNotEqual(str(asm) "mov dword ptr [0xdeadbeef], ecx")<block_end><def_stmt>test_misc_2 self<block_start>asm=self._parser.parse("fucompi st(1)")<line_sep>self.assertEqual(str(asm) "fucompi st1")<block_end><block_end><class_stmt>X86Parser64BitsTests(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self._parser=X86Parser(ARCH_X86_MODE_64)<block_end><def_stmt>test_64_two_oprnd_reg_reg self<block_start>asm=self._parser.parse("add rax, rbx")<line_sep>self.assertEqual(str(asm) "add rax, rbx")<block_end><def_stmt>test_64_two_oprnd_reg_reg_2 self<block_start>asm=self._parser.parse("add rax, r8")<line_sep>self.assertEqual(str(asm) "add rax, r8")<block_end><def_stmt>test_64_two_oprnd_reg_mem self<block_start>asm=self._parser.parse("add rax, [rbx + r15 * 4 + 0x10]")<line_sep>self.assertEqual(str(asm) "add rax, [rbx+r15*4+0x10]")<block_end># Misc # ======================================================================== # <def_stmt>test_misc_offset_1 self<block_start>asm=self._parser.parse("add byte ptr [rax+0xffffff89], cl")<line_sep>self.assertEqual(str(asm) "add byte ptr [rax+0xffffff89], cl")<block_end><block_end><def_stmt>main <block_start>unittest.main()<block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
"""用户, 组, 及相关认证数据库模型"""<import_from_stmt>.group Group<import_from_stmt>.user User<import_from_stmt>.user UserIndex<import_from_stmt>.auth Authentication<import_from_stmt>.accesspoint AccessPoint<line_sep>
<import_stmt>json<import_stmt>os<import_stmt>responses<import_from_stmt>django.urls reverse<import_from_stmt>.. TestAdminMixin TestLociMixin<class_stmt>BaseTestAdmin(TestAdminMixin TestLociMixin)<block_start>geocode_url='https://geocode.arcgis.com/arcgis/rest/services/World/GeocodeServer/'<def_stmt>test_location_list self<block_start>self._login_as_admin()<line_sep>self._create_location(name='test-admin-location-1')<line_sep>url=reverse('{0}_location_changelist'.format(self.url_prefix))<line_sep>r=self.client.get(url)<line_sep>self.assertContains(r 'test-admin-location-1')<block_end><def_stmt>test_floorplan_list self<block_start>self._login_as_admin()<line_sep>self._create_floorplan()<line_sep>self._create_location()<line_sep>url=reverse('{0}_floorplan_changelist'.format(self.url_prefix))<line_sep>r=self.client.get(url)<line_sep>self.assertContains(r '1st floor')<block_end><def_stmt>test_location_json_view self<block_start>self._login_as_admin()<line_sep>loc=self._create_location()<line_sep>r=self.client.get(reverse('admin:django_loci_location_json' args=[loc.pk]))<line_sep>expected={'name':loc.name 'address':loc.address 'type':loc.type 'is_mobile':loc.is_mobile 'geometry':json.loads(loc.geometry.json) }<line_sep>self.assertDictEqual(r.json() expected)<block_end><def_stmt>test_location_floorplan_json_view self<block_start>self._login_as_admin()<line_sep>fl=self._create_floorplan()<line_sep>r=self.client.get(reverse('admin:django_loci_location_floorplans_json' args=[fl.location.pk]))<line_sep>expected={'choices':[{'id':str(fl.pk) 'str':str(fl) 'floor':fl.floor 'image':fl.image.url 'image_width':fl.image.width 'image_height':fl.image.height }]}<line_sep>self.assertDictEqual(r.json() expected)<block_end><def_stmt>test_location_change_image_removed self<block_start>self._login_as_admin()<line_sep>loc=self._create_location(name='test-admin-location-1' type='indoor')<line_sep>fl=self._create_floorplan(location=loc)<line_sep># remove floorplan image os.remove(fl.image.path)<line_sep>url=reverse('{0}_location_change'.format(self.url_prefix) args=[loc.pk])<line_sep>r=self.client.get(url)<line_sep>self.assertContains(r 'test-admin-location-1')<block_end><def_stmt>test_floorplan_change_image_removed self<block_start>self._login_as_admin()<line_sep>loc=self._create_location(name='test-admin-location-1' type='indoor')<line_sep>fl=self._create_floorplan(location=loc)<line_sep># remove floorplan image os.remove(fl.image.path)<line_sep>url=reverse('{0}_floorplan_change'.format(self.url_prefix) args=[fl.pk])<line_sep>r=self.client.get(url)<line_sep>self.assertContains(r 'test-admin-location-1')<block_end><def_stmt>test_is_mobile_location_json_view self<block_start>self._login_as_admin()<line_sep>loc=self._create_location(is_mobile=<true> geometry=<none>)<line_sep>response=self.client.get(reverse('admin:django_loci_location_json' args=[loc.pk]))<line_sep>self.assertEqual(response.status_code 200)<line_sep>content=json.loads(response.content)<line_sep>self.assertEqual(content['geometry'] <none>)<line_sep>loc1=self._create_location(name='location2' address='loc2 add' type='outdoor')<line_sep>response1=self.client.get(reverse('admin:django_loci_location_json' args=[loc1.pk]))<line_sep>self.assertEqual(response1.status_code 200)<line_sep>content1=json.loads(response1.content)<line_sep>expected={'name':'location2' 'address':'loc2 add' 'type':'outdoor' 'is_mobile':<false> 'geometry':{'type':'Point' 'coordinates':[12.512124 41.898903]} }<line_sep>self.assertEqual(content1 expected)<block_end>@responses.activate<def_stmt>test_geocode self<block_start>self._login_as_admin()<line_sep>address='Red Square'<line_sep>url='{0}?address={1}'.format(reverse('admin:django_loci_location_geocode_api') address)<line_sep># Mock HTTP request to the URL to work offline responses.add(responses.GET f'{self.geocode_url}findAddressCandidates?singleLine=Red+Square&f=json&maxLocations=1' body=self._load_content('base/static/test-geocode.json') content_type='application/json' )<line_sep>response=self.client.get(url)<line_sep>response_lat=round(response.json()['lat'])<line_sep>response_lng=round(response.json()['lng'])<line_sep>self.assertEqual(response.status_code 200)<line_sep>self.assertEqual(response_lat 56)<line_sep>self.assertEqual(response_lng 38)<block_end><def_stmt>test_geocode_no_address self<block_start>self._login_as_admin()<line_sep>url=reverse('admin:django_loci_location_geocode_api')<line_sep>response=self.client.get(url)<line_sep>expected={'error':'Address parameter not defined'}<line_sep>self.assertEqual(response.status_code 400)<line_sep>self.assertEqual(response.json() expected)<block_end>@responses.activate<def_stmt>test_geocode_invalid_address self<block_start>self._login_as_admin()<line_sep>invalid_address='thisaddressisnotvalid123abc'<line_sep>url='{0}?address={1}'.format(reverse('admin:django_loci_location_geocode_api') invalid_address)<line_sep>responses.add(responses.GET f'{self.geocode_url}findAddressCandidates?singleLine=thisaddressisnotvalid123abc'<concat>'&f=json&maxLocations=1' body=self._load_content('base/static/test-geocode-invalid-address.json') content_type='application/json' )<line_sep>response=self.client.get(url)<line_sep>expected={'error':'Not found location with given name'}<line_sep>self.assertEqual(response.status_code 404)<line_sep>self.assertEqual(response.json() expected)<block_end>@responses.activate<def_stmt>test_reverse_geocode self<block_start>self._login_as_admin()<line_sep>lat=52<line_sep>lng=21<line_sep>url='{0}?lat={1}&lng={2}'.format(reverse('admin:django_loci_location_reverse_geocode_api') lat lng)<line_sep># Mock HTTP request to the URL to work offline responses.add(responses.GET f'{self.geocode_url}reverseGeocode?location=21.0%2C52.0&f=json&outSR=4326' body=self._load_content('base/static/test-reverse-geocode.json') content_type='application/json' )<line_sep>response=self.client.get(url)<line_sep>self.assertEqual(response.status_code 200)<line_sep>self.assertContains(response 'POL')<block_end>@responses.activate<def_stmt>test_reverse_location_with_no_address self<block_start>self._login_as_admin()<line_sep>lat=-30<line_sep>lng=-30<line_sep>url='{0}?lat={1}&lng={2}'.format(reverse('admin:django_loci_location_reverse_geocode_api') lat lng)<line_sep>responses.add(responses.GET f'{self.geocode_url}reverseGeocode?location=-30.0%2C-30.0&f=json&outSR=4326' body=self._load_content('base/static/test-reverse-location-with-no-address.json') content_type='application/json' )<line_sep>response=self.client.get(url)<line_sep>response_address=response.json()['address']<line_sep>self.assertEqual(response.status_code 404)<line_sep>self.assertEqual(response_address '')<block_end><def_stmt>test_reverse_geocode_no_coords self<block_start>self._login_as_admin()<line_sep>url=reverse('admin:django_loci_location_reverse_geocode_api')<line_sep>response=self.client.get(url)<line_sep>expected={'error':'lat or lng parameter not defined'}<line_sep>self.assertEqual(response.status_code 400)<line_sep>self.assertEqual(response.json() expected)<block_end><block_end>
<import_from_stmt>django.core.exceptions NON_FIELD_ERRORS<import_from_stmt>rest_framework status viewsets serializers<import_from_stmt>rest_framework.decorators list_route<import_from_stmt>rest_framework.response Response<import_from_stmt>rest_framework.serializers ModelSerializer<import_from_stmt>jet_django.filters.model_aggregate AggregateFilter<import_from_stmt>jet_django.filters.model_group GroupFilter<import_from_stmt>jet_django.pagination CustomPageNumberPagination<import_from_stmt>jet_django.permissions HasProjectPermissions ModifyNotInDemo<import_from_stmt>jet_django.serializers.reorder reorder_serializer_factory<class_stmt>AggregateSerializer(serializers.Serializer)<block_start>y_func=serializers.IntegerField()<def_stmt>__init__ self *args **kwargs<block_start><if_stmt>'y_func_serializer'<in>kwargs<block_start>self.fields['y_func']=kwargs.pop('y_func_serializer')<block_end>super().__init__(*args **kwargs)<block_end><block_end><class_stmt>GroupSerializer(serializers.Serializer)<block_start>group=serializers.CharField()<line_sep>y_func=serializers.IntegerField()<def_stmt>__init__ self *args **kwargs<block_start><if_stmt>'group_serializer'<in>kwargs<block_start>self.fields['group']=kwargs.pop('group_serializer')<block_end><if_stmt>'y_func_serializer'<in>kwargs<block_start>self.fields['y_func']=kwargs.pop('y_func_serializer')<block_end>super().__init__(*args **kwargs)<block_end><block_end><def_stmt>model_viewset_factory build_model build_filter_class build_serializer_class build_detail_serializer_class build_queryset build_actions ordering_field<block_start>ReorderSerializer=reorder_serializer_factory(build_queryset ordering_field)<class_stmt>Viewset(viewsets.ModelViewSet)<block_start>model=build_model<line_sep>queryset=build_queryset<line_sep>pagination_class=CustomPageNumberPagination<line_sep>filter_class=build_filter_class<line_sep>authentication_classes=()<line_sep>permission_classes=(HasProjectPermissions ModifyNotInDemo)<def_stmt>get_serializer_class self<block_start><if_stmt>self.action<eq>'aggregate'<block_start><return>AggregateSerializer<block_end><elif_stmt>self.action<eq>'group'<block_start><return>GroupSerializer<block_end><elif_stmt>self.action<eq>'retrieve'<block_start><return>build_detail_serializer_class<block_end><else_stmt><block_start><return>build_serializer_class<block_end><block_end>@list_route(methods=['get'])<def_stmt>aggregate self request<block_start>queryset=self.filter_queryset(self.get_queryset())<line_sep>y_func=request.GET['_y_func'].lower()<line_sep>y_column=request.GET.get('_y_column' 'id')<line_sep>y_field=self.model._meta.get_field(y_column)<line_sep>y_serializer_class,y_serializer_kwargs=ModelSerializer().build_standard_field(y_column y_field)<line_sep>y_serializer=y_serializer_class(**y_serializer_kwargs)<line_sep>queryset=AggregateFilter().filter(queryset {'y_func':y_func 'y_column':y_column})<line_sep>serializer=self.get_serializer(queryset y_func_serializer=y_serializer)<line_sep><return>Response(serializer.data)<block_end>@list_route(methods=['get'])<def_stmt>group self request<block_start>queryset=self.filter_queryset(self.get_queryset())<line_sep>x_column=request.GET['_x_column']<line_sep>x_lookup_name=request.GET.get('_x_lookup')<line_sep>y_func=request.GET['_y_func'].lower()<line_sep>y_column=request.GET.get('_y_column' 'id')<line_sep>x_field=self.model._meta.get_field(x_column)<line_sep>x_lookup=x_field.class_lookups.get(x_lookup_name)<line_sep>y_field=self.model._meta.get_field(y_column)<if_stmt>x_lookup<block_start>x_field=x_lookup('none').output_field<block_end>x_serializer_class,x_serializer_kwargs=ModelSerializer().build_standard_field(x_column x_field)<line_sep>x_serializer=x_serializer_class(**x_serializer_kwargs)<line_sep>y_serializer_class,y_serializer_kwargs=ModelSerializer().build_standard_field(y_column y_field)<line_sep>y_serializer=y_serializer_class(**y_serializer_kwargs)<line_sep>queryset=GroupFilter().filter(queryset {'x_column':x_column 'x_lookup':x_lookup 'y_func':y_func 'y_column':y_column})<line_sep>serializer=self.get_serializer(queryset many=<true> group_serializer=x_serializer y_func_serializer=y_serializer)<line_sep><return>Response(serializer.data)<block_end><def_stmt>get_serializer self *args **kwargs<block_start>""" Return the serializer instance that should be used for validating and deserializing input, and for serializing output. """<line_sep>serializer_class=self.get_serializer_class()<line_sep>kwargs['context']=self.get_serializer_context()<line_sep><return>serializer_class(*args **kwargs)<block_end>@list_route(methods=['post'])<def_stmt>reorder self request<block_start>serializer=ReorderSerializer(data=request.data)<line_sep>serializer.is_valid(raise_exception=<true>)<line_sep>serializer.save()<line_sep><return>Response(serializer.data)<block_end>@list_route(methods=['post'])<def_stmt>reset_order self request<block_start>i=1<for_stmt>instance build_queryset<block_start>setattr(instance ordering_field i)<line_sep>instance.save()<line_sep>i<augadd>1<block_end><return>Response({})<block_end><block_end><for_stmt>action build_actions<block_start><def_stmt>route self request<block_start>form=action(data=request.data)<if_stmt><not>form.is_valid()<block_start><return>Response(form.errors status=status.HTTP_400_BAD_REQUEST)<block_end>queryset=form.filer_queryset(self.get_queryset())<try_stmt><block_start>result=form.save(queryset)<block_end><except_stmt>Exception<as>e<block_start><return>Response({NON_FIELD_ERRORS:str(e)} status=status.HTTP_400_BAD_REQUEST)<block_end><return>Response({'action':form._meta.name 'result':result})<block_end>decorator=list_route(methods=['post'])<line_sep>route=decorator(route)<line_sep>setattr(Viewset action._meta.name route)<block_end><return>Viewset<block_end>
<import_from_stmt>openems.openems *<line_sep># A simple simulation # # FDTD Simulation Setting # F=FDTD()<line_sep>F.add(Exc(typ='Sinus' f0=100000))<line_sep>F.add(BoundaryCond(['PMC' 'PMC' 'PEC' 'PEC' 'MUR' 'MUR']))<line_sep># # CSX (Geometry setting) # C=CSX()<line_sep># The Box is added as a property C.add(Excitation('excitation') p=Box(P1=[-10 -10 0] P2=[10 10 0] Pr=0))<line_sep>C.add(DumpBox('Et') p=Box(P1=[-10 0 -10] P2=[10 0 30] Pr=0))<line_sep>C.add(RectilinearGrid(np.arange(-10 11 1) np.arange(-10 11 1) np.arange(-10 11 1)))<line_sep>C.add(Polyhedron())<line_sep>S=OpenEMS(F C)<line_sep>S.save(filename='RectWaveguide.xml')<line_sep>#gnd = Matter('gnd') #sphere = Matter('sphere') #patch = Matter('patch') #substrate = Matter('substrate',typ='Ma',Epsilon="3.38",Kappa="0.00046") #cdgsht = Matter('copper',typ='Cs',conductivity="56e6",thickness="40e-6") #b1 = Box(P1=[0,0,0],P2=[100,100,200],Pr=0) #b2 = Box(P1=[0,0,0],P2=[10,20,30],Pr=10) #b4 = Box(P1=[-10,0,-10],P2=[10,0,30],Pr=0) #s1 = Sphere(P=[0,0,0],R=100,Pr=50) #dump = DumpBox() #C.add(gnd) #C.add(patch) #C.add(substrate) #C.add(sphere) #C.add(cdgsht) #C.add(exc) #C.add(dump) #C.set('gnd',b1) #C.set('gnd',b2) #C.set('sphere',s1) #C.set('copper',b1) #C.set('copper',b2) #C.set('Et',b4) #C.save(filename='structure.xml') ##C.AddBox(prop='ConductingSheet',name='copper',P1=[0,-50,200],P2=[1000,50,200],Pri=10) ##C.AddCylinder(prop='Metal',name='cyl0',P1=[0,0,0],P2=[0,0,100],Rad=50,Pri=10) #
<import_stmt>clr<line_sep>clr.AddReference('RevitAPI')<import_from_stmt>Autodesk.Revit.DB *<def_stmt>GetViewTemplate view<block_start><if_stmt><not>view<block_start><return><none><block_end><elif_stmt>hasattr(view "ViewTemplateId")<block_start><if_stmt>view.ViewTemplateId.IntegerValue<eq>-1<block_start><return><none><block_end><else_stmt><block_start><return>view.Document.GetElement(view.ViewTemplateId)<block_end><block_end><else_stmt><block_start><return><none><block_end><block_end>views=UnwrapElement(IN[0])<if_stmt>isinstance(IN[0] list)<block_start>OUT=[GetViewTemplate(x)<for>x views]<block_end><else_stmt><block_start>OUT=GetViewTemplate(views)<block_end>
<import_stmt>numpy<as>np<import_from_stmt>igibson.external.pybullet_tools.utils aabb_union get_aabb get_all_links<import_from_stmt>igibson.object_states.object_state_base CachingEnabledObjectState<class_stmt>AABB(CachingEnabledObjectState)<block_start><def_stmt>_compute_value self<block_start>body_id=self.obj.get_body_id()<line_sep>all_links=get_all_links(body_id)<line_sep>aabbs=[get_aabb(body_id link=link)<for>link all_links]<line_sep>aabb_low,aabb_hi=aabb_union(aabbs)<if_stmt><not>hasattr(self.obj "category")<or>self.obj.category<ne>"floors"<or>self.obj.room_floor<is><none><block_start><return>np.array(aabb_low) np.array(aabb_hi)<block_end># TODO: remove after split floors # room_floor will be set to the correct RoomFloor beforehand room_instance=self.obj.room_floor.room_instance<line_sep># Get the x-y values from the room segmentation map room_aabb_low,room_aabb_hi=self.obj.room_floor.scene.get_aabb_by_room_instance(room_instance)<if_stmt>room_aabb_low<is><none><block_start><return>np.array(aabb_low) np.array(aabb_hi)<block_end># Use the z values from pybullet room_aabb_low[2]=aabb_low[2]<line_sep>room_aabb_hi[2]=aabb_hi[2]<line_sep><return>np.array(room_aabb_low) np.array(room_aabb_hi)<block_end><def_stmt>_set_value self new_value<block_start><raise>NotImplementedError("AABB state currently does not support setting.")<block_end># Nothing needs to be done to save/load AABB since it will happen due to pose caching. <def_stmt>_dump self<block_start><return><none><block_end><def_stmt>load self data<block_start><return><block_end><block_end>
#-*- coding:utf-8 -*- # &Author AnFany # 引入方法 <import_stmt>Kmeans_AnFany<as>K_Af# AnFany <import_stmt>Kmeans_Sklearn<as>K_Sk# Sklearn <import_stmt>matplotlib.pyplot<as>plt<import_from_stmt>pylab mpl# 作图显示中文 mpl.rcParams['font.sans-serif']=['FangSong']# 设置中文字体新宋体 mpl.rcParams['axes.unicode_minus']=<false><import_stmt>numpy<as>np<line_sep># 利用sklearn生成数据集 <import_from_stmt>sklearn.datasets make_blobs<line_sep>X,Y=make_blobs(n_samples=600 centers=6 n_features=2)<line_sep># 绘制散点图 <def_stmt>fig_scatter exdata eydata titl='训练数据散点图' co=['r' 'g' 'k' 'b' 'y' 'm'] marker=['o' '^' 'H' 'v' 'd' '>']<block_start>typeclass=sorted(list(set(eydata)))<for_stmt>ii range(len(typeclass))<block_start>datax=exdata[eydata<eq>typeclass[ii]]<line_sep>plt.scatter(datax[: 0] datax[: -1] c=co[ii] s=50 marker=marker[ii])<block_end>plt.title(titl)<line_sep>#plt.legend(['%d类'%i for i in typeclass], bbox_to_anchor=(1.2, 0.9)) plt.xlabel('特征1')<line_sep>plt.ylabel('特征2')<block_end># 调用不同的方法 # AnFany kresult=K_Af.op_kmeans(X countcen=6)<line_sep># Sklearn sk=K_Sk.KMeans(init='k-means++' n_clusters=6 n_init=10)<line_sep>train=sk.fit(X)<line_sep>result=sk.predict(X)<line_sep>skru=K_Sk.trans(result)<line_sep>#绘制算法后的类别的散点图 <def_stmt>sca Xdata Center signdict co=['r' 'g' 'y' 'b' 'c' 'm'] marker=['o' '^' 'H' 's' 'd' '*'] titl='AnFany 结果'<block_start>du=1<for_stmt>jj signdict<block_start>xdata=Xdata[signdict[jj]]<line_sep>plt.scatter(xdata[: 0] xdata[: -1] c=co[jj] s=50 marker=marker[jj] label='%d类'%jj)# 绘制样本散点图 <block_end><for_stmt>ss Center<block_start><if_stmt>du<block_start>plt.scatter(ss[0] ss[1] c='k' s=100 marker='8' label='类别中心')#绘制类别中心点 du=0<block_end><else_stmt><block_start>plt.scatter(ss[0] ss[1] c='k' s=100 marker='8')<block_end><block_end># 绘制类别中心点 plt.legend(bbox_to_anchor=(1.2 1))<line_sep>plt.title(titl)<line_sep>plt.xlabel('特征1')<line_sep>plt.ylabel('特征2')<block_end># 定义欧几里得距离 <def_stmt>dis sample center<block_start>cen=np.array([center])<line_sep>sample=np.array(sample)<if_stmt>len(sample)<ne>0<block_start>usb=np.sum((sample-cen)<power>2 axis=1)<power>0.5<line_sep><return>usb<block_end><else_stmt><block_start><return>0<block_end><block_end># 计算最终的分类结果的成本值 <def_stmt>Cost Xdata typedict<block_start>center={}<for_stmt>kk typedict<block_start>center[kk]=np.mean(Xdata[typedict[kk]] axis=0)# 均值 <block_end>cio=0<for_stmt>cc typedict<block_start>cio<augadd>np.sum(dis(Xdata[typedict[cc]] center[cc]))<block_end><return>cio<block_end># 最终的结果展示 plt.subplot(2 2 1)<line_sep>fig_scatter(X Y)<line_sep>plt.subplot(2 2 2)<line_sep>sca(X kresult[0] kresult[2])<line_sep>plt.subplot(2 2 3)<line_sep>sca(X train.cluster_centers_ skru titl='Sklearn 结果')<line_sep>plt.subplot(2 2 4)<line_sep>plt.axis('off')<line_sep>plt.text(0.3 0.6 'AnFany 最终的分类成本值为:%.5f'%Cost(X kresult[2]))<line_sep>plt.text(0.3 0.3 'Sklearn 最终的分类成本值为:%.5f'%Cost(X skru))<line_sep>plt.show()<line_sep>
# ***************************************************************************** # Copyright (c) 2020, Intel Corporation All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, # EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ***************************************************************************** """ | This file contains SDC utility functions related to typing compilation phase """<import_stmt>numpy<import_stmt>numba<import_stmt>sdc<import_from_stmt>numba types<import_from_stmt>numba.core.errors TypingError<import_from_stmt>numba.np numpy_support<import_from_stmt>sdc.datatypes.indexes *<import_from_stmt>sdc.str_arr_type string_array_type StringArrayType<import_from_stmt>sdc.datatypes.categorical.types Categorical<line_sep>sdc_old_index_types=(types.Array StringArrayType )<line_sep>sdc_pandas_index_types=(EmptyIndexType PositionalIndexType RangeIndexType Int64IndexType MultiIndexType )+sdc_old_index_types<line_sep>sdc_indexes_range_like=(PositionalIndexType RangeIndexType )<line_sep># TO-DO: support caching of data allocated for range indexes at request for .values sdc_indexes_wo_values_cache=(EmptyIndexType PositionalIndexType RangeIndexType )<line_sep>sdc_pandas_df_column_types=(types.Array StringArrayType Categorical )<class_stmt>TypeChecker<block_start>""" Validate object type and raise TypingError if the type is invalid, e.g.: Method nsmallest(). The object n given: bool expected: int """<line_sep>msg_template='{} The object {}\n given: {}\n expected: {}'<def_stmt>__init__ self func_name<block_start>""" Parameters ---------- func_name: :obj:`str` name of the function where types checking """<line_sep>self.func_name=func_name<block_end><def_stmt>raise_exc self data expected_types name=''<block_start>""" Raise exception with unified message Parameters ---------- data: :obj:`any` real type of the data expected_types: :obj:`str` expected types inserting directly to the exception name: :obj:`str` name of the parameter """<line_sep>msg=self.msg_template.format(self.func_name name data expected_types)<line_sep><raise>TypingError(msg)<block_end><def_stmt>check self data accepted_type name=''<block_start>""" Check data type belongs to specified type Parameters ---------- data: :obj:`any` real type of the data accepted_type: :obj:`type` accepted type name: :obj:`str` name of the parameter """<if_stmt><not>isinstance(data accepted_type)<block_start>self.raise_exc(data accepted_type.__name__ name=name)<block_end><block_end><block_end><class_stmt>SDCLimitation(Exception)<block_start>"""Exception to be raised in case of SDC limitation"""<line_sep><pass><block_end><def_stmt>kwsparams2list params<block_start>"""Convert parameters dict to a list of string of a format 'key=value'"""<line_sep><return>['{}={}'.format(k v)<for>k,v params.items()]<block_end><def_stmt>sigparams2list param_names defaults<block_start>"""Creates a list of strings of a format 'key=value' from parameter names and default values"""<line_sep><return>[(f'{param}'<if>param<not><in>defaults<else>f'{param}={defaults[param]}')<for>param param_names]<block_end><def_stmt>has_literal_value var value<block_start>"""Used during typing to check that variable var is a Numba literal value equal to value"""<if_stmt><not>isinstance(var types.Literal)<block_start><return><false><block_end><if_stmt>value<is><none><block_start><return>isinstance(var types.NoneType)<or>var.literal_value<is>value<block_end><elif_stmt>isinstance(value type(bool))<block_start><return>var.literal_value<is>value<block_end><else_stmt><block_start><return>var.literal_value<eq>value<block_end><block_end><def_stmt>has_python_value var value<block_start>"""Used during typing to check that variable var was resolved as Python type and has specific value"""<if_stmt><not>isinstance(var type(value))<block_start><return><false><block_end><if_stmt>value<is><none><or>isinstance(value type(bool))<block_start><return>var<is>value<block_end><else_stmt><block_start><return>var<eq>value<block_end><block_end><def_stmt>is_default var value<block_start><return>has_literal_value(var value)<or>has_python_value(var value)<or>isinstance(var types.Omitted)<block_end><def_stmt>check_is_numeric_array type_var<block_start>"""Used during typing to check that type_var is a numeric numpy arrays"""<line_sep><return>check_is_array_of_dtype(type_var types.Number)<block_end><def_stmt>check_index_is_numeric ty_series<block_start>"""Used during typing to check that series has numeric index"""<line_sep><return>isinstance(ty_series.index.dtype types.Number)<block_end><def_stmt>check_types_comparable ty_left ty_right<block_start>"""Used during typing to check that specified types can be compared"""<if_stmt>hasattr(ty_left 'dtype')<block_start>ty_left=ty_left.dtype<block_end><if_stmt>hasattr(ty_right 'dtype')<block_start>ty_right=ty_right.dtype<block_end># add the rest of supported types here <if_stmt>isinstance(ty_left types.Number)<block_start><return>isinstance(ty_right types.Number)<block_end><if_stmt>isinstance(ty_left types.UnicodeType)<block_start><return>isinstance(ty_right types.UnicodeType)<block_end><if_stmt>isinstance(ty_left types.Boolean)<block_start><return>isinstance(ty_right types.Boolean)<block_end><if_stmt>isinstance(ty_left (types.Tuple types.UniTuple))# FIXME: just for now to unblock compilation <block_start><return>ty_left<eq>ty_right<block_end><return><false><block_end><def_stmt>check_arrays_comparable ty_left ty_right<block_start>"""Used during typing to check that underlying arrays of specified types can be compared"""<line_sep><return>((ty_left<eq>string_array_type<and>ty_right<eq>string_array_type)<or>(check_is_numeric_array(ty_left)<and>check_is_numeric_array(ty_right)))<block_end><def_stmt>check_is_array_of_dtype type_var dtype<block_start>"""Used during typing to check that type_var is a numeric numpy array of specific dtype"""<line_sep><return>isinstance(type_var types.Array)<and>isinstance(type_var.dtype dtype)<block_end><def_stmt>find_common_dtype_from_numpy_dtypes array_types scalar_types<block_start>"""Used to find common numba dtype for a sequences of numba dtypes each representing some numpy dtype"""<line_sep>np_array_dtypes=[numpy_support.as_dtype(dtype)<for>dtype array_types]<line_sep>np_scalar_dtypes=[numpy_support.as_dtype(dtype)<for>dtype scalar_types]<line_sep>np_common_dtype=numpy.find_common_type(np_array_dtypes np_scalar_dtypes)<line_sep>numba_common_dtype=numpy_support.from_dtype(np_common_dtype)<line_sep><return>numba_common_dtype<block_end><def_stmt>find_index_common_dtype left right<block_start>"""Used to find common dtype for indexes of two series and verify if index dtypes are equal"""<line_sep>left_index_dtype=left.dtype<line_sep>right_index_dtype=right.dtype<line_sep>index_dtypes_match=left_index_dtype<eq>right_index_dtype<if_stmt><not>index_dtypes_match<block_start>numba_index_common_dtype=find_common_dtype_from_numpy_dtypes([left_index_dtype right_index_dtype] [])<block_end><else_stmt><block_start>numba_index_common_dtype=left_index_dtype<block_end><return>index_dtypes_match numba_index_common_dtype<block_end><def_stmt>gen_impl_generator codegen impl_name<block_start>"""Generate generator of an implementation"""<def_stmt>_df_impl_generator *args **kwargs<block_start>func_text,global_vars=codegen(*args **kwargs)<line_sep>loc_vars={}<line_sep>exec(func_text global_vars loc_vars)<line_sep>_impl=loc_vars[impl_name]<line_sep><return>_impl<block_end><return>_df_impl_generator<block_end><def_stmt>check_signed_integer ty<block_start><return>isinstance(ty types.Integer)<and>ty.signed<block_end><def_stmt>_check_dtype_param_type dtype<block_start>""" Returns True is dtype is a valid type for dtype parameter and False otherwise. Used in RangeIndex ctor and other methods that take dtype parameter. """<line_sep>valid_dtype_types=(types.NoneType types.Omitted types.UnicodeType types.NumberClass)<line_sep><return>isinstance(dtype valid_dtype_types)<or>dtype<is><none><block_end>
<import_stmt>FWCore.ParameterSet.Config<as>cms<import_from_stmt>DQMServices.Core.DQMEDHarvester DQMEDHarvester<line_sep>l1EmulatorErrorFlagClient=DQMEDHarvester("L1EmulatorErrorFlagClient" # # for each L1 system, give: # - SystemLabel: system label # - HwValLabel: system label as used in hardware validation package # (the package producing the ErrorFlag histogram) # - SystemMask: system mask: if 1, the system is masked in the summary plot # - SystemFolder: the folder where the ErrorFlag histogram is looked for # # the position in the parameter set gives, in reverse order, the position in the reportSummaryMap # in the emulator column (left column) L1Systems=cms.VPSet(cms.PSet(SystemLabel=cms.string("ECAL") HwValLabel=cms.string("ETP") SystemMask=cms.uint32(1) SystemFolder=cms.string("")) cms.PSet(SystemLabel=cms.string("HCAL") HwValLabel=cms.string("HTP") SystemMask=cms.uint32(1) SystemFolder=cms.string("")) cms.PSet(SystemLabel=cms.string("RCT") HwValLabel=cms.string("RCT") SystemMask=cms.uint32(0) SystemFolder=cms.string("")) cms.PSet(SystemLabel=cms.string("Stage1Layer2") HwValLabel=cms.string("Stage1Layer2") SystemMask=cms.uint32(0) SystemFolder=cms.string("")) cms.PSet(SystemLabel=cms.string("DTTF") HwValLabel=cms.string("DTF") SystemMask=cms.uint32(0) SystemFolder=cms.string("")) cms.PSet(SystemLabel=cms.string("DTTPG") HwValLabel=cms.string("DTP") SystemMask=cms.uint32(1) SystemFolder=cms.string("")) cms.PSet(SystemLabel=cms.string("CSCTF") HwValLabel=cms.string("CTF") SystemMask=cms.uint32(1) SystemFolder=cms.string("")) cms.PSet(SystemLabel=cms.string("CSCTPG") HwValLabel=cms.string("CTP") SystemMask=cms.uint32(1) SystemFolder=cms.string("")) cms.PSet(SystemLabel=cms.string("RPC") HwValLabel=cms.string("RPC") SystemMask=cms.uint32(0) SystemFolder=cms.string("")) cms.PSet(SystemLabel=cms.string("GMT") HwValLabel=cms.string("GMT") SystemMask=cms.uint32(0) SystemFolder=cms.string("")) cms.PSet(SystemLabel=cms.string("GT") HwValLabel=cms.string("GT") SystemMask=cms.uint32(1) SystemFolder=cms.string("L1TEMU/Stage1GTexpert"))))<line_sep>
<def_stmt>main <block_start>n=111<line_sep>gen=(n<times>7<for>x range(10))<if_stmt>777<in>gen<block_start>print("Yes!")<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
<import_from_stmt>.banks callback_view go_to_bank_gateway<import_from_stmt>.samples sample_payment_view sample_result_view<line_sep>
<import_stmt>sys<class_stmt>RPCType(object)<block_start>CloseRPC=0<line_sep>DetachRPC=1<line_sep>AddWindowRPC=2<line_sep>DeleteWindowRPC=3<line_sep>SetWindowLayoutRPC=4<line_sep>SetActiveWindowRPC=5<line_sep>ClearWindowRPC=6<line_sep>ClearAllWindowsRPC=7<line_sep>OpenDatabaseRPC=8<line_sep>CloseDatabaseRPC=9<line_sep>ActivateDatabaseRPC=10<line_sep>CheckForNewStatesRPC=11<line_sep>CreateDatabaseCorrelationRPC=12<line_sep>AlterDatabaseCorrelationRPC=13<line_sep>DeleteDatabaseCorrelationRPC=14<line_sep>ReOpenDatabaseRPC=15<line_sep>ReplaceDatabaseRPC=16<line_sep>OverlayDatabaseRPC=17<line_sep>OpenComputeEngineRPC=18<line_sep>CloseComputeEngineRPC=19<line_sep>AnimationSetNFramesRPC=20<line_sep>AnimationPlayRPC=21<line_sep>AnimationReversePlayRPC=22<line_sep>AnimationStopRPC=23<line_sep>TimeSliderNextStateRPC=24<line_sep>TimeSliderPreviousStateRPC=25<line_sep>SetTimeSliderStateRPC=26<line_sep>SetActiveTimeSliderRPC=27<line_sep>AddPlotRPC=28<line_sep>SetPlotFrameRangeRPC=29<line_sep>DeletePlotKeyframeRPC=30<line_sep>MovePlotKeyframeRPC=31<line_sep>DeleteActivePlotsRPC=32<line_sep>HideActivePlotsRPC=33<line_sep>DrawPlotsRPC=34<line_sep>DisableRedrawRPC=35<line_sep>RedrawRPC=36<line_sep>SetActivePlotsRPC=37<line_sep>ChangeActivePlotsVarRPC=38<line_sep>AddOperatorRPC=39<line_sep>AddInitializedOperatorRPC=40<line_sep>PromoteOperatorRPC=41<line_sep>DemoteOperatorRPC=42<line_sep>RemoveOperatorRPC=43<line_sep>RemoveLastOperatorRPC=44<line_sep>RemoveAllOperatorsRPC=45<line_sep>SaveWindowRPC=46<line_sep>SetDefaultPlotOptionsRPC=47<line_sep>SetPlotOptionsRPC=48<line_sep>SetDefaultOperatorOptionsRPC=49<line_sep>SetOperatorOptionsRPC=50<line_sep>WriteConfigFileRPC=51<line_sep>ConnectToMetaDataServerRPC=52<line_sep>IconifyAllWindowsRPC=53<line_sep>DeIconifyAllWindowsRPC=54<line_sep>ShowAllWindowsRPC=55<line_sep>HideAllWindowsRPC=56<line_sep>UpdateColorTableRPC=57<line_sep>SetAnnotationAttributesRPC=58<line_sep>SetDefaultAnnotationAttributesRPC=59<line_sep>ResetAnnotationAttributesRPC=60<line_sep>SetKeyframeAttributesRPC=61<line_sep>SetPlotSILRestrictionRPC=62<line_sep>SetViewAxisArrayRPC=63<line_sep>SetViewCurveRPC=64<line_sep>SetView2DRPC=65<line_sep>SetView3DRPC=66<line_sep>ResetPlotOptionsRPC=67<line_sep>ResetOperatorOptionsRPC=68<line_sep>SetAppearanceRPC=69<line_sep>ProcessExpressionsRPC=70<line_sep>SetLightListRPC=71<line_sep>SetDefaultLightListRPC=72<line_sep>ResetLightListRPC=73<line_sep>SetAnimationAttributesRPC=74<line_sep>SetWindowAreaRPC=75<line_sep>PrintWindowRPC=76<line_sep>ResetViewRPC=77<line_sep>RecenterViewRPC=78<line_sep>ToggleAllowPopupRPC=79<line_sep>ToggleMaintainViewModeRPC=80<line_sep>ToggleBoundingBoxModeRPC=81<line_sep>ToggleCameraViewModeRPC=82<line_sep>TogglePerspectiveViewRPC=83<line_sep>ToggleSpinModeRPC=84<line_sep>ToggleLockTimeRPC=85<line_sep>ToggleLockToolsRPC=86<line_sep>ToggleLockViewModeRPC=87<line_sep>ToggleFullFrameRPC=88<line_sep>UndoViewRPC=89<line_sep>RedoViewRPC=90<line_sep>InvertBackgroundRPC=91<line_sep>ClearPickPointsRPC=92<line_sep>SetWindowModeRPC=93<line_sep>EnableToolRPC=94<line_sep>SetToolUpdateModeRPC=95<line_sep>CopyViewToWindowRPC=96<line_sep>CopyLightingToWindowRPC=97<line_sep>CopyAnnotationsToWindowRPC=98<line_sep>CopyPlotsToWindowRPC=99<line_sep>ClearCacheRPC=100<line_sep>ClearCacheForAllEnginesRPC=101<line_sep>SetViewExtentsTypeRPC=102<line_sep>ClearRefLinesRPC=103<line_sep>SetRenderingAttributesRPC=104<line_sep>QueryRPC=105<line_sep>CloneWindowRPC=106<line_sep>SetMaterialAttributesRPC=107<line_sep>SetDefaultMaterialAttributesRPC=108<line_sep>ResetMaterialAttributesRPC=109<line_sep>SetPlotDatabaseStateRPC=110<line_sep>DeletePlotDatabaseKeyframeRPC=111<line_sep>MovePlotDatabaseKeyframeRPC=112<line_sep>ClearViewKeyframesRPC=113<line_sep>DeleteViewKeyframeRPC=114<line_sep>MoveViewKeyframeRPC=115<line_sep>SetViewKeyframeRPC=116<line_sep>OpenMDServerRPC=117<line_sep>EnableToolbarRPC=118<line_sep>HideToolbarsRPC=119<line_sep>HideToolbarsForAllWindowsRPC=120<line_sep>ShowToolbarsRPC=121<line_sep>ShowToolbarsForAllWindowsRPC=122<line_sep>SetToolbarIconSizeRPC=123<line_sep>SaveViewRPC=124<line_sep>SetGlobalLineoutAttributesRPC=125<line_sep>SetPickAttributesRPC=126<line_sep>ExportColorTableRPC=127<line_sep>ExportEntireStateRPC=128<line_sep>ImportEntireStateRPC=129<line_sep>ImportEntireStateWithDifferentSourcesRPC=130<line_sep>ResetPickAttributesRPC=131<line_sep>AddAnnotationObjectRPC=132<line_sep>HideActiveAnnotationObjectsRPC=133<line_sep>DeleteActiveAnnotationObjectsRPC=134<line_sep>RaiseActiveAnnotationObjectsRPC=135<line_sep>LowerActiveAnnotationObjectsRPC=136<line_sep>SetAnnotationObjectOptionsRPC=137<line_sep>SetDefaultAnnotationObjectListRPC=138<line_sep>ResetAnnotationObjectListRPC=139<line_sep>ResetPickLetterRPC=140<line_sep>SetDefaultPickAttributesRPC=141<line_sep>ChooseCenterOfRotationRPC=142<line_sep>SetCenterOfRotationRPC=143<line_sep>SetQueryOverTimeAttributesRPC=144<line_sep>SetDefaultQueryOverTimeAttributesRPC=145<line_sep>ResetQueryOverTimeAttributesRPC=146<line_sep>ResetLineoutColorRPC=147<line_sep>SetInteractorAttributesRPC=148<line_sep>SetDefaultInteractorAttributesRPC=149<line_sep>ResetInteractorAttributesRPC=150<line_sep>GetProcInfoRPC=151<line_sep>SendSimulationCommandRPC=152<line_sep>UpdateDBPluginInfoRPC=153<line_sep>ExportDBRPC=154<line_sep>SetTryHarderCyclesTimesRPC=155<line_sep>OpenClientRPC=156<line_sep>OpenGUIClientRPC=157<line_sep>OpenCLIClientRPC=158<line_sep>SuppressQueryOutputRPC=159<line_sep>SetQueryFloatFormatRPC=160<line_sep>SetMeshManagementAttributesRPC=161<line_sep>SetDefaultMeshManagementAttributesRPC=162<line_sep>ResetMeshManagementAttributesRPC=163<line_sep>ResizeWindowRPC=164<line_sep>MoveWindowRPC=165<line_sep>MoveAndResizeWindowRPC=166<line_sep>SetStateLoggingRPC=167<line_sep>ConstructDataBinningRPC=168<line_sep>RequestMetaDataRPC=169<line_sep>SetTreatAllDBsAsTimeVaryingRPC=170<line_sep>SetCreateMeshQualityExpressionsRPC=171<line_sep>SetCreateTimeDerivativeExpressionsRPC=172<line_sep>SetCreateVectorMagnitudeExpressionsRPC=173<line_sep>CopyActivePlotsRPC=174<line_sep>SetPlotFollowsTimeRPC=175<line_sep>TurnOffAllLocksRPC=176<line_sep>SetDefaultFileOpenOptionsRPC=177<line_sep>SetSuppressMessagesRPC=178<line_sep>ApplyNamedSelectionRPC=179<line_sep>CreateNamedSelectionRPC=180<line_sep>DeleteNamedSelectionRPC=181<line_sep>LoadNamedSelectionRPC=182<line_sep>SaveNamedSelectionRPC=183<line_sep>SetNamedSelectionAutoApplyRPC=184<line_sep>UpdateNamedSelectionRPC=185<line_sep>InitializeNamedSelectionVariablesRPC=186<line_sep>MenuQuitRPC=187<line_sep>SetPlotDescriptionRPC=188<line_sep>MovePlotOrderTowardFirstRPC=189<line_sep>MovePlotOrderTowardLastRPC=190<line_sep>SetPlotOrderToFirstRPC=191<line_sep>SetPlotOrderToLastRPC=192<line_sep>RenamePickLabelRPC=193<line_sep>GetQueryParametersRPC=194<line_sep>DDTConnectRPC=195<line_sep>DDTFocusRPC=196<line_sep>ReleaseToDDTRPC=197<line_sep>MaxRPC=198<block_end>
<import_stmt>abc<import_from_stmt>typing Dict Callable<import_stmt>tensorflow<as>tf<import_from_stmt>flink_ml_framework.context Context<import_from_stmt>flink_ml_framework.java_file *<import_from_stmt>..runner tf_helper io_helper<import_from_stmt>..runner.output_writer DirectOutputWriter<try_stmt><block_start><import_from_stmt>flink_ml_tensorflow.tensorflow_context TFContext<block_end><except_stmt><block_start><import_from_stmt>flink_ml_tensorflow2.tensorflow_context TFContext<block_end># noinspection PyUnresolvedReferences <import_from_stmt>tensorflow_io.core.python.ops core_ops<line_sep>__all__=['TF1_TYPE' 'TF2_TYPE']<line_sep>TF1_TYPE='tf1'<line_sep>TF2_TYPE='tf2'<class_stmt>BaseEntry(abc.ABC)<block_start><def_stmt>__init__ self func_name engine_type<block_start>self.func_name=func_name<line_sep>self.engine_type=engine_type<block_end>@staticmethod<def_stmt>get_func_by_name func_name<block_start>""" Get function by the func name :param func_name: func name :return: function """<if_stmt>'.'<not><in>func_name<block_start><if_stmt>func_name<in>globals()<block_start><return>globals()[func_name]<block_end><else_stmt><block_start><raise>RuntimeError('cannot find function[{}]'.format(func_name))<block_end><block_end><else_stmt><block_start>module_name,func_name=func_name.rsplit('.' 1)<import_stmt>importlib<line_sep># load the module, will raise ImportError if module cannot be loaded m=importlib.import_module(module_name)<line_sep># get the class, will raise AttributeError if class cannot be found c=getattr(m func_name)<line_sep><return>c<block_end><block_end>@abc.abstractmethod<def_stmt>construct_args self **kwargs<block_start><pass><block_end><def_stmt>is_batch self<block_start><return><true><block_end><def_stmt>post_process self **kwargs<block_start><pass><block_end><def_stmt>entry_func self context:Context<block_start>tf_context=TFContext(context)<line_sep>properties=tf_context.properties<line_sep>print('properties' properties flush=<true>)<line_sep># intra_op_parallelism is set by akdl, because there is a bug in TensorFlow 1.x # See: https://stackoverflow.com/questions/34426268/restricting-number-of-cores-used intra_op_parallelism=int(properties['ALINK:intra_op_parallelism'])<if_stmt>self.engine_type<eq>TF1_TYPE<block_start>tf_helper.set_intra_op_parallelism(intra_op_parallelism_threads=intra_op_parallelism)<block_end><elif_stmt>self.engine_type<eq>TF2_TYPE<block_start>tf.config.threading.set_intra_op_parallelism_threads(intra_op_parallelism)<block_end>num_workers=int(properties['ALINK:num_workers'])<line_sep>work_dir=properties['ALINK:work_dir']<line_sep>cluster,task_type,task_index=tf_context.export_estimator_cluster()<if_stmt>self.is_batch()<block_start>java_queue_file=JavaFile(context.from_java() context.to_java())<line_sep>dataset_file=os.path.join(work_dir 'dataset.tfrecords')<line_sep>dataset,dataset_length=io_helper.convert_java_queue_file_to_repeatable_dataset(java_queue_file dataset_file)<line_sep>print("number of records: "+str(dataset_length) flush=<true>)<line_sep>dataset_fn:Callable[[] tf.data.TFRecordDataset]=<lambda>:tf.data.TFRecordDataset(dataset_file)<block_end><else_stmt><block_start>dataset_fn:Callable[[] tf.data.TFRecordDataset]=<lambda>:tf_context.flink_stream_dataset()<line_sep>dataset=<none><line_sep>dataset_file=<none><line_sep>dataset_length=<none><block_end>saved_model_dir=os.path.join(work_dir 'savedmodel')<line_sep>user_params:Dict=json.loads(properties['ALINK:user_defined_params'])<for_stmt>i range(1 1024)<block_start>key="ALINK:bc_"+str(i)<if_stmt>key<in>properties<block_start>user_params[key]=context.properties[key]<block_end><block_end>key="ALINK:model_dir"<if_stmt>key<in>properties<block_start>user_params[key]=properties[key]<block_end>output_writer=DirectOutputWriter(tf_context.from_java() tf_context.to_java())<line_sep>locals_copy=locals().copy()<line_sep>locals_copy.pop("self")<line_sep>print("locals_copy = " locals_copy flush=<true>)<line_sep>args=self.construct_args(**locals_copy)<line_sep>func=self.get_func_by_name(self.func_name)<line_sep>func(args)<line_sep>print("task_type = {}, task_index = {}: done tf_user_main".format(task_type task_index) flush=<true>)<line_sep>local_vars=locals().copy()<line_sep>local_vars.pop('self')<line_sep>self.post_process(**local_vars)<line_sep>print("task_type = {}, task_index = {}: exit".format(task_type task_index) flush=<true>)<line_sep>output_writer.close()<block_end><block_end>
# All credit to https://stackoverflow.com/questions/46571448/tkinter-and-a-html-file - thanks DELICA - https://stackoverflow.com/users/7027346/delica <import_from_stmt>cefpython3 cefpython<as>cef<import_stmt>ctypes<try_stmt><block_start><import_stmt>tkinter<as>tk<import_from_stmt>tkinter messagebox<block_end><except_stmt>ImportError<block_start><import_stmt>Tkinter<as>tk<block_end><import_stmt>sys<import_stmt>platform<import_stmt>logging<as>_logging<line_sep># Fix for PyCharm hints warnings WindowUtils=cef.WindowUtils()<line_sep># Platforms WINDOWS=(platform.system()<eq>"Windows")<line_sep>LINUX=(platform.system()<eq>"Linux")<line_sep>MAC=(platform.system()<eq>"Darwin")<line_sep># Globals logger=_logging.getLogger("tkinter_.py")<line_sep>url="localhost:8050/"<class_stmt>MainFrame(tk.Frame)<block_start><def_stmt>__init__ self root<block_start>self.closing=<false><line_sep>self.browser=<none><line_sep># Root root.geometry("900x640")<line_sep>tk.Grid.rowconfigure(root 0 weight=1)<line_sep>tk.Grid.columnconfigure(root 0 weight=1)<line_sep># MainFrame tk.Frame.__init__(self root)<line_sep>self.master.title('SimBA Dashboard')<line_sep>self.master.protocol("WM_DELETE_WINDOW" self.on_close)<line_sep>self.bind("<Configure>" self.on_configure)<line_sep>self.bind("<FocusIn>" self.on_focus_in)<line_sep>self.bind("<FocusOut>" self.on_focus_out)<line_sep>self.focus_set()<line_sep># Pack MainFrame self.pack(fill=tk.BOTH expand=tk.YES)<block_end><def_stmt>embed_browser self<block_start>window_info=cef.WindowInfo()<line_sep>rect=[0 0 self.winfo_width() self.winfo_height()]<line_sep>window_info.SetAsChild(self.get_window_handle() rect)<line_sep>self.browser=cef.CreateBrowserSync(window_info url=url)<line_sep>#todo <assert_stmt>self.browser<line_sep>self.browser.SetClientHandler(LoadHandler(self))<line_sep>self.browser.SetClientHandler(FocusHandler(self))<line_sep>self.message_loop_work()<block_end><def_stmt>get_window_handle self<block_start><if_stmt>self.winfo_id()<g>0<block_start><return>self.winfo_id()<block_end><else_stmt><block_start><raise>Exception("Couldn't obtain window handle")<block_end><block_end><def_stmt>message_loop_work self<block_start>cef.MessageLoopWork()<line_sep>self.after(10 self.message_loop_work)<block_end><def_stmt>on_configure self event<block_start>width=event.width<line_sep>height=event.height<if_stmt>self.browser<block_start><if_stmt>WINDOWS<block_start>ctypes.windll.user32.SetWindowPos(self.browser.GetWindowHandle() 0 0 0 width height 0x0002)<block_end><elif_stmt>LINUX<block_start>self.browser.SetBounds(0 0 width height)<block_end>self.browser.NotifyMoveOrResizeStarted()<block_end><if_stmt><not>self.browser<block_start>self.embed_browser()<block_end><block_end><def_stmt>on_focus_in self _<block_start>logger.debug("BrowserFrame.on_focus_in")<if_stmt>self.browser<block_start>self.browser.SetFocus(<true>)<line_sep>self.focus_set()<block_end><block_end><def_stmt>on_focus_out self _<block_start>logger.debug("BrowserFrame.on_focus_out")<if_stmt>self.browser<block_start>self.browser.SetFocus(<false>)<block_end><block_end><def_stmt>on_close self<block_start><if_stmt>self.browser<block_start>self.browser.CloseBrowser(<true>)<line_sep>self.clear_browser_references()<block_end>self.destroy()<line_sep>self.master.destroy()<block_end><def_stmt>get_browser self<block_start><if_stmt>self.browser<block_start><return>self.browser<block_end><return><none><block_end><def_stmt>clear_browser_references self<block_start>self.browser=<none><block_end><block_end><class_stmt>LoadHandler(object)<block_start><def_stmt>__init__ self browser_frame<block_start>self.browser_frame=browser_frame<block_end><block_end><class_stmt>FocusHandler(object)<block_start><def_stmt>__init__ self browser<block_start>self.browser=browser<block_end><def_stmt>OnTakeFocus self next_component **_<block_start>logger.debug("FocusHandler.OnTakeFocus, next={next}".format(next=next_component))<block_end><def_stmt>OnSetFocus self source **_<block_start>logger.debug("FocusHandler.OnSetFocus, source={source}".format(source=source))<line_sep><return><false><block_end><def_stmt>OnGotFocus self **_<block_start>"""Fix CEF focus issues (#255). Call browser frame's focus_set to get rid of type cursor in url entry widget."""<line_sep>logger.debug("FocusHandler.OnGotFocus")<line_sep>self.browser.focus_set()<block_end><block_end># if __name__ == '__main__': logger.setLevel(_logging.INFO)<line_sep>stream_handler=_logging.StreamHandler()<line_sep>formatter=_logging.Formatter("[%(filename)s] %(message)s")<line_sep>stream_handler.setFormatter(formatter)<line_sep>logger.addHandler(stream_handler)<line_sep>logger.info("CEF Python {ver}".format(ver=cef.__version__))<line_sep>logger.info("Python {ver} {arch}".format(ver=platform.python_version() arch=platform.architecture()[0]))<line_sep>logger.info("Tk {ver}".format(ver=tk.Tcl().eval('info patchlevel')))<assert_stmt>cef.__version__<ge>"55.3" "CEF Python v55.3+ required to run this"<line_sep>sys.excepthook=cef.ExceptHook# To shutdown all CEF processes on error root=tk.Tk()<line_sep>app=MainFrame(root)<def_stmt>on_closing <block_start><if_stmt>messagebox.askokcancel("Quit" "Do you want to quit?")<block_start>root.destroy()<block_end><block_end>root.protocol("WM_DELETE_WINDOW" on_closing)<line_sep># Tk must be initialized before CEF otherwise fatal error (Issue #306) cef.Initialize()<line_sep>root.mainloop()<line_sep># app.mainloop() cef.Shutdown()<line_sep>
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ .. module: security_monkey.watchers.vpc.vpn :platform: Unix .. version:: $$VERSION$$ .. moduleauthor:: <NAME> <<EMAIL>> @alex.cline """<import_from_stmt>cloudaux.aws.ec2 describe_vpn_connections<import_from_stmt>security_monkey.cloudaux_watcher CloudAuxWatcher<import_from_stmt>security_monkey.watcher ChangeItem<line_sep>DATETIME_FORMAT='%Y-%m-%dT%H:%M:%SZ'<class_stmt>VPN(CloudAuxWatcher)<block_start>index='vpn'<line_sep>i_am_singular='VPN Connection'<line_sep>i_am_plural='VPN Connections'<def_stmt>__init__ self *args **kwargs<block_start>super(VPN self).__init__(*args **kwargs)<line_sep>self.honor_ephemerals=<true><line_sep>self.ephemeral_paths=['VgwTelemetry$*$LastStatusChange' 'VgwTelemetry$*$Status' 'VgwTelemetry$*$StatusMessage' ]<block_end><def_stmt>get_name_from_list_output self item<block_start><if_stmt>item.get("Tags")<block_start><for_stmt>tag item["Tags"]<block_start><if_stmt>tag["Key"]<eq>"Name"<block_start><return>"{} ({})".format(tag["Value"] item["VpnConnectionId"])<block_end><block_end><block_end><return>item["VpnConnectionId"]<block_end><def_stmt>list_method self **kwargs<block_start><return>describe_vpn_connections(**kwargs)<block_end><def_stmt>get_method self item **kwargs# Remove the CustomerGatewayConfiguration -- it's not necessary as all the details are present anyway: <block_start>item.pop("CustomerGatewayConfiguration" <none>)<line_sep># Set the ARN: item["Arn"]="arn:aws:ec2:{region}:{account}:vpn-connection/{id}".format(region=kwargs["region"] account=kwargs["account_number"] id=item["VpnConnectionId"])<line_sep># Cast the datetimes to something JSON serializable (ISO 8601 string): <for_stmt>vgw item.get("VgwTelemetry" [])<block_start><if_stmt>vgw.get("LastStatusChange")<block_start>vgw["LastStatusChange"]=vgw["LastStatusChange"].strftime(DATETIME_FORMAT)<block_end><block_end><return>item<block_end><block_end><class_stmt>VPNItem(ChangeItem)<block_start><def_stmt>__init__ self region=<none> account=<none> name=<none> arn=<none> config=<none> source_watcher=<none><block_start>super(VPNItem self).__init__(index=VPN.index region=region account=account name=name arn=arn new_config=config<if>config<else>{} source_watcher=source_watcher)<block_end><block_end>
<import_stmt>os<import_stmt>glob<import_stmt>cv2<import_stmt>numpy<as>np<import_stmt>torch<import_from_stmt>torchvision.transforms transforms<import_from_stmt>natsort natsorted<import_from_stmt>models resmasking_dropout1<import_from_stmt>utils.datasets.fer2013dataset EMOTION_DICT<import_from_stmt>barez show<line_sep>transform=transforms.Compose([transforms.ToPILImage() transforms.ToTensor() ])<def_stmt>activations_mask tensor<block_start>tensor=torch.squeeze(tensor 0)<line_sep>tensor=torch.mean(tensor 0)<line_sep>tensor=tensor.detach().cpu().numpy()<line_sep>tensor=np.maximum(tensor 0)<line_sep>tensor=cv2.resize(tensor (224 224))<line_sep>tensor=tensor-np.min(tensor)<line_sep>tensor=tensor/np.max(tensor)<line_sep>heatmap=cv2.applyColorMap(np.uint8(255<times>tensor) cv2.COLORMAP_JET)<line_sep><return>heatmap<block_end>model=resmasking_dropout1(3 7)<line_sep># state = torch.load('./saved/checkpoints/resmasking_dropout1_rot30_2019Nov17_14.33') state=torch.load("./saved/checkpoints/Z_resmasking_dropout1_rot30_2019Nov30_13.32")<line_sep>model.load_state_dict(state["net"])<line_sep>model.cuda()<line_sep>model.eval()<for_stmt>image_path natsorted(glob.glob("/home/z/research/bkemo/images/**/*.png" recursive=<true>))<block_start>image_name=os.path.basename(image_path)<line_sep>print(image_name)<line_sep># image_path = '/home/z/research/bkemo/images/disgust/0.0_dc10a3_1976_0.png' image=cv2.imread(image_path)<line_sep>image=cv2.resize(image (224 224))<line_sep>tensor=transform(image)<line_sep>tensor=torch.unsqueeze(tensor 0)<line_sep>tensor=tensor.cuda()<line_sep># output = model(tensor) x=model.conv1(tensor)# 112 x=model.bn1(x)<line_sep>x=model.relu(x)<line_sep>x=model.maxpool(x)# 56 x=model.layer1(x)# 56 m=model.mask1(x)<line_sep>x=x<times>(1+m)<line_sep>x=model.layer2(x)# 28 m=model.mask2(x)<line_sep>x=x<times>(1+m)<line_sep>x=model.layer3(x)# 14 heat_1=activations_mask(x)<line_sep>m=model.mask3(x)<line_sep>x=x<times>(1+m)<line_sep># heat_2 = activations_mask(m) x=model.layer4(x)# 7 m=model.mask4(x)<line_sep>x=x<times>(1+m)<line_sep>x=model.avgpool(x)<line_sep>x=torch.flatten(x 1)<line_sep>output=model.fc(x)<line_sep># print(np.sum(heat_1 - heat_2)) # show(np.concatenate((image, heat_1, heat_2), axis=1)) cv2.imwrite("./masking_provements/{}".format(image_name) np.concatenate((image heat_1) axis=1) )<line_sep># np.concatenate((image, heat_1, heat_2), axis=1)) # output = output.cpu().numpy() # print(EMOTION_DICT[torch.argmax(output, 1).item()]) <block_end>
""" wrapper for ccmake command line tool """<import_stmt>subprocess<line_sep>name='ccmake'<line_sep>platforms=['linux' 'osx']<line_sep>optional=<true><line_sep>not_found="required for 'fips config' functionality"<line_sep>#------------------------------------------------------------------------------- <def_stmt>check_exists fips_dir<block_start>"""test if ccmake is in the path :returns: True if ccmake is in the path """<try_stmt><block_start>out=subprocess.check_output(['ccmake' '--version'])<line_sep><return><true><block_end><except_stmt>(OSError subprocess.CalledProcessError)<block_start><return><false><block_end><block_end>#------------------------------------------------------------------------------- <def_stmt>run build_dir<block_start>"""run ccmake to configure cmake project :param build_dir: directory where ccmake should run :returns: True if ccmake returns successful """<line_sep>res=subprocess.call('ccmake .' cwd=build_dir shell=<true>)<line_sep><return>res<eq>0<block_end>
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. <import_stmt>itertools<import_from_stmt>heat.api.openstack.v1 util<import_from_stmt>heat.api.openstack.v1.views views_common<import_from_stmt>heat.rpc api<as>rpc_api<line_sep>_collection_name='stacks'<line_sep>basic_keys=(rpc_api.STACK_ID rpc_api.STACK_NAME rpc_api.STACK_DESCRIPTION rpc_api.STACK_STATUS rpc_api.STACK_STATUS_DATA rpc_api.STACK_CREATION_TIME rpc_api.STACK_DELETION_TIME rpc_api.STACK_UPDATED_TIME rpc_api.STACK_OWNER rpc_api.STACK_PARENT rpc_api.STACK_USER_PROJECT_ID rpc_api.STACK_TAGS )<def_stmt>format_stack req stack keys=<none> include_project=<false><block_start><def_stmt>transform key value<block_start><if_stmt>keys<and>key<not><in>keys<block_start><return><block_end><if_stmt>key<eq>rpc_api.STACK_ID<block_start><yield>('id' value['stack_id'])<line_sep><yield>('links' [util.make_link(req value)])<if_stmt>include_project<block_start><yield>('project' value['tenant'])<block_end><block_end><elif_stmt>key<eq>rpc_api.STACK_ACTION<block_start><return><block_end><elif_stmt>(key<eq>rpc_api.STACK_STATUS<and>rpc_api.STACK_ACTION<in>stack)# To avoid breaking API compatibility, we join RES_ACTION # and RES_STATUS, so the API format doesn't expose the # internal split of state into action/status <block_start><yield>(key '_'.join((stack[rpc_api.STACK_ACTION] value)))<block_end><else_stmt># TODO(zaneb): ensure parameters can be formatted for XML # elif key == rpc_api.STACK_PARAMETERS: # return key, json.dumps(value) <block_start><yield>(key value)<block_end><block_end><return>dict(itertools.chain.from_iterable(transform(k v)<for>k,v stack.items()))<block_end><def_stmt>collection req stacks count=<none> include_project=<false><block_start>keys=basic_keys<line_sep>formatted_stacks=[format_stack(req s keys include_project)<for>s stacks]<line_sep>result={'stacks':formatted_stacks}<line_sep>links=views_common.get_collection_links(req formatted_stacks)<if_stmt>links<block_start>result['links']=links<block_end><if_stmt>count<is><not><none><block_start>result['count']=count<block_end><return>result<block_end>
<import_from_stmt>sys version_info<if_stmt>version_info[0]<le>2<and>version_info[1]<le>4<block_start><def_stmt>all iterable<block_start><for_stmt>element iterable<block_start><if_stmt><not>element<block_start><return><false><block_end><block_end><return><true><block_end><block_end><else_stmt><block_start>all=all<block_end>
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_from_stmt>ppcls.data.preprocess.ops.autoaugment ImageNetPolicy<as>RawImageNetPolicy<import_from_stmt>ppcls.data.preprocess.ops.randaugment RandAugment<as>RawRandAugment<import_from_stmt>ppcls.data.preprocess.ops.timm_autoaugment RawTimmAutoAugment<import_from_stmt>ppcls.data.preprocess.ops.cutout Cutout<import_from_stmt>ppcls.data.preprocess.ops.hide_and_seek HideAndSeek<import_from_stmt>ppcls.data.preprocess.ops.random_erasing RandomErasing<import_from_stmt>ppcls.data.preprocess.ops.grid GridMask<import_from_stmt>ppcls.data.preprocess.ops.operators DecodeImage<import_from_stmt>ppcls.data.preprocess.ops.operators ResizeImage<import_from_stmt>ppcls.data.preprocess.ops.operators CropImage<import_from_stmt>ppcls.data.preprocess.ops.operators RandCropImage<import_from_stmt>ppcls.data.preprocess.ops.operators RandFlipImage<import_from_stmt>ppcls.data.preprocess.ops.operators NormalizeImage<import_from_stmt>ppcls.data.preprocess.ops.operators ToCHWImage<import_from_stmt>ppcls.data.preprocess.ops.operators AugMix<import_from_stmt>ppcls.data.preprocess.batch_ops.batch_operators MixupOperator CutmixOperator OpSampler FmixOperator<import_stmt>numpy<as>np<import_from_stmt>PIL Image<def_stmt>transform data ops=[]<block_start>""" transform """<for_stmt>op ops<block_start>data=op(data)<block_end><return>data<block_end><class_stmt>AutoAugment(RawImageNetPolicy)<block_start>""" ImageNetPolicy wrapper to auto fit different img types """<def_stmt>__init__ self *args **kwargs<block_start>super().__init__(*args **kwargs)<block_end><def_stmt>__call__ self img<block_start><if_stmt><not>isinstance(img Image.Image)<block_start>img=np.ascontiguousarray(img)<line_sep>img=Image.fromarray(img)<block_end>img=super().__call__(img)<if_stmt>isinstance(img Image.Image)<block_start>img=np.asarray(img)<block_end><return>img<block_end><block_end><class_stmt>RandAugment(RawRandAugment)<block_start>""" RandAugment wrapper to auto fit different img types """<def_stmt>__init__ self *args **kwargs<block_start>super().__init__(*args **kwargs)<block_end><def_stmt>__call__ self img<block_start><if_stmt><not>isinstance(img Image.Image)<block_start>img=np.ascontiguousarray(img)<line_sep>img=Image.fromarray(img)<block_end>img=super().__call__(img)<if_stmt>isinstance(img Image.Image)<block_start>img=np.asarray(img)<block_end><return>img<block_end><block_end><class_stmt>TimmAutoAugment(RawTimmAutoAugment)<block_start>""" TimmAutoAugment wrapper to auto fit different img tyeps. """<def_stmt>__init__ self *args **kwargs<block_start>super().__init__(*args **kwargs)<block_end><def_stmt>__call__ self img<block_start><if_stmt><not>isinstance(img Image.Image)<block_start>img=np.ascontiguousarray(img)<line_sep>img=Image.fromarray(img)<block_end>img=super().__call__(img)<if_stmt>isinstance(img Image.Image)<block_start>img=np.asarray(img)<block_end><return>img<block_end><block_end>
""" The constants used in FLV files and their meanings. """<line_sep># Tag type (TAG_TYPE_AUDIO TAG_TYPE_VIDEO TAG_TYPE_SCRIPT)=(8 9 18)<line_sep># Sound format (SOUND_FORMAT_PCM_PLATFORM_ENDIAN SOUND_FORMAT_ADPCM SOUND_FORMAT_MP3 SOUND_FORMAT_PCM_LITTLE_ENDIAN SOUND_FORMAT_NELLYMOSER_16KHZ SOUND_FORMAT_NELLYMOSER_8KHZ SOUND_FORMAT_NELLYMOSER SOUND_FORMAT_G711_A_LAW SOUND_FORMAT_G711_MU_LAW)=range(9)<line_sep>(SOUND_FORMAT_AAC SOUND_FORMAT_SPEEX)=range(10 12)<line_sep>(SOUND_FORMAT_MP3_8KHZ SOUND_FORMAT_DEVICE_SPECIFIC)=range(14 16)<line_sep>sound_format_to_string={SOUND_FORMAT_PCM_PLATFORM_ENDIAN:"Linear PCM, platform endian" SOUND_FORMAT_ADPCM:"ADPCM" SOUND_FORMAT_MP3:"MP3" SOUND_FORMAT_PCM_LITTLE_ENDIAN:"Linear PCM, little endian" SOUND_FORMAT_NELLYMOSER_16KHZ:"Nellymoser 16-kHz mono" SOUND_FORMAT_NELLYMOSER_8KHZ:"Nellymoser 8-kHz mono" SOUND_FORMAT_NELLYMOSER:"Nellymoser" SOUND_FORMAT_G711_A_LAW:"G.711 A-law logarithmic PCM" SOUND_FORMAT_G711_MU_LAW:"G.711 mu-law logarithmic PCM" SOUND_FORMAT_AAC:"AAC" SOUND_FORMAT_SPEEX:"Speex" SOUND_FORMAT_MP3_8KHZ:"MP3 8-kHz" SOUND_FORMAT_DEVICE_SPECIFIC:"Device-specific sound"}<line_sep># Sound rate (SOUND_RATE_5_5_KHZ SOUND_RATE_11_KHZ SOUND_RATE_22_KHZ SOUND_RATE_44_KHZ)=range(4)<line_sep>sound_rate_to_string={SOUND_RATE_5_5_KHZ:"5.5-kHz" SOUND_RATE_11_KHZ:"11-kHz" SOUND_RATE_22_KHZ:"22-kHz" SOUND_RATE_44_KHZ:"44-kHz"}<line_sep># Sound size (SOUND_SIZE_8_BIT SOUND_SIZE_16_BIT)=range(2)<line_sep>sound_size_to_string={SOUND_SIZE_8_BIT:"snd8Bit" SOUND_SIZE_16_BIT:"snd16Bit"}<line_sep># Sound type (SOUND_TYPE_MONO SOUND_TYPE_STEREO)=range(2)<line_sep>sound_type_to_string={SOUND_TYPE_MONO:"sndMono" SOUND_TYPE_STEREO:"sndStereo"}<line_sep># AAC packet type (AAC_PACKET_TYPE_SEQUENCE_HEADER AAC_PACKET_TYPE_RAW)=range(2)<line_sep>aac_packet_type_to_string={AAC_PACKET_TYPE_SEQUENCE_HEADER:"sequence header" AAC_PACKET_TYPE_RAW:"raw"}<line_sep># Codec ID (CODEC_ID_JPEG CODEC_ID_H263 CODEC_ID_SCREEN_VIDEO CODEC_ID_VP6 CODEC_ID_VP6_WITH_ALPHA CODEC_ID_SCREEN_VIDEO_V2 CODEC_ID_H264)=range(1 8)<line_sep>codec_id_to_string={CODEC_ID_JPEG:"JPEG" CODEC_ID_H263:"Sorenson H.263" CODEC_ID_SCREEN_VIDEO:"Screen video" CODEC_ID_VP6:"On2 VP6" CODEC_ID_VP6_WITH_ALPHA:"On2 VP6 with alpha channel" CODEC_ID_SCREEN_VIDEO_V2:"Screen video version 2" CODEC_ID_H264:"H.264"}<line_sep># Frame type (FRAME_TYPE_KEYFRAME FRAME_TYPE_INTERFRAME FRAME_TYPE_DISPOSABLE_INTERFRAME FRAME_TYPE_GENERATED_KEYFRAME FRAME_TYPE_INFO_FRAME)=range(1 6)<line_sep>frame_type_to_string={FRAME_TYPE_KEYFRAME:"keyframe" FRAME_TYPE_INTERFRAME:"interframe" FRAME_TYPE_DISPOSABLE_INTERFRAME:"disposable interframe" FRAME_TYPE_GENERATED_KEYFRAME:"generated keyframe" FRAME_TYPE_INFO_FRAME:"video info/command frame"}<line_sep># H.264 packet type (H264_PACKET_TYPE_SEQUENCE_HEADER H264_PACKET_TYPE_NALU H264_PACKET_TYPE_END_OF_SEQUENCE)=range(3)<line_sep>h264_packet_type_to_string={H264_PACKET_TYPE_SEQUENCE_HEADER:"sequence header" H264_PACKET_TYPE_NALU:"NAL unit" H264_PACKET_TYPE_END_OF_SEQUENCE:"sequence end"}<line_sep># Value type (VALUE_TYPE_NUMBER VALUE_TYPE_BOOLEAN VALUE_TYPE_STRING VALUE_TYPE_OBJECT VALUE_TYPE_MOVIECLIP VALUE_TYPE_NULL VALUE_TYPE_UNDEFINED VALUE_TYPE_REFERENCE VALUE_TYPE_ECMA_ARRAY)=range(9)<line_sep>(VALUE_TYPE_STRICT_ARRAY VALUE_TYPE_DATE VALUE_TYPE_LONGSTRING)=range(10 13)<line_sep>value_type_to_string={VALUE_TYPE_NUMBER:'Number' VALUE_TYPE_BOOLEAN:'Boolean' VALUE_TYPE_STRING:'String' VALUE_TYPE_OBJECT:'Object' VALUE_TYPE_MOVIECLIP:'MovieClip' VALUE_TYPE_NULL:'Null' VALUE_TYPE_UNDEFINED:'Undefined' VALUE_TYPE_REFERENCE:'Reference' VALUE_TYPE_ECMA_ARRAY:'ECMA Array' VALUE_TYPE_STRICT_ARRAY:'Strict Array' VALUE_TYPE_DATE:'Date' VALUE_TYPE_LONGSTRING:'Longstring'}<line_sep>
# - Generated by tools/entrypoint_compiler.py: do not edit by hand """ Trainers.LightGbmBinaryClassifier """<import_stmt>numbers<import_from_stmt>..utils.entrypoints EntryPoint<import_from_stmt>..utils.utils try_set unlist<def_stmt>trainers_lightgbmbinaryclassifier training_data predictor_model=<none> number_of_iterations=100 learning_rate=<none> number_of_leaves=<none> minimum_example_count_per_leaf=<none> feature_column_name='Features' booster=<none> label_column_name='Label' example_weight_column_name=<none> row_group_column_name=<none> normalize_features='Auto' caching='Auto' unbalanced_sets=<false> weight_of_positive_examples=1.0 sigmoid=0.5 evaluation_metric='Logloss' maximum_bin_count_per_feature=255 verbose=<false> silent=<true> number_of_threads=<none> early_stopping_round=0 batch_size=1048576 use_categorical_split=<none> handle_missing_value=<true> use_zero_as_missing_value=<false> minimum_example_count_per_group=100 maximum_categorical_split_point_count=32 categorical_smoothing=10.0 l2_categorical_regularization=10.0 seed=<none> parallel_trainer=<none> **params<block_start>""" **Description** Train a LightGBM binary classification model. :param number_of_iterations: Number of iterations. (inputs). :param training_data: The data to be used for training (inputs). :param learning_rate: Shrinkage rate for trees, used to prevent over-fitting. Range: (0,1]. (inputs). :param number_of_leaves: Maximum leaves for trees. (inputs). :param minimum_example_count_per_leaf: Minimum number of instances needed in a child. (inputs). :param feature_column_name: Column to use for features (inputs). :param booster: Which booster to use, can be gbtree, gblinear or dart. gbtree and dart use tree based model while gblinear uses linear function. (inputs). :param label_column_name: Column to use for labels (inputs). :param example_weight_column_name: Column to use for example weight (inputs). :param row_group_column_name: Column to use for example groupId (inputs). :param normalize_features: Normalize option for the feature column (inputs). :param caching: Whether trainer should cache input training data (inputs). :param unbalanced_sets: Use for binary classification when training data is not balanced. (inputs). :param weight_of_positive_examples: Control the balance of positive and negative weights, useful for unbalanced classes. A typical value to consider: sum(negative cases) / sum(positive cases). (inputs). :param sigmoid: Parameter for the sigmoid function. (inputs). :param evaluation_metric: Evaluation metrics. (inputs). :param maximum_bin_count_per_feature: Maximum number of bucket bin for features. (inputs). :param verbose: Verbose (inputs). :param silent: Printing running messages. (inputs). :param number_of_threads: Number of parallel threads used to run LightGBM. (inputs). :param early_stopping_round: Rounds of early stopping, 0 will disable it. (inputs). :param batch_size: Number of entries in a batch when loading data. (inputs). :param use_categorical_split: Enable categorical split or not. (inputs). :param handle_missing_value: Enable special handling of missing value or not. (inputs). :param use_zero_as_missing_value: Enable usage of zero (0) as missing value. (inputs). :param minimum_example_count_per_group: Minimum number of instances per categorical group. (inputs). :param maximum_categorical_split_point_count: Max number of categorical thresholds. (inputs). :param categorical_smoothing: Lapalace smooth term in categorical feature spilt. Avoid the bias of small categories. (inputs). :param l2_categorical_regularization: L2 Regularization for categorical split. (inputs). :param seed: Sets the random seed for LightGBM to use. (inputs). :param parallel_trainer: Parallel LightGBM Learning Algorithm (inputs). :param predictor_model: The trained model (outputs). """<line_sep>entrypoint_name='Trainers.LightGbmBinaryClassifier'<line_sep>inputs={}<line_sep>outputs={}<if_stmt>number_of_iterations<is><not><none><block_start>inputs['NumberOfIterations']=try_set(obj=number_of_iterations none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>training_data<is><not><none><block_start>inputs['TrainingData']=try_set(obj=training_data none_acceptable=<false> is_of_type=str)<block_end><if_stmt>learning_rate<is><not><none><block_start>inputs['LearningRate']=try_set(obj=learning_rate none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>number_of_leaves<is><not><none><block_start>inputs['NumberOfLeaves']=try_set(obj=number_of_leaves none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>minimum_example_count_per_leaf<is><not><none><block_start>inputs['MinimumExampleCountPerLeaf']=try_set(obj=minimum_example_count_per_leaf none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>feature_column_name<is><not><none><block_start>inputs['FeatureColumnName']=try_set(obj=feature_column_name none_acceptable=<true> is_of_type=str is_column=<true>)<block_end><if_stmt>booster<is><not><none><block_start>inputs['Booster']=try_set(obj=booster none_acceptable=<true> is_of_type=dict)<block_end><if_stmt>label_column_name<is><not><none><block_start>inputs['LabelColumnName']=try_set(obj=label_column_name none_acceptable=<true> is_of_type=str is_column=<true>)<block_end><if_stmt>example_weight_column_name<is><not><none><block_start>inputs['ExampleWeightColumnName']=try_set(obj=example_weight_column_name none_acceptable=<true> is_of_type=str is_column=<true>)<block_end><if_stmt>row_group_column_name<is><not><none><block_start>inputs['RowGroupColumnName']=try_set(obj=row_group_column_name none_acceptable=<true> is_of_type=str is_column=<true>)<block_end><if_stmt>normalize_features<is><not><none><block_start>inputs['NormalizeFeatures']=try_set(obj=normalize_features none_acceptable=<true> is_of_type=str values=['No' 'Warn' 'Auto' 'Yes'])<block_end><if_stmt>caching<is><not><none><block_start>inputs['Caching']=try_set(obj=caching none_acceptable=<true> is_of_type=str values=['Auto' 'Memory' 'None'])<block_end><if_stmt>unbalanced_sets<is><not><none><block_start>inputs['UnbalancedSets']=try_set(obj=unbalanced_sets none_acceptable=<true> is_of_type=bool)<block_end><if_stmt>weight_of_positive_examples<is><not><none><block_start>inputs['WeightOfPositiveExamples']=try_set(obj=weight_of_positive_examples none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>sigmoid<is><not><none><block_start>inputs['Sigmoid']=try_set(obj=sigmoid none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>evaluation_metric<is><not><none><block_start>inputs['EvaluationMetric']=try_set(obj=evaluation_metric none_acceptable=<true> is_of_type=str values=['None' 'Default' 'Logloss' 'Error' 'AreaUnderCurve'])<block_end><if_stmt>maximum_bin_count_per_feature<is><not><none><block_start>inputs['MaximumBinCountPerFeature']=try_set(obj=maximum_bin_count_per_feature none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>verbose<is><not><none><block_start>inputs['Verbose']=try_set(obj=verbose none_acceptable=<true> is_of_type=bool)<block_end><if_stmt>silent<is><not><none><block_start>inputs['Silent']=try_set(obj=silent none_acceptable=<true> is_of_type=bool)<block_end><if_stmt>number_of_threads<is><not><none><block_start>inputs['NumberOfThreads']=try_set(obj=number_of_threads none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>early_stopping_round<is><not><none><block_start>inputs['EarlyStoppingRound']=try_set(obj=early_stopping_round none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>batch_size<is><not><none><block_start>inputs['BatchSize']=try_set(obj=batch_size none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>use_categorical_split<is><not><none><block_start>inputs['UseCategoricalSplit']=try_set(obj=use_categorical_split none_acceptable=<true> is_of_type=bool)<block_end><if_stmt>handle_missing_value<is><not><none><block_start>inputs['HandleMissingValue']=try_set(obj=handle_missing_value none_acceptable=<true> is_of_type=bool)<block_end><if_stmt>use_zero_as_missing_value<is><not><none><block_start>inputs['UseZeroAsMissingValue']=try_set(obj=use_zero_as_missing_value none_acceptable=<true> is_of_type=bool)<block_end><if_stmt>minimum_example_count_per_group<is><not><none><block_start>inputs['MinimumExampleCountPerGroup']=try_set(obj=minimum_example_count_per_group none_acceptable=<true> is_of_type=numbers.Real valid_range={'Inf':0 'Max':2147483647})<block_end><if_stmt>maximum_categorical_split_point_count<is><not><none><block_start>inputs['MaximumCategoricalSplitPointCount']=try_set(obj=maximum_categorical_split_point_count none_acceptable=<true> is_of_type=numbers.Real valid_range={'Inf':0 'Max':2147483647})<block_end><if_stmt>categorical_smoothing<is><not><none><block_start>inputs['CategoricalSmoothing']=try_set(obj=categorical_smoothing none_acceptable=<true> is_of_type=numbers.Real valid_range={'Min':0.0})<block_end><if_stmt>l2_categorical_regularization<is><not><none><block_start>inputs['L2CategoricalRegularization']=try_set(obj=l2_categorical_regularization none_acceptable=<true> is_of_type=numbers.Real valid_range={'Min':0.0})<block_end><if_stmt>seed<is><not><none><block_start>inputs['Seed']=try_set(obj=seed none_acceptable=<true> is_of_type=numbers.Real)<block_end><if_stmt>parallel_trainer<is><not><none><block_start>inputs['ParallelTrainer']=try_set(obj=parallel_trainer none_acceptable=<true> is_of_type=dict)<block_end><if_stmt>predictor_model<is><not><none><block_start>outputs['PredictorModel']=try_set(obj=predictor_model none_acceptable=<false> is_of_type=str)<block_end>input_variables={x<for>x unlist(inputs.values())<if>isinstance(x str)<and>x.startswith("$")}<line_sep>output_variables={x<for>x unlist(outputs.values())<if>isinstance(x str)<and>x.startswith("$")}<line_sep>entrypoint=EntryPoint(name=entrypoint_name inputs=inputs outputs=outputs input_variables=input_variables output_variables=output_variables)<line_sep><return>entrypoint<block_end>
# -*- coding: utf-8 -*- <import_from_stmt>gengine.app.tests.base BaseDBTest<import_from_stmt>gengine.app.tests.helpers create_user update_user delete_user get_or_create_language<import_from_stmt>gengine.metadata DBSession<import_from_stmt>gengine.app.model AuthUser<class_stmt>TestUserCreation(BaseDBTest)<block_start><def_stmt>test_user_creation self<block_start>lang=get_or_create_language("en")<line_sep>user=create_user(lat=12.1 lng=12.2 #country = "RO", #region = "Transylvania", #city = "Cluj-Napoca", timezone="Europe/Bukarest" language="en" additional_public_data={"first_name":"Rudolf" "last_name":"<NAME>"})<line_sep>self.assertTrue(user.lat<eq>12.1)<line_sep>self.assertTrue(user.lng<eq>12.2)<line_sep>#self.assertTrue(user.country == "RO") #self.assertTrue(user.region == "Transylvania") #self.assertTrue(user.city == "Cluj-Napoca") self.assertTrue(user.timezone<eq>"Europe/Bukarest")<line_sep>self.assertTrue(user.language_id<eq>lang.id)<line_sep>self.assertTrue(user.additional_public_data["first_name"]<eq>"Rudolf")<line_sep>self.assertTrue(user.additional_public_data["last_name"]<eq>"<NAME>")<block_end><def_stmt>test_user_updation self<block_start>lang=get_or_create_language("en")<line_sep>user=create_user()<line_sep>user=update_user(user_id=user.id lat=14.2 lng=16.3 #country = "EN", #region = "Transylvania", #city = "Cluj-Napoca", timezone="Europe/Bukarest" language="en" additional_public_data={"first_name":"Rudolf" "last_name":"<NAME>"})<line_sep># Correct cases self.assertTrue(user.lat<eq>14.2)<line_sep>self.assertTrue(user.lng<eq>16.3)<line_sep>#self.assertTrue(user.country == "EN") #self.assertTrue(user.region == "Transylvania") #self.assertTrue(user.city == "Cluj-Napoca") self.assertTrue(user.timezone<eq>"Europe/Bukarest")<line_sep>self.assertTrue(user.language_id<eq>lang.id)<block_end><def_stmt>test_user_deletion self<block_start>user1=create_user()<line_sep># Create Second user user2=create_user(lat=85.59 lng=65.75 #country="DE", #region="Niedersachsen", #city="Osnabrück", timezone="Europe/Berlin" language="de" additional_public_data={"first_name":"Michael" "last_name":"Clarke"} friends=[1])<line_sep>remaining_users=delete_user(user_id=user1.id)<line_sep># Correct cases self.assertNotIn(user1.id remaining_users)<line_sep>self.assertEqual(user2.id remaining_users[0].id)<block_end><def_stmt>test_verify_password self<block_start>auth_user=AuthUser()<line_sep>auth_user.password="<PASSWORD>"<line_sep>auth_user.active=<true><line_sep>auth_user.email="<EMAIL>"<line_sep>DBSession.add(auth_user)<line_sep>iscorrect=auth_user.verify_password("<PASSWORD>")<line_sep>self.assertEqual(iscorrect <true>)<block_end><def_stmt>test_create_token self<block_start>user=create_user()<line_sep>auth_user=AuthUser()<line_sep>auth_user.user_id=user.id<line_sep>auth_user.password="<PASSWORD>"<line_sep>auth_user.active=<true><line_sep>auth_user.email="<EMAIL>"<line_sep>DBSession.add(auth_user)<if_stmt>auth_user.verify_password("<PASSWORD>")<block_start>token=auth_user.get_or_create_token()<block_end>self.assertNotEqual(token <none>)<block_end><block_end>
<import_stmt>logging<import_from_stmt>collections namedtuple<import_from_stmt>. export<line_sep>log=logging.getLogger(__name__)<line_sep>NO_QUERY=0<line_sep>PARSED_QUERY=1<line_sep>RAW_QUERY=2<line_sep>SpecialCommand=namedtuple('SpecialCommand' ['handler' 'command' 'shortcut' 'description' 'arg_type' 'hidden' 'case_sensitive'])<line_sep>COMMANDS={}<line_sep>@export<class_stmt>CommandNotFound(Exception)<block_start><pass><block_end>@export<def_stmt>parse_special_command sql<block_start>command,_,arg=sql.partition(' ')<line_sep>verbose='+'<in>command<line_sep>command=command.strip().replace('+' '')<line_sep><return>(command verbose arg.strip())<block_end>@export<def_stmt>special_command command shortcut description arg_type=PARSED_QUERY hidden=<false> case_sensitive=<false> aliases=()<block_start><def_stmt>wrapper wrapped<block_start>register_special_command(wrapped command shortcut description arg_type hidden case_sensitive aliases)<line_sep><return>wrapped<block_end><return>wrapper<block_end>@export<def_stmt>register_special_command handler command shortcut description arg_type=PARSED_QUERY hidden=<false> case_sensitive=<false> aliases=()<block_start>cmd=command.lower()<if><not>case_sensitive<else>command<line_sep>COMMANDS[cmd]=SpecialCommand(handler command shortcut description arg_type hidden case_sensitive)<for_stmt>alias aliases<block_start>cmd=alias.lower()<if><not>case_sensitive<else>alias<line_sep>COMMANDS[cmd]=SpecialCommand(handler command shortcut description arg_type case_sensitive=case_sensitive hidden=<true>)<block_end><block_end>@export<def_stmt>execute cur sql<block_start>"""Execute a special command and return the results. If the special command is not supported a KeyError will be raised. """<line_sep>command,verbose,arg=parse_special_command(sql)<if_stmt>(command<not><in>COMMANDS)<and>(command.lower()<not><in>COMMANDS)<block_start><raise>CommandNotFound<block_end><try_stmt><block_start>special_cmd=COMMANDS[command]<block_end><except_stmt>KeyError<block_start>special_cmd=COMMANDS[command.lower()]<if_stmt>special_cmd.case_sensitive<block_start><raise>CommandNotFound('Command not found: %s'%command)<block_end><block_end># "help <SQL KEYWORD> is a special case. We want built-in help, not # mycli help here. <if_stmt>command<eq>'help'<and>arg<block_start><return>show_keyword_help(cur=cur arg=arg)<block_end><if_stmt>special_cmd.arg_type<eq>NO_QUERY<block_start><return>special_cmd.handler()<block_end><elif_stmt>special_cmd.arg_type<eq>PARSED_QUERY<block_start><return>special_cmd.handler(cur=cur arg=arg verbose=verbose)<block_end><elif_stmt>special_cmd.arg_type<eq>RAW_QUERY<block_start><return>special_cmd.handler(cur=cur query=sql)<block_end><block_end>@special_command('help' '\\?' 'Show this help.' arg_type=NO_QUERY aliases=('\\?' '?'))<def_stmt>show_help # All the parameters are ignored. <block_start>headers=['Command' 'Shortcut' 'Description']<line_sep>result=[]<for_stmt>_,value sorted(COMMANDS.items())<block_start><if_stmt><not>value.hidden<block_start>result.append((value.command value.shortcut value.description))<block_end><block_end><return>[(<none> result headers <none>)]<block_end><def_stmt>show_keyword_help cur arg<block_start>""" Call the built-in "show <command>", to display help for an SQL keyword. :param cur: cursor :param arg: string :return: list """<line_sep>keyword=arg.strip('"').strip("'")<line_sep>query="help '{0}'".format(keyword)<line_sep>log.debug(query)<line_sep>cur.execute(query)<if_stmt>cur.description<and>cur.rowcount<g>0<block_start>headers=[x[0]<for>x cur.description]<line_sep><return>[(<none> cur headers '')]<block_end><else_stmt><block_start><return>[(<none> <none> <none> 'No help found for {0}.'.format(keyword))]<block_end><block_end>@special_command('exit' '\\q' 'Exit.' arg_type=NO_QUERY aliases=('\\q' ))@special_command('quit' '\\q' 'Quit.' arg_type=NO_QUERY)<def_stmt>quit *_args<block_start><raise>EOFError<block_end>@special_command('\\e' '\\e' 'Edit command with editor (uses $EDITOR).' arg_type=NO_QUERY case_sensitive=<true>)@special_command('\\clip' '\\clip' 'Copy query to the system clipboard.' arg_type=NO_QUERY case_sensitive=<true>)@special_command('\\G' '\\G' 'Display current query results vertically.' arg_type=NO_QUERY case_sensitive=<true>)<def_stmt>stub <block_start><raise>NotImplementedError<block_end>
<import_stmt>torch<import_from_stmt>torch nn<import_from_stmt>torch.nn functional<as>F<import_from_stmt>torchdrug layers<class_stmt>ConditionalFlow(nn.Module)<block_start>""" Conditional flow transformation from `Masked Autoregressive Flow for Density Estimation`_. .. _Masked Autoregressive Flow for Density Estimation: https://arxiv.org/pdf/1705.07057.pdf Parameters: input_dim (int): input & output dimension condition_dim (int): condition dimension hidden_dims (list of int, optional): hidden dimensions activation (str or function, optional): activation function """<def_stmt>__init__ self input_dim condition_dim hidden_dims=<none> activation="relu"<block_start>super(ConditionalFlow self).__init__()<line_sep>self.input_dim=input_dim<line_sep>self.output_dim=input_dim<if_stmt>hidden_dims<is><none><block_start>hidden_dims=[]<block_end>self.mlp=layers.MLP(condition_dim list(hidden_dims)+[input_dim<times>2] activation)<line_sep>self.rescale=nn.Parameter(torch.zeros(1))<block_end><def_stmt>forward self input condition<block_start>""" Transform data into latent representations. Parameters: input (Tensor): input representations condition (Tensor): conditional representations Returns: (Tensor, Tensor): latent representations, log-likelihood of the transformation """<line_sep>scale,bias=self.mlp(condition).chunk(2 dim=-1)<line_sep>scale=(F.tanh(scale)<times>self.rescale)<line_sep>output=(input+bias)<times>scale.exp()<line_sep>log_det=scale<line_sep><return>output log_det<block_end><def_stmt>reverse self latent condition<block_start>""" Transform latent representations into data. Parameters: latent (Tensor): latent representations condition (Tensor): conditional representations Returns: (Tensor, Tensor): input representations, log-likelihood of the transformation """<line_sep>scale,bias=self.mlp(condition).chunk(2 dim=-1)<line_sep>scale=(F.tanh(scale)<times>self.rescale)<line_sep>output=latent/scale.exp()-bias<line_sep>log_det=scale<line_sep><return>output log_det<block_end><block_end>
<import_stmt>logging<as>log<class_stmt>Log<block_start><def_stmt>__init__ self level<block_start>self.level=level<line_sep>log.basicConfig(format='%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s' level=level)<line_sep>self.log=log<block_end><def_stmt>info self msg<block_start>self.log.info(msg)<block_end><def_stmt>debug self msg<block_start>self.log.debug(msg)<block_end><def_stmt>warn self msg<block_start>self.log.warn(msg)<block_end><def_stmt>error self msg<block_start>self.log.error(msg)<block_end><block_end>
# # Copyright (c) 2019 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # <import_from_stmt>click.testing CliRunner<import_from_stmt>cli_text_consts ModelExportCmdTexts<as>Texts<import_from_stmt>commands.model.common workflow_description<import_from_stmt>commands.model.export export<import_from_stmt>platform_resources.workflow ArgoWorkflow QUEUED_PHASE<line_sep>FEM_NAME="EXPORT_1"<line_sep>SEM_NAME="EXPORT_2"<line_sep>FEM_PARAMETERS="PARAMS_1"<line_sep>SEM_PARAMETERS="PARAMS_2"<line_sep>FEM_START_DATE='2000-01-01'<line_sep>FEM_NAMESPACE='test-namespace'<line_sep>TEST_AGROWORKFLOW=ArgoWorkflow(name=FEM_NAME started_at=FEM_START_DATE finished_at=<none> namespace=FEM_NAMESPACE phase=<none>)<line_sep>TWO_MODEL_OUTPUT=[workflow_description(name=FEM_NAME parameters=FEM_PARAMETERS) workflow_description(name=SEM_NAME parameters=SEM_PARAMETERS)]<def_stmt>setup_mocks mocker<block_start>mocker.patch('commands.model.export.get_kubectl_current_context_namespace' return_value='fake-namespace')<line_sep>mocker.patch('platform_resources.workflow.ArgoWorkflow.from_yaml' return_value=mocker.MagicMock())<line_sep>mocker.patch('platform_resources.workflow.ArgoWorkflow.get' return_value=TEST_AGROWORKFLOW)<line_sep>mocker.patch('os.listdir' return_value=['openvino.yaml' 'tensorflow.yaml' 'some_other_file'])<line_sep>mocker.patch('commands.model.export.NAUTAConfigMap' return_value=mocker.MagicMock(registry='fake-addr'))<line_sep>mocker.patch('commands.model.export.Config')<line_sep>mocker.patch('os.path.isdir' return_value=<true>)<block_end><def_stmt>test_export mocker<block_start>setup_mocks(mocker)<line_sep>result=CliRunner().invoke(export ["/fake/path" "openvino"])<assert_stmt>result.exit_code<eq>0<assert_stmt>"Successfully created export workflow"<in>result.output<assert_stmt>QUEUED_PHASE<in>result.output<assert_stmt>FEM_NAME<in>result.output<assert_stmt>FEM_START_DATE<in>result.output<assert_stmt>FEM_NAMESPACE<in>result.output<block_end><def_stmt>test_export_inexistent_format mocker<block_start>setup_mocks(mocker)<line_sep>result=CliRunner().invoke(export ["/fake/path" "bad"])<assert_stmt>result.exit_code<eq>2<assert_stmt>"Format: bad does not exist. Choose from:"<in>result.output<block_end><def_stmt>test_export_failure mocker<block_start>setup_mocks(mocker)<line_sep>mocker.patch('platform_resources.workflow.ArgoWorkflow.from_yaml' return_value=mocker.MagicMock(create=<lambda>:RuntimeError))<line_sep>result=CliRunner().invoke(export ["/fake/path" "openvino"])<assert_stmt>result.exit_code<eq>1<assert_stmt>"Failed to create export workflow"<in>result.output<block_end><def_stmt>test_export_list mocker<block_start>mocker.patch("commands.model.export.get_list_of_workflows" return_value=TWO_MODEL_OUTPUT)<line_sep>result=CliRunner().invoke(export ["formats"])<assert_stmt>FEM_NAME<in>result.output<assert_stmt>SEM_NAME<in>result.output<assert_stmt>FEM_PARAMETERS<in>result.output<assert_stmt>SEM_PARAMETERS<in>result.output<block_end><def_stmt>test_export_list_error mocker<block_start>mocker.patch("commands.model.export.get_list_of_workflows" side_effect=RuntimeError)<line_sep>result=CliRunner().invoke(export ["formats"])<assert_stmt>Texts.EXPORT_LIST_ERROR_MSG<in>result.output<block_end><def_stmt>test_export_missing_format mocker<block_start>setup_mocks(mocker)<line_sep>result=CliRunner().invoke(export ["wrong-option"])<assert_stmt>Texts.MISSING_EXPORT_FORMAT.format(formats=["openvino" "tensorflow"])<in>result.output<block_end>
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) <import_from_stmt>spack *<class_stmt>PyMdanalysis(PythonPackage)<block_start>"""MDAnalysis is a Python toolkit to analyze molecular dynamics trajectories generated by a wide range of popular simulation packages including DL_Poly, CHARMM, Amber, NAMD, LAMMPS, and Gromacs. (See the lists of supported trajectory formats and topology formats.)"""<line_sep>homepage="https://www.mdanalysis.org"<line_sep>pypi="MDAnalysis/MDAnalysis-0.19.2.tar.gz"<line_sep>version('1.0.0' sha256='f45a024aca45e390ff1c45ca90beb2180b78881be377e2a1aa9cd6c109bcfa81')<line_sep>version('0.20.1' sha256='d04b71b193b9716d2597ffb9938b93f43487fa535da1bb5c1f2baccf356d7df9')<line_sep>version('0.19.2' sha256='c5395bbafa5efca2e1aee4715d26129844140c47cb8301da0293106cb969de7d')<line_sep>version('0.19.1' sha256='ff1d694f8598c0833ec340de6a6adb3b5e62b92d0fa94ee6401718ba972db3cc')<line_sep>version('0.19.0' sha256='248e3b37fc6150e31c609cc18a3927c32aee37b76d29cbfedf635e7e1aa982cf')<line_sep>version('0.18.0' sha256='a08acea1755112411e7db55e3f282e164b47a59e15794b38744cce6c596f252a')<line_sep>version('0.17.0' sha256='9bd61760334698cc7b8a57ad26456451e926e9c9e66722594ad8816561348cde')<line_sep>version('0.16.2' sha256='407d9a9ff1ab8a5e47973714d06fabff220f8d08a28792dee93e88e70e995b0a')<line_sep>version('0.16.1' sha256='3dc8f5d639ab3a0d152cbd7259ae9372ec8a9bac0f8cb7d3b80ce5adc1e3ee57')<line_sep>version('0.16.0' sha256='c4824fa1fddd336daa39371436187ebb023366885fb250c2827ed7fce2546bd4')<line_sep>version('0.15.0' sha256='9088786048b47339cba1f8a586977bbb3bb04ae1bcd0462b59e45bda37e25533')<line_sep>variant('analysis' default=<true> description='Enable analysis packages: matplotlib, scipy, seaborn')<line_sep>variant('amber' default=<false> description='Support AMBER netcdf format.')<line_sep>depends_on('python@2.7:' type=('build' 'run'))<line_sep>depends_on('py-setuptools' type='build')<line_sep>depends_on('py-cython@0.16:' type='build')<line_sep>depends_on('py-six@1.4.0:' type=('build' 'run'))<line_sep>depends_on('py-networkx@1.0:' type=('build' 'run'))<line_sep>depends_on('py-gsd@1.4.0:' when='@0.17.0:' type=('build' 'run'))<line_sep>depends_on('py-mmtf-python@1.0.0:' when='@0.16.0:' type=('build' 'run'))<line_sep>depends_on('py-mock' when='@0.18.0:' type=('build' 'run'))<line_sep>depends_on('py-tqdm@4.43.0:' when='@1.0.0:' type=('build' 'run'))<line_sep>depends_on('py-joblib' when='@0.16.0:0.20.1' type=('build' 'run'))<line_sep>depends_on('py-joblib@0.12:' when='@1.0.0:' type=('build' 'run'))<line_sep>depends_on('py-numpy@1.5.0:' when='@:0.15.0' type=('build' 'run'))<line_sep>depends_on('py-numpy@1.10.4:' when='@0.16.0:0.19.2' type=('build' 'run'))<line_sep>depends_on('py-numpy@1.13.3:' when='@0.20.1:' type=('build' 'run'))<line_sep>depends_on('py-biopython@1.59:' when='@:0.17.0' type=('build' 'run'))<line_sep>depends_on('py-biopython@1.71:' when='@0.18.0:' type=('build' 'run'))<line_sep>depends_on('py-griddataformats@0.3.2:' when='@:0.16.2' type=('build' 'run'))<line_sep>depends_on('py-griddataformats@0.4:' when='@0.17.0:' type=('build' 'run'))<line_sep>depends_on('py-matplotlib' when='@:0.15.0+analysis' type=('build' 'run'))<line_sep>depends_on('py-matplotlib@1.5.1:' when='@0.16.0:0.16.1+analysis' type=('build' 'run'))<line_sep>depends_on('py-matplotlib@1.5.1:' when='@0.16.2:' type=('build' 'run'))<line_sep>depends_on('py-scipy' when='@:0.16.1+analysis' type=('build' 'run'))<line_sep>depends_on('py-scipy' when='@0.16.2:0.17.0' type=('build' 'run'))<line_sep>depends_on('py-scipy@1.0.0:' when='@0.18.0:' type=('build' 'run'))<line_sep>depends_on('py-scikit-learn' when='@0.16.0:+analysis' type=('build' 'run'))<line_sep>depends_on('py-seaborn' when='+analysis' type=('build' 'run'))<line_sep>depends_on('py-netcdf4@1.0:' when='+amber' type=('build' 'run'))<line_sep>depends_on('hdf5' when='+amber' type=('run'))<block_end>
<import_stmt>cherrypy<import_from_stmt>cherrypy.test helper<class_stmt>SessionAuthenticateTest(helper.CPWebCase)<block_start><def_stmt>setup_server <block_start><def_stmt>check username password# Dummy check_username_and_password function <block_start><if_stmt>username<ne>'test'<or>password<ne>'password'<block_start><return>'Wrong login/password'<block_end><block_end><def_stmt>augment_params # A simple tool to add some things to request.params # This is to check to make sure that session_auth can handle request # params (ticket #780) <block_start>cherrypy.request.params["test"]="test"<block_end>cherrypy.tools.augment_params=cherrypy.Tool('before_handler' augment_params <none> priority=30)<class_stmt>Test<block_start>_cp_config={'tools.sessions.on':<true> 'tools.session_auth.on':<true> 'tools.session_auth.check_username_and_password':check 'tools.augment_params.on':<true> }<def_stmt>index self **kwargs<block_start><return>"Hi %s, you are logged in"%cherrypy.request.login<block_end>index.exposed=<true><block_end>cherrypy.tree.mount(Test())<block_end>setup_server=staticmethod(setup_server)<def_stmt>testSessionAuthenticate self# request a page and check for login form <block_start>self.getPage('/')<line_sep>self.assertInBody('<form method="post" action="do_login">')<line_sep># setup credentials login_body='username=test&password=password&from_page=/'<line_sep># attempt a login self.getPage('/do_login' method='POST' body=login_body)<line_sep>self.assertStatus((302 303))<line_sep># get the page now that we are logged in self.getPage('/' self.cookies)<line_sep>self.assertBody('Hi test, you are logged in')<line_sep># do a logout self.getPage('/do_logout' self.cookies method='POST')<line_sep>self.assertStatus((302 303))<line_sep># verify we are logged out self.getPage('/' self.cookies)<line_sep>self.assertInBody('<form method="post" action="do_login">')<block_end><block_end>
# Copyright 2020 The TensorFlow Quantum Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Module functions for tfq.differentiators.*"""<import_from_stmt>tensorflow_quantum.python.differentiators.adjoint Adjoint <import_from_stmt>tensorflow_quantum.python.differentiators.linear_combination ForwardDifference CentralDifference LinearCombination <import_from_stmt>tensorflow_quantum.python.differentiators.parameter_shift ParameterShift <import_from_stmt>tensorflow_quantum.python.differentiators.differentiator Differentiator <line_sep>
# (C) Datadog, Inc. 2019-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) <import_stmt>pytest<import_from_stmt>mock MagicMock<import_from_stmt>requests HTTPError<import_from_stmt>datadog_checks.base AgentCheck<import_from_stmt>datadog_checks.dev.http MockResponse<import_from_stmt>.common HARBOR_COMPONENTS HARBOR_VERSION VERSION_1_5 VERSION_1_6 VERSION_1_8<line_sep>@pytest.mark.usefixtures("patch_requests")<def_stmt>test_check_health aggregator harbor_check harbor_api<block_start>base_tags=['tag1:val1' 'tag2']<line_sep>harbor_check._check_health(harbor_api base_tags)<if_stmt>harbor_api.harbor_version<ge>VERSION_1_8<block_start>components=HARBOR_COMPONENTS<for_stmt>c components<block_start>aggregator.assert_service_check('harbor.status' AgentCheck.OK tags=base_tags+['component:{}'.format(c)])<block_end><block_end><elif_stmt>harbor_api.harbor_version<ge>VERSION_1_6<block_start>aggregator.assert_service_check('harbor.status' AgentCheck.OK tags=base_tags+['component:chartmuseum'])<line_sep>aggregator.assert_service_check('harbor.status' AgentCheck.OK tags=base_tags)<block_end><elif_stmt>harbor_api.harbor_version<ge>VERSION_1_5<block_start>aggregator.assert_service_check('harbor.status' AgentCheck.OK tags=base_tags)<block_end><else_stmt><block_start>aggregator.assert_service_check('harbor.status' AgentCheck.UNKNOWN tags=base_tags)<block_end><block_end>@pytest.mark.usefixtures("patch_requests")<def_stmt>test_check_registries_health aggregator harbor_check harbor_api<block_start>tags=['tag1:val1' 'tag2']<line_sep>harbor_check._check_registries_health(harbor_api tags)<line_sep>tags.append('registry:demo')<line_sep>aggregator.assert_service_check('harbor.registry.status' AgentCheck.OK tags=tags)<block_end>@pytest.mark.usefixtures("patch_requests")<def_stmt>test_submit_project_metrics aggregator harbor_check harbor_api<block_start>tags=['tag1:val1' 'tag2']<line_sep>harbor_check._submit_project_metrics(harbor_api tags)<line_sep>aggregator.assert_metric('harbor.projects.count' 2 tags=tags)<block_end>@pytest.mark.usefixtures("patch_requests")<def_stmt>test_submit_disk_metrics aggregator harbor_check harbor_api<block_start>tags=['tag1:val1' 'tag2']<line_sep>harbor_check._submit_disk_metrics(harbor_api tags)<line_sep>aggregator.assert_metric('harbor.disk.free' 5e5 tags=tags)<line_sep>aggregator.assert_metric('harbor.disk.total' 1e6 tags=tags)<block_end>@pytest.mark.usefixtures("patch_requests")@pytest.mark.skipif(HARBOR_VERSION<l>VERSION_1_5 reason="The registry.read_only metric is submitted for Harbor 1.5+")<def_stmt>test_submit_read_only_status aggregator harbor_check harbor_api<block_start>tags=['tag1:val1' 'tag2']<line_sep>harbor_check._submit_read_only_status(harbor_api tags)<line_sep>aggregator.assert_metric('harbor.registry.read_only' 0 tags=tags)<block_end><def_stmt>test_api__make_get_request harbor_api<block_start>harbor_api.http=MagicMock()<line_sep>harbor_api.http.get=MagicMock(return_value=MockResponse(json_data={'json':<true>}))<assert_stmt>harbor_api._make_get_request('{base_url}/api/path')<eq>{"json":<true>}<line_sep>harbor_api.http.get=MagicMock(return_value=MockResponse(status_code=500))<with_stmt>pytest.raises(HTTPError)<block_start>harbor_api._make_get_request('{base_url}/api/path')<block_end><block_end><def_stmt>test_api__make_paginated_get_request harbor_api<block_start>expected_result=[{'item':i}<for>i range(20)]<line_sep>paginated_result=[[expected_result[i] expected_result[i+1]]<for>i range(0 len(expected_result)-1 2)]<line_sep>values=[]<for_stmt>r paginated_result<block_start>values.append(MockResponse(json_data=r headers={'link':'Link: <unused_url>; rel=next; type="text/plain"'}))<block_end>values[-1].headers.pop('link')<line_sep>harbor_api.http=MagicMock()<line_sep>harbor_api.http.get=MagicMock(side_effect=values)<assert_stmt>harbor_api._make_paginated_get_request('{base_url}/api/path')<eq>expected_result<block_end><def_stmt>test_api__make_post_request harbor_api<block_start>harbor_api.http=MagicMock()<line_sep>harbor_api.http.post=MagicMock(return_value=MockResponse(json_data={'json':<true>}))<assert_stmt>harbor_api._make_post_request('{base_url}/api/path')<eq>{"json":<true>}<line_sep>harbor_api.http.post=MagicMock(return_value=MockResponse(status_code=500))<with_stmt>pytest.raises(HTTPError)<block_start>harbor_api._make_post_request('{base_url}/api/path')<block_end><block_end>
# Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. <import_from_stmt>vsm_dashboard.api swift<import_from_stmt>.utils TestDataContainer<def_stmt>data TEST<block_start>TEST.containers=TestDataContainer()<line_sep>TEST.objects=TestDataContainer()<line_sep>container_1=swift.Container(dict(name=u"container_one\u6346"))<line_sep>container_2=swift.Container(dict(name=u"container_two\u6346"))<line_sep>TEST.containers.add(container_1 container_2)<line_sep>object_dict={"name":u"test_object\u6346" "content_type":u"text/plain" "bytes":128 "last_modified":<none> "hash":u"object_hash"}<line_sep>obj_dicts=[object_dict]<line_sep>obj_data="Fake Data"<for_stmt>obj_dict obj_dicts<block_start>swift_object=swift.StorageObject(obj_dict container_1.name data=obj_data)<line_sep>TEST.objects.add(swift_object)<block_end><block_end>
<import_stmt>torch<import_stmt>sys<import_stmt>os<line_sep>sys.path.append(os.getcwd())<import_from_stmt>utils.helper_modules Sequential2<import_from_stmt>unimodals.common_models Linear MLP MaxOut_MLP<import_from_stmt>datasets.imdb.get_data get_dataloader<import_from_stmt>fusions.common_fusions Concat<import_from_stmt>objective_functions.objectives_for_supervised_learning MFM_objective<import_from_stmt>objective_functions.recon sigmloss1d<import_from_stmt>training_structures.Supervised_Learning train test<line_sep>filename="best_mfm.pt"<line_sep>traindata,validdata,testdata=get_dataloader("../video/multimodal_imdb.hdf5" "../video/mmimdb" vgg=<true> batch_size=128)<line_sep>classes=23<line_sep>n_latent=512<line_sep>fuse=Sequential2(Concat() MLP(2<times>n_latent n_latent n_latent<floordiv>2)).cuda()<line_sep>encoders=[MaxOut_MLP(512 512 300 n_latent <false>).cuda() MaxOut_MLP(512 1024 4096 n_latent <false>).cuda()]<line_sep>head=Linear(n_latent<floordiv>2 classes).cuda()<line_sep>decoders=[MLP(n_latent 600 300).cuda() MLP(n_latent 2048 4096).cuda()]<line_sep>intermediates=[MLP(n_latent n_latent<floordiv>2 n_latent<floordiv>2).cuda() MLP(n_latent n_latent<floordiv>2 n_latent<floordiv>2).cuda()]<line_sep>recon_loss=MFM_objective(2.0 [sigmloss1d sigmloss1d] [1.0 1.0] criterion=torch.nn.BCEWithLogitsLoss())<line_sep>train(encoders fuse head traindata validdata 1000 decoders+intermediates early_stop=<true> task="multilabel" objective_args_dict={"decoders":decoders "intermediates":intermediates} save=filename optimtype=torch.optim.AdamW lr=5e-3 weight_decay=0.01 objective=recon_loss)<line_sep>print("Testing:")<line_sep>model=torch.load(filename).cuda()<line_sep>test(model testdata method_name="MFM" dataset="imdb" criterion=torch.nn.BCEWithLogitsLoss() task="multilabel")<line_sep>
# <import_stmt>os<line_sep>TEST_DIR=os.path.abspath(os.path.dirname(__file__))<line_sep>
<import_stmt>os<import_stmt>numpy<as>np<import_stmt>torch<import_stmt>argparse<import_from_stmt>hparams create_hparams<import_from_stmt>model lcm<import_from_stmt>train load_model<import_from_stmt>torch.utils.data DataLoader<import_from_stmt>reader TextMelIDLoader TextMelIDCollate id2sp<import_from_stmt>inference_utils plot_data<line_sep>parser=argparse.ArgumentParser()<line_sep>parser.add_argument('-c' '--checkpoint_path' type=str help='directory to save checkpoints')<line_sep>parser.add_argument('--hparams' type=str required=<false> help='comma separated name=value pairs')<line_sep>args=parser.parse_args()<line_sep>checkpoint_path=args.checkpoint_path<line_sep>hparams=create_hparams(args.hparams)<line_sep>model=load_model(hparams)<line_sep>model.load_state_dict(torch.load(checkpoint_path)['state_dict'] strict=<false>)<line_sep>_=model.eval()<def_stmt>gen_embedding speaker<block_start>training_list=hparams.training_list<line_sep>train_set_A=TextMelIDLoader(training_list hparams.mel_mean_std hparams.speaker_A hparams.speaker_B shuffle=<false> pids=[speaker])<line_sep>collate_fn=TextMelIDCollate(lcm(hparams.n_frames_per_step_encoder hparams.n_frames_per_step_decoder))<line_sep>train_loader_A=DataLoader(train_set_A num_workers=1 shuffle=<false> sampler=<none> batch_size=1 pin_memory=<false> drop_last=<true> collate_fn=collate_fn)<with_stmt>torch.no_grad()<block_start>speaker_embeddings=[]<for_stmt>i,batch enumerate(train_loader_A)#print i <block_start>x,y=model.parse_batch(batch)<line_sep>text_input_padded,mel_padded,text_lengths,mel_lengths,speaker_id=x<line_sep>speaker_id,speaker_embedding=model.speaker_encoder.inference(mel_padded)<line_sep>speaker_embedding=speaker_embedding.data.cpu().numpy()<line_sep>speaker_embeddings.append(speaker_embedding)<block_end>speaker_embeddings=np.vstack(speaker_embeddings)<block_end>print(speaker_embeddings.shape)<if_stmt><not>os.path.exists('outdir/embeddings')<block_start>os.makedirs('outdir/embeddings')<block_end>np.save('outdir/embeddings/%s.npy'%speaker speaker_embeddings)<line_sep>plot_data([speaker_embeddings] 'outdir/embeddings/%s.pdf'%speaker)<block_end>print('Generating embedding of %s ...'%hparams.speaker_A)<line_sep>gen_embedding(hparams.speaker_A)<line_sep>print('Generating embedding of %s ...'%hparams.speaker_B)<line_sep>gen_embedding(hparams.speaker_B)<line_sep>
<def_stmt>task_pos_args <block_start><def_stmt>show_params param1 pos<block_start>print('param1 is: {0}'.format(param1))<for_stmt>index,pos_arg enumerate(pos)<block_start>print('positional-{0}: {1}'.format(index pos_arg))<block_end><block_end><return>{'actions':[(show_params )] 'params':[{'name':'param1' 'short':'p' 'default':'default value'} ] 'pos_arg':'pos' 'verbosity':2 }<block_end>
# -*- coding: utf-8 -*- '''Chemical Engineering Design Library (ChEDL). Utilities for process modeling. Copyright (C) 2020, <NAME> <<EMAIL>> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.'''<import_stmt>pytest<import_from_stmt>fluids.core C2K<import_stmt>thermo<import_from_stmt>chemicals.utils *<import_from_stmt>thermo *<import_from_stmt>fluids.numerics *<import_from_stmt>math *<import_stmt>json<import_stmt>os<import_stmt>numpy<as>np<def_stmt>test_C2_C5_PR <block_start>T,P=300 3e6<line_sep>constants=ChemicalConstantsPackage(Tcs=[305.32 469.7] Pcs=[4872000.0 3370000.0] omegas=[0.098 0.251] Tms=[90.3 143.15] Tbs=[184.55 309.21] CASs=['74-84-0' '109-66-0'] names=['ethane' 'pentane'] MWs=[30.06904 72.14878])<line_sep>HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0 1000.0 [7.115386645067898e-21 -3.2034776773408394e-17 5.957592282542187e-14 -5.91169369931607e-11 3.391209091071677e-08 -1.158730780040934e-05 0.002409311277400987 -0.18906638711444712 37.94602410497228])) HeatCapacityGas(poly_fit=(200.0 1000.0 [7.537198394065234e-22 -4.946850205122326e-18 1.4223747507170372e-14 -2.3451318313798008e-11 2.4271676873997662e-08 -1.6055220805830093e-05 0.006379734000450042 -1.0360272314628292 141.84695243411866]))]<line_sep>correlations=PropertyCorrelationsPackage(constants HeatCapacityGases=HeatCapacityGases)<line_sep>zs=ws_to_zs(MWs=constants.MWs ws=[.5 .5])<line_sep>eos_kwargs={'Pcs':constants.Pcs 'Tcs':constants.Tcs 'omegas':constants.omegas}<line_sep>gas=CEOSGas(PRMIX eos_kwargs HeatCapacityGases=HeatCapacityGases T=T P=P zs=zs)<line_sep>liq=CEOSLiquid(PRMIX eos_kwargs HeatCapacityGases=HeatCapacityGases T=T P=P zs=zs)<line_sep>flasher=FlashVL(constants correlations liquid=liq gas=gas)<line_sep># Check there are two phases near the dew point. don't bother checking the composition most of the time. # When this test was written, case is still valid for a dP of 0.00000001 Pa # Issue here was that (sum_criteria < 1e-7) was the check in the stability test result interpretation # Fixed it by decreasing the tolerance 10x (1e-8) res=flasher.flash(P=5475649.470049857+15 T=123.3+273.15 zs=zs)<line_sep>assert_close1d(res.betas [0.9999995457838572 4.5421614280893863e-07] rtol=1e-4)<line_sep>assert_close1d(res.gas.zs [0.7058337751720506 0.29416622482794935] rtol=1e-4)<line_sep>assert_close1d(res.liquid0.zs [0.49517964670906095 0.504820353290939] rtol=1e-4)<line_sep># # In this case, the tolerance had to be decreased 10x more - to 1e-9! Triggered at a dP of 0.5 res=flasher.flash(P=5475649.470049857+0.5 T=123.3+273.15 zs=zs)<line_sep>assert_close1d(res.betas [0.999999984859061 1.5140938947055815e-08] rtol=1e-4)<line_sep>assert_close1d(res.gas.zs [0.7058336826506021 0.29416631734939785])<line_sep>assert_close1d(res.liquid0.zs [0.4951780663825745 0.5048219336174254])<line_sep># # This one is too close to the border - the VF from SS is less than 0, # # but if the tolerance is increased, it is positive (and should be) res=flasher.flash(P=5475649.470049857+0.001 T=123.3+273.15 zs=zs)<line_sep>assert_close1d(res.betas [0.9999999999697144 3.028555184414472e-11] rtol=3e-3)<line_sep>assert_close1d(res.gas.zs [0.7058336794959247 0.29416632050407526])<line_sep>assert_close1d(res.liquid0.zs [0.49517801199759515 0.5048219880024049])<line_sep># This one is presently identified as a LL... just check the number of phases <assert_stmt>flasher.flash(zs=zs P=6.615e6 T=386).phase_count<eq>2<block_end><def_stmt>test_flash_TP_K_composition_idependent_unhappiness <block_start>constants=ChemicalConstantsPackage(Tcs=[508.1 536.2 512.5] Pcs=[4700000.0 5330000.0 8084000.0] omegas=[0.309 0.21600000000000003 0.5589999999999999] MWs=[58.07914 119.37764000000001 32.04186] CASs=['67-64-1' '67-66-3' '67-56-1'] names=['acetone' 'chloroform' 'methanol'])<line_sep>HeatCapacityGases=[HeatCapacityGas(poly_fit=(200.0 1000.0 [-1.3320002425347943e-21 6.4063345232664645e-18 -1.251025808150141e-14 1.2265314167534311e-11 -5.535306305509636e-09 -4.32538332013644e-08 0.0010438724775716248 -0.19650919978971002 63.84239495676709])) HeatCapacityGas(poly_fit=(200.0 1000.0 [1.5389278550737367e-21 -8.289631533963465e-18 1.9149760160518977e-14 -2.470836671137373e-11 1.9355882067011222e-08 -9.265600540761629e-06 0.0024825718663005762 -0.21617464276832307 48.149539665907696])) HeatCapacityGas(poly_fit=(50.0 1000.0 [2.3511458696647882e-21 -9.223721411371584e-18 1.3574178156001128e-14 -8.311274917169928e-12 4.601738891380102e-10 1.78316202142183e-06 -0.0007052056417063217 0.13263597297874355 28.44324970462924]))]<line_sep>VolumeLiquids=[VolumeLiquid(poly_fit=(178.51 498.1 [6.564241965071999e-23 -1.6568522275506375e-19 1.800261692081815e-16 -1.0988731296761538e-13 4.118691518070104e-11 -9.701938804617744e-09 1.4022905458596618e-06 -0.00011362923883050033 0.0040109650220160956])) VolumeLiquid(poly_fit=(209.63 509.5799999999999 [2.034047306563089e-23 -5.45567626310959e-20 6.331811062990084e-17 -4.149759318710192e-14 1.6788970104955462e-11 -4.291900093120011e-09 6.769385838271721e-07 -6.0166473220815445e-05 0.0023740769479069054])) VolumeLiquid(poly_fit=(175.7 502.5 [3.5725079384600736e-23 -9.031033742820083e-20 9.819637959370411e-17 -5.993173551565636e-14 2.2442465416964825e-11 -5.27776114586072e-09 7.610461006178106e-07 -6.148574498547711e-05 0.00216398089328537])) ]<line_sep>VaporPressures=[VaporPressure(exp_poly_fit=(178.51 508.09000000000003 [-1.3233111115238975e-19 4.2217134794609376e-16 -5.861832547132719e-13 4.6488594950801467e-10 -2.3199079844570237e-07 7.548290741523459e-05 -0.015966705328994194 2.093003523977292 -125.39006100979816])) VaporPressure(exp_poly_fit=(207.15 536.4 [-8.714046553871422e-20 2.910491615051279e-16 -4.2588796020294357e-13 3.580003116042944e-10 -1.902612144361103e-07 6.614096470077095e-05 -0.01494801055978542 2.079082613726621 -130.24643185169472])) VaporPressure(exp_poly_fit=(175.7 512.49 [-1.446088049406911e-19 4.565038519454878e-16 -6.278051259204248e-13 4.935674274379539e-10 -2.443464113936029e-07 7.893819658700523e-05 -0.016615779444332356 2.1842496316772264 -134.19766175812708]))]<line_sep>liquid=GibbsExcessLiquid(VaporPressures=VaporPressures VolumeLiquids=VolumeLiquids HeatCapacityGases=HeatCapacityGases use_Poynting=<true> use_phis_sat=<false>)<line_sep>correlations=PropertyCorrelationsPackage(constants=constants skip_missing=<true> HeatCapacityGases=HeatCapacityGases VolumeLiquids=VolumeLiquids VaporPressures=VaporPressures)<line_sep>T,P=350.0 1e6<line_sep>zs=[0.2 0.0 0.8]<line_sep>eos_kwargs={'Pcs':constants.Pcs 'Tcs':constants.Tcs 'omegas':constants.omegas}<line_sep>gas=IdealGas(HeatCapacityGases=HeatCapacityGases T=T P=P zs=zs)<line_sep>flashN=FlashVLN(constants correlations liquids=[liquid] gas=gas)<line_sep># Low - all K under zero res=flashN.flash(T=T P=P zs=zs)<line_sep>assert_close(res.rho_mass() 733.1047159397776)<assert_stmt>1<eq>res.phase_count<assert_stmt>res.liquid0<is><not><none><line_sep># High - all K above zero res=flashN.flash(T=430 P=1e4 zs=zs)<assert_stmt>1<eq>res.phase_count<assert_stmt>res.gas<is><not><none><line_sep>assert_close(res.rho_mass() 0.10418751067559757)<line_sep># One K value is under 1, rest are above - but that component has mole frac of zero res=flashN.flash(T=420 P=1e4 zs=zs)<assert_stmt>1<eq>res.phase_count<assert_stmt>res.gas<is><not><none><line_sep># phis_at for liquids was broken, breaking this calculation res=flashN.flash(T=285.5 P=1e4 zs=zs)<line_sep>assert_close1d(res.betas [0.21860038882559643 0.7813996111744036])<assert_stmt>res.phase_count<eq>2<line_sep># Two cases RR was working on Ks less than 1, and coming up with a made up VF # Need to check Ks first res=flashN.flash(T=300.0000 P=900000.0000 zs=[0.5 0.1 0.4 0.0] )<assert_stmt>1<eq>res.phase_count<assert_stmt>res.gas<is><none><line_sep>res=flashN.flash(T=300.0000 P=900000.0000 zs=[.5 0 0 .5])<assert_stmt>1<eq>res.phase_count<assert_stmt>res.gas<is><none><block_end><def_stmt>test_flash_combustion_products <block_start>P=1e5<line_sep>T=794.5305048838037<line_sep>zs=[0.5939849621247668 0.112781954982051 0.0676691730155464 0.2255639098776358]<line_sep>constants=ChemicalConstantsPackage(atomss=[{'N':2} {'C':1 'O':2} {'O':2} {'H':2 'O':1}] CASs=['7727-37-9' '124-38-9' '7782-44-7' '7732-18-5'] MWs=[28.0134 44.0095 31.9988 18.01528] names=['nitrogen' 'carbon dioxide' 'oxygen' 'water'] omegas=[0.04 0.2252 0.021 0.344] Pcs=[3394387.5 7376460.0 5042945.25 22048320.0] Tbs=[77.355 194.67 90.18799999999999 373.124] Tcs=[126.2 304.2 154.58 647.14] Tms=[63.15 216.65 54.36 273.15])<line_sep>correlations=PropertyCorrelationsPackage(constants=constants skip_missing=<true> HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0 1000.0 [-6.496329615255804e-23 2.1505678500404716e-19 -2.2204849352453665e-16 1.7454757436517406e-14 9.796496485269412e-11 -4.7671178529502835e-08 8.384926355629239e-06 -0.0005955479316119903 29.114778709934264])) HeatCapacityGas(poly_fit=(50.0 1000.0 [-3.1115474168865828e-21 1.39156078498805e-17 -2.5430881416264243e-14 2.4175307893014295e-11 -1.2437314771044867e-08 3.1251954264658904e-06 -0.00021220221928610925 0.000884685506352987 29.266811602924644])) HeatCapacityGas(poly_fit=(50.0 1000.0 [7.682842888382947e-22 -3.3797331490434755e-18 6.036320672021355e-15 -5.560319277907492e-12 2.7591871443240986e-09 -7.058034933954475e-07 9.350023770249747e-05 -0.005794412013028436 29.229215579932934])) HeatCapacityGas(poly_fit=(50.0 1000.0 [5.543665000518528e-22 -2.403756749600872e-18 4.2166477594350336e-15 -3.7965208514613565e-12 1.823547122838406e-09 -4.3747690853614695e-07 5.437938301211039e-05 -0.003220061088723078 33.32731489750759]))])<line_sep>kijs=[[0.0 -0.0122 -0.0159 0.0] [-0.0122 0.0 0.0 0.0952] [-0.0159 0.0 0.0 0.0] [0.0 0.0952 0.0 0.0]]<line_sep>eos_kwargs={'Pcs':constants.Pcs 'Tcs':constants.Tcs 'omegas':constants.omegas 'kijs':kijs}<line_sep>gas=CEOSGas(PRMIX eos_kwargs HeatCapacityGases=correlations.HeatCapacityGases T=T P=P zs=zs)<line_sep>liq=CEOSLiquid(PRMIX eos_kwargs HeatCapacityGases=correlations.HeatCapacityGases T=T P=P zs=zs)<line_sep>flasher=FlashVL(constants correlations liquid=liq gas=gas)<line_sep>res=flasher.flash(T=T P=P zs=zs)<assert_stmt>res.gas<assert_stmt>res.phase<eq>'V'<block_end><def_stmt>test_bubble_T_PR_VL # Last point at 8e6 Pa not yet found. <block_start>constants=ChemicalConstantsPackage(CASs=['124-38-9' '110-54-3'] MWs=[44.0095 86.17536] names=['carbon dioxide' 'hexane'] omegas=[0.2252 0.2975] Pcs=[7376460.0 3025000.0] Tbs=[194.67 341.87] Tcs=[304.2 507.6] Tms=[216.65 178.075])<line_sep>correlations=PropertyCorrelationsPackage(constants=constants skip_missing=<true> HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0 1000.0 [-3.1115474168865828e-21 1.39156078498805e-17 -2.5430881416264243e-14 2.4175307893014295e-11 -1.2437314771044867e-08 3.1251954264658904e-06 -0.00021220221928610925 0.000884685506352987 29.266811602924644])) HeatCapacityGas(poly_fit=(200.0 1000.0 [1.3740654453881647e-21 -8.344496203280677e-18 2.2354782954548568e-14 -3.4659555330048226e-11 3.410703030634579e-08 -2.1693611029230923e-05 0.008373280796376588 -1.356180511425385 175.67091124888998]))])<line_sep>zs=[.5 .5]<line_sep>T=300.0<line_sep>P=1e6<line_sep>eos_kwargs={'Pcs':constants.Pcs 'Tcs':constants.Tcs 'omegas':constants.omegas}<line_sep>gas=CEOSGas(PRMIX eos_kwargs HeatCapacityGases=correlations.HeatCapacityGases T=T P=P zs=zs)<line_sep>liq=CEOSLiquid(PRMIX eos_kwargs HeatCapacityGases=correlations.HeatCapacityGases T=T P=P zs=zs)<line_sep>flasher=FlashVL(constants correlations liquid=liq gas=gas)<line_sep>res=flasher.flash(P=7.93e6 VF=0 zs=zs)<line_sep>assert_close(res.T 419.0621213529388 rtol=1e-6)<block_end><def_stmt>test_PR_four_bubble_dew_cases_VL <block_start>zs=[.5 .5]<line_sep>T=300.0<line_sep>P=1E6<line_sep>constants=ChemicalConstantsPackage(CASs=['98-01-1' '98-00-0'] MWs=[96.08406000000001 98.09994] names=['2-furaldehyde' 'furfuryl alcohol'] omegas=[0.4522 0.7340000000000001] Pcs=[5510000.0 5350000.0] Tbs=[434.65 441.15] Tcs=[670.0 632.0] Tms=[235.9 250.35])<line_sep>correlations=PropertyCorrelationsPackage(constants=constants skip_missing=<true> HeatCapacityGases=[HeatCapacityGas(poly_fit=(298 1000 [4.245751608816354e-21 -2.470461837781697e-17 6.221823690784335e-14 -8.847967216702641e-11 7.749899297737877e-08 -4.250059888737765e-05 0.013882452355067994 -2.1404621487165327 185.84988012691903])) HeatCapacityGas(poly_fit=(250.35 632.0 [-9.534610090167143e-20 3.4583416772306854e-16 -5.304513883184021e-13 4.410937690059558e-10 -2.0905505018557675e-07 5.20661895325169e-05 -0.004134468659764938 -0.3746374641720497 114.90130267531933]))])<line_sep>eos_kwargs={'Pcs':constants.Pcs 'Tcs':constants.Tcs 'omegas':constants.omegas}<line_sep>gas=CEOSGas(PRMIX eos_kwargs HeatCapacityGases=correlations.HeatCapacityGases T=T P=P zs=zs)<line_sep>liq=CEOSLiquid(PRMIX eos_kwargs HeatCapacityGases=correlations.HeatCapacityGases T=T P=P zs=zs)<line_sep>flasher=FlashVL(constants correlations liquid=liq gas=gas)<line_sep>assert_close(flasher.flash(P=1e6 VF=0 zs=zs).T 539.1838522423529 rtol=1e-6)<line_sep>assert_close(flasher.flash(P=1e6 VF=1 zs=zs).T 540.2081697501809 rtol=1e-6)<line_sep>assert_close(flasher.flash(T=600.0 VF=0 zs=zs).P 2766476.7473238464 rtol=1e-6)<line_sep>assert_close(flasher.flash(T=600.0 VF=1 zs=zs).P 2702616.6490743402 rtol=1e-6)<block_end><def_stmt>test_C1_C10_PT_flash_VL <block_start>IDs=['methane' 'C2' 'C3' 'C4' 'C5' 'C6' 'C7' 'C8' 'C9' 'C10']<line_sep>zs=[.1]<times>10<line_sep>T=300.0<line_sep>P=1E5<line_sep>constants=ChemicalConstantsPackage(CASs=['74-82-8' '74-84-0' '74-98-6' '106-97-8' '109-66-0' '110-54-3' '142-82-5' '111-65-9' '111-84-2' '124-18-5'] MWs=[16.04246 30.06904 44.09562 58.1222 72.14878 86.17536 100.20194000000001 114.22852 128.2551 142.28168] names=['methane' 'ethane' 'propane' 'butane' 'pentane' 'hexane' 'heptane' 'octane' 'nonane' 'decane'] omegas=[0.008 0.098 0.152 0.193 0.251 0.2975 0.3457 0.39399999999999996 0.444 0.49] Pcs=[4599000.0 4872000.0 4248000.0 3796000.0 3370000.0 3025000.0 2740000.0 2490000.0 2290000.0 2110000.0] Tbs=[111.65 184.55 231.04 272.65 309.21 341.87 371.53 398.77 423.95 447.25] Tcs=[190.56400000000002 305.32 369.83 425.12 469.7 507.6 540.2 568.7 594.6 611.7] Tms=[90.75 90.3 85.5 135.05 143.15 178.075 182.15 216.3 219.9 243.225])<line_sep>correlations=PropertyCorrelationsPackage(constants=constants skip_missing=<true> HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0 1000.0 [6.7703235945157e-22 -2.496905487234175e-18 3.141019468969792e-15 -8.82689677472949e-13 -1.3709202525543862e-09 1.232839237674241e-06 -0.0002832018460361874 0.022944239587055416 32.67333514157593])) HeatCapacityGas(poly_fit=(50.0 1000.0 [7.115386645067898e-21 -3.2034776773408394e-17 5.957592282542187e-14 -5.91169369931607e-11 3.391209091071677e-08 -1.158730780040934e-05 0.002409311277400987 -0.18906638711444712 37.94602410497228])) HeatCapacityGas(poly_fit=(50.0 1000.0 [7.008452174279456e-22 -1.7927920989992578e-18 1.1218415948991092e-17 4.23924157032547e-12 -5.279987063309569e-09 2.5119646468572195e-06 -0.0004080663744697597 0.1659704314379956 26.107282495650367])) HeatCapacityGas(poly_fit=(200.0 1000.0 [-2.608494166540452e-21 1.3127902917979555e-17 -2.7500977814441112e-14 3.0563338307642794e-11 -1.866070373718589e-08 5.4505831355984375e-06 -0.00024022110003950325 0.04007078628096955 55.70646822218319])) HeatCapacityGas(poly_fit=(200.0 1000.0 [7.537198394065234e-22 -4.946850205122326e-18 1.4223747507170372e-14 -2.3451318313798008e-11 2.4271676873997662e-08 -1.6055220805830093e-05 0.006379734000450042 -1.0360272314628292 141.84695243411866])) HeatCapacityGas(poly_fit=(200.0 1000.0 [1.3740654453881647e-21 -8.344496203280677e-18 2.2354782954548568e-14 -3.4659555330048226e-11 3.410703030634579e-08 -2.1693611029230923e-05 0.008373280796376588 -1.356180511425385 175.67091124888998])) HeatCapacityGas(poly_fit=(200.0 1000.0 [-1.4046935863496273e-21 5.8024177500786575e-18 -7.977871529098155e-15 7.331444047402207e-13 9.954400606484495e-09 -1.2112107913343475e-05 0.0062964696142858104 -1.0843106737278825 173.87692850911935])) HeatCapacityGas(poly_fit=(200.0 1000.0 [-1.069661592422583e-22 -1.2992882995593864e-18 8.808066659263286e-15 -2.1690080247294972e-11 2.8519221306107026e-08 -2.187775092823544e-05 0.009432620102532702 -1.5719488702446165 217.60587499269303])) HeatCapacityGas(poly_fit=(200.0 1000.0 [6.513870466670624e-22 -5.318305817618858e-18 1.8015815307749625e-14 -3.370046452151828e-11 3.840755097595374e-08 -2.7203677889897072e-05 0.011224516822410626 -1.842793858054514 247.3628627781443])) HeatCapacityGas(poly_fit=(200.0 1000.0 [-1.702672546011891e-21 6.6751002084997075e-18 -7.624102919104147e-15 -4.071140876082743e-12 1.863822577724324e-08 -1.9741705032236747e-05 0.009781408958916831 -1.6762677829939379 252.8975930305735]))])<line_sep>eos_kwargs={'Pcs':constants.Pcs 'Tcs':constants.Tcs 'omegas':constants.omegas}<line_sep>gas=CEOSGas(PRMIX eos_kwargs HeatCapacityGases=correlations.HeatCapacityGases T=T P=P zs=zs)<line_sep>liq=CEOSLiquid(PRMIX eos_kwargs HeatCapacityGases=correlations.HeatCapacityGases T=T P=P zs=zs)<line_sep>flasher=FlashVL(constants correlations liquid=liq gas=gas)<line_sep>res=flasher.flash(T=T P=P zs=zs)<line_sep>assert_close(res.VF 0.3933480634014041 rtol=1e-5)<block_end><def_stmt>test_combustion_products <block_start><import_from_stmt>chemicals.combustion fuel_air_spec_solver<line_sep>IDs=['methane' 'carbon dioxide' 'ethane' 'propane' 'isobutane' 'butane' '2-methylbutane' 'pentane' 'hexane' 'nitrogen' 'oxygen' 'water']<line_sep>T=C2K(15)<line_sep>P=1e5<line_sep>zs_fuel=[0.9652228316853225 0.0059558310220860665 0.018185509193506685 0.004595963476244076 0.0009769695915451998 0.001006970610302194 0.000472984762445398 0.0003239924667435125 0.0006639799746946288 0.002594967217109564 0.0 0.0]<line_sep>zs_fuel=normalize(zs_fuel)<line_sep>zs_air=[0.0]<times>9+[0.79 0.21]+[0.0]<line_sep>constants,properties=ChemicalConstantsPackage.from_IDs(IDs)<line_sep>combustion=fuel_air_spec_solver(zs_air=zs_air zs_fuel=zs_fuel CASs=constants.CASs atomss=constants.atomss n_fuel=1.0 O2_excess=0.1)<line_sep>zs=combustion['zs_out']<line_sep>eos_kwargs={'Pcs':constants.Pcs 'Tcs':constants.Tcs 'omegas':constants.omegas}<line_sep>gas=CEOSGas(PRMIX eos_kwargs T=T P=P zs=zs HeatCapacityGases=properties.HeatCapacityGases)<line_sep>liquid=CEOSLiquid(PRMIX eos_kwargs T=T P=P zs=zs HeatCapacityGases=properties.HeatCapacityGases)<line_sep>flasher=FlashVL(constants properties liquid=liquid gas=gas)<line_sep>res=flasher.flash(T=400.0 P=1e5 zs=zs)<assert_stmt>res.phase_count<eq>1<assert_stmt>res.gas<is><not><none><block_end><def_stmt>test_furfuryl_alcohol_high_TP # Legacy bug, don't even remember what the original issue was <block_start>constants=ChemicalConstantsPackage(MWs=[98.09994 18.01528] Tcs=[632.0 647.14] Pcs=[5350000.0 22048320.0] omegas=[0.734 0.344] names=['furfuryl alcohol' 'water'] CASs=['98-00-0' '7732-18-5'])<line_sep>correlations=PropertyCorrelationsPackage(constants=constants skip_missing=<true> HeatCapacityGases=[HeatCapacityGas(load_data=<false> poly_fit=(250.35 632.0 [-9.534610090167143e-20 3.4583416772306854e-16 -5.304513883184021e-13 4.410937690059558e-10 -2.0905505018557675e-07 5.20661895325169e-05 -0.004134468659764938 -0.3746374641720497 114.90130267531933])) HeatCapacityGas(load_data=<false> poly_fit=(50.0 1000.0 [5.543665000518528e-22 -2.403756749600872e-18 4.2166477594350336e-15 -3.7965208514613565e-12 1.823547122838406e-09 -4.3747690853614695e-07 5.437938301211039e-05 -0.003220061088723078 33.32731489750759]))])<line_sep>eos_kwargs=dict(Tcs=constants.Tcs Pcs=constants.Pcs omegas=constants.omegas)<line_sep>zs=[0.4444445555555555 1-0.4444445555555555]<line_sep>T,P=5774.577777777778 220483199.99999997<line_sep>gas=CEOSGas(eos_class=PRMIX eos_kwargs=eos_kwargs T=T P=P zs=zs HeatCapacityGases=correlations.HeatCapacityGases)<line_sep>liquid=CEOSLiquid(eos_class=PRMIX eos_kwargs=eos_kwargs T=T P=P zs=zs HeatCapacityGases=correlations.HeatCapacityGases)<line_sep>flasher=FlashVL(constants correlations liquid=liquid gas=gas)<line_sep>assert_close(flasher.flash(T=T P=P zs=zs).rho_mass() 227.52709151903954)<block_end><def_stmt>test_flash_GibbsExcessLiquid_ideal_Psat # Binary water-ethanol <block_start>T=230.0<line_sep>P=1e5<line_sep>zs=[.4 .6]<line_sep>MWs=[18.01528 46.06844]<line_sep>Tcs=[647.086 514.7]<line_sep>Pcs=[22048320.0 6137000.0]<line_sep>omegas=[0.344 0.635]<line_sep>VaporPressures=[VaporPressure(extrapolation='DIPPR101_ABC|DIPPR101_ABC' exp_poly_fit=(273.17 647.086 [-2.8478502840358144e-21 1.7295186670575222e-17 -4.034229148562168e-14 5.0588958391215855e-11 -3.861625996277003e-08 1.886271475957639e-05 -0.005928371869421494 1.1494956887882308 -96.74302379151317])) VaporPressure(extrapolation='DIPPR101_ABC|DIPPR101_ABC' exp_poly_fit=(159.11 514.7 [-2.3617526481119e-19 7.318686894378096e-16 -9.835941684445551e-13 7.518263303343784e-10 -3.598426432676194e-07 0.00011171481063640762 -0.022458952185007635 2.802615041941912 -166.43524219017118]))]<line_sep>HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0 1000.0 [5.543665000518528e-22 -2.403756749600872e-18 4.2166477594350336e-15 -3.7965208514613565e-12 1.823547122838406e-09 -4.3747690853614695e-07 5.437938301211039e-05 -0.003220061088723078 33.32731489750759])) HeatCapacityGas(poly_fit=(50.0 1000.0 [-1.162767978165682e-20 5.4975285700787494e-17 -1.0861242757337942e-13 1.1582703354362728e-10 -7.160627710867427e-08 2.5392014654765875e-05 -0.004732593693568646 0.5072291035198603 20.037826650765965]))]<line_sep>VolumeLiquids=[VolumeLiquid(poly_fit=(273.17 637.096 [9.00307261049824e-24 -3.097008950027417e-20 4.608271228765265e-17 -3.8726692841874345e-14 2.0099220218891486e-11 -6.596204729785676e-09 1.3368112879131157e-06 -0.00015298762503607717 0.007589247005014652]) Psat=VaporPressures[0] Tc=Tcs[0] Pc=Pcs[0] omega=omegas[0]) VolumeLiquid(poly_fit=(159.11 504.71000000000004 [5.388587987308587e-23 -1.331077476340645e-19 1.4083880805283782e-16 -8.327187308842775e-14 3.006387047487587e-11 -6.781931902982022e-09 9.331209920256822e-07 -7.153268618320437e-05 0.0023871634205665524]) Psat=VaporPressures[1] Tc=Tcs[1] Pc=Pcs[1] omega=omegas[1])]<line_sep>EnthalpyVaporizations=[EnthalpyVaporization(Tc=647.14 poly_fit_ln_tau=(273.17 647.095 647.14 [0.010220675607316746 0.5442323619614213 11.013674729940819 110.72478547661254 591.3170172192005 1716.4863395285283 4063.5975524922624 17960.502354189244 53916.28280689388])) EnthalpyVaporization(Tc=514.0 poly_fit_ln_tau=(159.11 513.9999486 514.0 [-0.002197958699297133 -0.1583773493009195 -4.716256555877727 -74.79765793302774 -675.8449382004112 -3387.5058752252276 -7531.327682252346 5111.75264050548 50774.16034043739]))]<line_sep>constants=ChemicalConstantsPackage(Tcs=Tcs Pcs=Pcs omegas=omegas MWs=MWs CASs=['7732-18-5' '64-17-5'])<line_sep>correlations=PropertyCorrelationsPackage(constants HeatCapacityGases=HeatCapacityGases EnthalpyVaporizations=EnthalpyVaporizations VolumeLiquids=VolumeLiquids VaporPressures=VaporPressures skip_missing=<true>)<line_sep>liquid=GibbsExcessLiquid(VaporPressures=VaporPressures HeatCapacityGases=HeatCapacityGases VolumeLiquids=VolumeLiquids EnthalpyVaporizations=EnthalpyVaporizations caloric_basis='Psat' equilibrium_basis='Psat' T=T P=P zs=zs)<line_sep>gas=IdealGas(T=T P=P zs=zs HeatCapacityGases=HeatCapacityGases)<line_sep>flasher=FlashVL(constants correlations liquid=liquid gas=gas)<line_sep># All points were missing because G_dep was missing res=flasher.flash(T=300 P=1e5 zs=zs)<assert_stmt>res.liquid_count<eq>1<line_sep># Failing when two K values were under 1e-10 res=flasher.flash(T=100 P=1e5 zs=zs)<assert_stmt>res.phase_count<eq>1<assert_stmt>res.liquid_count<eq>1<line_sep># Wilson guessess are hard zeros res=flasher.flash(T=5 P=1e5 zs=zs)<assert_stmt>res.phase_count<eq>1<assert_stmt>res.liquid_count<eq>1<line_sep># Wilson guesses inf, nan, and all zero res=flasher.flash(T=6.2 P=5e4 zs=zs)<assert_stmt>res.phase_count<eq>1<assert_stmt>res.liquid_count<eq>1<line_sep># One (but not both) fugacity became zero res=flasher.flash(T=8.4 P=1e-5 zs=zs)<assert_stmt>res.phase_count<eq>1<assert_stmt>res.liquid_count<eq>1<line_sep># Vapor fraction flashes <for_stmt>VF_value (0.0 1e-5 .3 .5 .7 1-1e-5 1.0)<block_start>VF=flasher.flash(T=T VF=VF_value zs=zs)<line_sep>check=flasher.flash(T=T P=VF.P zs=zs)<line_sep>assert_close(VF.VF check.VF rtol=1e-9)<block_end># Not exactly sure where the numerical challenge is occuring, but this is to be expected. # The tolerance decays at very small numbers <for_stmt>VF_value (1e-7 1e-8 1-1e-7 1-1e-8)<block_start>VF=flasher.flash(T=T VF=VF_value zs=zs)<line_sep>check=flasher.flash(T=T P=VF.P zs=zs)<line_sep>assert_close(VF.VF check.VF rtol=1e-5)<block_end><block_end><def_stmt>test_flash_GibbsExcessLiquid_ideal_PsatPoynting # Binary water-ethanol <block_start>T=230.0<line_sep>P=1e5<line_sep>zs=[.4 .6]<line_sep>MWs=[18.01528 46.06844]<line_sep>Tcs=[647.086 514.7]<line_sep>Pcs=[22048320.0 6137000.0]<line_sep>omegas=[0.344 0.635]<line_sep>VaporPressures=[VaporPressure(exp_poly_fit=(273.17 647.086 [-2.8478502840358144e-21 1.7295186670575222e-17 -4.034229148562168e-14 5.0588958391215855e-11 -3.861625996277003e-08 1.886271475957639e-05 -0.005928371869421494 1.1494956887882308 -96.74302379151317])) VaporPressure(exp_poly_fit=(159.11 514.7 [-2.3617526481119e-19 7.318686894378096e-16 -9.835941684445551e-13 7.518263303343784e-10 -3.598426432676194e-07 0.00011171481063640762 -0.022458952185007635 2.802615041941912 -166.43524219017118]))]<line_sep>HeatCapacityGases=[HeatCapacityGas(poly_fit=(50.0 1000.0 [5.543665000518528e-22 -2.403756749600872e-18 4.2166477594350336e-15 -3.7965208514613565e-12 1.823547122838406e-09 -4.3747690853614695e-07 5.437938301211039e-05 -0.003220061088723078 33.32731489750759])) HeatCapacityGas(poly_fit=(50.0 1000.0 [-1.162767978165682e-20 5.4975285700787494e-17 -1.0861242757337942e-13 1.1582703354362728e-10 -7.160627710867427e-08 2.5392014654765875e-05 -0.004732593693568646 0.5072291035198603 20.037826650765965]))]<line_sep>VolumeLiquids=[VolumeLiquid(poly_fit=(273.17 637.096 [9.00307261049824e-24 -3.097008950027417e-20 4.608271228765265e-17 -3.8726692841874345e-14 2.0099220218891486e-11 -6.596204729785676e-09 1.3368112879131157e-06 -0.00015298762503607717 0.007589247005014652]) Psat=VaporPressures[0] Tc=Tcs[0] Pc=Pcs[0] omega=omegas[0]) VolumeLiquid(poly_fit=(159.11 504.71000000000004 [5.388587987308587e-23 -1.331077476340645e-19 1.4083880805283782e-16 -8.327187308842775e-14 3.006387047487587e-11 -6.781931902982022e-09 9.331209920256822e-07 -7.153268618320437e-05 0.0023871634205665524]) Psat=VaporPressures[1] Tc=Tcs[1] Pc=Pcs[1] omega=omegas[1])]<line_sep>EnthalpyVaporizations=[EnthalpyVaporization(Tc=647.14 poly_fit_ln_tau=(273.17 647.095 647.14 [0.010220675607316746 0.5442323619614213 11.013674729940819 110.72478547661254 591.3170172192005 1716.4863395285283 4063.5975524922624 17960.502354189244 53916.28280689388])) EnthalpyVaporization(Tc=514.0 poly_fit_ln_tau=(159.11 513.9999486 514.0 [-0.002197958699297133 -0.1583773493009195 -4.716256555877727 -74.79765793302774 -675.8449382004112 -3387.5058752252276 -7531.327682252346 5111.75264050548 50774.16034043739]))]<line_sep>constants=ChemicalConstantsPackage(Tcs=Tcs Pcs=Pcs omegas=omegas MWs=MWs CASs=['7732-18-5' '64-17-5'])<line_sep>correlations=PropertyCorrelationsPackage(constants HeatCapacityGases=HeatCapacityGases EnthalpyVaporizations=EnthalpyVaporizations VolumeLiquids=VolumeLiquids VaporPressures=VaporPressures skip_missing=<true>)<line_sep>eoss=[PR(Tc=Tcs[0] Pc=Pcs[0] omega=omegas[0] T=T P=P) PR(Tc=Tcs[1] Pc=Pcs[1] omega=omegas[1] T=T P=P)]<line_sep>liquid=GibbsExcessLiquid(VaporPressures=VaporPressures HeatCapacityGases=HeatCapacityGases VolumeLiquids=VolumeLiquids EnthalpyVaporizations=EnthalpyVaporizations caloric_basis='PhiSat' equilibrium_basis='PhiSat' eos_pure_instances=eoss T=T P=P zs=zs)<line_sep>gas=IdealGas(T=T P=P zs=zs HeatCapacityGases=HeatCapacityGases)<line_sep>flasher=FlashVL(constants correlations liquid=liquid gas=gas)<line_sep># This was failing in pypy for a while instead of CPython res=flasher.flash(T=15 P=1e5 zs=zs)<assert_stmt>res.phase_count<eq>1<assert_stmt>res.liquid_count<eq>1<block_end>
# -*- mode:python -*- <import_stmt>flask<import_stmt>json<import_stmt>logging<import_from_stmt>datetime datetime<import_stmt>inflection<import_from_stmt>functools wraps<import_from_stmt>flask request url_for<import_from_stmt>werkzeug.exceptions HTTPException<import_from_stmt>.client.api.model *<import_from_stmt>. database<import_from_stmt>. helpers<import_from_stmt>.application db<line_sep>mgr=database.DatabaseManager(db)<line_sep>log=logging.getLogger(__name__)<line_sep>api=flask.Blueprint('api' __name__)<line_sep># ============================================================================= # API Helpers # ============================================================================= <def_stmt>route_api application *args **kwargs<block_start><def_stmt>decorator fn<block_start>@application.route(*args **kwargs)@wraps(fn)<def_stmt>wrapper *args **kwargs<block_start>headers=<none><line_sep>status_code=200<try_stmt><block_start>value=fn(*args **kwargs)<block_end><except_stmt>HTTPException<as>e<block_start><raise>helpers.set_exception_response(e)<block_end><if_stmt>isinstance(value tuple)<block_start><if_stmt>len(value)<g>2<block_start>headers=value[2]<block_end>status_code=value[1]<line_sep>value=value[0]<block_end><return>helpers.jsonify(value status_code headers)<block_end><return>fn<block_end><return>decorator<block_end><def_stmt>_dashboard_sort_column <block_start>"""Return a SQLAlchemy column descriptor to sort results by, based on the 'sort' and 'order' request parameters. """<line_sep>columns={'created':database.DashboardRecord.creation_date 'modified':database.DashboardRecord.last_modified_date 'category':database.DashboardRecord.category 'id':database.DashboardRecord.id 'title':database.DashboardRecord.title}<line_sep>colname=helpers.get_param('sort' 'created')<line_sep>order=helpers.get_param('order')<line_sep>column=database.DashboardRecord.creation_date<if_stmt>colname<in>columns<block_start>column=columns[colname]<block_end><if_stmt>order<eq>'desc'<or>order<eq>u'desc'<block_start><return>column.desc()<block_end><else_stmt><block_start><return>column.asc()<block_end><block_end><def_stmt>_set_dashboard_hrefs dash<block_start>"""Add the various ReSTful hrefs to an outgoing dashboard representation. dash should be the dictionary for of the dashboard, not the model object. """<line_sep>id=dash['id']<line_sep>dash['href']=url_for('api.dashboard_get' id=id)<line_sep>dash['definition_href']=url_for('api.dashboard_get_definition' id=id)<line_sep>dash['view_href']=url_for('ui.dashboard_with_slug' id=id slug=inflection.parameterize(dash['title']))<if_stmt>'definition'<in>dash<block_start>definition=dash['definition']<line_sep>definition['href']=url_for('api.dashboard_get_definition' id=id)<block_end><return>dash<block_end><def_stmt>_dashboards_response dashboards<block_start>"""Return a Flask response object for a list of dashboards in API format. dashboards must be a list of dashboard model objects, which will be converted to their JSON representation. """<if_stmt><not>isinstance(dashboards list)<block_start>dashboards=[dashboards]<block_end>include_definition=helpers.get_param_boolean('definition' <false>)<line_sep><return>[_set_dashboard_hrefs(d.to_json(include_definition=include_definition))<for>d dashboards]<block_end><def_stmt>_set_tag_hrefs tag<block_start>"""Add ReSTful href attributes to a tag's dictionary representation. """<line_sep>id=tag['id']<line_sep>tag['href']=url_for('api.tag_get' id=id)<line_sep><return>tag<block_end><def_stmt>_tags_response tags<block_start>"""Return a Flask response object for a list of tags in API format. tags must be a list of tag model objects, which will be converted to their JSON representation. """<if_stmt><not>isinstance(tags list)<block_start>tags=[tags]<block_end><return>[_set_tag_hrefs(t.to_json())<for>t tags]<block_end># ============================================================================= # Dashboards # ============================================================================= @route_api(api '/dashboard/')<def_stmt>dashboard_list <block_start>"""Listing for all dashboards. Returns just the metadata, not the definitions. """<line_sep>imported_from=request.args.get('imported_from')<if_stmt>imported_from<block_start>query=database.DashboardRecord.query.filter_by(imported_from=imported_from).order_by(_dashboard_sort_column())<block_end><else_stmt><block_start>query=database.DashboardRecord.query.order_by(_dashboard_sort_column())<block_end>dashboards=[d<for>d query.all()]<line_sep><return>_dashboards_response(dashboards)<block_end>@route_api(api '/dashboard/tagged/<tag>')<def_stmt>dashboard_list_tagged tag<block_start>"""Listing for a set of dashboards with a tag applied. Returns just the metadata, not the definitions. """<line_sep>tag=database.TagRecord.query.filter_by(name=tag).first()<if_stmt><not>tag<block_start><return>_dashboards_response([])<block_end>dashboards=[d<for>d tag.dashboards.order_by(_dashboard_sort_column())<if>tag]<line_sep><return>_dashboards_response(dashboards)<block_end>@route_api(api '/dashboard/category/<category>')<def_stmt>dashboard_list_dashboards_in_category category<block_start>"""Listing for a set of dashboards in a specified category. Returns just the metadata, not the definitions. """<line_sep>dashboards=[d<for>d database.DashboardRecord.query.filter_by(category=category).order_by(_dashboard_sort_column())]<line_sep><return>_dashboards_response(dashboards)<block_end>@route_api(api '/dashboard/category/')<def_stmt>dashboard_list_all_dashboard_categories <block_start>result=db.session.query(database.DashboardRecord.category db.func.count(database.DashboardRecord.category)).group_by(database.DashboardRecord.category).all()<line_sep>categories=[]<for_stmt>(name count) result<block_start>categories.append({'name':name 'count':count })<block_end><return>categories<block_end>@route_api(api '/dashboard/<id>')<def_stmt>dashboard_get id<block_start>"""Get the metadata for a single dashboard. """<line_sep>dashboard=database.DashboardRecord.query.get_or_404(id)<line_sep>rendering=helpers.get_param('rendering' <false>)<line_sep>include_definition=helpers.get_param_boolean('definition' <false>)<line_sep>dash=_set_dashboard_hrefs(dashboard.to_json(rendering<or>include_definition))<if_stmt>rendering<block_start>dash['preferences']=helpers.get_preferences()<block_end><return>dash<block_end>@route_api(api '/dashboard/<id>/for-rendering')<def_stmt>dashboard_get_for_rendering id<block_start>"""Get a dashboard with its definition, and current settings necessary for rendering. """<line_sep>dashboard=database.DashboardRecord.query.get_or_404(id)<line_sep>dash=_set_dashboard_hrefs(dashboard.to_json(<true>))<line_sep><return>{'dashboard':dash 'preferences':helpers.get_preferences()}<block_end>@route_api(api '/dashboard/' methods=['POST'])<def_stmt>dashboard_create <block_start>"""Create a new dashboard with an empty definition. """<line_sep>dashboard=database.DashboardRecord.from_json(request.json)<if_stmt><not>dashboard.title<block_start><return>{'error_message':"Missing required field 'title'"} 400<block_end><if_stmt>'definition'<in>request.json<block_start>dashboard.definition=database.DefinitionRecord(dumps(request.json['definition']))<block_end><else_stmt><block_start>dashboard.definition=database.DefinitionRecord(dumps(DashboardDefinition()))<block_end>mgr.store_dashboard(dashboard)<line_sep>href=url_for('api.dashboard_get' id=dashboard.id)<line_sep><return>{'dashboard_href':href 'view_href':url_for('ui.dashboard_with_slug' id=dashboard.id slug=inflection.parameterize(dashboard.title))} 201 {'Location':href}<block_end>@route_api(api '/dashboard/<id>' methods=['PUT'])<def_stmt>dashboard_update id<block_start>"""Update the metadata for an existing dashboard. """<line_sep>body=request.json<line_sep>dashboard=database.DashboardRecord.query.get_or_404(id)<line_sep>dashboard.merge_from_json(body)<line_sep>mgr.store_dashboard(dashboard)<line_sep># TODO - return similar to create, above <return>{}<block_end>@route_api(api '/dashboard/<id>' methods=['DELETE'])<def_stmt>dashboard_delete id<block_start>"""Delete a dashboard. Use with caution. """<line_sep>dashboard=database.DashboardRecord.query.get_or_404(id)<line_sep>db.session.delete(dashboard)<line_sep>db.session.commit()<line_sep><return>{} 204<block_end>@route_api(api '/dashboard/<id>/definition')<def_stmt>dashboard_get_definition id<block_start>"""Fetch the definition for a dashboard. This returns the representation to use when modifiying a dashboard. """<line_sep>dashboard=database.DashboardRecord.query.filter_by(id=id)[0]<line_sep>definition=database.DashboardRecord.query.get_or_404(id).definition.to_json()<line_sep>definition['href']=url_for('api.dashboard_get_definition' id=id)<line_sep>definition['dashboard_href']=url_for('api.dashboard_get' id=id)<line_sep><return>definition<block_end>@route_api(api '/dashboard/<id>/definition' methods=['PUT'])<def_stmt>dashboard_update_definition id<block_start>"""Update the definition of the dashboard. This should use the representation returned by /api/dashboard/<id>/definition, and should NOT have any embedded variables expanded, nor should it have complete graphite URLs in the queries. """<line_sep>dashboard=database.DashboardRecord.query.get_or_404(id)<line_sep># Validate the payload definition=DashboardDefinition.from_json(json.loads(request.data.decode('utf-8')))<if_stmt>dashboard.definition<block_start>dashboard.definition.definition=dumps(definition)<block_end><else_stmt><block_start>dashboard.definition=database.DashboardRecordDef(request.data)<block_end>mgr.store_dashboard(dashboard)<line_sep><return>{}<block_end># ============================================================================= # Tags # ============================================================================= @route_api(api '/tag/')<def_stmt>tag_list <block_start>"""Listing for all tags. """<line_sep>tags=db.session.query(database.TagRecord).all()<line_sep><return>_tags_response(tags)<block_end>@route_api(api '/tag/<id>')<def_stmt>tag_get id<block_start>tag=database.TagRecord.query.get_or_404(id)<line_sep><return>_tags_response(tag)<block_end># ============================================================================= # Miscellany # ============================================================================= @route_api(api '/preferences/')<def_stmt>preferences_get <block_start><return>helpers.get_preferences()<block_end>@route_api(api '/preferences/' methods=['PUT'])<def_stmt>preferences_put <block_start>helpers.set_preferences(request.json)<line_sep><return>helpers.get_preferences()<block_end>
<import_from_stmt>dataclasses dataclass field<import_from_stmt>datetime date datetime time timezone<import_from_stmt>pathlib Path<import_from_stmt>typing Any Dict Optional Union<import_stmt>ciso8601<import_stmt>pytest<import_from_stmt>mashumaro DataClassDictMixin<import_from_stmt>mashumaro.exceptions UnserializableField<import_from_stmt>mashumaro.types SerializationStrategy<import_from_stmt>.entities MutableString MyList ThirdPartyType TypedDictRequiredKeys <def_stmt>test_ciso8601_datetime_parser <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:datetime=field(metadata={"deserialize":"ciso8601"})<block_end>should_be=DataClass(x=datetime(2021 1 2 3 4 5 tzinfo=timezone.utc))<line_sep>instance=DataClass.from_dict({"x":"2021-01-02T03:04:05Z"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_ciso8601_date_parser <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:date=field(metadata={"deserialize":"ciso8601"})<block_end>should_be=DataClass(x=date(2021 1 2))<line_sep>instance=DataClass.from_dict({"x":"2021-01-02T03:04:05Z"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_ciso8601_time_parser <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:time=field(metadata={"deserialize":"ciso8601"})<block_end>should_be=DataClass(x=time(3 4 5))<line_sep>instance=DataClass.from_dict({"x":"2021-01-02T03:04:05Z"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_pendulum_datetime_parser <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:datetime=field(metadata={"deserialize":"pendulum"})<block_end>should_be=DataClass(x=datetime(2008 12 29 7 tzinfo=timezone.utc))<line_sep>instance=DataClass.from_dict({"x":"2009-W01 0700"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_pendulum_date_parser <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:date=field(metadata={"deserialize":"pendulum"})<block_end>should_be=DataClass(x=date(2008 12 29))<line_sep>instance=DataClass.from_dict({"x":"2009-W01"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_pendulum_time_parser <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:time=field(metadata={"deserialize":"pendulum"})<block_end>should_be=DataClass(x=time(3 4 5))<line_sep>instance=DataClass.from_dict({"x":"2009-W01 030405"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_unsupported_datetime_parser_engine <block_start><with_stmt>pytest.raises(UnserializableField)<block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:datetime=field(metadata={"deserialize":"unsupported"})<block_end><block_end><block_end><def_stmt>test_global_function_datetime_parser <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:datetime=field(metadata={"deserialize":ciso8601.parse_datetime_as_naive})<block_end>should_be=DataClass(x=datetime(2021 1 2 3 4 5))<line_sep>instance=DataClass.from_dict({"x":"2021-01-02T03:04:05+03:00"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_local_function_datetime_parser <block_start><def_stmt>parse_dt s<block_start><return>ciso8601.parse_datetime_as_naive(s)<block_end>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:datetime=field(metadata={"deserialize":parse_dt})<block_end>should_be=DataClass(x=datetime(2021 1 2 3 4 5))<line_sep>instance=DataClass.from_dict({"x":"2021-01-02T03:04:05+03:00"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_class_method_datetime_parser <block_start><class_stmt>DateTimeParser<block_start>@classmethod<def_stmt>parse_dt cls s:str<arrow>datetime<block_start><return>datetime.fromisoformat(s)<block_end><block_end>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:datetime=field(metadata={"deserialize":DateTimeParser.parse_dt})<block_end>should_be=DataClass(x=datetime(2021 1 2 3 4 5))<line_sep>instance=DataClass.from_dict({"x":"2021-01-02T03:04:05"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_class_instance_method_datetime_parser <block_start><class_stmt>DateTimeParser<block_start><def_stmt>__call__ self s:str<arrow>datetime<block_start><return>datetime.fromisoformat(s)<block_end><block_end>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:datetime=field(metadata={"deserialize":DateTimeParser()})<block_end>should_be=DataClass(x=datetime(2021 1 2 3 4 5))<line_sep>instance=DataClass.from_dict({"x":"2021-01-02T03:04:05"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_callable_class_instance_datetime_parser <block_start><class_stmt>CallableDateTimeParser<block_start><def_stmt>__call__ self s<block_start><return>ciso8601.parse_datetime(s)<block_end><block_end>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:datetime=field(metadata={"deserialize":CallableDateTimeParser()})<block_end>should_be=DataClass(x=datetime(2021 1 2 3 4 5 tzinfo=timezone.utc))<line_sep>instance=DataClass.from_dict({"x":"2021-01-02T03:04:05Z"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_lambda_datetime_parser <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:datetime=field(metadata={"deserialize":<lambda>s:ciso8601.parse_datetime(s)})<block_end>should_be=DataClass(x=datetime(2021 1 2 3 4 5 tzinfo=timezone.utc))<line_sep>instance=DataClass.from_dict({"x":"2021-01-02T03:04:05Z"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_derived_dataclass_metadata_deserialize_option <block_start>@dataclass<class_stmt>A<block_start>x:datetime=field(metadata={"deserialize":ciso8601.parse_datetime})<block_end>@dataclass<class_stmt>B(A DataClassDictMixin)<block_start>y:datetime=field(metadata={"deserialize":ciso8601.parse_datetime})<block_end>should_be=B(x=datetime(2021 1 2 3 4 5 tzinfo=timezone.utc) y=datetime(2021 1 2 3 4 5 tzinfo=timezone.utc) )<line_sep>instance=B.from_dict({"x":"2021-01-02T03:04:05Z" "y":"2021-01-02T03:04:05Z"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_bytearray_overridden <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:bytearray=field(metadata={"deserialize":<lambda>s:s.upper().encode()})<block_end>should_be=DataClass(x=bytearray(b"ABC"))<line_sep>instance=DataClass.from_dict({"x":"abc"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_path_like_overridden <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:Path=field(metadata={"deserialize":<lambda>s:Path(str(s).upper())})<block_end>should_be=DataClass(x=Path("/ABC"))<line_sep>instance=DataClass.from_dict({"x":"/abc"})<assert_stmt>instance<eq>should_be<block_end><def_stmt>test_datetime_serialize_option <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:datetime=field(metadata={"serialize":<lambda>v:v.strftime("%Y-%m-%d %H:%M:%S")})<block_end>should_be={"x":"2021-01-02 03:04:05"}<line_sep>instance=DataClass(x=datetime(2021 1 2 3 4 5 tzinfo=timezone.utc))<assert_stmt>instance.to_dict()<eq>should_be<block_end><def_stmt>test_third_party_type_overridden <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:ThirdPartyType=field(metadata={"deserialize":<lambda>v:ThirdPartyType(v) "serialize":<lambda>v:v.value })<block_end>should_be=DataClass(x=ThirdPartyType(123))<line_sep>instance=DataClass.from_dict({"x":123})<assert_stmt>instance<eq>should_be<assert_stmt>instance.to_dict()<eq>{"x":123}<block_end><def_stmt>test_serializable_type_overridden <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:MutableString=field(metadata={"deserialize":<lambda>s:MutableString(s.upper()) "serialize":<lambda>v:str(v).lower() })<block_end>should_be=DataClass(x=MutableString("ABC"))<line_sep>instance=DataClass.from_dict({"x":"abc"})<assert_stmt>instance<eq>should_be<assert_stmt>instance.to_dict()<eq>{"x":"abc"}<block_end><def_stmt>test_optional_overridden <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:Optional[ThirdPartyType]=field(metadata={"deserialize":<lambda>v:ThirdPartyType(v) "serialize":<lambda>v:v.value })<block_end>instance=DataClass.from_dict({"x":123})<assert_stmt>instance<assert_stmt>instance.x.value<eq>123<line_sep>dct=instance.to_dict()<assert_stmt>dct["x"]<eq>123<block_end><def_stmt>test_union_overridden <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:Union[int str float ThirdPartyType]=field(metadata={"deserialize":<lambda>v:ThirdPartyType(v) "serialize":<lambda>v:v.value })<block_end>instance=DataClass.from_dict({"x":1})<assert_stmt>instance<eq>DataClass(x=ThirdPartyType(value=1))<assert_stmt>instance.to_dict()<eq>{"x":1}<block_end><def_stmt>test_serialization_strategy <block_start><class_stmt>TestSerializationStrategy(SerializationStrategy)<block_start><def_stmt>serialize self value<block_start><return>[value]<block_end><def_stmt>deserialize self value<block_start><return>value[0]<block_end><block_end>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:int=field(metadata={"serialization_strategy":TestSerializationStrategy()})<block_end>instance=DataClass(x=123)<assert_stmt>DataClass.from_dict({"x":[123]})<eq>instance<assert_stmt>instance.to_dict()<eq>{"x":[123]}<block_end><def_stmt>test_collection_derived_custom_class <block_start>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:MyList=field(metadata={"serialize":<lambda>v:v "deserialize":<lambda>v:v})<block_end>instance=DataClass(x=[1 2 3])<assert_stmt>DataClass.from_dict({"x":[1 2 3]})<eq>instance<assert_stmt>instance.to_dict()<eq>{"x":[1 2 3]}<block_end><def_stmt>test_dataclass_with_typed_dict_overridden <block_start><def_stmt>serialize_x x:TypedDictRequiredKeys<arrow>Dict[str Any]<block_start><return>{"int":int(x["int"]) "float":float(x["float"])}<block_end><def_stmt>deserialize_x x:Dict[str Any]<arrow>TypedDictRequiredKeys<block_start><return>TypedDictRequiredKeys(int=x["int"] float=x["float"])<block_end>@dataclass<class_stmt>DataClass(DataClassDictMixin)<block_start>x:TypedDictRequiredKeys=field(metadata={"serialize":serialize_x "deserialize":deserialize_x})<block_end>obj=DataClass(x=TypedDictRequiredKeys(int=1 float=2.0))<line_sep>data={"x":{"int":1 "float":2.0}}<assert_stmt>DataClass.from_dict(data)<eq>obj<assert_stmt>obj.to_dict()<eq>data<block_end>
<import_stmt>ast<import_stmt>re<import_stmt>sys<if_stmt>sys.version_info<l>(2 7)<block_start><import_stmt>unittest2<as>unittest<block_end><else_stmt><block_start><import_stmt>unittest<block_end><import_stmt>astunparse<import_from_stmt>tests.common AstunparseCommonTestCase<class_stmt>DumpTestCase(AstunparseCommonTestCase unittest.TestCase)<block_start><def_stmt>assertASTEqual self dump1 dump2# undo the pretty-printing <block_start>dump1=re.sub(r"(?<=[\(\[])\n\s+" "" dump1)<line_sep>dump1=re.sub(r"\n\s+" " " dump1)<line_sep>self.assertEqual(dump1 dump2)<block_end><def_stmt>check_roundtrip self code1 filename="internal" mode="exec"<block_start>ast_=compile(str(code1) filename mode ast.PyCF_ONLY_AST)<line_sep>dump1=astunparse.dump(ast_)<line_sep>dump2=ast.dump(ast_)<line_sep>self.assertASTEqual(dump1 dump2)<block_end><block_end>
# pylint: skip-file <import_from_stmt>athena_glue_service_logs.catalog_manager BaseCatalogManager<def_stmt>test_class_init mocker<block_start>mocker.patch.multiple(BaseCatalogManager __abstractmethods__=set())<line_sep>base_catalog=BaseCatalogManager('us-west-2' 'dbname' 'tablename' 's3://somewhere')<assert_stmt>base_catalog.database_name<eq>'dbname'<assert_stmt>base_catalog.s3_location<eq>'s3://somewhere'<assert_stmt>base_catalog.table_name<eq>'tablename'<block_end><def_stmt>test_init_with_partitions mocker<block_start>mocker.patch.multiple(BaseCatalogManager __abstractmethods__=set())<line_sep>mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.does_database_exist' return_value=<true>)<line_sep>mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_database')<line_sep>mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_table')<line_sep>mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.create_partitions')<line_sep>base_catalog=BaseCatalogManager('us-west-2' 'dbname' 'tablename' 's3://somewhere')<line_sep>base_catalog.initialize_with_partitions(['a' 'b' 'c'])<assert_stmt>BaseCatalogManager.create_database.call_count<eq>0<line_sep>BaseCatalogManager.create_table.assert_called_once()<line_sep>BaseCatalogManager.create_partitions.assert_called_once_with(partition_list=['a' 'b' 'c'])<line_sep>mocker.patch('athena_glue_service_logs.catalog_manager.BaseCatalogManager.does_database_exist' return_value=<false>)<line_sep>base_catalog.initialize_with_partitions(['a' 'b' 'c'])<assert_stmt>BaseCatalogManager.create_database.call_count<eq>1<block_end>
""" CutBlur Copyright 2020-present NAVER corp. MIT license """<import_stmt>os<import_stmt>glob<import_stmt>data<class_stmt>BenchmarkSR(data.BaseDataset)<block_start><def_stmt>__init__ self phase opt<block_start>root=opt.dataset_root<line_sep>self.scale=opt.scale<line_sep>dir_HQ,dir_LQ=self.get_subdir()<line_sep>self.HQ_paths=sorted(glob.glob(os.path.join(root dir_HQ "*.png")))<line_sep>self.LQ_paths=sorted(glob.glob(os.path.join(root dir_LQ "*.png")))<line_sep>super().__init__(phase opt)<block_end><def_stmt>get_subdir self<block_start>dir_HQ="HR"<line_sep>dir_LQ="X{}".format(self.scale)<line_sep><return>dir_HQ dir_LQ<block_end><block_end><class_stmt>BenchmarkDN(BenchmarkSR)<block_start><def_stmt>__init__ self phase opt<block_start>self.sigma=opt.sigma<line_sep>super().__init__(phase opt)<block_end><def_stmt>get_subdir self<block_start>dir_HQ="HQ"<line_sep>dir_LQ="{}".format(self.sigma)<line_sep><return>dir_HQ dir_LQ<block_end><block_end><class_stmt>BenchmarkJPEG(BenchmarkSR)<block_start><def_stmt>__init__ self phase opt<block_start>self.quality=opt.quality<line_sep>super().__init__(phase opt)<block_end><def_stmt>get_subdir self<block_start>dir_HQ="HQ"<line_sep>dir_LQ="{}".format(self.quality)<line_sep><return>dir_HQ dir_LQ<block_end><block_end>
""" $lic$ Copyright (C) 2016-2020 by Tsinghua University and The Board of Trustees of Stanford University This program is free software: you can redistribute it and/or modify it under the terms of the Modified BSD-3 License as published by the Open Source Initiative. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the BSD-3 License for more details. You should have received a copy of the Modified BSD-3 License along with this program. If not, see <https://opensource.org/licenses/BSD-3-Clause>. """<import_stmt>unittest<import_from_stmt>nn_dataflow.core Network<import_from_stmt>nn_dataflow.core Layer InputLayer ConvLayer FCLayer PoolingLayer EltwiseLayer<class_stmt>TestNetwork(unittest.TestCase)<block_start>''' Tests for Network. '''<line_sep># pylint: disable=too-many-public-methods <def_stmt>setUp self<block_start>''' Set up. '''<line_sep>self.network=Network('test_net')<line_sep>self.network.set_input_layer(InputLayer(3 224))<line_sep>self.network.add('c1' ConvLayer(3 64 224 3))<line_sep>self.network.add('p1' PoolingLayer(64 7 32))<line_sep>self.network.add('f1' FCLayer(64 1000 7))<block_end><def_stmt>test_set_input_layer self<block_start>''' Modifier set_input_layer. '''<line_sep>network=Network('test_net')<line_sep>network.set_input_layer(InputLayer(3 24))<line_sep>self.assertIsInstance(network.input_layer() InputLayer)<line_sep>self.assertEqual(network.input_layer().nofm 3)<line_sep>self.assertEqual(network.input_layer().hofm 24)<line_sep>self.assertEqual(network.input_layer().wofm 24)<line_sep>self.assertEqual(len(network) 0)<block_end><def_stmt>test_set_input_layer_type self<block_start>''' Modifier set_input_layer type. '''<line_sep>network=Network('test_net')<with_stmt>self.assertRaisesRegex(TypeError 'Network: .*input_layer.*')<block_start>network.set_input_layer(Layer(3 24))<block_end><with_stmt>self.assertRaisesRegex(TypeError 'Network: .*input_layer.*')<block_start>network.set_input_layer(ConvLayer(3 8 24 3))<block_end><block_end><def_stmt>test_set_input_layer_duplicate self<block_start>''' Modifier set_input_layer duplicate. '''<line_sep>network=Network('test_net')<line_sep>network.set_input_layer(InputLayer(3 24))<with_stmt>self.assertRaisesRegex(KeyError 'Network: .*input.*')<block_start>network.set_input_layer(InputLayer(3 24))<block_end><block_end><def_stmt>test_add self<block_start>''' Modifier add. '''<line_sep>self.assertEqual(len(self.network) 3)<line_sep>self.network.add('f2' FCLayer(64 2000 7) prevs='p1')<line_sep>self.network.add('f3' FCLayer(3000 1000) prevs=('f1' 'f2'))<line_sep>self.network.add('e4' EltwiseLayer(1000 1 2) prevs=('f1' 'f3'))<line_sep>self.network.add('f4' FCLayer(1000 1000) prevs='e4')<line_sep>self.assertEqual(len(self.network) 7)<block_end><def_stmt>test_add_same_key self<block_start>''' Modifier add same key. '''<line_sep>network=Network('test_net')<line_sep>network.set_input_layer(InputLayer(3 224))<line_sep>network.add('c1' ConvLayer(3 64 224 3))<with_stmt>self.assertRaisesRegex(KeyError 'Network: .*c1.*')<block_start>network.add('c1' ConvLayer(64 128 224 3))<block_end><block_end><def_stmt>test_add_no_input self<block_start>''' Modifier add no input. '''<line_sep>network=Network('test_net')<with_stmt>self.assertRaisesRegex(RuntimeError 'Network: .*input.*')<block_start>network.add('c1' ConvLayer(3 64 224 3))<block_end><block_end><def_stmt>test_add_no_prev self<block_start>''' Modifier add no prevs. '''<line_sep>network=Network('test_net')<line_sep>network.set_input_layer(InputLayer(3 224))<line_sep>network.add('c1' ConvLayer(3 64 224 3))<with_stmt>self.assertRaisesRegex(KeyError 'Network: .*prev.*p1.*')<block_start>network.add('p1' PoolingLayer(64 7 32) prevs='p1')<block_end><block_end><def_stmt>test_add_invalid_type self<block_start>''' Modifier add invalid type. '''<line_sep>network=Network('test_net')<line_sep>network.set_input_layer(InputLayer(3 224))<with_stmt>self.assertRaisesRegex(TypeError 'Network: .*Layer.*')<block_start>network.add('c1' (3 64 224 3))<block_end><block_end><def_stmt>test_add_unmatch_prev self<block_start>''' Modifier add unmatch prevs. '''<line_sep>network=Network('test_net')<line_sep>network.set_input_layer(InputLayer(3 224))<line_sep>network.add('c1' ConvLayer(3 64 224 3))<with_stmt>self.assertRaisesRegex(ValueError 'Network: .*c1.*p1.*mismatch fmap.*')<block_start>network.add('p1' PoolingLayer(64 7 2))<block_end>self.assertEqual(len(network) 1)<with_stmt>self.assertRaisesRegex(ValueError 'Network: .*c1.*c2.*mismatch fmap.*')<block_start>network.add('c2' ConvLayer(64 128 220 3))<block_end>self.assertEqual(len(network) 1)<with_stmt>self.assertRaisesRegex(ValueError 'Network: .*c1.*prev.*p1.*')<block_start>network.add('p1' PoolingLayer(32 7 32))<block_end>self.assertEqual(len(network) 1)<with_stmt>self.assertRaisesRegex(ValueError 'Network: .*c1.*prev.*c2.*')<block_start>network.add('c2' ConvLayer(32 128 224 3))<block_end>self.assertEqual(len(network) 1)<line_sep>network.add('c2' ConvLayer(64 128 224 3))<with_stmt>self.assertRaisesRegex(ValueError r'Network: .*c1 | c2.*prev.*p1.*')<block_start>network.add('p1' PoolingLayer(128 7 32) prevs=('c1' 'c2'))<block_end>self.assertEqual(len(network) 2)<block_end><def_stmt>test_add_ext self<block_start>''' Modifier add_ext. '''<line_sep>self.assertEqual(len(self.network) 3)<line_sep>self.network.add_ext('e0' InputLayer(3 24))<line_sep>self.assertIsInstance(self.network['e0'] InputLayer)<line_sep>self.assertEqual(self.network['e0'].nofm 3)<line_sep>self.assertEqual(self.network['e0'].hofm 24)<line_sep>self.assertEqual(self.network['e0'].wofm 24)<line_sep>self.network.add_ext('e1' InputLayer(5 (16 20)))<line_sep>self.assertIsInstance(self.network['e1'] InputLayer)<line_sep>self.assertEqual(self.network['e1'].nofm 5)<line_sep>self.assertEqual(self.network['e1'].hofm 16)<line_sep>self.assertEqual(self.network['e1'].wofm 20)<line_sep>self.assertEqual(len(self.network) 3)<block_end><def_stmt>test_add_ext_same_key self<block_start>''' Modifier add_ext same key. '''<line_sep>network=Network('test_net')<line_sep>network.add_ext('e0' InputLayer(3 24))<with_stmt>self.assertRaisesRegex(KeyError 'Network: .*ext.*')<block_start>network.add_ext('e0' InputLayer(3 24))<block_end><block_end><def_stmt>test_add_ext_invalid_type self<block_start>''' Modifier add_ext invalid type. '''<line_sep>network=Network('test_net')<with_stmt>self.assertRaisesRegex(TypeError 'Network: .*external layer.*')<block_start>network.add_ext('e0' Layer(3 24))<block_end><with_stmt>self.assertRaisesRegex(TypeError 'Network: .*external layer.*')<block_start>network.add_ext('e0' ConvLayer(3 8 24 3))<block_end><block_end><def_stmt>test_prevs self<block_start>''' Get prevs. '''<line_sep>self.network.add('f2' FCLayer(64 2000 7) prevs='p1')<line_sep>self.network.add('f3' FCLayer(3000 1000) prevs=('f1' 'f2'))<line_sep>prevs=self.network.prevs('f1')<line_sep>self.assertTupleEqual(prevs ('p1' ))<line_sep>prevs=self.network.prevs('f2')<line_sep>self.assertTupleEqual(prevs ('p1' ))<line_sep>prevs=self.network.prevs('f3')<line_sep>self.assertTupleEqual(prevs ('f1' 'f2'))<block_end><def_stmt>test_prevs_first self<block_start>''' Get prevs first layer. '''<line_sep>self.network.add('c2' ConvLayer(3 3 224 1) prevs=self.network.INPUT_LAYER_KEY)<line_sep>prevs=self.network.prevs('c1')<line_sep>self.assertTupleEqual(prevs (<none> ))<line_sep>prevs=self.network.prevs('c2')<line_sep>self.assertTupleEqual(prevs (<none> ))<block_end><def_stmt>test_prevs_input self<block_start>''' Get prevs input layer. '''<with_stmt>self.assertRaisesRegex(ValueError 'Network: .*input.*')<block_start>_=self.network.prevs(self.network.INPUT_LAYER_KEY)<block_end><block_end><def_stmt>test_prevs_ext_next self<block_start>''' Get prevs next layer of an external layer. '''<line_sep>self.network.add_ext('e0' InputLayer(3 224))<line_sep>self.network.add('n' ConvLayer(6 3 224 1) prevs=(self.network.INPUT_LAYER_KEY 'e0'))<line_sep>prevs=self.network.prevs('n')<line_sep>self.assertTupleEqual(prevs (<none> 'e0'))<block_end><def_stmt>test_prevs_ext self<block_start>''' Get prevs external layer. '''<line_sep>self.network.add_ext('e0' InputLayer(3 3))<with_stmt>self.assertRaisesRegex(ValueError 'Network: .*ext.*')<block_start>_=self.network.prevs('e0')<block_end><block_end><def_stmt>test_nexts self<block_start>''' Get nexts. '''<line_sep>self.network.add('f2' FCLayer(64 2000 7) prevs='p1')<line_sep>self.network.add('f3' FCLayer(3000 1000) prevs=('f1' 'f2'))<line_sep>self.network.add('e4' EltwiseLayer(1000 1 2) prevs=('f1' 'f3'))<line_sep>self.network.add('f4' FCLayer(1000 1000) prevs='e4')<line_sep>nexts=self.network.nexts('p1')<line_sep>self.assertTupleEqual(nexts ('f1' 'f2'))<line_sep>nexts=self.network.nexts('f1')<line_sep>self.assertTupleEqual(nexts ('f3' 'e4'))<line_sep>nexts=self.network.nexts('f2')<line_sep>self.assertTupleEqual(nexts ('f3' ))<line_sep>nexts=self.network.nexts('f3')<line_sep>self.assertTupleEqual(nexts ('e4' ))<block_end><def_stmt>test_nexts_last self<block_start>''' Get nexts first layer. '''<line_sep>nexts=self.network.nexts('f1')<line_sep>self.assertTupleEqual(nexts (<none> ))<line_sep>self.network.add('f2' FCLayer(64 2000 7) prevs='p1')<line_sep>nexts=self.network.nexts('f1')<line_sep>self.assertTupleEqual(nexts (<none> ))<line_sep>nexts=self.network.nexts('f2')<line_sep>self.assertTupleEqual(nexts (<none> ))<block_end><def_stmt>test_nexts_input self<block_start>''' Get nexts input layer. '''<line_sep>nexts=self.network.nexts(self.network.INPUT_LAYER_KEY)<line_sep>self.assertTupleEqual(nexts ('c1' ))<line_sep>self.network.add('c2' ConvLayer(3 3 224 1) prevs=self.network.INPUT_LAYER_KEY)<line_sep>self.network.add('c3' ConvLayer(6 4 224 1) prevs=(self.network.INPUT_LAYER_KEY 'c2'))<line_sep>nexts=self.network.nexts(self.network.INPUT_LAYER_KEY)<line_sep>self.assertTupleEqual(nexts ('c1' 'c2' 'c3'))<block_end><def_stmt>test_firsts self<block_start>''' Get firsts. '''<line_sep>firsts=self.network.firsts()<line_sep>self.assertTupleEqual(firsts ('c1' ))<line_sep>self.network.add('c2' ConvLayer(3 3 224 1) prevs=self.network.INPUT_LAYER_KEY)<line_sep>self.network.add('c3' ConvLayer(6 4 224 1) prevs=(self.network.INPUT_LAYER_KEY 'c2'))<line_sep>firsts=self.network.firsts()<line_sep>self.assertTupleEqual(firsts ('c1' 'c2'))<line_sep>self.assertIn('c1' firsts)<line_sep>self.assertNotIn('c3' firsts)<block_end><def_stmt>test_firsts_ext self<block_start>''' Get firsts with external layers. '''<line_sep>self.network.add_ext('e0' InputLayer(3 224))<line_sep>self.network.add('c2' ConvLayer(3 3 224 1) prevs=('e0' ))<line_sep>self.network.add('c3' ConvLayer(67 3 224 1) prevs=('e0' 'c1'))<line_sep>self.network.add('c4' ConvLayer(6 3 224 1) prevs=(self.network.INPUT_LAYER_KEY 'e0' ))<line_sep>firsts=self.network.firsts()<line_sep>self.assertIn('c2' firsts)<line_sep>self.assertNotIn('c3' firsts)<line_sep>self.assertIn('c4' firsts)<block_end><def_stmt>test_lasts self<block_start>''' Get lasts. '''<line_sep>lasts=self.network.lasts()<line_sep>self.assertTupleEqual(lasts ('f1' ))<line_sep>self.network.add('f2' FCLayer(64 2000 7) prevs='p1')<line_sep>lasts=self.network.lasts()<line_sep>self.assertTupleEqual(lasts ('f1' 'f2'))<block_end><def_stmt>test_ext_layers self<block_start>''' Get external layers. '''<line_sep>self.assertTupleEqual(self.network.ext_layers() tuple())<line_sep>self.network.add_ext('e0' InputLayer(3 224))<line_sep>self.assertTupleEqual(self.network.ext_layers() ('e0' ))<line_sep>self.network.add_ext('e1' InputLayer(3 224))<line_sep>self.assertTupleEqual(self.network.ext_layers() ('e0' 'e1'))<block_end><def_stmt>test_contains self<block_start>''' Whether contains. '''<line_sep>self.assertIn('c1' self.network)<line_sep>self.assertIn('p1' self.network)<line_sep>self.assertIn('f1' self.network)<line_sep>self.assertNotIn('f2' self.network)<line_sep>self.network.add('f2' FCLayer(64 2000 7) prevs='p1')<line_sep>self.assertIn('f2' self.network)<block_end><def_stmt>test_len self<block_start>''' Accessor len. '''<line_sep>self.assertEqual(len(self.network) 3)<line_sep>network=Network('test_net')<line_sep>self.assertEqual(len(network) 0)<line_sep>network.set_input_layer(InputLayer(3 224))<line_sep>self.assertEqual(len(network) 0)<line_sep>network.add('c1' ConvLayer(3 4 224 1))<line_sep>self.assertEqual(len(network) 1)<line_sep>self.network.add('f2' FCLayer(64 2000 7) prevs='p1')<line_sep>self.assertEqual(len(self.network) 4)<line_sep>self.network.add('f3' FCLayer(3000 1000) prevs=('f1' 'f2'))<line_sep>self.assertEqual(len(self.network) 5)<line_sep>self.network.add('e4' EltwiseLayer(1000 1 2) prevs=('f1' 'f3'))<line_sep>self.assertEqual(len(self.network) 6)<line_sep>self.network.add('f4' FCLayer(1000 1000) prevs='e4')<line_sep>self.assertEqual(len(self.network) 7)<block_end><def_stmt>test_iter self<block_start>''' Accessor iter. '''<line_sep>num=0<for_stmt>layer self.network<block_start>self.assertIn(layer self.network)<line_sep>self.assertIsInstance(self.network[layer] Layer)<line_sep>num<augadd>1<block_end>self.assertEqual(len(self.network) num)<line_sep>network=Network('test_net')<line_sep>network.set_input_layer(InputLayer(3 224))<with_stmt>self.assertRaises(StopIteration)<block_start>_=next(iter(network))<block_end><block_end><def_stmt>test_contains_ext self<block_start>''' Whether contains external layer. '''<line_sep>self.assertNotIn('e0' self.network)<line_sep>self.network.add_ext('e0' InputLayer(3 224))<line_sep>self.assertIn('e0' self.network)<block_end><def_stmt>test_len_ext self<block_start>''' Accessor len external layer. '''<line_sep>self.assertEqual(len(self.network) 3)<line_sep>self.network.add_ext('e0' InputLayer(3 224))<line_sep>self.assertEqual(len(self.network) 3)<block_end><def_stmt>test_iter_ext self<block_start>''' Accessor iter external layer. '''<line_sep>self.network.add_ext('e0' InputLayer(3 224))<for_stmt>layer self.network<block_start>self.assertNotEqual(layer 'e0')<block_end><block_end><def_stmt>test_getitem self<block_start>''' Accessor getitem. '''<line_sep>self.assertIsInstance(self.network['c1'] ConvLayer)<line_sep>self.assertIsInstance(self.network['p1'] PoolingLayer)<line_sep>self.assertIsInstance(self.network['f1'] FCLayer)<block_end><def_stmt>test_getitem_error self<block_start>''' Accessor getitem. '''<with_stmt>self.assertRaisesRegex(KeyError 'Network: .*c2.*')<block_start>_=self.network['c2']<block_end><block_end><def_stmt>test_str self<block_start>''' Accessor str. '''<line_sep>string=str(self.network)<for_stmt>layer self.network<block_start>self.assertIn(layer string)<block_end><block_end><block_end>
<import_from_stmt>sympy Derivative<as>D Eq exp sin Function Symbol symbols cos log <import_from_stmt>sympy.core S<import_from_stmt>sympy.solvers.pde pde_separate pde_separate_add pde_separate_mul pdsolve classify_pde checkpdesol <import_from_stmt>sympy.testing.pytest raises<line_sep>a,b,c,x,y=symbols('a b c x y')<def_stmt>test_pde_separate_add <block_start>x,y,z,t=symbols("x,y,z,t")<line_sep>F,T,X,Y,Z,u=map(Function 'FTXYZu')<line_sep>eq=Eq(D(u(x t) x) D(u(x t) t)<times>exp(u(x t)))<line_sep>res=pde_separate_add(eq u(x t) [X(x) T(t)])<assert_stmt>res<eq>[D(X(x) x)<times>exp(-X(x)) D(T(t) t)<times>exp(T(t))]<block_end><def_stmt>test_pde_separate <block_start>x,y,z,t=symbols("x,y,z,t")<line_sep>F,T,X,Y,Z,u=map(Function 'FTXYZu')<line_sep>eq=Eq(D(u(x t) x) D(u(x t) t)<times>exp(u(x t)))<line_sep>raises(ValueError <lambda>:pde_separate(eq u(x t) [X(x) T(t)] 'div'))<block_end><def_stmt>test_pde_separate_mul <block_start>x,y,z,t=symbols("x,y,z,t")<line_sep>c=Symbol("C" real=<true>)<line_sep>Phi=Function('Phi')<line_sep>F,R,T,X,Y,Z,u=map(Function 'FRTXYZu')<line_sep>r,theta,z=symbols('r,theta,z')<line_sep># Something simple :) eq=Eq(D(F(x y z) x)+D(F(x y z) y)+D(F(x y z) z) 0)<line_sep># Duplicate arguments in functions raises(ValueError <lambda>:pde_separate_mul(eq F(x y z) [X(x) u(z z)]))<line_sep># Wrong number of arguments raises(ValueError <lambda>:pde_separate_mul(eq F(x y z) [X(x) Y(y)]))<line_sep># Wrong variables: [x, y] -> [x, z] raises(ValueError <lambda>:pde_separate_mul(eq F(x y z) [X(t) Y(x y)]))<assert_stmt>pde_separate_mul(eq F(x y z) [Y(y) u(x z)])<eq>[D(Y(y) y)/Y(y) -D(u(x z) x)/u(x z)-D(u(x z) z)/u(x z)]<assert_stmt>pde_separate_mul(eq F(x y z) [X(x) Y(y) Z(z)])<eq>[D(X(x) x)/X(x) -D(Z(z) z)/Z(z)-D(Y(y) y)/Y(y)]<line_sep># wave equation wave=Eq(D(u(x t) t t) c<power>2<times>D(u(x t) x x))<line_sep>res=pde_separate_mul(wave u(x t) [X(x) T(t)])<assert_stmt>res<eq>[D(X(x) x x)/X(x) D(T(t) t t)/(c<power>2<times>T(t))]<line_sep># Laplace equation in cylindrical coords eq=Eq(1/r<times>D(Phi(r theta z) r)+D(Phi(r theta z) r 2)+1/r<power>2<times>D(Phi(r theta z) theta 2)+D(Phi(r theta z) z 2) 0)<line_sep># Separate z res=pde_separate_mul(eq Phi(r theta z) [Z(z) u(theta r)])<assert_stmt>res<eq>[D(Z(z) z z)/Z(z) -D(u(theta r) r r)/u(theta r)-D(u(theta r) r)/(r<times>u(theta r))-D(u(theta r) theta theta)/(r<power>2<times>u(theta r))]<line_sep># Lets use the result to create a new equation... eq=Eq(res[1] c)<line_sep># ...and separate theta... res=pde_separate_mul(eq u(theta r) [T(theta) R(r)])<assert_stmt>res<eq>[D(T(theta) theta theta)/T(theta) -r<times>D(R(r) r)/R(r)-r<power>2<times>D(R(r) r r)/R(r)-c<times>r<power>2]<line_sep># ...or r... res=pde_separate_mul(eq u(theta r) [R(r) T(theta)])<assert_stmt>res<eq>[r<times>D(R(r) r)/R(r)+r<power>2<times>D(R(r) r r)/R(r)+c<times>r<power>2 -D(T(theta) theta theta)/T(theta)]<block_end><def_stmt>test_issue_11726 <block_start>x,t=symbols("x t")<line_sep>f=symbols("f" cls=Function)<line_sep>X,T=symbols("X T" cls=Function)<line_sep>u=f(x t)<line_sep>eq=u.diff(x 2)-u.diff(t 2)<line_sep>res=pde_separate(eq u [T(x) X(t)])<assert_stmt>res<eq>[D(T(x) x x)/T(x) D(X(t) t t)/X(t)]<block_end><def_stmt>test_pde_classify # When more number of hints are added, add tests for classifying here. <block_start>f=Function('f')<line_sep>eq1=a<times>f(x y)+b<times>f(x y).diff(x)+c<times>f(x y).diff(y)<line_sep>eq2=3<times>f(x y)+2<times>f(x y).diff(x)+f(x y).diff(y)<line_sep>eq3=a<times>f(x y)+b<times>f(x y).diff(x)+2<times>f(x y).diff(y)<line_sep>eq4=x<times>f(x y)+f(x y).diff(x)+3<times>f(x y).diff(y)<line_sep>eq5=x<power>2<times>f(x y)+x<times>f(x y).diff(x)+x<times>y<times>f(x y).diff(y)<line_sep>eq6=y<times>x<power>2<times>f(x y)+y<times>f(x y).diff(x)+f(x y).diff(y)<for_stmt>eq [eq1 eq2 eq3]<block_start><assert_stmt>classify_pde(eq)<eq>('1st_linear_constant_coeff_homogeneous' )<block_end><for_stmt>eq [eq4 eq5 eq6]<block_start><assert_stmt>classify_pde(eq)<eq>('1st_linear_variable_coeff' )<block_end><block_end><def_stmt>test_checkpdesol <block_start>f,F=map(Function ['f' 'F'])<line_sep>eq1=a<times>f(x y)+b<times>f(x y).diff(x)+c<times>f(x y).diff(y)<line_sep>eq2=3<times>f(x y)+2<times>f(x y).diff(x)+f(x y).diff(y)<line_sep>eq3=a<times>f(x y)+b<times>f(x y).diff(x)+2<times>f(x y).diff(y)<for_stmt>eq [eq1 eq2 eq3]<block_start><assert_stmt>checkpdesol(eq pdsolve(eq))[0]<block_end>eq4=x<times>f(x y)+f(x y).diff(x)+3<times>f(x y).diff(y)<line_sep>eq5=2<times>f(x y)+1<times>f(x y).diff(x)+3<times>f(x y).diff(y)<line_sep>eq6=f(x y)+1<times>f(x y).diff(x)+3<times>f(x y).diff(y)<assert_stmt>checkpdesol(eq4 [pdsolve(eq5) pdsolve(eq6)])<eq>[(<false> (x-2)<times>F(3<times>x-y)<times>exp(-x/S(5)-3<times>y/S(5))) (<false> (x-1)<times>F(3<times>x-y)<times>exp(-x/S(10)-3<times>y/S(10)))]<for_stmt>eq [eq4 eq5 eq6]<block_start><assert_stmt>checkpdesol(eq pdsolve(eq))[0]<block_end>sol=pdsolve(eq4)<line_sep>sol4=Eq(sol.lhs-sol.rhs 0)<line_sep>raises(NotImplementedError <lambda>:checkpdesol(eq4 sol4 solve_for_func=<false>))<block_end><def_stmt>test_solvefun <block_start>f,F,G,H=map(Function ['f' 'F' 'G' 'H'])<line_sep>eq1=f(x y)+f(x y).diff(x)+f(x y).diff(y)<assert_stmt>pdsolve(eq1)<eq>Eq(f(x y) F(x-y)<times>exp(-x/2-y/2))<assert_stmt>pdsolve(eq1 solvefun=G)<eq>Eq(f(x y) G(x-y)<times>exp(-x/2-y/2))<assert_stmt>pdsolve(eq1 solvefun=H)<eq>Eq(f(x y) H(x-y)<times>exp(-x/2-y/2))<block_end><def_stmt>test_pde_1st_linear_constant_coeff_homogeneous <block_start>f,F=map(Function ['f' 'F'])<line_sep>u=f(x y)<line_sep>eq=2<times>u+u.diff(x)+u.diff(y)<assert_stmt>classify_pde(eq)<eq>('1st_linear_constant_coeff_homogeneous' )<line_sep>sol=pdsolve(eq)<assert_stmt>sol<eq>Eq(u F(x-y)<times>exp(-x-y))<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=4+(3<times>u.diff(x)/u)+(2<times>u.diff(y)/u)<assert_stmt>classify_pde(eq)<eq>('1st_linear_constant_coeff_homogeneous' )<line_sep>sol=pdsolve(eq)<assert_stmt>sol<eq>Eq(u F(2<times>x-3<times>y)<times>exp(-S(12)<times>x/13-S(8)<times>y/13))<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=u+(6<times>u.diff(x))+(7<times>u.diff(y))<assert_stmt>classify_pde(eq)<eq>('1st_linear_constant_coeff_homogeneous' )<line_sep>sol=pdsolve(eq)<assert_stmt>sol<eq>Eq(u F(7<times>x-6<times>y)<times>exp(-6<times>x/S(85)-7<times>y/S(85)))<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=a<times>u+b<times>u.diff(x)+c<times>u.diff(y)<line_sep>sol=pdsolve(eq)<assert_stmt>checkpdesol(eq sol)[0]<block_end><def_stmt>test_pde_1st_linear_constant_coeff <block_start>f,F=map(Function ['f' 'F'])<line_sep>u=f(x y)<line_sep>eq=-2<times>u.diff(x)+4<times>u.diff(y)+5<times>u-exp(x+3<times>y)<line_sep>sol=pdsolve(eq)<assert_stmt>sol<eq>Eq(f(x y) (F(4<times>x+2<times>y)<times>exp(x/2)+exp(x+4<times>y)/15)<times>exp(-y))<assert_stmt>classify_pde(eq)<eq>('1st_linear_constant_coeff' '1st_linear_constant_coeff_Integral')<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=(u.diff(x)/u)+(u.diff(y)/u)+1-(exp(x+y)/u)<line_sep>sol=pdsolve(eq)<assert_stmt>sol<eq>Eq(f(x y) F(x-y)<times>exp(-x/2-y/2)+exp(x+y)/3)<assert_stmt>classify_pde(eq)<eq>('1st_linear_constant_coeff' '1st_linear_constant_coeff_Integral')<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=2<times>u+-u.diff(x)+3<times>u.diff(y)+sin(x)<line_sep>sol=pdsolve(eq)<assert_stmt>sol<eq>Eq(f(x y) F(3<times>x+y)<times>exp(x/5-3<times>y/5)-2<times>sin(x)/5-cos(x)/5)<assert_stmt>classify_pde(eq)<eq>('1st_linear_constant_coeff' '1st_linear_constant_coeff_Integral')<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=u+u.diff(x)+u.diff(y)+x<times>y<line_sep>sol=pdsolve(eq)<assert_stmt>sol.expand()<eq>Eq(f(x y) x+y+(x-y)<power>2/4-(x+y)<power>2/4+F(x-y)<times>exp(-x/2-y/2)-2).expand()<assert_stmt>classify_pde(eq)<eq>('1st_linear_constant_coeff' '1st_linear_constant_coeff_Integral')<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=u+u.diff(x)+u.diff(y)+log(x)<assert_stmt>classify_pde(eq)<eq>('1st_linear_constant_coeff' '1st_linear_constant_coeff_Integral')<block_end><def_stmt>test_pdsolve_all <block_start>f,F=map(Function ['f' 'F'])<line_sep>u=f(x y)<line_sep>eq=u+u.diff(x)+u.diff(y)+x<power>2<times>y<line_sep>sol=pdsolve(eq hint='all')<line_sep>keys=['1st_linear_constant_coeff' '1st_linear_constant_coeff_Integral' 'default' 'order']<assert_stmt>sorted(sol.keys())<eq>keys<assert_stmt>sol['order']<eq>1<assert_stmt>sol['default']<eq>'1st_linear_constant_coeff'<assert_stmt>sol['1st_linear_constant_coeff'].expand()<eq>Eq(f(x y) -x<power>2<times>y+x<power>2+2<times>x<times>y-4<times>x-2<times>y+F(x-y)<times>exp(-x/2-y/2)+6).expand()<block_end><def_stmt>test_pdsolve_variable_coeff <block_start>f,F=map(Function ['f' 'F'])<line_sep>u=f(x y)<line_sep>eq=x<times>(u.diff(x))-y<times>(u.diff(y))+y<power>2<times>u-y<power>2<line_sep>sol=pdsolve(eq hint="1st_linear_variable_coeff")<assert_stmt>sol<eq>Eq(u F(x<times>y)<times>exp(y<power>2/2)+1)<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=x<power>2<times>u+x<times>u.diff(x)+x<times>y<times>u.diff(y)<line_sep>sol=pdsolve(eq hint='1st_linear_variable_coeff')<assert_stmt>sol<eq>Eq(u F(y<times>exp(-x))<times>exp(-x<power>2/2))<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=y<times>x<power>2<times>u+y<times>u.diff(x)+u.diff(y)<line_sep>sol=pdsolve(eq hint='1st_linear_variable_coeff')<assert_stmt>sol<eq>Eq(u F(-2<times>x+y<power>2)<times>exp(-x<power>3/3))<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=exp(x)<power>2<times>(u.diff(x))+y<line_sep>sol=pdsolve(eq hint='1st_linear_variable_coeff')<assert_stmt>sol<eq>Eq(u y<times>exp(-2<times>x)/2+F(y))<assert_stmt>checkpdesol(eq sol)[0]<line_sep>eq=exp(2<times>x)<times>(u.diff(y))+y<times>u-u<line_sep>sol=pdsolve(eq hint='1st_linear_variable_coeff')<assert_stmt>sol<eq>Eq(u F(x)<times>exp(-y<times>(y-2)<times>exp(-2<times>x)/2))<block_end>
""" Author: <NAME> """<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<import_from_stmt>datetime datetime<class_stmt>TrackerFeeder(object)<block_start>""" Feeder for the trackers of the FinanceHub database. """<def_stmt>__init__ self db_connect<block_start>""" Feeder construction :param db_connect: sql connection engine from sqlalchemy """<line_sep>self.conn=db_connect.connection<block_end><def_stmt>fetch self fh_ticker<block_start>""" grabs trackers from the FH database :param fh_ticker: str or list with the tickers from the database trackers :return: pandas DataFrame with tickers on the columns """<assert_stmt>type(fh_ticker)<is>str<or>type(fh_ticker)<is>list<or>type(fh_ticker)<is>dict "'tickers' must be a string, list or dict"<line_sep>sql_query='SELECT time_stamp, fh_ticker, value FROM "trackers" WHERE '<if_stmt>type(fh_ticker)<is>str<block_start>sql_query=sql_query+"fh_ticker IN ('"+fh_ticker+"')"<block_end><elif_stmt>type(fh_ticker)<is>list<block_start>sql_query=sql_query+"fh_ticker IN ('"+"', '".join(fh_ticker)+"')"<block_end><elif_stmt>type(fh_ticker)<is>dict<block_start>sql_query=sql_query+"fh_ticker IN ('"+"', '".join(list(fh_ticker.keys()))+"')"<block_end>df=pd.read_sql(sql=sql_query con=self.conn)<line_sep>df=df.pivot(index='time_stamp' columns='fh_ticker' values='value')<if_stmt>type(fh_ticker)<is>dict<block_start>df=df.rename(fh_ticker axis=1)<block_end>df.index=pd.to_datetime(df.index)<line_sep>df=df.dropna(how='all')<line_sep>df=df.sort_index()<line_sep><return>df<block_end><def_stmt>fetch_metadata self<block_start>""" Returns the full metadata table of the FH trackers, which is useful to do custom filters and look at what is in the database. :return: pandas Dataframe """<line_sep>sql_query='SELECT * FROM "trackers_description"'<line_sep>df=pd.read_sql(sql=sql_query con=self.conn)<line_sep><return>df<block_end><def_stmt>filter_fetch self filter_dict ret='series'<block_start>""" Grabs the trackers from the FH database that satisfy the criteria given by 'filter_dict'. :param filter_dict: dict. Keys must be column names from the metadata table. Values must be either str or list of str :param ret: If 'series', returns the a dataframe with the tracker series that staistfy the conditions. If 'tickers', returns a list of the tickers that staistfy the conditions. :return: list or pandas DataFrame """<assert_stmt>type(filter_dict)<is>dict "'filter_dict' must be a dict"<assert_stmt>len(filter_dict)<g>0 "'filter_dict' is empty"<assert_stmt>ret.lower()<in>['series' 'tickers'] "'ret' must be either 'series' or 'ticker'"<line_sep>desc_query='SELECT fh_ticker FROM trackers_description WHERE '<for_stmt>col filter_dict.keys()<block_start><if_stmt>type(filter_dict[col])<is>list<block_start>desc_query=desc_query+col+" IN ('"+"', '".join(filter_dict[col])+"')"<block_end><else_stmt><block_start>desc_query=desc_query+col+f" IN ('{filter_dict[col]}')"<block_end>desc_query=desc_query+' and '<block_end>desc_query=desc_query[:-5]<line_sep>df=pd.read_sql(sql=desc_query con=self.conn)<line_sep>tickers=df.values.flatten().tolist()<if_stmt>ret<eq>'tickers'<block_start><return>tickers<block_end>df=self.fetch(tickers)<line_sep><return>df<block_end><def_stmt>filter_parameters self<block_start>""" Grabs the possible columns and their respective unique values from the metadata table. :return: dict. Keys are the column names, values are list of unique values of the column. """<line_sep>df=self.fetch_metadata()<line_sep>param_dict={}<for_stmt>col df.columns<block_start>param_dict[col]=df[col].unique().tolist()<block_end><return>param_dict<block_end><def_stmt>fetch_everything self<block_start>sql_query='SELECT time_stamp, fh_ticker, value FROM "trackers"'<line_sep>df=pd.read_sql(sql=sql_query con=self.conn)<line_sep>df=df.pivot(index='time_stamp' columns='fh_ticker' values='value')<line_sep>df.index=pd.to_datetime(df.index)<line_sep>df=df.dropna(how='all')<line_sep>df=df.sort_index()<line_sep><return>df<block_end><block_end><class_stmt>FocusFeeder(object)<block_start><def_stmt>__init__ self db_connect<block_start>""" Feeder construction :param db_connect: sql connection engine from sqlalchemy """<line_sep>self.conn=db_connect.connection<block_end><def_stmt>fetch self index='ipca' frequency='yearly' prediction_scope=<none> dt_ini=<none> dt_end=<none><block_start>""" Grabs data from the data base and pivots the results into a dataframe. To assure consistency The function can only take one index at a time and one frequency at a time. Only'prediction_scope' can be a list. If no prediction scope is passed, all available prediction scopes are returned. :param index: String containing the name of the index. :param frequency: String. 'yearly', 'monthly' or 'quarterly' (availability depends on the index) :param prediction_scope: string, float or list. Years that the forecasts are for. :param dt_ini: string. Initial date for the series :param dt_end: string. End date for the series :return: pandas DataFrame with the pivoted data. """<line_sep># Error Checking self._basic_assertions(index frequency prediction_scope)<line_sep># Handle formats index,frequency,prediction_scope,dt_ini,dt_end,pivot=self._map_inputs(index frequency prediction_scope dt_ini dt_end)<line_sep># build sql query sql_query=self._build_sql_query(index frequency prediction_scope dt_ini dt_end)<line_sep># get data df=pd.read_sql(sql=sql_query con=self.conn)<line_sep>df=df.drop_duplicates()<line_sep># pivoting df=df.pivot(index='date' columns=pivot values='value')<line_sep>df.index=pd.to_datetime(df.index)<line_sep><return>df<block_end><def_stmt>years_ahead self index='IPCA' years=1 dt_ini=<none> dt_end=<none><block_start>""" The metric atribute is set to 'mean' by default because further projections change smoothly """<line_sep># Error checking self._basic_assertions_years_ahead(index years)<line_sep># Handle formats index,dt_ini,dt_end=self._map_inputs_years_ahead(index dt_ini dt_end)<line_sep># grabs the index for all available years for each date df=self.fetch(index=index frequency='yearly' prediction_scope=<none> dt_ini=dt_ini dt_end=dt_end)<line_sep># creates the new dataframe df_weighted=pd.DataFrame(index=df.index)<line_sep>df_weighted[index+' '+str(years)+' year ahead']=np.nan<line_sep># days until year end df_weighted['D2YE']=((df_weighted.index+pd.offsets.YearEnd())-pd.to_datetime(df_weighted.index.tolist())).days<for_stmt>ind df_weighted.index<block_start><if_stmt>ind.day<eq>31<and>ind.month<eq>12<block_start>df_weighted.loc[ind 'D2YE']=0<block_end><block_end># loops on each date <for_stmt>date df_weighted.index<block_start>df_weighted.loc[date index+' '+str(years)+' year ahead']=(df.loc[date str(date.year+years-1)]<times>df_weighted.loc[date 'D2YE']+df.loc[date str(date.year+years)]<times>(365-df_weighted.loc[date 'D2YE']))/365<block_end>df=df_weighted[[index+' '+str(years)+' year ahead']].interpolate()<line_sep>df.index=pd.to_datetime(df.index)<line_sep><return>df<block_end>@staticmethod<def_stmt>_basic_assertions index frequency prediction_scope<block_start>"""Check basic assertions"""<assert_stmt>type(index)<is>str 'index must be a string'<assert_stmt>type(frequency)<is>str 'frequency must be a string'<block_end>@staticmethod<def_stmt>_map_inputs index frequency prediction_scope dt_ini dt_end<block_start>"""Handle formats of the inputs"""<line_sep># index <if_stmt>type(index)<is>str<block_start>index=index.lower()<block_end><elif_stmt>type(index)<is>list<block_start>index=[x.lower()<for>x index]<block_end># frequency frequency=frequency.lower()<line_sep># prediction_scope <if_stmt>type(prediction_scope)<is>str<block_start>prediction_scope=prediction_scope.lower()<block_end><elif_stmt>type(prediction_scope)<is>list<block_start>prediction_scope=[str(x).lower()<for>x prediction_scope]<block_end><elif_stmt>prediction_scope<is><none><block_start>prediction_scope=<none><block_end><else_stmt><block_start>prediction_scope=str(prediction_scope).lower()<block_end># dates <if_stmt>dt_ini<is><none><block_start>dt_ini='1900-01-01'<block_end><if_stmt>dt_end<is><none><block_start>dt_end=datetime.now().strftime('%Y-%m-%d')<block_end># pivot variable (while we have no metrics, its always the prediction scope) pivot='prediction_scope'<line_sep><return>index frequency prediction_scope dt_ini dt_end pivot<block_end>@staticmethod<def_stmt>_build_sql_query index frequency prediction_scope dt_ini dt_end<block_start>sql_query='SELECT DATE, VALUE, PREDICTION_SCOPE FROM "focus_survey" WHERE '<line_sep># index (must not be None) <if_stmt>type(index)<is>str<block_start>sql_query=sql_query+"lower(INDEX) IN ('"+index+"')"<block_end><elif_stmt>type(index)<is>list<block_start>sql_query=sql_query+"lower(INDEX) IN ('"+"', '".join(index)+"')"<block_end># frequency <if_stmt>type(frequency)<is>str<block_start>sql_query=sql_query+" AND lower(FREQUENCY) IN ('"+frequency+"')"<block_end><elif_stmt>type(frequency)<is>list<block_start>sql_query=sql_query+" AND lower(FREQUENCY) IN ('"+"', '".join(frequency)+"')"<block_end># prediction scope <if_stmt>type(prediction_scope)<is>str<block_start>sql_query=sql_query+" AND lower(PREDICTION_SCOPE) IN ('"+prediction_scope+"')"<block_end><elif_stmt>type(prediction_scope)<is>list<block_start>sql_query=sql_query+" AND lower(PREDICTION_SCOPE) IN ('"+"', '".join(prediction_scope)+"')"<block_end>sql_query=sql_query+" AND DATE BETWEEN '"+dt_ini+"' AND '"+dt_end+"'"<line_sep>sql_query=sql_query+' ORDER BY DATE;'<line_sep><return>sql_query<block_end>@staticmethod<def_stmt>_basic_assertions_years_ahead index years<block_start>"""Check basic assertions"""<assert_stmt>type(index)<is>str 'index must be a string'<assert_stmt>(type(years)<is>int)<and>(years<le>4) 'number of years must be an intger between 1 and 4'<block_end>@staticmethod<def_stmt>_map_inputs_years_ahead index dt_ini dt_end<block_start>"""Handles the format of the inputs of the years_ahead method"""<line_sep>index=index.lower()<line_sep># dates <if_stmt>dt_ini<is><none><block_start>dt_ini='1900-01-01'<block_end><if_stmt>dt_end<is><none><block_start>dt_end=datetime.now().strftime('%Y-%m-%d')<block_end><return>index dt_ini dt_end<block_end><block_end>
<import_from_stmt>diagrams Node<class_stmt>_Outscale(Node)<block_start>_provider="outscale"<line_sep>_icon_dir="resources/outscale"<line_sep>fontcolor="#ffffff"<block_end>
<import_from_stmt>overrides overrides<import_from_stmt>..masked_layer MaskedLayer<class_stmt>OutputMask(MaskedLayer)<block_start>""" This Layer is purely for debugging. You can wrap this on a layer's output to get the mask output by that layer as a model output, for easier visualization of what the model is actually doing. Don't try to use this in an actual model. """<line_sep>@overrides<def_stmt>compute_mask self inputs mask=<none><block_start><return><none><block_end>@overrides<def_stmt>call self inputs mask=<none># pylint: disable=unused-argument <block_start><return>mask<block_end><block_end>
<class_stmt>RegipyException(Exception)<block_start>""" This is the parent exception for all regipy exceptions """<line_sep><pass><block_end><class_stmt>RegipyGeneralException(RegipyException)<block_start>""" General exception """<line_sep><pass><block_end><class_stmt>RegistryValueNotFoundException(RegipyException)<block_start><pass><block_end><class_stmt>NoRegistrySubkeysException(RegipyException)<block_start><pass><block_end><class_stmt>NoRegistryValuesException(RegipyException)<block_start><pass><block_end><class_stmt>RegistryKeyNotFoundException(RegipyException)<block_start><pass><block_end><class_stmt>UnidentifiedHiveException(RegipyException)<block_start><pass><block_end><class_stmt>RegistryRecoveryException(RegipyException)<block_start><pass><block_end><class_stmt>RegistryParsingException(RegipyException)<block_start>""" Raised when there is a parsing error, most probably a corrupted hive """<line_sep><pass><block_end><class_stmt>NtSidDecodingException(RegipyException)<block_start>""" Raised when the binary Windows NT SID representation can not be decoded """<block_end>
""" If you find this code useful, please cite our paper: <NAME>, <NAME>, and <NAME>. "HistoGAN: Controlling Colors of GAN-Generated and Real Images via Color Histograms." In CVPR, 2021. @inproceedings{afifi2021histogan, title={Histo{GAN}: Controlling Colors of {GAN}-Generated and Real Images via Color Histograms}, author={<NAME> Brubaker, <NAME>. and Brown, <NAME>.}, booktitle={CVPR}, year={2021} } """<import_from_stmt>tqdm tqdm<import_from_stmt>histoGAN Trainer NanException<import_from_stmt>histogram_classes.RGBuvHistBlock RGBuvHistBlock<import_from_stmt>datetime datetime<import_stmt>torch<import_stmt>argparse<import_from_stmt>retry.api retry_call<import_stmt>os<import_from_stmt>PIL Image<import_from_stmt>torchvision transforms<import_stmt>numpy<as>np<line_sep>SCALE=1/np.sqrt(2.0)<def_stmt>train_from_folder data='./dataset/' results_dir='./results' models_dir='./models' name='test' new=<false> load_from=-1 image_size=128 network_capacity=16 transparent=<false> batch_size=2 gradient_accumulate_every=8 num_train_steps=150000 learning_rate=2e-4 num_workers=<none> save_every=1000 generate=<false> save_noise_latent=<false> target_noise_file=<none> target_latent_file=<none> num_image_tiles=8 trunc_psi=0.75 fp16=<false> fq_layers=[] fq_dict_size=256 attn_layers=[] hist_method='inverse-quadratic' hist_resizing='sampling' hist_sigma=0.02 hist_bin=64 hist_insz=150 alpha=2 target_hist=<none> aug_prob=0.0 dataset_aug_prob=0.0 aug_types=<none><block_start>model=Trainer(name results_dir models_dir batch_size=batch_size gradient_accumulate_every=gradient_accumulate_every image_size=image_size network_capacity=network_capacity transparent=transparent lr=learning_rate num_workers=num_workers save_every=save_every trunc_psi=trunc_psi fp16=fp16 fq_layers=fq_layers fq_dict_size=fq_dict_size attn_layers=attn_layers hist_insz=hist_insz hist_bin=hist_bin hist_sigma=hist_sigma hist_resizing=hist_resizing hist_method=hist_method aug_prob=aug_prob dataset_aug_prob=dataset_aug_prob aug_types=aug_types)<if_stmt><not>new<block_start>model.load(load_from)<block_end><else_stmt><block_start>model.clear()<block_end><if_stmt>generate<block_start>now=datetime.now()<line_sep>timestamp=now.strftime("%m-%d-%Y_%H-%M-%S")<if_stmt>save_noise_latent<and><not>os.path.exists('temp')<block_start>os.mkdir('./temp')<block_end><if_stmt>save_noise_latent<and><not>os.path.exists(f'./temp/{name}')<block_start>os.mkdir(f'./temp/{name}')<block_end><if_stmt>target_hist<is><none><block_start><raise>Exception('No target histogram or image is given')<block_end>extension=os.path.splitext(target_hist)[1]<if_stmt>extension<eq>'.npy'<block_start>hist=np.load(target_hist)<line_sep>h=torch.from_numpy(hist).to(device=torch.cuda.current_device())<if_stmt>num_image_tiles<g>1<block_start>num_image_tiles=num_image_tiles-num_image_tiles%2<for_stmt>i range(int(np.log2(num_image_tiles)))<block_start>h=torch.cat((h h) dim=0)<block_end><block_end>samples_name=('generated-'+f'{os.path.basename(os.path.splitext(target_hist)[0])}'<concat>f'-{timestamp}')<line_sep>model.evaluate(samples_name hist_batch=h num_image_tiles=num_image_tiles save_noise_latent=save_noise_latent load_noise_file=target_noise_file load_latent_file=target_latent_file)<line_sep>print(f'sample images generated at {results_dir}/{name}/{samples_name}')<block_end><elif_stmt>str.lower(extension)<eq>'.jpg'<or>str.lower(extension)<eq>'.png'<block_start>histblock=RGBuvHistBlock(insz=hist_insz h=hist_bin resizing=hist_resizing method=hist_method sigma=hist_sigma device=torch.cuda.current_device())<line_sep>transform=transforms.Compose([transforms.ToTensor()])<line_sep>img=Image.open(target_hist)<line_sep>img=torch.unsqueeze(transform(img) dim=0).to(device=torch.cuda.current_device())<line_sep>h=histblock(img)<if_stmt>num_image_tiles<g>1<block_start>num_image_tiles=num_image_tiles-num_image_tiles%2<for_stmt>i range(int(np.log2(num_image_tiles)))<block_start>h=torch.cat((h h) dim=0)<block_end><block_end>samples_name=('generated-'+f'{os.path.basename(os.path.splitext(target_hist)[0])}'<concat>f'-{timestamp}')<line_sep>model.evaluate(samples_name hist_batch=h num_image_tiles=num_image_tiles save_noise_latent=save_noise_latent load_noise_file=target_noise_file load_latent_file=target_latent_file)<line_sep>print(f'sample images generated at {results_dir}/{name}/{samples_name}')<block_end><elif_stmt>extension<eq>''<block_start>files=[os.path.join(target_hist f)<for>f os.listdir(target_hist)<if>os.path.isfile(os.path.join(target_hist f))]<line_sep>histblock=RGBuvHistBlock(insz=hist_insz h=hist_bin resizing=hist_resizing method=hist_method sigma=hist_sigma device=torch.cuda.current_device())<line_sep>transform=transforms.Compose([transforms.ToTensor()])<for_stmt>f files<block_start>extension=os.path.splitext(f)[1]<if_stmt>extension<eq>'.npy'<block_start>hist=np.load(f)<line_sep>h=torch.from_numpy(hist).to(device=torch.cuda.current_device())<block_end><elif_stmt>(extension<eq>str.lower(extension)<eq>'.jpg'<or>str.lower(extension)<eq>'.png')<block_start>img=Image.open(f)<line_sep>img=torch.unsqueeze(transform(img) dim=0).to(device=torch.cuda.current_device())<line_sep>h=histblock(img)<block_end><else_stmt><block_start>print(f'Warning: File extension of {f} is not supported.')<line_sep><continue><block_end><if_stmt>num_image_tiles<g>1<block_start>num_image_tiles=num_image_tiles-num_image_tiles%2<for_stmt>i range(int(np.log2(num_image_tiles)))<block_start>h=torch.cat((h h) dim=0)<block_end><block_end>samples_name=('generated-'+f'{os.path.basename(os.path.splitext(f)[0])}'<concat>f'-{timestamp}')<line_sep>model.evaluate(samples_name hist_batch=h num_image_tiles=num_image_tiles save_noise_latent=save_noise_latent load_noise_file=target_noise_file load_latent_file=target_latent_file)<line_sep>print(f'sample images generated at {results_dir}/{name}/'<concat>f'{samples_name}')<block_end><block_end><else_stmt><block_start>print('The file extension of target image is not supported.')<line_sep><raise>NotImplementedError<block_end><return><block_end>print('\nStart training....\n')<line_sep>print(f'Alpha = {alpha}')<line_sep>model.set_data_src(data)<for_stmt>_ tqdm(range(num_train_steps-model.steps) mininterval=10. desc=f'{name}<{data}>')<block_start>retry_call(model.train fargs=[alpha] tries=3 exceptions=NanException)<if_stmt>_%50<eq>0<block_start>model.print_log()<block_end><block_end><block_end><def_stmt>get_args <block_start>parser=argparse.ArgumentParser(description='Train/Test HistoGAN.')<line_sep>parser.add_argument('--data' dest='data' default='./dataset/')<line_sep>parser.add_argument('--results_dir' dest='results_dir' default='./results_HistoGAN')<line_sep>parser.add_argument('--models_dir' dest='models_dir' default='./models')<line_sep>parser.add_argument('--target_hist' dest='target_hist' default=<none>)<line_sep>parser.add_argument('--name' dest='name' default='histoGAN_model')<line_sep>parser.add_argument('--new' dest='new' default=<false>)<line_sep>parser.add_argument('--load_from' dest='load_from' default=-1)<line_sep>parser.add_argument('--image_size' dest='image_size' default=256 type=int)<line_sep>parser.add_argument('--network_capacity' dest='network_capacity' default=16 type=int)<line_sep>parser.add_argument('--transparent' dest='transparent' default=<false>)<line_sep>parser.add_argument('--batch_size' dest='batch_size' default=2 type=int)<line_sep>parser.add_argument('--gradient_accumulate_every' dest='gradient_accumulate_every' default=8 type=int)<line_sep>parser.add_argument('--num_train_steps' dest='num_train_steps' default=1500000 type=int)<line_sep>parser.add_argument('--learning_rate' dest='learning_rate' default=2e-4 type=float)<line_sep>parser.add_argument('--num_workers' dest='num_workers' default=<none>)<line_sep>parser.add_argument('--save_every' dest='save_every' default=5000 type=int)<line_sep>parser.add_argument('--generate' dest='generate' default=<false>)<line_sep>parser.add_argument('--save_noise_latent' dest='save_n_l' default=<false>)<line_sep>parser.add_argument('--target_noise_file' dest='target_n' default=<none>)<line_sep>parser.add_argument('--target_latent_file' dest='target_l' default=<none>)<line_sep>parser.add_argument('--num_image_tiles' dest='num_image_tiles' default=16 type=int)<line_sep>parser.add_argument('--trunc_psi' dest='trunc_psi' default=0.75 type=float)<line_sep>parser.add_argument('--fp 16' dest='fp16' default=<false>)<line_sep>parser.add_argument('--fq_layers' dest='fq_layers' default=[])<line_sep>parser.add_argument('--fq_dict_size' dest='fq_dict_size' default=256 type=int)<line_sep>parser.add_argument('--attn_layers' dest='attn_layers' default=[])<line_sep>parser.add_argument('--gpu' dest='gpu' default=0 type=int)<line_sep>parser.add_argument('--hist_bin' dest='hist_bin' default=64 type=int)<line_sep>parser.add_argument('--hist_insz' dest='hist_insz' default=150 type=int)<line_sep>parser.add_argument('--hist_method' dest='hist_method' default='inverse-quadratic')<line_sep>parser.add_argument('--hist_resizing' dest='hist_resizing' default='interpolation')<line_sep>parser.add_argument('--hist_sigma' dest='hist_sigma' default=0.02 type=float)<line_sep>parser.add_argument('--alpha' dest='alpha' default=2 type=float)<line_sep>parser.add_argument('--aug_prob' dest='aug_prob' default=0.0 type=float help='Probability of discriminator augmentation. It '<concat>'applies operations specified in --aug_types.')<line_sep>parser.add_argument('--dataset_aug_prob' dest='dataset_aug_prob' default=0.0 type=float help='Probability of dataset augmentation. It applies '<concat>'random cropping')<line_sep>parser.add_argument('--aug_types' dest='aug_types' default=['translation' 'cutout'] nargs='+' help='Options include: translation, cutout, and color')<line_sep><return>parser.parse_args()<block_end><if_stmt>__name__<eq>"__main__"<block_start>args=get_args()<line_sep>torch.cuda.set_device(args.gpu)<line_sep>train_from_folder(data=args.data results_dir=args.results_dir models_dir=args.models_dir name=args.name new=args.new load_from=args.load_from image_size=args.image_size network_capacity=args.network_capacity transparent=args.transparent batch_size=args.batch_size gradient_accumulate_every=args.gradient_accumulate_every num_train_steps=args.num_train_steps learning_rate=args.learning_rate num_workers=args.num_workers save_every=args.save_every generate=args.generate save_noise_latent=args.save_n_l target_noise_file=args.target_n target_latent_file=args.target_l num_image_tiles=args.num_image_tiles trunc_psi=args.trunc_psi fp16=args.fp16 fq_layers=args.fq_layers fq_dict_size=args.fq_dict_size attn_layers=args.attn_layers hist_method=args.hist_method hist_resizing=args.hist_resizing hist_sigma=args.hist_sigma hist_bin=args.hist_bin hist_insz=args.hist_insz target_hist=args.target_hist alpha=args.alpha aug_prob=args.aug_prob dataset_aug_prob=args.dataset_aug_prob aug_types=args.aug_types)<block_end>
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. <import_stmt>timeit<import_stmt>itertools<import_stmt>argparse<import_stmt>os<class_stmt>OpArgMngr(object)<block_start>"""Operator argument manager for storing operator workloads."""<line_sep>args={}<line_sep>@staticmethod<def_stmt>add_workload funcname *args **kwargs<block_start><if_stmt>"_specifier"<not><in>kwargs<block_start>_specifier=funcname<block_end><else_stmt><block_start>_specifier=kwargs["_specififer"]<del_stmt>kwargs["_specififer"]<block_end><if_stmt>_specifier<in>OpArgMngr.args<block_start><raise>ValueError("duplicate {}".format(_specifier))<block_end>OpArgMngr.args[_specifier]={'args':args 'kwargs':kwargs 'funcname':funcname}<block_end><block_end><def_stmt>generate_workloads <block_start>array_pool={}<line_sep>shapes=[]<for_stmt>ndim range(4)<block_start>shapes.extend(list(itertools.product(range(4) repeat=ndim)))<block_end><for_stmt>shape shapes<block_start>name='x'.join(str(i)<for>i shape)<if_stmt>name<in>array_pool<block_start><raise>ValueError("duplicate array {}".format(name))<block_end>array_pool[name]=dnp.ones(shape)<block_end><return>array_pool<block_end><def_stmt>prepare_workloads <block_start>pool=generate_workloads()<line_sep>OpArgMngr.add_workload("zeros" (2 2))<line_sep>OpArgMngr.add_workload("full" (2 2) 10)<line_sep>OpArgMngr.add_workload("identity" 3)<line_sep>OpArgMngr.add_workload("ones" (2 2))<line_sep>OpArgMngr.add_workload("einsum" "ii" pool['2x2'] optimize=<false>)<line_sep>OpArgMngr.add_workload("unique" pool['1'] return_index=<true> return_inverse=<true> return_counts=<true> axis=-1)<line_sep>OpArgMngr.add_workload("dstack" (pool['2x1'] pool['2x1'] pool['2x1'] pool['2x1']))<line_sep>OpArgMngr.add_workload("polyval" dnp.arange(10) pool['2x2'])<line_sep>OpArgMngr.add_workload("ediff1d" pool['2x2'] pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("nan_to_num" pool['2x2'])<line_sep>OpArgMngr.add_workload("tri" 2 3 4)<line_sep>OpArgMngr.add_workload("tensordot" pool['2x2'] pool['2x2'] ((1 0) (0 1)))<line_sep>OpArgMngr.add_workload("cumsum" pool['3x2'] axis=0 out=pool['3x2'])<line_sep>OpArgMngr.add_workload("random.shuffle" pool['3'])<line_sep>OpArgMngr.add_workload("equal" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("not_equal" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("less" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("greater_equal" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("less_equal" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("maximum" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("minimum" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("sum" pool['2x2'] axis=0 keepdims=<true> out=pool['1x2'])<line_sep>OpArgMngr.add_workload("std" pool['2x2'] axis=0 ddof=0 keepdims=<true> out=pool['1x2'])<line_sep>OpArgMngr.add_workload("var" pool['2x2'] axis=0 ddof=1 keepdims=<true> out=pool['1x2'])<line_sep>OpArgMngr.add_workload("average" pool['2x2'] weights=pool['2'] axis=1 returned=<true>)<line_sep>OpArgMngr.add_workload("histogram" pool['2x2'] bins=10 range=(0.0 10.0))<line_sep>OpArgMngr.add_workload("add" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("cross" pool['2'] pool['2'])<line_sep>OpArgMngr.add_workload("linalg.eig" pool['3x3'])<line_sep>OpArgMngr.add_workload("linalg.eigh" pool['3x3'])<line_sep>OpArgMngr.add_workload("linalg.det" pool['3x3'])<line_sep>OpArgMngr.add_workload("linalg.slogdet" pool['3x3'])<line_sep>OpArgMngr.add_workload("linalg.matrix_rank" pool['3x3'] pool['1'] hermitian=<false>)<line_sep>OpArgMngr.add_workload("linalg.svd" pool['3x3'])<line_sep>OpArgMngr.add_workload("linalg.cholesky" pool['1x1'])<line_sep>OpArgMngr.add_workload("linalg.qr" pool['3x3'])<line_sep>OpArgMngr.add_workload("linalg.lstsq" pool['2x1'] pool['2'] rcond=<none>)<line_sep>OpArgMngr.add_workload("linalg.eigvals" pool['1x1'])<line_sep>OpArgMngr.add_workload("linalg.eigvalsh" pool['1x1'] UPLO='L')<line_sep>OpArgMngr.add_workload("linalg.inv" pool['1x1'])<line_sep>OpArgMngr.add_workload("linalg.pinv" pool['2x3x3'] pool['1'] hermitian=<false>)<line_sep>OpArgMngr.add_workload("linalg.solve" pool['1x1'] pool['1'])<line_sep>OpArgMngr.add_workload("linalg.tensorinv" pool['1x1'] ind=2)<line_sep>OpArgMngr.add_workload("linalg.norm" pool['3x3'])<line_sep>OpArgMngr.add_workload("linalg.tensorsolve" pool['1x1x1'] pool['1x1x1'] (2 0 1))<line_sep>OpArgMngr.add_workload("tile" pool['2x2'] 1)<line_sep>OpArgMngr.add_workload("trace" pool['2x2'])<line_sep>OpArgMngr.add_workload("transpose" pool['2x2'])<line_sep>OpArgMngr.add_workload("split" pool['3x3'] (0 1 2) axis=1)<line_sep>OpArgMngr.add_workload("vstack" (pool['3x3'] pool['3x3'] pool['3x3']))<line_sep>OpArgMngr.add_workload("argmax" pool['3x2'] axis=-1)<line_sep>OpArgMngr.add_workload("argmin" pool['3x2'] axis=-1)<line_sep>OpArgMngr.add_workload("atleast_1d" pool['2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("atleast_2d" pool['2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("atleast_3d" pool['2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("argsort" pool['3x2'] axis=-1)<line_sep>OpArgMngr.add_workload("sort" pool['3x2'] axis=-1)<line_sep>OpArgMngr.add_workload("indices" dimensions=(1 2 3))<line_sep>OpArgMngr.add_workload("subtract" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("multiply" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("mod" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("remainder" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("divide" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("true_divide" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("power" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("lcm" pool['2x2'].astype('int32') pool['2x2'].astype('int32'))<line_sep>OpArgMngr.add_workload("diff" pool['2x2'] n=1 axis=-1)<line_sep>OpArgMngr.add_workload("inner" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("random.multinomial" n=2 pvals=[1/6.]<times>6 size=(2 2))<line_sep>OpArgMngr.add_workload("random.rand" 3 2)<line_sep>OpArgMngr.add_workload("random.randn" 2 2)<line_sep>OpArgMngr.add_workload("nonzero" pool['2x2'])<line_sep>OpArgMngr.add_workload("tril" pool['2x2'] k=0)<line_sep>OpArgMngr.add_workload("random.choice" pool['2'] size=(2 2))<line_sep>OpArgMngr.add_workload("take" pool['2'] dnp.array([1 0] dtype='int64'))<line_sep>OpArgMngr.add_workload("clip" pool['2x2'] 0 1)<line_sep>OpArgMngr.add_workload("expand_dims" pool['2x2'] axis=0)<line_sep>OpArgMngr.add_workload("broadcast_to" pool['2x2'] (2 2 2))<line_sep>OpArgMngr.add_workload("full_like" pool['2x2'] 2)<line_sep>OpArgMngr.add_workload("zeros_like" pool['2x2'])<line_sep>OpArgMngr.add_workload("ones_like" pool['2x2'])<line_sep>OpArgMngr.add_workload("bitwise_and" pool['2x2'].astype(int) pool['2x2'].astype(int))<line_sep>OpArgMngr.add_workload("bitwise_xor" pool['2x2'].astype(int) pool['2x2'].astype(int))<line_sep>OpArgMngr.add_workload("bitwise_or" pool['2x2'].astype(int) pool['2x2'].astype(int))<line_sep>OpArgMngr.add_workload("copysign" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("arctan2" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("hypot" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("ldexp" pool['2x2'].astype(int) pool['2x2'].astype(int))<line_sep>OpArgMngr.add_workload("logical_and" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("logical_or" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("logical_xor" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("random.uniform" low=0 high=1 size=1)<line_sep>OpArgMngr.add_workload("random.exponential" scale=2 size=(2 2))<line_sep>OpArgMngr.add_workload("random.rayleigh" scale=2 size=(2 2))<line_sep>OpArgMngr.add_workload("random.weibull" a=2 size=(2 2))<line_sep>OpArgMngr.add_workload("random.pareto" a=2 size=(2 2))<line_sep>OpArgMngr.add_workload("random.power" a=2 size=(2 2))<line_sep>OpArgMngr.add_workload("random.logistic" loc=2 scale=2 size=(2 2))<line_sep>OpArgMngr.add_workload("random.gumbel" loc=2 scale=2 size=(2 2))<line_sep>OpArgMngr.add_workload("where" pool['2x3'] pool['2x3'] pool['2x1'])<line_sep>OpArgMngr.add_workload("may_share_memory" pool['2x3'][:0] pool['2x3'][:1])<line_sep>OpArgMngr.add_workload('squeeze' pool['2x2'] axis=<none>)<line_sep>OpArgMngr.add_workload("pad" pool['2x2'] pad_width=((1 2) (1 2)) mode="constant")<line_sep>OpArgMngr.add_workload("prod" pool['2x2'] axis=1 dtype="float64" keepdims=<false>)<line_sep>OpArgMngr.add_workload("around" pool['2x2'] decimals=0)<line_sep>OpArgMngr.add_workload("round" pool['2x2'] decimals=1)<line_sep>OpArgMngr.add_workload("repeat" pool['2x2'] repeats=1 axis=<none>)<line_sep>OpArgMngr.add_workload("diagflat" pool['2x2'] k=1)<line_sep>OpArgMngr.add_workload("diag" pool['2x2'] k=1)<line_sep>OpArgMngr.add_workload("diagonal" pool['2x2x2'] offset=-1 axis1=0 axis2=1)<line_sep>OpArgMngr.add_workload("diag_indices_from" pool['2x2'])<line_sep>OpArgMngr.add_workload("bincount" dnp.arange(3 dtype=int) pool['3'] minlength=4)<line_sep>OpArgMngr.add_workload("percentile" pool['2x2x2'] 80 axis=0 out=pool['2x2'] interpolation='midpoint')<line_sep>OpArgMngr.add_workload("quantile" pool['2x2x2'] 0.8 axis=0 out=pool['2x2'] interpolation='midpoint')<line_sep>OpArgMngr.add_workload("all" pool['2x2x2'] axis=(0 1) out=dnp.array([<false> <false>] dtype=bool) keepdims=<false>)<line_sep>OpArgMngr.add_workload("any" pool['2x2x2'] axis=(0 1) out=dnp.array([<false> <false>] dtype=bool) keepdims=<false>)<line_sep>OpArgMngr.add_workload("roll" pool["2x2"] 1 axis=0)<line_sep>OpArgMngr.add_workload("rot90" pool["2x2"] 2)<line_sep>OpArgMngr.add_workload("column_stack" (pool['3x3'] pool['3x3'] pool['3x3']))<line_sep>OpArgMngr.add_workload("hstack" (pool['3x3'] pool['3x3'] pool['3x3']))<line_sep>OpArgMngr.add_workload("triu" pool['3x3'])<line_sep>OpArgMngr.add_workload("array_split" pool['2x2'] 2 axis=1)<line_sep>OpArgMngr.add_workload("vsplit" pool['2x2'] 2)<line_sep>OpArgMngr.add_workload("hsplit" pool['2x2'] 2)<line_sep>OpArgMngr.add_workload("dsplit" pool['2x2x2'] 2)<line_sep>OpArgMngr.add_workload("arange" 10)<line_sep>OpArgMngr.add_workload("concatenate" (pool['1x2'] pool['1x2'] pool['1x2']) axis=0)<line_sep>OpArgMngr.add_workload("append" pool['2x2'] pool['1x2'] axis=0)<line_sep>OpArgMngr.add_workload("insert" pool['3x2'] 1 pool['1x1'] axis=0)<line_sep>OpArgMngr.add_workload("delete" pool['3x2'] 1 axis=0)<line_sep>OpArgMngr.add_workload("blackman" 12)<line_sep>OpArgMngr.add_workload("eye" 5)<line_sep>OpArgMngr.add_workload("hamming" 12)<line_sep>OpArgMngr.add_workload("hanning" 12)<line_sep>OpArgMngr.add_workload("linspace" 0 10 8 endpoint=<false>)<line_sep>OpArgMngr.add_workload("logspace" 2.0 3.0 num=4 base=2.0 dtype=onp.float32)<line_sep>OpArgMngr.add_workload("matmul" pool['2x2'] pool['2x2'])<line_sep>OpArgMngr.add_workload("mean" pool['2x2'] axis=0 keepdims=<true>)<line_sep>OpArgMngr.add_workload("random.gamma" 1 size=(2 3))<line_sep>OpArgMngr.add_workload("random.normal" 1 size=(2 3))<line_sep>OpArgMngr.add_workload("max" pool["2x2"] axis=0 out=pool['2'] keepdims=<false>)<line_sep>OpArgMngr.add_workload("min" pool["2x2"] axis=0 out=pool['2'] keepdims=<false>)<line_sep>OpArgMngr.add_workload("amax" pool["2x2"] axis=1 out=pool['2'] keepdims=<false>)<line_sep>OpArgMngr.add_workload("amin" pool["2x2"] axis=1 out=pool['2'] keepdims=<false>)<line_sep>unary_ops=['negative' 'reciprocal' 'abs' 'sign' 'rint' 'ceil' 'floor' 'bitwise_not' 'trunc' 'fix' 'square' 'sqrt' 'cbrt' 'exp' 'log' 'log10' 'log2' 'log1p' 'expm1' 'logical_not' 'isnan' 'isinf' 'isposinf' 'isneginf' 'isfinite' 'sin' 'cos' 'tan' 'arcsin' 'arccos' 'arctan' 'degrees' 'radians' 'sinh' 'cosh' 'tanh' 'arcsinh' 'arccosh' 'arctanh']<line_sep># 'rad2deg', 'deg2rad' cannot run without tvm <for_stmt>unary_op unary_ops<block_start><if_stmt>unary_op<eq>"bitwise_not"<block_start>OpArgMngr.add_workload(unary_op dnp.ones((2 2) dtype=int))<block_end><else_stmt><block_start>OpArgMngr.add_workload(unary_op pool['2x2'])<block_end><block_end><block_end><def_stmt>benchmark_helper f *args **kwargs<block_start>number=10000<line_sep><return>timeit.timeit(<lambda>:f(*args **kwargs) number=number)/number<block_end><def_stmt>get_op module funcname<block_start>funcname=funcname.split(".")<for_stmt>fname funcname<block_start>module=getattr(module fname)<block_end><return>module<block_end><def_stmt>run_benchmark packages<block_start>results={}<for_stmt>(k v) OpArgMngr.args.items()<block_start>result={}<for_stmt>(name package) packages.items()<block_start>print('{}.{} running...'.format(name k))<line_sep>op=get_op(package["module"] v["funcname"])<line_sep>args=[package["data"](arg)<for>arg v["args"]]<line_sep>kwargs={k:package["data"](v)<for>(k v) v["kwargs"].items()}<line_sep>benchmark=benchmark_helper(op *args **kwargs)<line_sep>result[name]=benchmark<block_end>results[k]=result<block_end><return>results<block_end><def_stmt>show_results results<block_start>print("{:>24}{:>24}{:>24}".format("name" "package" "time(us)"))<for_stmt>(specifier d) results.items()<block_start><for_stmt>(k v) d.items()<block_start>print("{:>24}{:>24}{:>24}".format(specifier k v<times>10<power>6))<block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>parser=argparse.ArgumentParser()<line_sep>parser.add_argument('ffi_type')<line_sep>parsed=parser.parse_args()<if_stmt>parsed.ffi_type<eq>"cython"<block_start>os.environ['MXNET_ENABLE_CYTHON']='1'<line_sep>os.environ['MXNET_ENFORCE_CYTHON']='1'<block_end><elif_stmt>parsed.ffi_type<eq>"ctypes"<block_start>os.environ['MXNET_ENABLE_CYTHON']='0'<block_end><else_stmt><block_start><raise>ValueError("unknown ffi_type {}" format(parsed.ffi_type))<block_end>os.environ["MXNET_ENGINE_TYPE"]="NaiveEngine"<import_stmt>mxnet<as>mx<import_stmt>numpy<as>onp<import_from_stmt>mxnet np<as>dnp<line_sep>mx.npx.set_np(dtype=<false>)<line_sep>packages={"onp":{"module":onp "data":<lambda>arr:arr.asnumpy()<if>isinstance(arr dnp.ndarray)<else>arr} "dnp":{"module":dnp "data":<lambda>arr:arr}}<line_sep>prepare_workloads()<line_sep>results=run_benchmark(packages)<line_sep>show_results(results)<block_end>
<import_stmt>sys<import_stmt>math<import_stmt>numpy<as>np<import_from_stmt>datetime datetime<import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<import_from_stmt>torch.autograd Variable<class_stmt>DecAtt(nn.Module)<block_start><def_stmt>__init__ self num_units num_classes embedding_size dropout device=0 training=<true> project_input=<true> use_intra_attention=<false> distance_biases=10 max_sentence_length=30<block_start>""" Create the model based on MLP networks. :param num_units: size of the networks :param num_classes: number of classes in the problem :param embedding_size: size of each word embedding :param use_intra_attention: whether to use intra-attention model :param training: whether to create training tensors (optimizer) :p/word_embeddingaram project_input: whether to project input embeddings to a different dimensionality :param distance_biases: number of different distances with biases used in the intra-attention model """<line_sep>super().__init__()<line_sep>self.arch="DecAtt"<line_sep>self.num_units=num_units<line_sep>self.num_classes=num_classes<line_sep>self.project_input=project_input<line_sep>self.embedding_size=embedding_size<line_sep>self.distance_biases=distance_biases<line_sep>self.intra_attention=<false><line_sep>self.max_sentence_length=max_sentence_length<line_sep>self.device=device<line_sep>self.bias_embedding=nn.Embedding(max_sentence_length 1)<line_sep>self.linear_layer_project=nn.Linear(embedding_size num_units bias=<false>)<line_sep>#self.linear_layer_intra = nn.Sequential(nn.Linear(num_units, num_units), nn.ReLU(), nn.Linear(num_units, num_units), nn.ReLU()) self.linear_layer_attend=nn.Sequential(nn.Dropout(p=dropout) nn.Linear(num_units num_units) nn.ReLU() nn.Dropout(p=dropout) nn.Linear(num_units num_units) nn.ReLU())<line_sep>self.linear_layer_compare=nn.Sequential(nn.Dropout(p=dropout) nn.Linear(num_units<times>2 num_units) nn.ReLU() nn.Dropout(p=dropout) nn.Linear(num_units num_units) nn.ReLU())<line_sep>self.linear_layer_aggregate=nn.Sequential(nn.Dropout(p=dropout) nn.Linear(num_units<times>2 num_units) nn.ReLU() nn.Dropout(p=dropout) nn.Linear(num_units num_units) nn.ReLU() nn.Linear(num_units num_classes) nn.LogSoftmax())<line_sep>self.init_weight()<block_end><def_stmt>init_weight self<block_start>self.linear_layer_project.weight.data.normal_(0 0.01)<line_sep>self.linear_layer_attend[1].weight.data.normal_(0 0.01)<line_sep>self.linear_layer_attend[1].bias.data.fill_(0)<line_sep>self.linear_layer_attend[4].weight.data.normal_(0 0.01)<line_sep>self.linear_layer_attend[4].bias.data.fill_(0)<line_sep>self.linear_layer_compare[1].weight.data.normal_(0 0.01)<line_sep>self.linear_layer_compare[1].bias.data.fill_(0)<line_sep>self.linear_layer_compare[4].weight.data.normal_(0 0.01)<line_sep>self.linear_layer_compare[4].bias.data.fill_(0)<line_sep>self.linear_layer_aggregate[1].weight.data.normal_(0 0.01)<line_sep>self.linear_layer_aggregate[1].bias.data.fill_(0)<line_sep>self.linear_layer_aggregate[4].weight.data.normal_(0 0.01)<line_sep>self.linear_layer_aggregate[4].bias.data.fill_(0)<line_sep>#self.word_embedding.weight.data.copy_(torch.from_numpy(self.pretrained_emb)) <block_end><def_stmt>attention_softmax3d self raw_attentions<block_start>reshaped_attentions=raw_attentions.view(-1 raw_attentions.size(2))<line_sep>out=nn.functional.softmax(reshaped_attentions dim=1)<line_sep><return>out.view(raw_attentions.size(0) raw_attentions.size(1) raw_attentions.size(2))<block_end><def_stmt>_transformation_input self embed_sent<block_start>embed_sent=self.linear_layer_project(embed_sent)<line_sep>result=embed_sent<if_stmt>self.intra_attention<block_start>f_intra=self.linear_layer_intra(embed_sent)<line_sep>f_intra_t=torch.transpose(f_intra 1 2)<line_sep>raw_attentions=torch.matmul(f_intra f_intra_t)<line_sep>time_steps=embed_sent.size(1)<line_sep>r=torch.arange(0 time_steps)<line_sep>r_matrix=r.view(1 -1).expand(time_steps time_steps)<line_sep>raw_index=r_matrix-r.view(-1 1)<line_sep>clipped_index=torch.clamp(raw_index 0 self.distance_biases-1)<line_sep>clipped_index=Variable(clipped_index.long())<if_stmt>torch.cuda.is_available()<block_start>clipped_index=clipped_index.to(self.device)<block_end>bias=self.bias_embedding(clipped_index)<line_sep>bias=torch.squeeze(bias)<line_sep>raw_attentions<augadd>bias<line_sep>attentions=self.attention_softmax3d(raw_attentions)<line_sep>attended=torch.matmul(attentions embed_sent)<line_sep>result=torch.cat([embed_sent attended] 2)<block_end><return>result<block_end><def_stmt>attend self sent1 sent2 lsize_list rsize_list<block_start>""" Compute inter-sentence attention. This is step 1 (attend) in the paper :param sent1: tensor in shape (batch, time_steps, num_units), the projected sentence 1 :param sent2: tensor in shape (batch, time_steps, num_units) :return: a tuple of 3-d tensors, alfa and beta. """<line_sep>repr1=self.linear_layer_attend(sent1)<line_sep>repr2=self.linear_layer_attend(sent2)<line_sep>repr2=torch.transpose(repr2 1 2)<line_sep>raw_attentions=torch.matmul(repr1 repr2)<line_sep>#self.mask = generate_mask(lsize_list, rsize_list) # masked = mask(self.raw_attentions, rsize_list) #masked = raw_attentions * self.mask att_sent1=self.attention_softmax3d(raw_attentions)<line_sep>beta=torch.matmul(att_sent1 sent2)#input2_soft raw_attentions_t=torch.transpose(raw_attentions 1 2).contiguous()<line_sep>#self.mask_t = torch.transpose(self.mask, 1, 2).contiguous() # masked = mask(raw_attentions_t, lsize_list) #masked = raw_attentions_t * self.mask_t att_sent2=self.attention_softmax3d(raw_attentions_t)<line_sep>alpha=torch.matmul(att_sent2 sent1)#input1_soft <return>alpha beta<block_end><def_stmt>compare self sentence soft_alignment<block_start>""" Apply a feed forward network to compare o ne sentence to its soft alignment with the other. :param sentence: embedded and projected sentence, shape (batch, time_steps, num_units) :param soft_alignment: tensor with shape (batch, time_steps, num_units) :return: a tensor (batch, time_steps, num_units) """<line_sep>sent_alignment=torch.cat([sentence soft_alignment] 2)<line_sep>out=self.linear_layer_compare(sent_alignment)<line_sep>#out, (state, _) = self.lstm_compare(out) <return>out<block_end><def_stmt>aggregate self v1 v2<block_start>""" Aggregate the representations induced from both sentences and their representations :param v1: tensor with shape (batch, time_steps, num_units) :param v2: tensor with shape (batch, time_steps, num_units) :return: logits over classes, shape (batch, num_classes) """<line_sep>v1_sum=torch.sum(v1 1)<line_sep>v2_sum=torch.sum(v2 1)<line_sep>out=self.linear_layer_aggregate(torch.cat([v1_sum v2_sum] 1))<line_sep><return>out<block_end><def_stmt>forward self sent1 sent2 ext_feats=<none> word_to_doc_count=<none> raw_sent1=<none> raw_sent2=<none><block_start>lsize_list=[len(s.split(" "))<for>s raw_sent1]<line_sep>rsize_list=[len(s.split(" "))<for>s raw_sent2]<line_sep>sent1=sent1.permute(0 2 1)<line_sep>sent2=sent2.permute(0 2 1)<line_sep>sent1=self._transformation_input(sent1)<line_sep>sent2=self._transformation_input(sent2)<line_sep>alpha,beta=self.attend(sent1 sent2 lsize_list rsize_list)<line_sep>v1=self.compare(sent1 beta)<line_sep>v2=self.compare(sent2 alpha)<line_sep>logits=self.aggregate(v1 v2)<line_sep><return>logits<block_end><block_end>
<import_stmt>torch.nn<as>nn<import_stmt>math<import_stmt>torch.utils.model_zoo<as>model_zoo<import_stmt>torch.nn.functional<as>F<import_stmt>torch<import_stmt>numpy<as>np<import_stmt>cv2<import_stmt>pdb<def_stmt>sigmoid x<block_start><return>1/(1+math.exp(-x))<block_end><def_stmt>norm_angle angle<block_start>norm_angle=sigmoid(10<times>(abs(angle)/0.7853975-1))<line_sep><return>norm_angle<block_end><def_stmt>conv3x3 in_planes out_planes stride=1<block_start>"3x3 convolution with padding"<line_sep><return>nn.Conv2d(in_planes out_planes kernel_size=3 stride=stride padding=1 bias=<false>)<block_end><class_stmt>BasicBlock(nn.Module)<block_start>expansion=1<def_stmt>__init__ self inplanes planes stride=1 downsample=<none><block_start>super(BasicBlock self).__init__()<line_sep>self.conv1=conv3x3(inplanes planes stride)<line_sep>self.bn1=nn.BatchNorm2d(planes)<line_sep>self.relu=nn.ReLU()<line_sep>self.conv2=conv3x3(planes planes)<line_sep>self.bn2=nn.BatchNorm2d(planes)<line_sep>self.downsample=downsample<line_sep>self.stride=stride<block_end><def_stmt>forward self x<block_start>residual=x<line_sep>out=self.conv1(x)<line_sep>out=self.bn1(out)<line_sep>out=self.relu(out)<line_sep>out=self.conv2(out)<line_sep>out=self.bn2(out)<if_stmt>self.downsample<is><not><none><block_start>residual=self.downsample(x)<block_end>out<augadd>residual<line_sep>out=self.relu(out)<line_sep><return>out<block_end><block_end><class_stmt>Bottleneck(nn.Module)<block_start>expansion=4<def_stmt>__init__ self inplanes planes stride=1 downsample=<none><block_start>super(Bottleneck self).__init__()<line_sep>self.conv1=nn.Conv2d(inplanes planes kernel_size=1 bias=<false>)<line_sep>self.bn1=nn.BatchNorm2d(planes)<line_sep>self.conv2=nn.Conv2d(planes planes kernel_size=3 stride=stride padding=1 bias=<false>)<line_sep>self.bn2=nn.BatchNorm2d(planes)<line_sep>self.conv3=nn.Conv2d(planes planes<times>4 kernel_size=1 bias=<false>)<line_sep>self.bn3=nn.BatchNorm2d(planes<times>4)<line_sep>self.relu=nn.ReLU()<line_sep>self.downsample=downsample<line_sep>self.stride=stride<block_end><def_stmt>forward self x<block_start>residual=x<line_sep>out=self.conv1(x)<line_sep>out=self.bn1(out)<line_sep>out=self.relu(out)<line_sep>out=self.conv2(out)<line_sep>out=self.bn2(out)<line_sep>out=self.relu(out)<line_sep>out=self.conv3(out)<line_sep>out=self.bn3(out)<if_stmt>self.downsample<is><not><none><block_start>residual=self.downsample(x)<block_end>out=out+residual<line_sep>out=self.relu(out)<line_sep><return>out<block_end><block_end>###''' self-attention; relation-attention ''' <class_stmt>ResNet_AT(nn.Module)<block_start><def_stmt>__init__ self block layers num_classes=1000 end2end=<true> at_type=''<block_start>self.inplanes=64<line_sep>self.end2end=end2end<line_sep>super(ResNet_AT self).__init__()<line_sep>self.conv1=nn.Conv2d(3 64 kernel_size=7 stride=2 padding=3 bias=<false>)<line_sep>self.bn1=nn.BatchNorm2d(64)<line_sep>self.relu=nn.ReLU()<line_sep>self.maxpool=nn.MaxPool2d(kernel_size=3 stride=2 padding=1)<line_sep>self.layer1=self._make_layer(block 64 layers[0])<line_sep>self.layer2=self._make_layer(block 128 layers[1] stride=2)<line_sep>self.layer3=self._make_layer(block 256 layers[2] stride=2)<line_sep>self.layer4=self._make_layer(block 512 layers[3] stride=2)<line_sep>self.avgpool=nn.AdaptiveAvgPool2d(1)<line_sep>self.dropout=nn.Dropout(0.5)<line_sep>self.dropout2=nn.Dropout(0.6)<line_sep>self.alpha=nn.Sequential(nn.Linear(512 1) nn.Sigmoid())<line_sep>self.beta=nn.Sequential(nn.Linear(1024 1) nn.Sigmoid())<line_sep>self.pred_fc1=nn.Linear(512 7)<line_sep>self.pred_fc2=nn.Linear(1024 7)<line_sep>self.at_type=at_type<for_stmt>m self.modules()<block_start><if_stmt>isinstance(m nn.Conv2d)<block_start>n=m.kernel_size[0]<times>m.kernel_size[1]<times>m.out_channels<line_sep>m.weight.data.normal_(0 math.sqrt(2./n))<block_end><elif_stmt>isinstance(m nn.BatchNorm2d)<block_start>m.weight.data.fill_(1)<line_sep>m.bias.data.zero_()<block_end><block_end><block_end><def_stmt>_make_layer self block planes blocks stride=1<block_start>downsample=<none><if_stmt>stride<ne>1<or>self.inplanes<ne>planes<times>block.expansion<block_start>downsample=nn.Sequential(nn.Conv2d(self.inplanes planes<times>block.expansion kernel_size=1 stride=stride bias=<false>) nn.BatchNorm2d(planes<times>block.expansion) )<block_end>layers=[]<line_sep>layers.append(block(self.inplanes planes stride downsample))<line_sep>self.inplanes=planes<times>block.expansion<for_stmt>i range(1 blocks)<block_start>layers.append(block(self.inplanes planes))<block_end><return>nn.Sequential(*layers)<block_end><def_stmt>forward self x='' phrase='train' AT_level='first_level' vectors='' vm='' alphas_from1='' index_matrix=''<block_start>vs=[]<line_sep>alphas=[]<assert_stmt>phrase<eq>'train'<or>phrase<eq>'eval'<assert_stmt>AT_level<eq>'first_level'<or>AT_level<eq>'second_level'<or>AT_level<eq>'pred'<if_stmt>phrase<eq>'train'<block_start>num_pair=3<for_stmt>i range(num_pair)<block_start>f=x[: : : : i]# x[128,3,224,224] f=self.conv1(f)<line_sep>f=self.bn1(f)<line_sep>f=self.relu(f)<line_sep>f=self.maxpool(f)<line_sep>f=self.layer1(f)<line_sep>f=self.layer2(f)<line_sep>f=self.layer3(f)<line_sep>f=self.layer4(f)<line_sep>f=self.avgpool(f)<line_sep>f=f.squeeze(3).squeeze(2)# f[1, 512, 1, 1] ---> f[1, 512] # MN_MODEL(first Level) vs.append(f)<line_sep>alphas.append(self.alpha(self.dropout(f)))<block_end>vs_stack=torch.stack(vs dim=2)<line_sep>alphas_stack=torch.stack(alphas dim=2)<if_stmt>self.at_type<eq>'self-attention'<block_start>vm1=vs_stack.mul(alphas_stack).sum(2).div(alphas_stack.sum(2))<block_end><if_stmt>self.at_type<eq>'self_relation-attention'<block_start>vm1=vs_stack.mul(alphas_stack).sum(2).div(alphas_stack.sum(2))<line_sep>betas=[]<for_stmt>i range(len(vs))<block_start>vs[i]=torch.cat([vs[i] vm1] dim=1)<line_sep>betas.append(self.beta(self.dropout(vs[i])))<block_end>cascadeVs_stack=torch.stack(vs dim=2)<line_sep>betas_stack=torch.stack(betas dim=2)<line_sep>output=cascadeVs_stack.mul(betas_stack<times>alphas_stack).sum(2).div((betas_stack<times>alphas_stack).sum(2))<block_end><if_stmt>self.at_type<eq>'self-attention'<block_start>vm1=self.dropout(vm1)<line_sep>pred_score=self.pred_fc1(vm1)<block_end><if_stmt>self.at_type<eq>'self_relation-attention'<block_start>output=self.dropout2(output)<line_sep>pred_score=self.pred_fc2(output)<block_end><return>pred_score<block_end><if_stmt>phrase<eq>'eval'<block_start><if_stmt>AT_level<eq>'first_level'<block_start>f=self.conv1(x)<line_sep>f=self.bn1(f)<line_sep>f=self.relu(f)<line_sep>f=self.maxpool(f)<line_sep>f=self.layer1(f)<line_sep>f=self.layer2(f)<line_sep>f=self.layer3(f)<line_sep>f=self.layer4(f)<line_sep>f=self.avgpool(f)<line_sep>f=f.squeeze(3).squeeze(2)# f[1, 512, 1, 1] ---> f[1, 512] # MN_MODEL(first Level) alphas=self.alpha(self.dropout(f))<line_sep><return>f alphas<block_end><if_stmt>AT_level<eq>'second_level'<block_start><assert_stmt>self.at_type<eq>'self_relation-attention'<line_sep>vms=index_matrix.permute(1 0).mm(vm)# [381, 21783] -> [21783,381] * [381,512] --> [21783, 512] vs_cate=torch.cat([vectors vms] dim=1)<line_sep>betas=self.beta(self.dropout(vs_cate))<line_sep>''' keywords: mean_fc ; weight_sourcefc; sum_alpha; weightmean_sourcefc '''<line_sep>''' alpha * beta '''<line_sep>weight_catefc=vs_cate.mul(alphas_from1)# [21570,512] * [21570,1] --->[21570,512] alpha_beta=alphas_from1.mul(betas)<line_sep>sum_alphabetas=index_matrix.mm(alpha_beta)# [380,21570] * [21570,1] -> [380,1] weightmean_catefc=index_matrix.mm(weight_catefc).div(sum_alphabetas)<line_sep>weightmean_catefc=self.dropout2(weightmean_catefc)<line_sep>pred_score=self.pred_fc2(weightmean_catefc)<line_sep><return>pred_score<block_end><if_stmt>AT_level<eq>'pred'<block_start><if_stmt>self.at_type<eq>'self-attention'<block_start>pred_score=self.pred_fc1(self.dropout(vm))<block_end><return>pred_score<block_end><block_end><block_end><block_end>''' self-attention; relation-attention '''<def_stmt>resnet18_at pretrained=<false> **kwargs# Constructs base a ResNet-18 model. <block_start>model=ResNet_AT(BasicBlock [2 2 2 2] **kwargs)<line_sep><return>model<block_end>
<import_stmt>tensorflow<as>tf<import_from_stmt>tensorflow keras<import_from_stmt>tensorflow.keras backend<as>K<import_from_stmt>keras_cv_attention_models.aotnet AotNet<import_from_stmt>keras_cv_attention_models.download_and_load reload_model_weights<import_from_stmt>keras_cv_attention_models.attention_layers batchnorm_with_activation conv2d_no_bias<line_sep>PRETRAINED_DICT={"resnest101":{"imagenet":"63f9ebdcd32529cbc4b4fbbec3d1bb2f"} "resnest200":{"imagenet":"8e211dcb089b588e18d36ba7cdf92ef0"} "resnest269":{"imagenet":"4309ed1b0a8ae92f2b1143dc3512c5c7"} "resnest50":{"imagenet":"eee7b20a229821f730ab205b6afeb369"} }<def_stmt>rsoftmax inputs groups<block_start><if_stmt>groups<g>1<block_start>nn=tf.reshape(inputs [-1 1 groups inputs.shape[-1]<floordiv>groups])<line_sep># nn = tf.transpose(nn, [0, 2, 1, 3]) nn=tf.nn.softmax(nn axis=2)<line_sep>nn=tf.reshape(nn [-1 1 1 inputs.shape[-1]])<block_end><else_stmt><block_start>nn=keras.layers.Activation("sigmoid")(inputs)<block_end><return>nn<block_end><def_stmt>split_attention_conv2d inputs filters kernel_size=3 strides=1 downsample_first=<false> groups=2 activation="relu" name=""<block_start>h_axis,w_axis=[2 3]<if>K.image_data_format()<eq>"channels_first"<else>[1 2]<line_sep>in_channels=inputs.shape[-1]<line_sep>conv_strides=strides<if>downsample_first<else>1<if_stmt>groups<eq>1<block_start>logits=conv2d_no_bias(inputs filters kernel_size strides=conv_strides padding="same" name=name<and>name+"1_")<block_end><else_stmt># Using groups=2 is slow in `mixed_float16` policy # logits = conv2d_no_bias(inputs, filters * groups, kernel_size, padding="same", groups=groups, name=name and name + "1_") <block_start>logits=[]<line_sep>splitted_inputs=tf.split(inputs groups axis=-1)<for_stmt>ii range(groups)<block_start>conv_name=name<and>name+"1_g{}_".format(ii+1)<line_sep>logits.append(conv2d_no_bias(splitted_inputs[ii] filters kernel_size strides=conv_strides padding="same" name=conv_name))<block_end>logits=tf.concat(logits axis=-1)<block_end>logits=batchnorm_with_activation(logits activation=activation name=name<and>name+"1_")<if_stmt>groups<g>1<block_start>splited=tf.split(logits groups axis=-1)<line_sep>gap=tf.reduce_sum(splited axis=0)<block_end><else_stmt><block_start>gap=logits<block_end>gap=tf.reduce_mean(gap [h_axis w_axis] keepdims=<true>)<line_sep>reduction_factor=4<line_sep>inter_channels=max(in_channels<times>groups<floordiv>reduction_factor 32)<line_sep>atten=keras.layers.Conv2D(inter_channels kernel_size=1 name=name<and>name+"2_conv")(gap)<line_sep>atten=batchnorm_with_activation(atten activation=activation name=name<and>name+"2_")<line_sep>atten=keras.layers.Conv2D(filters<times>groups kernel_size=1 name=name<and>name+"3_conv")(atten)<line_sep>atten=rsoftmax(atten groups)<line_sep>out=keras.layers.Multiply()([atten logits])<if_stmt>groups<g>1<block_start>out=tf.split(out groups axis=-1)<line_sep>out=tf.reduce_sum(out axis=0)<block_end><if_stmt><not>downsample_first<and>strides<g>1<block_start>out=keras.layers.ZeroPadding2D(padding=1 name=name<and>name+"pool_pad")(out)<line_sep>out=keras.layers.AveragePooling2D(3 strides=2 name=name<and>name+"pool")(out)<block_end><return>out<block_end><def_stmt>ResNest input_shape=(224 224 3) stem_type="deep" attn_types="sa" bn_after_attn=<false> shortcut_type="avg" pretrained="imagenet" **kwargs<block_start>kwargs.pop("kwargs" <none>)<line_sep>model=AotNet(**locals() **kwargs)<line_sep>reload_model_weights(model pretrained_dict=PRETRAINED_DICT sub_release="resnest" pretrained=pretrained)<line_sep><return>model<block_end><def_stmt>ResNest50 input_shape=(224 224 3) num_classes=1000 activation="relu" classifier_activation="softmax" pretrained="imagenet" groups=2 **kwargs<block_start><return>ResNest(num_blocks=[3 4 6 3] stem_width=64 model_name="resnest50" **locals() **kwargs)<block_end><def_stmt>ResNest101 input_shape=(256 256 3) num_classes=1000 activation="relu" classifier_activation="softmax" pretrained="imagenet" groups=2 **kwargs<block_start><return>ResNest(num_blocks=[3 4 23 3] stem_width=128 model_name="resnest101" **locals() **kwargs)<block_end><def_stmt>ResNest200 input_shape=(320 320 3) num_classes=1000 activation="relu" classifier_activation="softmax" pretrained="imagenet" groups=2 **kwargs<block_start><return>ResNest(num_blocks=[3 24 36 3] stem_width=128 model_name="resnest200" **locals() **kwargs)<block_end><def_stmt>ResNest269 input_shape=(416 416 3) num_classes=1000 activation="relu" classifier_activation="softmax" pretrained="imagenet" groups=2 **kwargs<block_start><return>ResNest(num_blocks=[3 30 48 8] stem_width=128 model_name="resnest269" **locals() **kwargs)<block_end>
<import_stmt>os<import_from_stmt>typing List<import_stmt>numpy<as>np<import_from_stmt>torchvision datasets<as>torchdata<import_from_stmt>continuum.datasets ImageFolderDataset<import_from_stmt>continuum download<import_from_stmt>continuum.tasks TaskType<class_stmt>DTD(ImageFolderDataset)<block_start>"""Describable Textures Dataset (DTD) Reference: * Describing Textures in the Wild <NAME> and <NAME> and <NAME> and <NAME> and and <NAME> CVPR 2014 """<line_sep>url="https://www.robots.ox.ac.uk/~vgg/data/dtd/download/dtd-r1.0.1.tar.gz"<def_stmt>__init__ self data_path:str train:bool=<true> download:bool=<true> split:int=1<block_start>super().__init__(data_path=data_path train=train download=download data_type=TaskType.IMAGE_PATH)<if_stmt><not>(1<le>int(split)<le>10)<block_start><raise>ValueError(f"Available splits are [1, ..., 10], not {split}")<block_end>self.split=split<block_end><def_stmt>_download self<block_start>archive_path=os.path.join(self.data_path "dtd-r1.0.1.tar.gz")<if_stmt><not>os.path.exists(archive_path)<block_start>print("Downloading DTD dataset...")<line_sep>download.download(self.url self.data_path)<block_end><if_stmt><not>os.path.exists(os.path.join(self.data_path "dtd"))<block_start>print("Uncompressing images...")<line_sep>download.untar(archive_path)<block_end><block_end><def_stmt>get_data self<block_start>x,y,t=self._format(torchdata.ImageFolder(os.path.join(self.data_path "dtd" "images")).imgs)<if_stmt>self.train<block_start>index_files=[os.path.join(self.data_path "dtd" "labels" f"train{str(self.split)}.txt") os.path.join(self.data_path "dtd" "labels" f"val{str(self.split)}.txt")]<block_end><else_stmt><block_start>index_files=[os.path.join(self.data_path "dtd" "labels" f"test{str(self.split)}.txt")]<block_end>valid_paths=set()<for_stmt>index_file index_files<block_start><with_stmt>open(index_file)<as>f<block_start>valid_paths.update(map(<lambda>p:os.path.join(self.data_path "dtd" "images" p.strip()) f.readlines()))<block_end><block_end>valid_paths=np.array(list(valid_paths))<line_sep>indexes=np.isin(x valid_paths)<line_sep><return>x[indexes] y[indexes] <none><block_end><block_end>
<import_from_stmt>algorithms.maths.gcd gcd<import_from_stmt>typing List<def_stmt>solve_chinese_remainder num:List[int] rem:List[int]<block_start>""" Computes the smallest x that satisfies the chinese remainder theorem for a system of equations. The system of equations has the form: x % num[0] = rem[0] x % num[1] = rem[1] ... x % num[k - 1] = rem[k - 1] Where k is the number of elements in num and rem, k > 0. All numbers in num needs to be pariwise coprime otherwise an exception is raised returns x: the smallest value for x that satisfies the system of equations """<if_stmt><not>len(num)<eq>len(rem)<block_start><raise>Exception("num and rem should have equal length")<block_end><if_stmt><not>len(num)<g>0<block_start><raise>Exception("Lists num and rem need to contain at least one element")<block_end><for_stmt>n num<block_start><if_stmt><not>n<g>1<block_start><raise>Exception("All numbers in num needs to be > 1")<block_end><block_end><if_stmt><not>_check_coprime(num)<block_start><raise>Exception("All pairs of numbers in num are not coprime")<block_end>k=len(num)<line_sep>x=1<while_stmt><true><block_start>i=0<while_stmt>i<l>k<block_start><if_stmt>x%num[i]<ne>rem[i]<block_start><break><block_end>i<augadd>1<block_end><if_stmt>i<eq>k<block_start><return>x<block_end><else_stmt><block_start>x<augadd>1<block_end><block_end><block_end><def_stmt>_check_coprime l:List[int]<block_start><for_stmt>i range(len(l))<block_start><for_stmt>j range(len(l))<block_start><if_stmt>i<eq>j<block_start><continue><block_end><if_stmt>gcd(l[i] l[j])<ne>1<block_start><return><false><block_end><block_end><block_end><return><true><block_end>
<import_stmt>spacy<line_sep>nlp=spacy.load("ja_core_news_sm")<line_sep>text=("チックフィレイはジョージア州カレッジパークに本社を置く、"<concat>"チキンサンドを専門とするアメリカのファストフードレストランチェーンです。")<line_sep># トークナイズのみ行う doc=nlp(text)<line_sep>print([token.text<for>token doc])<line_sep>
<import_from_stmt>selenium.common.exceptions NoSuchElementException<import_from_stmt>selenium.webdriver.remote.webelement WebElement<class_stmt>Select(WebElement)<block_start>""" Implements logic to work with Web List UI elements """<line_sep>@property<def_stmt>is_multiple self<block_start>value=self.get_attribute('multiple')<line_sep><return>value<is><not><none><and><not>value<eq>'false'<block_end><def_stmt>select_option self option<block_start>""" Performs selection of provided item from Web List @params option - string item name """<line_sep>items_list=self.get_options()<for_stmt>item items_list<block_start><if_stmt>item.get_attribute("value")<eq>option<block_start>item.click()<line_sep><break><block_end><block_end><block_end><def_stmt>get_options self<block_start>""" Performs search for provided item in Web List """<line_sep><return>self.find_elements_by_tag_name('option')<block_end><def_stmt>get_attribute_selected self attribute<block_start>""" Performs search of selected item from Web List Return attribute of selected item @params attribute - string attribute name """<line_sep>items_list=self.get_options()<line_sep><return>next(iter([item.get_attribute(attribute)<for>item items_list<if>item.is_selected()]) <none>)<block_end><def_stmt>get_value_selected self<block_start>""" Performs search of selected item from Web List Return value of selected item """<line_sep><return>self.get_attribute_selected('value')<block_end><def_stmt>get_text_selected self<block_start>""" Performs search of selected item from Web List Return text of selected item """<line_sep><return>self.get_attribute_selected('text')<block_end><def_stmt>select_by_visible_text self text<block_start>""" Performs search of selected item from Web List @params text - string visible text """<line_sep>xpath='.//option[normalize-space(.) = {0}]'.format(self._escape_string(text))<line_sep>opts=self.find_elements_by_xpath(xpath)<line_sep>matched=<false><for_stmt>opt opts<block_start>self._set_selected(opt)<if_stmt><not>self.is_multiple<block_start><return><block_end>matched=<true><block_end># in case the target option isn't found by xpath # attempt to find it by direct comparison among options which contain at least the longest token from the text <if_stmt>len(opts)<eq>0<and>' '<in>text<block_start>sub_string_without_space=self._get_longest_token(text)<if_stmt>sub_string_without_space<eq>""<block_start>candidates=self.get_options()<block_end><else_stmt><block_start>xpath=".//option[contains(.,{0})]".format(self._escape_string(sub_string_without_space))<line_sep>candidates=self.find_elements_by_xpath(xpath)<block_end><for_stmt>candidate candidates<block_start><if_stmt>text<eq>candidate.text<block_start>self._set_selected(candidate)<if_stmt><not>self.is_multiple<block_start><return><block_end>matched=<true><block_end><block_end><block_end><if_stmt><not>matched<block_start><raise>NoSuchElementException("Could not locate element with visible text: "+str(text))<block_end><block_end>@staticmethod<def_stmt>_escape_string value<block_start><if_stmt>'"'<in>value<and>"'"<in>value<block_start>substrings=value.split('"')<line_sep>result=['concat(']<for_stmt>substring substrings<block_start>result.append('"{0}"'.format(substring))<line_sep>result.append(', \'"\', ')<block_end>result.pop()<if_stmt>value.endswith('"')<block_start>result.append(', \'"\'')<block_end><return>''.join(result)+')'<block_end><if_stmt>'"'<in>value<block_start><return>"'{0}'".format(value)<block_end><return>'"{0}"'.format(value)<block_end>@staticmethod<def_stmt>_get_longest_token value<block_start>items=value.split(' ')<line_sep>longest=''<for_stmt>item items<block_start><if_stmt>len(item)<g>len(longest)<block_start>longest=item<block_end><block_end><return>longest<block_end>@staticmethod<def_stmt>_set_selected option<block_start><if_stmt><not>option.is_selected()<block_start>option.click()<block_end><block_end><block_end>
# Copyright 2019 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Util for log module. """<import_stmt>logging<line_sep>_LOGGER=logging.getLogger('MA')<def_stmt>_find_caller <block_start>""" Bind findCaller() method, which is used to find the stack frame of the caller so that we can note the source file name, line number and function name. """<line_sep><return>_LOGGER.findCaller()<block_end><class_stmt>LogUtil<block_start>""" Logging module. Raises: SyntaxError: If create this class. """<line_sep>_instance=<none><line_sep>_logger=<none><line_sep>_extra_fmt=' [%s] [%s] '<def_stmt>__init__ self<block_start><raise>SyntaxError('can not instance, please use get_instance.')<block_end>@staticmethod<def_stmt>get_instance <block_start>""" Get instance of class `LogUtil`. Returns: Object, instance of class `LogUtil`. """<if_stmt>LogUtil._instance<is><none><block_start>LogUtil._instance=object.__new__(LogUtil)<line_sep>LogUtil._logger=_LOGGER<line_sep>LogUtil._init_logger()<block_end><return>LogUtil._instance<block_end>@staticmethod<def_stmt>_init_logger <block_start>""" Initialize logger. """<line_sep>LogUtil._logger.setLevel(logging.WARNING)<line_sep>log_fmt='[%(levelname)s] %(name)s(%(process)d:%(thread)d,'<concat>'%(processName)s):%(asctime)s%(message)s'<line_sep>log_fmt=logging.Formatter(log_fmt)<line_sep># create console handler with a higher log level console_handler=logging.StreamHandler()<line_sep>console_handler.setFormatter(log_fmt)<line_sep># add the handlers to the logger LogUtil._logger.handlers=[]<line_sep>LogUtil._logger.addHandler(console_handler)<line_sep>LogUtil._logger.propagate=<false><block_end><def_stmt>set_level self level<block_start>""" Set the logging level of this logger, level must be an integer or a string. Supported levels are 'NOTSET'(integer: 0), 'ERROR'(integer: 1-40), 'WARNING'('WARN', integer: 1-30), 'INFO'(integer: 1-20) and 'DEBUG'(integer: 1-10). For example, if logger.set_level('WARNING') or logger.set_level(21), then logger.warn() and logger.error() in scripts would be printed while running, while logger.info() or logger.debug() would not be printed. Args: level (Union[int, str]): Level of logger. """<line_sep>self._logger.setLevel(level)<block_end><def_stmt>add_handler self handler<block_start>""" Add other handler supported by logging module. Args: handler (logging.Handler): Other handler supported by logging module. Raises: ValueError: If handler is not an instance of logging.Handler. """<if_stmt>isinstance(handler logging.Handler)<block_start>self._logger.addHandler(handler)<block_end><else_stmt><block_start><raise>ValueError('handler must be an instance of logging.Handler,'<concat>' but got {}'.format(type(handler)))<block_end><block_end><def_stmt>debug self tag msg *args<block_start>""" Log '[tag] msg % args' with severity 'DEBUG'. Args: tag (str): Logger tag. msg (str): Logger message. args (Any): Auxiliary value. """<line_sep>caller_info=_find_caller()<line_sep>file_info=':'.join([caller_info[0] str(caller_info[1])])<line_sep>self._logger.debug(self._extra_fmt+msg file_info tag *args)<block_end><def_stmt>info self tag msg *args<block_start>""" Log '[tag] msg % args' with severity 'INFO'. Args: tag (str): Logger tag. msg (str): Logger message. args (Any): Auxiliary value. """<line_sep>caller_info=_find_caller()<line_sep>file_info=':'.join([caller_info[0] str(caller_info[1])])<line_sep>self._logger.info(self._extra_fmt+msg file_info tag *args)<block_end><def_stmt>warn self tag msg *args<block_start>""" Log '[tag] msg % args' with severity 'WARNING'. Args: tag (str): Logger tag. msg (str): Logger message. args (Any): Auxiliary value. """<line_sep>caller_info=_find_caller()<line_sep>file_info=':'.join([caller_info[0] str(caller_info[1])])<line_sep>self._logger.warning(self._extra_fmt+msg file_info tag *args)<block_end><def_stmt>error self tag msg *args<block_start>""" Log '[tag] msg % args' with severity 'ERROR'. Args: tag (str): Logger tag. msg (str): Logger message. args (Any): Auxiliary value. """<line_sep>caller_info=_find_caller()<line_sep>file_info=':'.join([caller_info[0] str(caller_info[1])])<line_sep>self._logger.error(self._extra_fmt+msg file_info tag *args)<block_end><block_end>
<import_from_future_stmt> absolute_import print_function unicode_literals<if_stmt>__name__<eq>"__main__"<block_start><import_from_stmt>.cli cli<line_sep>cli.wormhole()<block_end><else_stmt># raise ImportError('this module should not be imported') <block_start><pass><block_end>
<import_stmt>torch<import_stmt>torch.nn.functional<as>F<class_stmt>SelfAttnFunc(torch.autograd.Function)<block_start>@staticmethod<def_stmt>forward ctx use_time_mask is_training heads scale inputs input_weights output_weights input_biases output_biases mask is_additive_mask dropout_prob <block_start>use_biases_t=torch.tensor([input_biases<is><not><none>])<line_sep>heads_t=torch.tensor([heads])<line_sep>scale_t=torch.tensor([scale])<line_sep>dropout_prob_t=torch.tensor([dropout_prob])<line_sep>null_tensor=torch.tensor([])<line_sep>head_dim=inputs.size(2)<floordiv>heads<line_sep># Input Linear GEMM # input1: (activations) [seql_q, seqs, embed_dim(1024)] # input2: (weights) [embed_dim*3 (3072), embed_dim (1024)] (transpose [0,1]) # output: [seql_q, seqs, embed_dim*3] # GEMM: ( (seql_q*seqs) x embed_dim ) x ( embed_dim x embed_dim*3 ) = (seql_q*seqs x embed_dim*3) <if_stmt>use_biases_t[0]<block_start>input_lin_results=torch.addmm(input_biases inputs.view(inputs.size(0)<times>inputs.size(1) inputs.size(2)) input_weights.transpose(0 1) beta=1.0 alpha=1.0 )<block_end><else_stmt><block_start>input_lin_results=torch.mm(inputs.view(inputs.size(0)<times>inputs.size(1) inputs.size(2)) input_weights.transpose(0 1))<block_end>input_lin_results=input_lin_results.view(inputs.size(0) inputs.size(1) input_weights.size(0))<line_sep># Slice out q,k,v from one big Input Linear outuput (should only impact meta data, no copies!) # Sequences and heads are combined to make the batch of the Batched GEMM # input_lin_results: [seql_q, seqs, heads(16), 3, head_dim(64)] # input_lin_results: [seql_q, batches=seqs*heads, 3, head_dim] input_lin_results=input_lin_results.view(inputs.size(0) inputs.size(1)<times>heads 3 head_dim)<line_sep>queries=input_lin_results[: : 0 :]<line_sep>keys=input_lin_results[: : 1 :]<line_sep>values=input_lin_results[: : 2 :]<line_sep># Matmul1 Batched GEMMs # The output tensor is specified prior to the Batch GEMM because baddbmm requires its specification # baddbmm is used to apply the scale parameter via the Batched GEMM's alpha parameter instead of # a separate elementwise operation. # Input1: (Queries) [seql_q, seqs*heads, head_dim] tranpose(0,1) # Input2: (Keys) [seql_k, seqs*heads, head_dim] transpose(0,1) # output: [seqs*heads, seql_q, seql_k] # GEMM: Per batch: ( seql_q x head_dim ) x ( head_dim x seql_k ) = ( seql_q x seql_k ) matmul1_results=torch.empty((queries.size(1) queries.size(0) keys.size(0)) dtype=queries.dtype device=torch.device("cuda"))<line_sep>matmul1_results=torch.baddbmm(matmul1_results queries.transpose(0 1) keys.transpose(0 1).transpose(1 2) out=matmul1_results beta=0.0 alpha=scale_t[0] )<if_stmt>mask<is><not><none># Self Attention Time Mask <block_start><if_stmt>use_time_mask<block_start><assert_stmt>len(mask.size())<eq>2 "Timing mask is not 2D!"<assert_stmt>mask.size(0)<eq>mask.size(1) "Sequence length should match!"<line_sep>mask=mask.to(torch.bool)<line_sep>matmul1_results=matmul1_results.masked_fill_(mask float("-inf"))<block_end># Key Padding Mask <else_stmt><block_start>batches,seql_q,seql_k=matmul1_results.size()<line_sep>seqs=int(batches/heads)<line_sep>matmul1_results=matmul1_results.view(seqs heads seql_q seql_k)<if_stmt>is_additive_mask<block_start>matmul1_results=matmul1_results+mask.unsqueeze(1).unsqueeze(2)<block_end><else_stmt><block_start>mask=mask.to(torch.bool)<line_sep>matmul1_results=matmul1_results.masked_fill_(mask.unsqueeze(1).unsqueeze(2) float("-inf"))<block_end>matmul1_results=matmul1_results.view(seqs<times>heads seql_q seql_k)<block_end><block_end>softmax_results=F.softmax(matmul1_results dim=-1)<line_sep># Dropout - is not executed for inference <if_stmt>is_training<block_start>dropout_results,dropout_mask=torch._fused_dropout(softmax_results p=(1.0-dropout_prob_t[0]))<block_end><else_stmt><block_start>dropout_results=softmax_results<line_sep>dropout_mask=null_tensor<block_end># Matmul2 Batched GEMMs # The output tensor specification is needed here to specify the non-standard output. # Given that pytorch cannot currently perform autograd with an output tensor specified, # this requires a backward pass specified. # Input1: from_softmax [seqs*heads, seql_q, seql_k] # Input2: (values) [seql_v, seqs*heads, head_dim] transpose(0,1) # Output: [seql_q, seqs*heads, head_dim] transpose(0,1) # GEMM: Per batch: ( seql_q x seql_k ) x ( seql_k x head_dim ) = (seql_q x head_dim) matmul2_results=torch.empty((dropout_results.size(1) dropout_results.size(0) values.size(2)) dtype=dropout_results.dtype device=torch.device("cuda") ).transpose(1 0)<line_sep>matmul2_results=torch.bmm(dropout_results values.transpose(0 1) out=matmul2_results)<line_sep>matmul2_results=(matmul2_results.transpose(0 1).contiguous().view(inputs.size(0) inputs.size(1) inputs.size(2)))<line_sep># Output Linear GEMM # Input1: (activations) [seql_q, seqs, embed_dim=heads*head_dim] # Input2: (weights) [ embed_dim, embed_dim ] transpose(0,1) # Output: [ seql_q, seqs, embed_dim ] # GEMM: ( seql_q*seqs x embed_dim ) x ( embed_dim x embed_dim ) = ( seql_q*seqs x embed_dim ) <if_stmt>use_biases_t[0]<block_start>outputs=torch.addmm(output_biases matmul2_results.view(inputs.size(0)<times>inputs.size(1) inputs.size(2)) output_weights.transpose(0 1) beta=1.0 alpha=1.0 )<block_end><else_stmt><block_start>outputs=torch.mm(matmul2_results.view(inputs.size(0)<times>inputs.size(1) inputs.size(2)) output_weights.transpose(0 1))<block_end>outputs=outputs.view(inputs.size(0) inputs.size(1) output_weights.size(0))<line_sep>ctx.save_for_backward(use_biases_t heads_t scale_t matmul2_results dropout_results softmax_results input_lin_results inputs input_weights output_weights dropout_mask dropout_prob_t )<line_sep><return>outputs.detach()<block_end>@staticmethod<def_stmt>backward ctx output_grads<block_start>(use_biases_t heads_t scale_t matmul2_results dropout_results softmax_results input_lin_results inputs input_weights output_weights dropout_mask dropout_prob_t )=ctx.saved_tensors<line_sep>head_dim=inputs.size(2)<floordiv>heads_t[0]<line_sep># Slice out q,k,v from one big Input Linear outuput (should only impact meta data, no copies!) # Sequences and heads are combined to make the batch of the Batched GEMM # input_lin_results: [seql_q, seqs, heads(16), 3, head_dim(64)] # input_lin_results: [seql_q, batches=seqs*heads, 3, head_dim] input_lin_results=input_lin_results.view(inputs.size(0) inputs.size(1)<times>heads_t[0] 3 head_dim)<line_sep>queries=input_lin_results[: : 0 :]<line_sep>keys=input_lin_results[: : 1 :]<line_sep>values=input_lin_results[: : 2 :]<line_sep># Slice out q,k,v from one big set of gradients entering the input linear's bprop (should only impact meta data, no copies!) # The gradients are identical in size to the Input Linear outputs. # The tensor is declared before hand to properly slice out query, key, and value grads. input_lin_results_grads=torch.empty_like(input_lin_results)<line_sep>queries_grads=input_lin_results_grads[: : 0 :]<line_sep>keys_grads=input_lin_results_grads[: : 1 :]<line_sep>values_grads=input_lin_results_grads[: : 2 :]<line_sep># Output Linear GEMM - DGRAD # Input1: (data grads) [seql_q, seqs, embed_dim=heads*head_dim] # Input2: (weights) [ embed_dim, embed_dim ] # Output: [ seql_q, seqs, embed_dim ] # GEMM: ( seql_q*seqs x embed_dim ) x ( embed_dim x embed_dim ) = ( seql_q*seqs x embed_dim ) output_lin_grads=torch.mm(output_grads.view(output_grads.size(0)<times>output_grads.size(1) output_grads.size(2)) output_weights)<line_sep>output_lin_grads=output_lin_grads.view(output_grads.size(0) output_grads.size(1) output_weights.size(1))<line_sep># Output Linear GEMM - WGRAD # Input1: (data grads) [seql_q*seqs, embed_dim=heads*head_dim] transpose(0,1) # Input2: (activations) [seql_q*seqs, embed_dim ] # Output: [ seql_q, seqs, embed_dim ] # GEMM: ( embed_dim x seql_q*seqs ) x ( seql_q*seqs x embed_dim ) = ( embed_dim x embed_dim ) output_weight_grads=torch.mm(output_grads.view(output_grads.size(0)<times>output_grads.size(1) output_grads.size(2)).transpose(0 1) matmul2_results.view(matmul2_results.size(0)<times>matmul2_results.size(1) matmul2_results.size(2)) )<line_sep>output_lin_grads=output_lin_grads.view(inputs.size(0) inputs.size(1)<times>heads_t[0] head_dim).transpose(0 1)<if_stmt>use_biases_t[0]<block_start>output_bias_grads=torch.sum(output_grads.view(output_grads.size(0)<times>output_grads.size(1) output_grads.size(2)) 0)<block_end><else_stmt><block_start>output_bias_grads=<none><block_end># Matmul2 - DGRAD1 # Input1: (data grads) [seql_q, seqs*heads, head_dim] transpose(0,1) # Input2: (activations) [seql_k, seqs*heads, head_dim] transpose(0,1).transpose(1,2) # Output: [seqs*heads, seql_q, seql_k] # GEMM: Per batch: ( seql_q x head_dim ) x ( head_dim x seql_k ) = ( seql_q x seql_k ) matmul2_dgrad1=torch.bmm(output_lin_grads values.transpose(0 1).transpose(1 2))<line_sep># Matmul2 - DGRAD2 # Input1: (data grads) [seql_q, seqs*heads, head_dim] transpose(0,1) # Input2: (activations) [seql_k, seqs*heads, head_dim] transpose(0,1).transpose(1,2) # Output: [seqs*heads, seql_q, seql_k] # GEMM: Per batch: ( seql_q x head_dim ) x ( head_dim x seql_k ) = ( seql_q x seql_k ) values_grads=torch.bmm(dropout_results.transpose(1 2) output_lin_grads out=values_grads.transpose(0 1))<line_sep># Mask and Scaling for Dropout (not a publically documented op) dropout_grads=torch._masked_scale(matmul2_dgrad1 dropout_mask 1.0/(1.0-dropout_prob_t[0]))<line_sep># Softmax Grad (not a publically documented op) softmax_grads=torch._softmax_backward_data(dropout_grads softmax_results -1 softmax_results)<line_sep># Matmul1 - DGRAD1 # Input1: (data grads) [seqs*heads, seql_q, seql_k] # Input2: (activations) [seql_k, seqs*heads, head_dim] transpose(0,1) # Output: [seqs*heads, seql_q, head_dim] transpose(0,1) # GEMM: Per batch: ( seql_q x seql_k ) x ( seql_k x head_dim ) = ( seql_q x head_dim ) queries_grads=torch.baddbmm(queries_grads.transpose(0 1) softmax_grads keys.transpose(0 1) out=queries_grads.transpose(0 1) beta=0.0 alpha=scale_t[0] )<line_sep># Matmul1 - DGRAD2 # Input1: (data grads) [seqs*heads, seql_q, seql_k] transpose(1,2) # Input2: (activations) [seql_q, seqs*heads, head_dim] transpose(0,1) # Output: [seqs*heads, seql_k, head_dim] transpose(0,1) # GEMM: Per batch: ( seql_k x seql_q ) x ( seql_q x head_dim ) = ( seql_k x head_dim ) keys_grads=torch.baddbmm(keys_grads.transpose(0 1) softmax_grads.transpose(1 2) queries.transpose(0 1) out=keys_grads.transpose(0 1) beta=0.0 alpha=scale_t[0] )<line_sep># Input Linear GEMM - DGRAD # input1: (data grads) [seql_q, seqs, 3*embed_dim(3072)] # input2: (weights) [embed_dim*3 (3072), embed_dim (1024)] # output: [seql_q, seqs, embed_dim] # GEMM: ( (seql_q*seqs) x 3*embed_dim ) x ( 3*embed_dim x embed_dim ) = (seql_q*seqs x embed_dim) input_lin_results_grads=input_lin_results_grads.view(inputs.size(0)<times>inputs.size(1) heads_t[0]<times>3<times>head_dim)<line_sep>input_grads=torch.mm(input_lin_results_grads input_weights)<line_sep>input_grads=input_grads.view(inputs.size(0) inputs.size(1) inputs.size(2))<line_sep># Input Linear GEMM - WGRAD # input1: (data grads) [seql_q*seqs, 3*embed_dim(3072)] # input2: (activations) [seql_q*seqs, embed_dim(1024)] # output: [3*embed_dim, embed_dim] # GEMM: ( 3*embed_dim x seql_q*seqs ) x ( seql_q*seqs x embed_dim ) = (3*embed_dim x embed_dim) input_weight_grads=torch.mm(input_lin_results_grads.transpose(0 1) inputs.view(inputs.size(0)<times>inputs.size(1) inputs.size(2)))<if_stmt>use_biases_t[0]<block_start>input_bias_grads=torch.sum(input_lin_results_grads 0)<block_end><else_stmt><block_start>input_bias_grads=<none><block_end><return>(<none> <none> <none> <none> input_grads input_weight_grads output_weight_grads input_bias_grads output_bias_grads <none> <none> )<block_end><block_end>self_attn_func=SelfAttnFunc.apply<line_sep>
<import_from_stmt>django.contrib admin<import_from_stmt>django.urls path<import_from_stmt>.models BookLoan Library<import_from_stmt>.views CustomView<class_stmt>BookLoanInline(admin.StackedInline)<block_start>model=BookLoan<line_sep>extra=1<line_sep>readonly_fields=("id" "duration")<line_sep>fields=("book" "imprint" "status" "due_back" "borrower" "loan_start" "duration" )<block_end>@admin.register(BookLoan)<class_stmt>BookLoanAdmin(admin.ModelAdmin)<block_start>list_display=("book" "status" "borrower" "due_back" "id")<line_sep>list_filter=("status" "due_back")<line_sep>autocomplete_fields=("borrower" )<line_sep>search_fields=("book__title" )<line_sep>readonly_fields=("id" )<line_sep>fieldsets=((<none> {"fields":("book" "imprint" "id")}) ("Availability" {"fields":("status" "due_back" "duration" "borrower")}) )<def_stmt>get_urls self<block_start>""" Add in a custom view to demonstrate = """<line_sep>urls=super().get_urls()<line_sep><return>urls+[path("custom_view" CustomView.as_view() name="custom_view")]<block_end><def_stmt>response_change self request obj<block_start>ret=super().response_change(request obj)<if_stmt>"reserve"<in>request.POST<block_start>obj.status="r"<line_sep>obj.save()<block_end><return>ret<block_end><block_end>@admin.register(Library)<class_stmt>LibraryAdmin(admin.ModelAdmin)<block_start>list_display=("name" "address" "librarian")<block_end>
# Helper code to plot binary losses. # # <NAME> (http://eli.thegreenplace.net) # This code is in the public domain <import_from_future_stmt> print_function<import_stmt>matplotlib.pyplot<as>plt<import_stmt>numpy<as>np<if_stmt>__name__<eq>'__main__'<block_start>fig,ax=plt.subplots()<line_sep>fig.set_tight_layout(<true>)<line_sep>xs=np.linspace(-2 2 500)<line_sep># plot L0/1 loss ax.plot(xs np.where(xs<l>0 np.ones_like(xs) np.zeros_like(xs)) color='r' linewidth=2.0 label='$L_{01}$')<line_sep># plot square loss ax.plot(xs (xs-1)<power>2 linestyle='-.' label='$L_2$')<line_sep># plot hinge loss ax.plot(xs np.maximum(np.zeros_like(xs) 1-xs) color='g' linewidth=2.0 label='$L_h$')<line_sep>ax.grid(<true>)<line_sep>plt.ylim((-1 4))<line_sep>ax.legend()<line_sep>fig.savefig('loss.png' dpi=80)<line_sep>plt.show()<block_end>
<import_from_stmt>pytest_djangoapp configure_djangoapp_plugin<line_sep>pytest_plugins=configure_djangoapp_plugin(extend_INSTALLED_APPS=['django.contrib.sessions' 'django.contrib.messages' ] extend_MIDDLEWARE=['django.contrib.sessions.middleware.SessionMiddleware' 'django.contrib.messages.middleware.MessageMiddleware' ])<line_sep>
<import_stmt>enum<import_from_stmt>typing Dict List<import_from_stmt>odmantic.field Field<import_from_stmt>odmantic.model Model<class_stmt>TreeKind(str enum.Enum)<block_start>BIG="big"<line_sep>SMALL="small"<block_end><class_stmt>TreeModel(Model)<block_start>name:str=Field(primary_key=<true> default="<NAME> montagnes")<line_sep>average_size:float=Field(mongo_name="size")<line_sep>discovery_year:int<line_sep>kind:TreeKind<line_sep>genesis_continents:List[str]<line_sep>per_continent_density:Dict[str float]<block_end>
<import_stmt>subprocess os<line_sep>ue4_win=r"C:\Program Files\Epic Games\UE_4.16"<line_sep>ue4_linux="/home/qiuwch/workspace/UE416"<line_sep>ue4_mac='/Users/Shared/Epic Games/UE_4.16'<line_sep>win_uprojects=[r'C:\qiuwch\workspace\uprojects\UE4RealisticRendering\RealisticRendering.uproject' r'C:\qiuwch\workspace\uprojects\UE4ArchinteriorsVol2Scene1\ArchinteriorsVol2Scene1.uproject' r'C:\qiuwch\workspace\uprojects\UE4ArchinteriorsVol2Scene2\ArchinteriorsVol2Scene2.uproject' r'C:\qiuwch\workspace\uprojects\UE4ArchinteriorsVol2Scene3\ArchinteriorsVol2Scene3.uproject' r'C:\qiuwch\workspace\uprojects\UE4UrbanCity\UrbanCity.uproject' r'D:\workspace\uprojects\Matinee\Matinee.uproject' r'D:\workspace\uprojects\PhotorealisticCharacter\PhotorealisticCharacter2.uproject' ]<line_sep>linux_uprojects=[os.path.expanduser('~/workspace/uprojects/UE4RealisticRendering/RealisticRendering.uproject') os.path.expanduser('~/workspace/uprojects/UE4ArchinteriorsVol2Scene1/ArchinteriorsVol2Scene1.uproject') os.path.expanduser('~/workspace/uprojects/UE4ArchinteriorsVol2Scene2/ArchinteriorsVol2Scene2.uproject') os.path.expanduser('~/workspace/uprojects/UE4ArchinteriorsVol2Scene3/ArchinteriorsVol2Scene3.uproject') os.path.expanduser("~/workspace/uprojects/UE4UrbanCity/UrbanCity.uproject") ]<line_sep>mac_uprojects=[os.path.expanduser('~/workspace/UnrealEngine/Templates/FP_FirstPerson/FP_FirstPerson.uproject') os.path.expanduser('~/uprojects/RealisticRendering/RealisticRendering.uproject') os.path.expanduser('~/uprojects/UE4ArchinteriorsVol2Scene1/ArchinteriorsVol2Scene1.uproject') os.path.expanduser('~/uprojects/UE4ArchinteriorsVol2Scene2/ArchinteriorsVol2Scene2.uproject') os.path.expanduser('~/uprojects/UE4ArchinteriorsVol2Scene3/ArchinteriorsVol2Scene3.uproject') os.path.expanduser('~/uprojects/UE4UrbanCity/UrbanCity.uproject') ]<line_sep>uprojects=[]<for_stmt>uproject_path win_uprojects<block_start>uproject_name=os.path.basename(uproject_path).split('.')[0]<line_sep>uprojects.append(dict(uproject_path=uproject_path ue4_path=ue4_win log_file='log/win_%s.log'%uproject_name) )<block_end><for_stmt>uproject_path linux_uprojects<block_start>uproject_name=os.path.basename(uproject_path).split('.')[0]<line_sep>uprojects.append(dict(uproject_path=uproject_path ue4_path=ue4_linux log_file='log/linux_%s.log'%uproject_name) )<block_end><for_stmt>uproject_path mac_uprojects<block_start>uproject_name=os.path.basename(uproject_path).split('.')[0]<line_sep>uprojects.append(dict(uproject_path=uproject_path ue4_path=ue4_mac log_file='log/mac_%s.log'%uproject_name) )<block_end><if_stmt>__name__<eq>'__main__'<block_start><for_stmt>uproject uprojects<block_start>uproject_path=uproject['uproject_path']<if_stmt><not>os.path.isfile(uproject_path)<block_start>print("Can not find uproject file %s, skip this project"%uproject_path)<line_sep><continue><block_end>cmd=['python' 'build.py' '--UE4' uproject['ue4_path'] # '--output', uproject['output_folder'], uproject['uproject_path']]<line_sep>print(cmd)<line_sep>subprocess.call(cmd stdout=open(uproject['log_file'] 'w'))<with_stmt>open(uproject['log_file'])<as>f<block_start>lines=f.readlines()<line_sep>print(''.join(lines[-10:]))<block_end><block_end><block_end># Print the last few lines
# coding=utf-8 # Copyright 2021 RigL Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python3 """Functions for pruning FLAX masked models."""<import_stmt>collections<import_from_stmt>typing Any Callable Mapping Optional Union<import_stmt>flax<import_stmt>jax.numpy<as>jnp<import_from_stmt>rigl.experimental.jax.pruning masked<def_stmt>weight_magnitude weights<block_start>"""Creates weight magnitude-based saliencies, given a weight matrix."""<line_sep><return>jnp.absolute(weights)<block_end><def_stmt>prune model pruning_rate saliency_fn=weight_magnitude mask=<none> compare_fn=jnp.greater<block_start>"""Returns a mask for a model where the params in each layer are pruned using a saliency function. Args: model: The model to create a pruning mask for. pruning_rate: The fraction of lowest magnitude saliency weights that are pruned. If a float, the same rate is used for all layers, otherwise if it is a mapping, it must contain a rate for all masked layers in the model. saliency_fn: A function that returns a float number used to rank the importance of individual weights in the layer. mask: If the model has an existing mask, the mask will be applied before pruning the model. compare_fn: A pairwise operator to compare saliency with threshold, and return True if the saliency indicates the value should not be masked. Returns: A pruned mask for the given model. """<if_stmt><not>mask<block_start>mask=masked.simple_mask(model jnp.ones masked.WEIGHT_PARAM_NAMES)<block_end><if_stmt><not>isinstance(pruning_rate collections.Mapping)<block_start>pruning_rate_dict={}<for_stmt>param_name,_ masked.iterate_mask(mask)# Get the layer name from the parameter's full name/path. <block_start>layer_name=param_name.split('/')[-2]<line_sep>pruning_rate_dict[layer_name]=pruning_rate<block_end>pruning_rate=pruning_rate_dict<block_end><for_stmt>param_path,param_mask masked.iterate_mask(mask)<block_start>split_param_path=param_path.split('/')<line_sep>layer_name=split_param_path[-2]<line_sep>param_name=split_param_path[-1]<line_sep># If we don't have a pruning rate for the given layer, don't mask it. <if_stmt>layer_name<in>pruning_rate<and>mask[layer_name][param_name]<is><not><none><block_start>param_value=model.params[layer_name][masked.MaskedModule.UNMASKED][param_name]<line_sep># Here any existing mask is first applied to weight matrix. # Note: need to check explicitly is not None for np array. <if_stmt>param_mask<is><not><none><block_start>saliencies=saliency_fn(param_mask<times>param_value)<block_end><else_stmt><block_start>saliencies=saliency_fn(param_value)<block_end># TODO: Use partition here (partial sort) instead of sort, # since it's O(N), not O(N log N), however JAX doesn't support it. sorted_param=jnp.sort(jnp.abs(saliencies.flatten()))<line_sep># Figure out the weight magnitude threshold. threshold_index=jnp.round(pruning_rate[layer_name]<times>sorted_param.size).astype(jnp.int32)<line_sep>threshold=sorted_param[threshold_index]<line_sep>mask[layer_name][param_name]=jnp.array(compare_fn(saliencies threshold) dtype=jnp.int32)<block_end><block_end><return>mask<block_end>
<import_stmt>pytest<import_from_stmt>fuzz_lightyear.datastore _ALL_POST_FUZZ_HOOKS_BY_OPERATION<import_from_stmt>fuzz_lightyear.datastore _ALL_POST_FUZZ_HOOKS_BY_TAG<import_from_stmt>fuzz_lightyear.datastore _RERUN_POST_FUZZ_HOOKS_BY_OPERATION<import_from_stmt>fuzz_lightyear.datastore _RERUN_POST_FUZZ_HOOKS_BY_TAG<import_from_stmt>fuzz_lightyear.datastore get_excluded_operations<import_from_stmt>fuzz_lightyear.datastore get_included_tags<import_from_stmt>fuzz_lightyear.datastore get_non_vulnerable_operations<import_from_stmt>fuzz_lightyear.datastore get_user_defined_mapping<import_from_stmt>fuzz_lightyear.plugins get_enabled_plugins<import_from_stmt>fuzz_lightyear.request get_victim_session_factory<import_from_stmt>fuzz_lightyear.supplements.abstraction get_abstraction<line_sep>@pytest.fixture(autouse=<true>)<def_stmt>clear_caches <block_start>get_abstraction.cache_clear()<line_sep>get_user_defined_mapping.cache_clear()<line_sep>get_enabled_plugins.cache_clear()<line_sep>get_victim_session_factory.cache_clear()<line_sep>get_excluded_operations.cache_clear()<line_sep>get_non_vulnerable_operations.cache_clear()<line_sep>get_included_tags.cache_clear()<line_sep>_ALL_POST_FUZZ_HOOKS_BY_OPERATION.clear()<line_sep>_ALL_POST_FUZZ_HOOKS_BY_TAG.clear()<line_sep>_RERUN_POST_FUZZ_HOOKS_BY_OPERATION.clear()<line_sep>_RERUN_POST_FUZZ_HOOKS_BY_TAG.clear()<block_end>@pytest.fixture(autouse=<true>)<def_stmt>ignore_hypothesis_non_interactive_example_warning <block_start>"""In theory we're not supposed to use hypothesis' strategy.example(), but fuzz-lightyear isn't using hypothesis in a normal way. """<import_stmt>warnings<import_from_stmt>hypothesis.errors NonInteractiveExampleWarning<line_sep>warnings.filterwarnings('ignore' category=NonInteractiveExampleWarning )<block_end>
<import_from_stmt>tests run_main_and_assert<line_sep>FAST_LOCAL_TEST_ARGS="--exp-name local_test --datasets mnist"<concat>" --network LeNet --num-tasks 5 --seed 1 --batch-size 32"<concat>" --nepochs 2 --num-workers 0 --stop-at-task 3"<def_stmt>test_finetuning_stop_at_task <block_start>args_line=FAST_LOCAL_TEST_ARGS<line_sep>args_line<augadd>" --approach finetuning"<line_sep>run_main_and_assert(args_line)<block_end>
# Autor: <NAME> (@optider) # Github Profile: https://github.com/Optider/ # Problem Link: https://leetcode.com/problems/contains-duplicate/ <class_stmt>Solution<block_start><def_stmt>containsDuplicate self nums:List[int]<arrow>bool<block_start>count={}<for_stmt>n nums<block_start><if_stmt>count.get(n)<ne><none><block_start><return><true><block_end>count[n]=1<block_end><return><false><block_end><block_end>
<import_stmt>sys<import_stmt>logging<import_stmt>unittest<import_from_stmt>testfixtures LogCapture<import_from_stmt>twisted.python.failure Failure<import_from_stmt>scrapy.utils.log failure_to_exc_info TopLevelFormatter LogCounterHandler StreamLogger <import_from_stmt>scrapy.utils.test get_crawler<import_from_stmt>scrapy.extensions telnet<class_stmt>FailureToExcInfoTest(unittest.TestCase)<block_start><def_stmt>test_failure self<block_start><try_stmt><block_start>0/0<block_end><except_stmt>ZeroDivisionError<block_start>exc_info=sys.exc_info()<line_sep>failure=Failure()<block_end>self.assertTupleEqual(exc_info failure_to_exc_info(failure))<block_end><def_stmt>test_non_failure self<block_start>self.assertIsNone(failure_to_exc_info('test'))<block_end><block_end><class_stmt>TopLevelFormatterTest(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self.handler=LogCapture()<line_sep>self.handler.addFilter(TopLevelFormatter(['test']))<block_end><def_stmt>test_top_level_logger self<block_start>logger=logging.getLogger('test')<with_stmt>self.handler<as>log<block_start>logger.warning('test log msg')<block_end>log.check(('test' 'WARNING' 'test log msg'))<block_end><def_stmt>test_children_logger self<block_start>logger=logging.getLogger('test.test1')<with_stmt>self.handler<as>log<block_start>logger.warning('test log msg')<block_end>log.check(('test' 'WARNING' 'test log msg'))<block_end><def_stmt>test_overlapping_name_logger self<block_start>logger=logging.getLogger('test2')<with_stmt>self.handler<as>log<block_start>logger.warning('test log msg')<block_end>log.check(('test2' 'WARNING' 'test log msg'))<block_end><def_stmt>test_different_name_logger self<block_start>logger=logging.getLogger('different')<with_stmt>self.handler<as>log<block_start>logger.warning('test log msg')<block_end>log.check(('different' 'WARNING' 'test log msg'))<block_end><block_end><class_stmt>LogCounterHandlerTest(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>settings={'LOG_LEVEL':'WARNING'}<if_stmt><not>telnet.TWISTED_CONCH_AVAILABLE# disable it to avoid the extra warning <block_start>settings['TELNETCONSOLE_ENABLED']=<false><block_end>self.logger=logging.getLogger('test')<line_sep>self.logger.setLevel(logging.NOTSET)<line_sep>self.logger.propagate=<false><line_sep>self.crawler=get_crawler(settings_dict=settings)<line_sep>self.handler=LogCounterHandler(self.crawler)<line_sep>self.logger.addHandler(self.handler)<block_end><def_stmt>tearDown self<block_start>self.logger.propagate=<true><line_sep>self.logger.removeHandler(self.handler)<block_end><def_stmt>test_init self<block_start>self.assertIsNone(self.crawler.stats.get_value('log_count/DEBUG'))<line_sep>self.assertIsNone(self.crawler.stats.get_value('log_count/INFO'))<line_sep>self.assertIsNone(self.crawler.stats.get_value('log_count/WARNING'))<line_sep>self.assertIsNone(self.crawler.stats.get_value('log_count/ERROR'))<line_sep>self.assertIsNone(self.crawler.stats.get_value('log_count/CRITICAL'))<block_end><def_stmt>test_accepted_level self<block_start>self.logger.error('test log msg')<line_sep>self.assertEqual(self.crawler.stats.get_value('log_count/ERROR') 1)<block_end><def_stmt>test_filtered_out_level self<block_start>self.logger.debug('test log msg')<line_sep>self.assertIsNone(self.crawler.stats.get_value('log_count/INFO'))<block_end><block_end><class_stmt>StreamLoggerTest(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self.stdout=sys.stdout<line_sep>logger=logging.getLogger('test')<line_sep>logger.setLevel(logging.WARNING)<line_sep>sys.stdout=StreamLogger(logger logging.ERROR)<block_end><def_stmt>tearDown self<block_start>sys.stdout=self.stdout<block_end><def_stmt>test_redirect self<block_start><with_stmt>LogCapture()<as>log<block_start>print('test log msg')<block_end>log.check(('test' 'ERROR' 'test log msg'))<block_end><block_end>
<import_from_stmt>django.conf.urls url<import_from_stmt>. views<line_sep>urlpatterns=[url(r'^$' views.home name='home') url(r'^piechart/' views.demo_piechart name='demo_piechart') url(r'^linechart/' views.demo_linechart name='demo_linechart') url(r'^linechart_without_date/' views.demo_linechart_without_date name='demo_linechart_without_date') url(r'^linewithfocuschart/' views.demo_linewithfocuschart name='demo_linewithfocuschart') url(r'^multibarchart/' views.demo_multibarchart name='demo_multibarchart') url(r'^stackedareachart/' views.demo_stackedareachart name='demo_stackedareachart') url(r'^multibarhorizontalchart/' views.demo_multibarhorizontalchart name='demo_multibarhorizontalchart') url(r'^lineplusbarchart/' views.demo_lineplusbarchart name='demo_lineplusbarchart') url(r'^cumulativelinechart/' views.demo_cumulativelinechart name='demo_cumulativelinechart') url(r'^discretebarchart/' views.demo_discretebarchart name='demo_discretebarchart') url(r'^discretebarchart_with_date/' views.demo_discretebarchart_with_date name='demo_discretebarchart_date') url(r'^scatterchart/' views.demo_scatterchart name='demo_scatterchart') url(r'^linechart_with_ampm/' views.demo_linechart_with_ampm name='demo_linechart_with_ampm') # url(r'^demoproject/', include('demoproject.foo.urls')), ]<line_sep>
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- <import_stmt>unittest<import_from_stmt>azext_devops.dev.common.format trim_for_display date_time_to_only_date<class_stmt>TestFormatMethods(unittest.TestCase)<block_start><def_stmt>test_trim_for_display self<block_start>input='Gallery extensions for Portal Extension'<line_sep>output=trim_for_display(input 20)<line_sep>self.assertEqual(output 'Gallery extensions f...')<line_sep>input='Aex platform'<line_sep>output=trim_for_display(input 20)<line_sep>self.assertEqual(output input)<line_sep>input=''<line_sep>output=trim_for_display(input 20)<line_sep>self.assertEqual(output input)<line_sep>input=<none><line_sep>output=trim_for_display(input 20)<line_sep>self.assertEqual(output input)<block_end><def_stmt>test_date_time_to_only_date self<block_start>input='2019-02-24T02:45:41.277000+00:00'<line_sep>output=date_time_to_only_date(input)<line_sep>self.assertEqual(output '2019-02-24')<line_sep>input='Aex platform'<line_sep>output=date_time_to_only_date(input)<line_sep>self.assertEqual(output input)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
""" Test get_obj_value set_obj_value has_obj_value """<import_stmt>pytest<import_from_stmt>dayu_widgets utils<class_stmt>_HasNameAgeObject(object)<block_start><def_stmt>__init__ self name age<block_start>super(_HasNameAgeObject self).__init__()<line_sep>self.name=name<line_sep>self.age=age<block_end><block_end>@pytest.mark.parametrize('obj' ({'name':'xiaoming' 'age':18} _HasNameAgeObject('xiaoming' 18)))<class_stmt>TestObjValue(object)<block_start>"""Test get_obj_value has_obj_value set_obj_value collection."""<line_sep>@pytest.mark.parametrize('attr, default, result' (('name' 'hhh' 'xiaoming') ('age' 0 18) ('score' 0 0)))<def_stmt>test_get_obj_value self obj attr default result<block_start>"""Test get_obj_value with dict/object as arg. """<assert_stmt>utils.get_obj_value(obj attr default)<eq>result<block_end>@pytest.mark.parametrize('attr, result' (('name' <true>) ('age' <true>) ('sex' <false>) ))<def_stmt>test_has_obj_value self obj attr result<block_start>"""Test has_obj_value with dict/object as arg. """<assert_stmt>utils.has_obj_value(obj attr)<eq>result<block_end>@pytest.mark.parametrize('attr, value' (('name' 'xiaohua') ('age' 30) ('id' 80) ))<def_stmt>test_set_obj_value self obj attr value<block_start>"""Test set_obj_value with dict/object as arg. """<line_sep>utils.set_obj_value(obj attr value)<assert_stmt>utils.get_obj_value(obj attr)<eq>value<block_end><block_end>
<import_stmt>sys yaml test_appliance<def_stmt>main args=<none><block_start>collections=[]<import_stmt>test_yaml<line_sep>collections.append(test_yaml)<if_stmt>yaml.__with_libyaml__<block_start><import_stmt>test_yaml_ext<line_sep>collections.append(test_yaml_ext)<block_end><return>test_appliance.run(collections args)<block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
<import_stmt>datetime<import_from_stmt>abc ABC abstractmethod<import_stmt>pajbot<class_stmt>AccessToken(ABC)<block_start>SHOULD_REFRESH_THRESHOLD=0.9<line_sep>"""Fraction between 0 and 1 indicating what fraction/percentage of the specified full validity period should actually be utilized. E.g. if this is set to 0.9, the implementation will refresh the token once at least 90% of the full validity period (expires_in) is over."""<def_stmt>__init__ self access_token created_at expires_in token_type refresh_token scope<block_start>self.access_token=access_token<line_sep>self.created_at=created_at<line_sep># can both be None self.expires_in=expires_in<if_stmt>self.expires_in<is><not><none><block_start>self.expires_at=self.created_at+self.expires_in<block_end><else_stmt><block_start>self.expires_at=<none><block_end>self.token_type=token_type<line_sep># can be None self.refresh_token=refresh_token<line_sep># always a list, can be empty list self.scope=scope<block_end>@abstractmethod<def_stmt>can_refresh self<block_start><pass><block_end><def_stmt>should_refresh self<block_start>"""Returns True if less than 10% of the token's lifetime remains, False otherwise"""<if_stmt><not>self.can_refresh()<block_start><return><false><block_end># intended lifetime of the token <if_stmt>self.expires_at<is><not><none><block_start>expires_after=self.expires_at-self.created_at<block_end><else_stmt># this is a token that never expires # because we don't want any issues, refresh it anyways <block_start>expires_after=datetime.timedelta(hours=1)<block_end># how much time has passed since token creation token_age=pajbot.utils.now()-self.created_at<line_sep># maximum token age before token should be refreshed (90% of the total token lifetime) max_token_age=expires_after<times>self.SHOULD_REFRESH_THRESHOLD<line_sep># expired? <return>token_age<ge>max_token_age<block_end><def_stmt>jsonify self<block_start>"""serialize for storage"""<if_stmt>self.expires_in<is><none><block_start>expires_in_milliseconds=<none><block_end><else_stmt><block_start>expires_in_milliseconds=self.expires_in.total_seconds()<times>1000<block_end><return>{"access_token":self.access_token "created_at":self.created_at.timestamp()<times>1000 "expires_in":expires_in_milliseconds "token_type":self.token_type "refresh_token":self.refresh_token "scope":self.scope }<block_end>@classmethod<def_stmt>from_json cls json_data<block_start>"""deserialize json produced by jsonify()"""<if_stmt>json_data["expires_in"]<is><none><block_start>expires_in=<none><block_end><else_stmt><block_start>expires_in=datetime.timedelta(milliseconds=json_data["expires_in"])<block_end><return>cls(access_token=json_data["access_token"] created_at=pajbot.utils.datetime_from_utc_milliseconds(json_data["created_at"]) expires_in=expires_in token_type=json_data["token_type"] refresh_token=json_data["refresh_token"] scope=json_data["scope"] )<block_end>@classmethod<def_stmt>from_api_response cls response<block_start>"""Construct new object from twitch response json data"""<line_sep># expires_in is only missing for old Client-IDs to which twitch will respond with # infinitely-lived tokens (the "expires_in" field is absent in that case). expires_in_seconds=response.get("expires_in" <none>)<if_stmt>expires_in_seconds<is><none><block_start>expires_in=<none><block_end><else_stmt><block_start>expires_in=datetime.timedelta(seconds=expires_in_seconds)<block_end><return>cls(access_token=response["access_token"] created_at=pajbot.utils.now() expires_in=expires_in token_type=response["token_type"] refresh_token=response.get("refresh_token" <none>) scope=response.get("scope" []) )<block_end>@abstractmethod<def_stmt>refresh self api<block_start><pass><block_end><block_end><class_stmt>UserAccessToken(AccessToken)<block_start><def_stmt>can_refresh self<block_start><return>self.refresh_token<is><not><none><block_end><def_stmt>refresh self api<block_start><if_stmt><not>self.can_refresh()<block_start><raise>ValueError("This user access token cannot be refreshed, it has no refresh token")<block_end><return>api.refresh_user_access_token(self.refresh_token)<block_end>@staticmethod<def_stmt>from_implicit_auth_flow_token access_token<block_start><return>UserAccessToken(access_token=access_token created_at=<none> expires_in=<none> token_type="bearer" refresh_token=<none> scope=[] )<block_end><block_end><class_stmt>AppAccessToken(AccessToken)<block_start><def_stmt>can_refresh self<block_start><return><true><block_end><def_stmt>refresh self api<block_start><return>api.get_app_access_token(self.scope)<block_end><block_end>
"""Precision for ranking."""<import_stmt>numpy<as>np<import_from_stmt>matchzoo.engine.base_metric BaseMetric sort_and_couple RankingMetric <class_stmt>Precision(RankingMetric)<block_start>"""Precision metric."""<line_sep>ALIAS='precision'<def_stmt>__init__ self k:int=1 threshold:float=0.<block_start>""" :class:`PrecisionMetric` constructor. :param k: Number of results to consider. :param threshold: the label threshold of relevance degree. """<line_sep>self._k=k<line_sep>self._threshold=threshold<block_end><def_stmt>__repr__ self<arrow>str<block_start>""":return: Formated string representation of the metric."""<line_sep><return>f"{self.ALIAS}@{self._k}({self._threshold})"<block_end><def_stmt>__call__ self y_true:np.array y_pred:np.array<arrow>float<block_start>""" Calculate precision@k. Example: >>> y_true = [0, 0, 0, 1] >>> y_pred = [0.2, 0.4, 0.3, 0.1] >>> Precision(k=1)(y_true, y_pred) 0.0 >>> Precision(k=2)(y_true, y_pred) 0.0 >>> Precision(k=4)(y_true, y_pred) 0.25 >>> Precision(k=5)(y_true, y_pred) 0.2 :param y_true: The ground true label of each document. :param y_pred: The predicted scores of each document. :return: Precision @ k :raises: ValueError: len(r) must be >= k. """<if_stmt>self._k<le>0<block_start><raise>ValueError(f"k must be greater than 0."<concat>f"{self._k} received.")<block_end>coupled_pair=sort_and_couple(y_true y_pred)<line_sep>precision=0.0<for_stmt>idx,(label score) enumerate(coupled_pair)<block_start><if_stmt>idx<ge>self._k<block_start><break><block_end><if_stmt>label<g>self._threshold<block_start>precision<augadd>1.<block_end><block_end><return>precision/self._k<block_end><block_end>
<import_stmt>os.path<as>osp<line_sep># Root directory of project ROOT_DIR=osp.abspath(osp.join(osp.dirname(__file__) '..' '..'))<line_sep># Path to data dir _DATA_DIR=osp.abspath(osp.join(ROOT_DIR 'data'))<line_sep># Required dataset entry keys _IM_DIR='image_directory'<line_sep>_ANN_FN='annotation_file'<line_sep># Available datasets COMMON_DATASETS={'coco_2017_train':{_IM_DIR:_DATA_DIR+'/coco/images/train2017' _ANN_FN:_DATA_DIR+'/coco/annotations/instances_train2017.json' } 'coco_2017_val':{_IM_DIR:_DATA_DIR+'/coco/images/val2017' _ANN_FN:_DATA_DIR+'/coco/annotations/instances_val2017.json' } 'coco_2017_test':{_IM_DIR:_DATA_DIR+'/coco/images/test2017' _ANN_FN:_DATA_DIR+'/coco/annotations/image_info_test2017.json' } 'coco_2017_test-dev':{_IM_DIR:_DATA_DIR+'/coco/images/test2017' _ANN_FN:_DATA_DIR+'/coco/annotations/image_info_test-dev2017.json' } 'keypoints_coco_2017_train':{_IM_DIR:_DATA_DIR+'/coco/images/train2017' _ANN_FN:_DATA_DIR+'/coco/annotations/person_keypoints_train2017.json'} 'keypoints_coco_2017_val':{_IM_DIR:_DATA_DIR+'/coco/images/val2017' _ANN_FN:_DATA_DIR+'/coco/annotations/person_keypoints_val2017.json'} 'keypoints_coco_2017_test':{_IM_DIR:_DATA_DIR+'/coco/images/test2017' _ANN_FN:_DATA_DIR+'/coco/annotations/image_info_test2017.json'} 'keypoints_coco_2017_test-dev':{_IM_DIR:_DATA_DIR+'/coco/images/test2017' _ANN_FN:_DATA_DIR+'/coco/annotations/image_info_test-dev2017.json' } 'dense_coco_2017_train':{_IM_DIR:_DATA_DIR+'/coco/images/train2017' _ANN_FN:_DATA_DIR+'/coco/annotations/DensePoseData/densepose_coco_train2017.json' } 'dense_coco_2017_val':{_IM_DIR:_DATA_DIR+'/coco/images/val2017' _ANN_FN:_DATA_DIR+'/coco/annotations/DensePoseData/densepose_coco_val2017.json' } 'dense_coco_2017_test':{_IM_DIR:_DATA_DIR+'/coco/images/test2017' _ANN_FN:_DATA_DIR+'/coco/annotations/DensePoseData/densepose_coco_test.json' } 'CIHP_train':{# new addition by wzh _IM_DIR:_DATA_DIR+'/CIHP/train_img' _ANN_FN:_DATA_DIR+'/CIHP/annotations/CIHP_train.json' } 'CIHP_val':{# new addition by wzh _IM_DIR:_DATA_DIR+'/CIHP/val_img' _ANN_FN:_DATA_DIR+'/CIHP/annotations/CIHP_val.json' } 'CIHP_test':{# new addition by wzh _IM_DIR:_DATA_DIR+'/CIHP/test_img' _ANN_FN:_DATA_DIR+'/CIHP/annotations/CIHP_test.json' } 'MHP-v2_train':{# new addition by wzh _IM_DIR:_DATA_DIR+'/MHP-v2/train_img' _ANN_FN:_DATA_DIR+'/MHP-v2/annotations/MHP-v2_train.json' } 'MHP-v2_val':{# new addition by wzh _IM_DIR:_DATA_DIR+'/MHP-v2/val_img' _ANN_FN:_DATA_DIR+'/MHP-v2/annotations/MHP-v2_val.json' } 'MHP-v2_test':{# new addition by wzh _IM_DIR:_DATA_DIR+'/MHP-v2/test_img' _ANN_FN:_DATA_DIR+'/MHP-v2/annotations/MHP-v2_test_all.json' } 'MHP-v2_test_inter_top10':{# new addition by wzh _IM_DIR:_DATA_DIR+'/MHP-v2/test_img' _ANN_FN:_DATA_DIR+'/MHP-v2/annotations/MHP-v2_test_inter_top10.json' } 'MHP-v2_test_inter_top20':{# new addition by wzh _IM_DIR:_DATA_DIR+'/MHP-v2/test_img' _ANN_FN:_DATA_DIR+'/MHP-v2/annotations/MHP-v2_test_inter_top20.json' } 'PASCAL-Person-Part_train':{# new addition by soeaver _IM_DIR:_DATA_DIR+'/PASCAL-Person-Part/train_img' _ANN_FN:_DATA_DIR+'/PASCAL-Person-Part/annotations/pascal_person_part_train.json' } 'PASCAL-Person-Part_test':{# new addition by soeaver _IM_DIR:_DATA_DIR+'/PASCAL-Person-Part/test_img' _ANN_FN:_DATA_DIR+'/PASCAL-Person-Part/annotations/pascal_person_part_test.json' }}<line_sep>
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** <import_stmt>warnings<import_stmt>pulumi<import_stmt>pulumi.runtime<import_from_stmt>typing Any Mapping Optional Sequence Union overload<import_from_stmt>.. _utilities<import_from_stmt>. outputs<import_from_stmt>._inputs *<line_sep>__all__=['ServicePerimeterArgs' 'ServicePerimeter']<line_sep>@pulumi.input_type<class_stmt>ServicePerimeterArgs<block_start><def_stmt>__init__ __self__ * parent:pulumi.Input[str] title:pulumi.Input[str] description:Optional[pulumi.Input[str]]=<none> name:Optional[pulumi.Input[str]]=<none> perimeter_type:Optional[pulumi.Input[str]]=<none> spec:Optional[pulumi.Input['ServicePerimeterSpecArgs']]=<none> status:Optional[pulumi.Input['ServicePerimeterStatusArgs']]=<none> use_explicit_dry_run_spec:Optional[pulumi.Input[bool]]=<none><block_start>""" The set of arguments for constructing a ServicePerimeter resource. :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[str] title: Human readable title. Must be unique within the Policy. :param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect behavior. :param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} :param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. :param pulumi.Input['ServicePerimeterSpecArgs'] spec: Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. :param pulumi.Input['ServicePerimeterStatusArgs'] status: ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. :param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. """<line_sep>pulumi.set(__self__ "parent" parent)<line_sep>pulumi.set(__self__ "title" title)<if_stmt>description<is><not><none><block_start>pulumi.set(__self__ "description" description)<block_end><if_stmt>name<is><not><none><block_start>pulumi.set(__self__ "name" name)<block_end><if_stmt>perimeter_type<is><not><none><block_start>pulumi.set(__self__ "perimeter_type" perimeter_type)<block_end><if_stmt>spec<is><not><none><block_start>pulumi.set(__self__ "spec" spec)<block_end><if_stmt>status<is><not><none><block_start>pulumi.set(__self__ "status" status)<block_end><if_stmt>use_explicit_dry_run_spec<is><not><none><block_start>pulumi.set(__self__ "use_explicit_dry_run_spec" use_explicit_dry_run_spec)<block_end><block_end>@property@pulumi.getter<def_stmt>parent self<arrow>pulumi.Input[str]<block_start>""" The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} """<line_sep><return>pulumi.get(self "parent")<block_end>@parent.setter<def_stmt>parent self value:pulumi.Input[str]<block_start>pulumi.set(self "parent" value)<block_end>@property@pulumi.getter<def_stmt>title self<arrow>pulumi.Input[str]<block_start>""" Human readable title. Must be unique within the Policy. """<line_sep><return>pulumi.get(self "title")<block_end>@title.setter<def_stmt>title self value:pulumi.Input[str]<block_start>pulumi.set(self "title" value)<block_end>@property@pulumi.getter<def_stmt>description self<arrow>Optional[pulumi.Input[str]]<block_start>""" Description of the ServicePerimeter and its use. Does not affect behavior. """<line_sep><return>pulumi.get(self "description")<block_end>@description.setter<def_stmt>description self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "description" value)<block_end>@property@pulumi.getter<def_stmt>name self<arrow>Optional[pulumi.Input[str]]<block_start>""" Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} """<line_sep><return>pulumi.get(self "name")<block_end>@name.setter<def_stmt>name self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "name" value)<block_end>@property@pulumi.getter(name="perimeterType")<def_stmt>perimeter_type self<arrow>Optional[pulumi.Input[str]]<block_start>""" Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. """<line_sep><return>pulumi.get(self "perimeter_type")<block_end>@perimeter_type.setter<def_stmt>perimeter_type self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "perimeter_type" value)<block_end>@property@pulumi.getter<def_stmt>spec self<arrow>Optional[pulumi.Input['ServicePerimeterSpecArgs']]<block_start>""" Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. """<line_sep><return>pulumi.get(self "spec")<block_end>@spec.setter<def_stmt>spec self value:Optional[pulumi.Input['ServicePerimeterSpecArgs']]<block_start>pulumi.set(self "spec" value)<block_end>@property@pulumi.getter<def_stmt>status self<arrow>Optional[pulumi.Input['ServicePerimeterStatusArgs']]<block_start>""" ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. """<line_sep><return>pulumi.get(self "status")<block_end>@status.setter<def_stmt>status self value:Optional[pulumi.Input['ServicePerimeterStatusArgs']]<block_start>pulumi.set(self "status" value)<block_end>@property@pulumi.getter(name="useExplicitDryRunSpec")<def_stmt>use_explicit_dry_run_spec self<arrow>Optional[pulumi.Input[bool]]<block_start>""" Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. """<line_sep><return>pulumi.get(self "use_explicit_dry_run_spec")<block_end>@use_explicit_dry_run_spec.setter<def_stmt>use_explicit_dry_run_spec self value:Optional[pulumi.Input[bool]]<block_start>pulumi.set(self "use_explicit_dry_run_spec" value)<block_end><block_end>@pulumi.input_type<class_stmt>_ServicePerimeterState<block_start><def_stmt>__init__ __self__ * create_time:Optional[pulumi.Input[str]]=<none> description:Optional[pulumi.Input[str]]=<none> name:Optional[pulumi.Input[str]]=<none> parent:Optional[pulumi.Input[str]]=<none> perimeter_type:Optional[pulumi.Input[str]]=<none> spec:Optional[pulumi.Input['ServicePerimeterSpecArgs']]=<none> status:Optional[pulumi.Input['ServicePerimeterStatusArgs']]=<none> title:Optional[pulumi.Input[str]]=<none> update_time:Optional[pulumi.Input[str]]=<none> use_explicit_dry_run_spec:Optional[pulumi.Input[bool]]=<none><block_start>""" Input properties used for looking up and filtering ServicePerimeter resources. :param pulumi.Input[str] create_time: Time the AccessPolicy was created in UTC. :param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect behavior. :param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. :param pulumi.Input['ServicePerimeterSpecArgs'] spec: Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. :param pulumi.Input['ServicePerimeterStatusArgs'] status: ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. :param pulumi.Input[str] title: Human readable title. Must be unique within the Policy. :param pulumi.Input[str] update_time: Time the AccessPolicy was updated in UTC. :param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. """<if_stmt>create_time<is><not><none><block_start>pulumi.set(__self__ "create_time" create_time)<block_end><if_stmt>description<is><not><none><block_start>pulumi.set(__self__ "description" description)<block_end><if_stmt>name<is><not><none><block_start>pulumi.set(__self__ "name" name)<block_end><if_stmt>parent<is><not><none><block_start>pulumi.set(__self__ "parent" parent)<block_end><if_stmt>perimeter_type<is><not><none><block_start>pulumi.set(__self__ "perimeter_type" perimeter_type)<block_end><if_stmt>spec<is><not><none><block_start>pulumi.set(__self__ "spec" spec)<block_end><if_stmt>status<is><not><none><block_start>pulumi.set(__self__ "status" status)<block_end><if_stmt>title<is><not><none><block_start>pulumi.set(__self__ "title" title)<block_end><if_stmt>update_time<is><not><none><block_start>pulumi.set(__self__ "update_time" update_time)<block_end><if_stmt>use_explicit_dry_run_spec<is><not><none><block_start>pulumi.set(__self__ "use_explicit_dry_run_spec" use_explicit_dry_run_spec)<block_end><block_end>@property@pulumi.getter(name="createTime")<def_stmt>create_time self<arrow>Optional[pulumi.Input[str]]<block_start>""" Time the AccessPolicy was created in UTC. """<line_sep><return>pulumi.get(self "create_time")<block_end>@create_time.setter<def_stmt>create_time self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "create_time" value)<block_end>@property@pulumi.getter<def_stmt>description self<arrow>Optional[pulumi.Input[str]]<block_start>""" Description of the ServicePerimeter and its use. Does not affect behavior. """<line_sep><return>pulumi.get(self "description")<block_end>@description.setter<def_stmt>description self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "description" value)<block_end>@property@pulumi.getter<def_stmt>name self<arrow>Optional[pulumi.Input[str]]<block_start>""" Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} """<line_sep><return>pulumi.get(self "name")<block_end>@name.setter<def_stmt>name self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "name" value)<block_end>@property@pulumi.getter<def_stmt>parent self<arrow>Optional[pulumi.Input[str]]<block_start>""" The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} """<line_sep><return>pulumi.get(self "parent")<block_end>@parent.setter<def_stmt>parent self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "parent" value)<block_end>@property@pulumi.getter(name="perimeterType")<def_stmt>perimeter_type self<arrow>Optional[pulumi.Input[str]]<block_start>""" Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. """<line_sep><return>pulumi.get(self "perimeter_type")<block_end>@perimeter_type.setter<def_stmt>perimeter_type self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "perimeter_type" value)<block_end>@property@pulumi.getter<def_stmt>spec self<arrow>Optional[pulumi.Input['ServicePerimeterSpecArgs']]<block_start>""" Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. """<line_sep><return>pulumi.get(self "spec")<block_end>@spec.setter<def_stmt>spec self value:Optional[pulumi.Input['ServicePerimeterSpecArgs']]<block_start>pulumi.set(self "spec" value)<block_end>@property@pulumi.getter<def_stmt>status self<arrow>Optional[pulumi.Input['ServicePerimeterStatusArgs']]<block_start>""" ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. """<line_sep><return>pulumi.get(self "status")<block_end>@status.setter<def_stmt>status self value:Optional[pulumi.Input['ServicePerimeterStatusArgs']]<block_start>pulumi.set(self "status" value)<block_end>@property@pulumi.getter<def_stmt>title self<arrow>Optional[pulumi.Input[str]]<block_start>""" Human readable title. Must be unique within the Policy. """<line_sep><return>pulumi.get(self "title")<block_end>@title.setter<def_stmt>title self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "title" value)<block_end>@property@pulumi.getter(name="updateTime")<def_stmt>update_time self<arrow>Optional[pulumi.Input[str]]<block_start>""" Time the AccessPolicy was updated in UTC. """<line_sep><return>pulumi.get(self "update_time")<block_end>@update_time.setter<def_stmt>update_time self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "update_time" value)<block_end>@property@pulumi.getter(name="useExplicitDryRunSpec")<def_stmt>use_explicit_dry_run_spec self<arrow>Optional[pulumi.Input[bool]]<block_start>""" Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. """<line_sep><return>pulumi.get(self "use_explicit_dry_run_spec")<block_end>@use_explicit_dry_run_spec.setter<def_stmt>use_explicit_dry_run_spec self value:Optional[pulumi.Input[bool]]<block_start>pulumi.set(self "use_explicit_dry_run_spec" value)<block_end><block_end><class_stmt>ServicePerimeter(pulumi.CustomResource)<block_start>@overload<def_stmt>__init__ __self__ resource_name:str opts:Optional[pulumi.ResourceOptions]=<none> description:Optional[pulumi.Input[str]]=<none> name:Optional[pulumi.Input[str]]=<none> parent:Optional[pulumi.Input[str]]=<none> perimeter_type:Optional[pulumi.Input[str]]=<none> spec:Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]]=<none> status:Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]]=<none> title:Optional[pulumi.Input[str]]=<none> use_explicit_dry_run_spec:Optional[pulumi.Input[bool]]=<none> __props__=<none><block_start>""" ServicePerimeter describes a set of GCP resources which can freely import and export data amongst themselves, but not export outside of the ServicePerimeter. If a request with a source within this ServicePerimeter has a target outside of the ServicePerimeter, the request will be blocked. Otherwise the request is allowed. There are two types of Service Perimeter - Regular and Bridge. Regular Service Perimeters cannot overlap, a single GCP project can only belong to a single regular Service Perimeter. Service Perimeter Bridges can contain only GCP projects as members, a single GCP project may belong to multiple Service Perimeter Bridges. To get more information about ServicePerimeter, see: * [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters) * How-to Guides * [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart) > **Warning:** If you are using User ADCs (Application Default Credentials) with this resource, you must specify a `billing_project` and set `user_project_override` to true in the provider configuration. Otherwise the ACM API will return a 403 error. Your account must have the `serviceusage.services.use` permission on the `billing_project` you defined. ## Example Usage ### Access Context Manager Service Perimeter Basic ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=["storage.googleapis.com"], ), title="restrict_storage") access_level = gcp.accesscontextmanager.AccessLevel("access-level", basic=gcp.accesscontextmanager.AccessLevelBasicArgs( conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs( device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs( os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs( os_type="DESKTOP_CHROME_OS", )], require_screen_lock=False, ), regions=[ "CH", "IT", "US", ], )], ), parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), title="chromeos_no_lock") ``` ### Access Context Manager Service Perimeter Secure Data Exchange ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") secure_data_exchange = gcp.accesscontextmanager.ServicePerimeters("secure-data-exchange", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), service_perimeters=[ gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), title="", status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["storage.googleapis.com"], ), ), gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), title="", status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["bigtable.googleapis.com"], vpc_accessible_services=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusVpcAccessibleServicesArgs( enable_restriction=True, allowed_services=["bigquery.googleapis.com"], ), ), ), ]) access_level = gcp.accesscontextmanager.AccessLevel("access-level", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), title="secure_data_exchange", basic=gcp.accesscontextmanager.AccessLevelBasicArgs( conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs( device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs( require_screen_lock=False, os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs( os_type="DESKTOP_CHROME_OS", )], ), regions=[ "CH", "IT", "US", ], )], )) test_access = gcp.accesscontextmanager.ServicePerimeter("test-access", parent=f"accessPolicies/{google_access_context_manager_access_policy['test-access']['name']}", title="%s", perimeter_type="PERIMETER_TYPE_REGULAR", status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=[ "bigquery.googleapis.com", "storage.googleapis.com", ], access_levels=[access_level.name], vpc_accessible_services=gcp.accesscontextmanager.ServicePerimeterStatusVpcAccessibleServicesArgs( enable_restriction=True, allowed_services=[ "bigquery.googleapis.com", "storage.googleapis.com", ], ), ingress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyArgs( ingress_from=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromArgs( sources=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromSourceArgs( access_level=google_access_context_manager_access_level["test-access"]["name"], )], identity_type="ANY_IDENTITY", ), ingress_to=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToArgs( resources=["*"], operations=[ gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs( service_name="bigquery.googleapis.com", method_selectors=[ gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="BigQueryStorage.ReadRows", ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="TableService.ListTables", ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( permission="bigquery.jobs.get", ), ], ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs( service_name="storage.googleapis.com", method_selectors=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="google.storage.objects.create", )], ), ], ), )], egress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyArgs( egress_from=gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyEgressFromArgs( identity_type="ANY_USER_ACCOUNT", ), )], )) ``` ### Access Context Manager Service Perimeter Dry Run ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), spec=gcp.accesscontextmanager.ServicePerimeterSpecArgs( restricted_services=["storage.googleapis.com"], ), status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=["bigquery.googleapis.com"], ), title="restrict_bigquery_dryrun_storage", use_explicit_dry_run_spec=True) ``` ## Import ServicePerimeter can be imported using any of these accepted formats ```sh $ pulumi import gcp:accesscontextmanager/servicePerimeter:ServicePerimeter default {{name}} ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect behavior. :param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. :param pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']] spec: Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. :param pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']] status: ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. :param pulumi.Input[str] title: Human readable title. Must be unique within the Policy. :param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. """<line_sep><ellipsis><block_end>@overload<def_stmt>__init__ __self__ resource_name:str args:ServicePerimeterArgs opts:Optional[pulumi.ResourceOptions]=<none><block_start>""" ServicePerimeter describes a set of GCP resources which can freely import and export data amongst themselves, but not export outside of the ServicePerimeter. If a request with a source within this ServicePerimeter has a target outside of the ServicePerimeter, the request will be blocked. Otherwise the request is allowed. There are two types of Service Perimeter - Regular and Bridge. Regular Service Perimeters cannot overlap, a single GCP project can only belong to a single regular Service Perimeter. Service Perimeter Bridges can contain only GCP projects as members, a single GCP project may belong to multiple Service Perimeter Bridges. To get more information about ServicePerimeter, see: * [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters) * How-to Guides * [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart) > **Warning:** If you are using User ADCs (Application Default Credentials) with this resource, you must specify a `billing_project` and set `user_project_override` to true in the provider configuration. Otherwise the ACM API will return a 403 error. Your account must have the `serviceusage.services.use` permission on the `billing_project` you defined. ## Example Usage ### Access Context Manager Service Perimeter Basic ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=["storage.googleapis.com"], ), title="restrict_storage") access_level = gcp.accesscontextmanager.AccessLevel("access-level", basic=gcp.accesscontextmanager.AccessLevelBasicArgs( conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs( device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs( os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs( os_type="DESKTOP_CHROME_OS", )], require_screen_lock=False, ), regions=[ "CH", "IT", "US", ], )], ), parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), title="chromeos_no_lock") ``` ### Access Context Manager Service Perimeter Secure Data Exchange ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") secure_data_exchange = gcp.accesscontextmanager.ServicePerimeters("secure-data-exchange", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), service_perimeters=[ gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), title="", status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["storage.googleapis.com"], ), ), gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), title="", status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["bigtable.googleapis.com"], vpc_accessible_services=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusVpcAccessibleServicesArgs( enable_restriction=True, allowed_services=["bigquery.googleapis.com"], ), ), ), ]) access_level = gcp.accesscontextmanager.AccessLevel("access-level", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), title="secure_data_exchange", basic=gcp.accesscontextmanager.AccessLevelBasicArgs( conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs( device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs( require_screen_lock=False, os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs( os_type="DESKTOP_CHROME_OS", )], ), regions=[ "CH", "IT", "US", ], )], )) test_access = gcp.accesscontextmanager.ServicePerimeter("test-access", parent=f"accessPolicies/{google_access_context_manager_access_policy['test-access']['name']}", title="%s", perimeter_type="PERIMETER_TYPE_REGULAR", status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=[ "bigquery.googleapis.com", "storage.googleapis.com", ], access_levels=[access_level.name], vpc_accessible_services=gcp.accesscontextmanager.ServicePerimeterStatusVpcAccessibleServicesArgs( enable_restriction=True, allowed_services=[ "bigquery.googleapis.com", "storage.googleapis.com", ], ), ingress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyArgs( ingress_from=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromArgs( sources=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressFromSourceArgs( access_level=google_access_context_manager_access_level["test-access"]["name"], )], identity_type="ANY_IDENTITY", ), ingress_to=gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToArgs( resources=["*"], operations=[ gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs( service_name="bigquery.googleapis.com", method_selectors=[ gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="BigQueryStorage.ReadRows", ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="TableService.ListTables", ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( permission="bigquery.jobs.get", ), ], ), gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationArgs( service_name="storage.googleapis.com", method_selectors=[gcp.accesscontextmanager.ServicePerimeterStatusIngressPolicyIngressToOperationMethodSelectorArgs( method="google.storage.objects.create", )], ), ], ), )], egress_policies=[gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyArgs( egress_from=gcp.accesscontextmanager.ServicePerimeterStatusEgressPolicyEgressFromArgs( identity_type="ANY_USER_ACCOUNT", ), )], )) ``` ### Access Context Manager Service Perimeter Dry Run ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") service_perimeter = gcp.accesscontextmanager.ServicePerimeter("service-perimeter", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), spec=gcp.accesscontextmanager.ServicePerimeterSpecArgs( restricted_services=["storage.googleapis.com"], ), status=gcp.accesscontextmanager.ServicePerimeterStatusArgs( restricted_services=["bigquery.googleapis.com"], ), title="restrict_bigquery_dryrun_storage", use_explicit_dry_run_spec=True) ``` ## Import ServicePerimeter can be imported using any of these accepted formats ```sh $ pulumi import gcp:accesscontextmanager/servicePerimeter:ServicePerimeter default {{name}} ``` :param str resource_name: The name of the resource. :param ServicePerimeterArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """<line_sep><ellipsis><block_end><def_stmt>__init__ __self__ resource_name:str *args **kwargs<block_start>resource_args,opts=_utilities.get_resource_args_opts(ServicePerimeterArgs pulumi.ResourceOptions *args **kwargs)<if_stmt>resource_args<is><not><none><block_start>__self__._internal_init(resource_name opts **resource_args.__dict__)<block_end><else_stmt><block_start>__self__._internal_init(resource_name *args **kwargs)<block_end><block_end><def_stmt>_internal_init __self__ resource_name:str opts:Optional[pulumi.ResourceOptions]=<none> description:Optional[pulumi.Input[str]]=<none> name:Optional[pulumi.Input[str]]=<none> parent:Optional[pulumi.Input[str]]=<none> perimeter_type:Optional[pulumi.Input[str]]=<none> spec:Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]]=<none> status:Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]]=<none> title:Optional[pulumi.Input[str]]=<none> use_explicit_dry_run_spec:Optional[pulumi.Input[bool]]=<none> __props__=<none><block_start><if_stmt>opts<is><none><block_start>opts=pulumi.ResourceOptions()<block_end><if_stmt><not>isinstance(opts pulumi.ResourceOptions)<block_start><raise>TypeError('Expected resource options to be a ResourceOptions instance')<block_end><if_stmt>opts.version<is><none><block_start>opts.version=_utilities.get_version()<block_end><if_stmt>opts.id<is><none><block_start><if_stmt>__props__<is><not><none><block_start><raise>TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')<block_end>__props__=ServicePerimeterArgs.__new__(ServicePerimeterArgs)<line_sep>__props__.__dict__["description"]=description<line_sep>__props__.__dict__["name"]=name<if_stmt>parent<is><none><and><not>opts.urn<block_start><raise>TypeError("Missing required property 'parent'")<block_end>__props__.__dict__["parent"]=parent<line_sep>__props__.__dict__["perimeter_type"]=perimeter_type<line_sep>__props__.__dict__["spec"]=spec<line_sep>__props__.__dict__["status"]=status<if_stmt>title<is><none><and><not>opts.urn<block_start><raise>TypeError("Missing required property 'title'")<block_end>__props__.__dict__["title"]=title<line_sep>__props__.__dict__["use_explicit_dry_run_spec"]=use_explicit_dry_run_spec<line_sep>__props__.__dict__["create_time"]=<none><line_sep>__props__.__dict__["update_time"]=<none><block_end>super(ServicePerimeter __self__).__init__('gcp:accesscontextmanager/servicePerimeter:ServicePerimeter' resource_name __props__ opts)<block_end>@staticmethod<def_stmt>get resource_name:str id:pulumi.Input[str] opts:Optional[pulumi.ResourceOptions]=<none> create_time:Optional[pulumi.Input[str]]=<none> description:Optional[pulumi.Input[str]]=<none> name:Optional[pulumi.Input[str]]=<none> parent:Optional[pulumi.Input[str]]=<none> perimeter_type:Optional[pulumi.Input[str]]=<none> spec:Optional[pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']]]=<none> status:Optional[pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']]]=<none> title:Optional[pulumi.Input[str]]=<none> update_time:Optional[pulumi.Input[str]]=<none> use_explicit_dry_run_spec:Optional[pulumi.Input[bool]]=<none><arrow>'ServicePerimeter'<block_start>""" Get an existing ServicePerimeter resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] create_time: Time the AccessPolicy was created in UTC. :param pulumi.Input[str] description: Description of the ServicePerimeter and its use. Does not affect behavior. :param pulumi.Input[str] name: Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[str] perimeter_type: Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. :param pulumi.Input[pulumi.InputType['ServicePerimeterSpecArgs']] spec: Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. :param pulumi.Input[pulumi.InputType['ServicePerimeterStatusArgs']] status: ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. :param pulumi.Input[str] title: Human readable title. Must be unique within the Policy. :param pulumi.Input[str] update_time: Time the AccessPolicy was updated in UTC. :param pulumi.Input[bool] use_explicit_dry_run_spec: Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. """<line_sep>opts=pulumi.ResourceOptions.merge(opts pulumi.ResourceOptions(id=id))<line_sep>__props__=_ServicePerimeterState.__new__(_ServicePerimeterState)<line_sep>__props__.__dict__["create_time"]=create_time<line_sep>__props__.__dict__["description"]=description<line_sep>__props__.__dict__["name"]=name<line_sep>__props__.__dict__["parent"]=parent<line_sep>__props__.__dict__["perimeter_type"]=perimeter_type<line_sep>__props__.__dict__["spec"]=spec<line_sep>__props__.__dict__["status"]=status<line_sep>__props__.__dict__["title"]=title<line_sep>__props__.__dict__["update_time"]=update_time<line_sep>__props__.__dict__["use_explicit_dry_run_spec"]=use_explicit_dry_run_spec<line_sep><return>ServicePerimeter(resource_name opts=opts __props__=__props__)<block_end>@property@pulumi.getter(name="createTime")<def_stmt>create_time self<arrow>pulumi.Output[str]<block_start>""" Time the AccessPolicy was created in UTC. """<line_sep><return>pulumi.get(self "create_time")<block_end>@property@pulumi.getter<def_stmt>description self<arrow>pulumi.Output[Optional[str]]<block_start>""" Description of the ServicePerimeter and its use. Does not affect behavior. """<line_sep><return>pulumi.get(self "description")<block_end>@property@pulumi.getter<def_stmt>name self<arrow>pulumi.Output[str]<block_start>""" Resource name for the ServicePerimeter. The short_name component must begin with a letter and only include alphanumeric and '_'. Format: accessPolicies/{policy_id}/servicePerimeters/{short_name} """<line_sep><return>pulumi.get(self "name")<block_end>@property@pulumi.getter<def_stmt>parent self<arrow>pulumi.Output[str]<block_start>""" The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} """<line_sep><return>pulumi.get(self "parent")<block_end>@property@pulumi.getter(name="perimeterType")<def_stmt>perimeter_type self<arrow>pulumi.Output[Optional[str]]<block_start>""" Specifies the type of the Perimeter. There are two types: regular and bridge. Regular Service Perimeter contains resources, access levels, and restricted services. Every resource can be in at most ONE regular Service Perimeter. In addition to being in a regular service perimeter, a resource can also be in zero or more perimeter bridges. A perimeter bridge only contains resources. Cross project operations are permitted if all effected resources share some perimeter (whether bridge or regular). Perimeter Bridge does not contain access levels or services: those are governed entirely by the regular perimeter that resource is in. Perimeter Bridges are typically useful when building more complex topologies with many independent perimeters that need to share some data with a common perimeter, but should not be able to share data among themselves. Default value is `PERIMETER_TYPE_REGULAR`. Possible values are `PERIMETER_TYPE_REGULAR` and `PERIMETER_TYPE_BRIDGE`. """<line_sep><return>pulumi.get(self "perimeter_type")<block_end>@property@pulumi.getter<def_stmt>spec self<arrow>pulumi.Output[Optional['outputs.ServicePerimeterSpec']]<block_start>""" Proposed (or dry run) ServicePerimeter configuration. This configuration allows to specify and test ServicePerimeter configuration without enforcing actual access restrictions. Only allowed to be set when the `useExplicitDryRunSpec` flag is set. Structure is documented below. """<line_sep><return>pulumi.get(self "spec")<block_end>@property@pulumi.getter<def_stmt>status self<arrow>pulumi.Output[Optional['outputs.ServicePerimeterStatus']]<block_start>""" ServicePerimeter configuration. Specifies sets of resources, restricted services and access levels that determine perimeter content and boundaries. Structure is documented below. """<line_sep><return>pulumi.get(self "status")<block_end>@property@pulumi.getter<def_stmt>title self<arrow>pulumi.Output[str]<block_start>""" Human readable title. Must be unique within the Policy. """<line_sep><return>pulumi.get(self "title")<block_end>@property@pulumi.getter(name="updateTime")<def_stmt>update_time self<arrow>pulumi.Output[str]<block_start>""" Time the AccessPolicy was updated in UTC. """<line_sep><return>pulumi.get(self "update_time")<block_end>@property@pulumi.getter(name="useExplicitDryRunSpec")<def_stmt>use_explicit_dry_run_spec self<arrow>pulumi.Output[Optional[bool]]<block_start>""" Use explicit dry run spec flag. Ordinarily, a dry-run spec implicitly exists for all Service Perimeters, and that spec is identical to the status for those Service Perimeters. When this flag is set, it inhibits the generation of the implicit spec, thereby allowing the user to explicitly provide a configuration ("spec") to use in a dry-run version of the Service Perimeter. This allows the user to test changes to the enforced config ("status") without actually enforcing them. This testing is done through analyzing the differences between currently enforced and suggested restrictions. useExplicitDryRunSpec must bet set to True if any of the fields in the spec are set to non-default values. """<line_sep><return>pulumi.get(self "use_explicit_dry_run_spec")<block_end><block_end>
<import_stmt>random<import_stmt>numpy<as>np<import_stmt>selfdrive.boardd.tests.boardd_old<as>boardd_old<import_stmt>selfdrive.boardd.boardd<as>boardd<import_from_stmt>common.realtime sec_since_boot<import_from_stmt>cereal log<import_stmt>unittest<def_stmt>generate_random_can_data_list <block_start>can_list=[]<line_sep>cnt=random.randint(1 64)<for_stmt>j range(cnt)<block_start>can_data=np.random.bytes(random.randint(1 8))<line_sep>can_list.append([random.randint(0 128) random.randint(0 128) can_data random.randint(0 128)])<block_end><return>can_list cnt<block_end><class_stmt>TestBoarddApiMethods(unittest.TestCase)<block_start><def_stmt>test_correctness self<block_start><for_stmt>i range(1000)<block_start>can_list,_=generate_random_can_data_list()<line_sep># Sendcan # Old API m_old=boardd_old.can_list_to_can_capnp(can_list 'sendcan').to_bytes()<line_sep># new API m=boardd.can_list_to_can_capnp(can_list 'sendcan')<line_sep>ev_old=log.Event.from_bytes(m_old)<line_sep>ev=log.Event.from_bytes(m)<line_sep>self.assertEqual(ev_old.which() ev.which())<line_sep>self.assertEqual(len(ev.sendcan) len(ev_old.sendcan))<for_stmt>i range(len(ev.sendcan))<block_start>attrs=['address' 'busTime' 'dat' 'src']<for_stmt>attr attrs<block_start>self.assertEqual(getattr(ev.sendcan[i] attr 'new') getattr(ev_old.sendcan[i] attr 'old'))<block_end><block_end># Can m_old=boardd_old.can_list_to_can_capnp(can_list 'can').to_bytes()<line_sep># new API m=boardd.can_list_to_can_capnp(can_list 'can')<line_sep>ev_old=log.Event.from_bytes(m_old)<line_sep>ev=log.Event.from_bytes(m)<line_sep>self.assertEqual(ev_old.which() ev.which())<line_sep>self.assertEqual(len(ev.can) len(ev_old.can))<for_stmt>i range(len(ev.can))<block_start>attrs=['address' 'busTime' 'dat' 'src']<for_stmt>attr attrs<block_start>self.assertEqual(getattr(ev.can[i] attr 'new') getattr(ev_old.can[i] attr 'old'))<block_end><block_end><block_end><block_end><def_stmt>test_performance self<block_start>can_list,cnt=generate_random_can_data_list()<line_sep>recursions=1000<line_sep>n1=sec_since_boot()<for_stmt>i range(recursions)<block_start>boardd_old.can_list_to_can_capnp(can_list 'sendcan').to_bytes()<block_end>n2=sec_since_boot()<line_sep>elapsed_old=n2-n1<line_sep># print('Old API, elapsed time: {} secs'.format(elapsed_old)) n1=sec_since_boot()<for_stmt>i range(recursions)<block_start>boardd.can_list_to_can_capnp(can_list)<block_end>n2=sec_since_boot()<line_sep>elapsed_new=n2-n1<line_sep># print('New API, elapsed time: {} secs'.format(elapsed_new)) self.assertTrue(elapsed_new<l>elapsed_old/2)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
# Generated by Django 3.1.5 on 2021-02-17 11:04 <import_from_stmt>django.db migrations<import_stmt>saleor.core.db.fields<import_stmt>saleor.core.utils.editorjs<def_stmt>update_empty_description_field apps schema_editor<block_start>Category=apps.get_model("product" "Category")<line_sep>CategoryTranslation=apps.get_model("product" "CategoryTranslation")<line_sep>Collection=apps.get_model("product" "Collection")<line_sep>CollectionTranslation=apps.get_model("product" "CollectionTranslation")<line_sep>Product=apps.get_model("product" "Product")<line_sep>ProductTranslation=apps.get_model("product" "ProductTranslation")<line_sep>models=[Category CategoryTranslation Collection CollectionTranslation Product ProductTranslation ]<for_stmt>model models<block_start>model.objects.filter(description={}).update(description=<none>)<block_end><block_end><class_stmt>Migration(migrations.Migration)<block_start>dependencies=[("product" "0140_auto_20210125_0905") ]<line_sep>operations=[migrations.AlterField(model_name="category" name="description" field=saleor.core.db.fields.SanitizedJSONField(blank=<true> null=<true> sanitizer=saleor.core.utils.editorjs.clean_editor_js ) ) migrations.AlterField(model_name="categorytranslation" name="description" field=saleor.core.db.fields.SanitizedJSONField(blank=<true> null=<true> sanitizer=saleor.core.utils.editorjs.clean_editor_js ) ) migrations.AlterField(model_name="collection" name="description" field=saleor.core.db.fields.SanitizedJSONField(blank=<true> null=<true> sanitizer=saleor.core.utils.editorjs.clean_editor_js ) ) migrations.AlterField(model_name="collectiontranslation" name="description" field=saleor.core.db.fields.SanitizedJSONField(blank=<true> null=<true> sanitizer=saleor.core.utils.editorjs.clean_editor_js ) ) migrations.AlterField(model_name="product" name="description" field=saleor.core.db.fields.SanitizedJSONField(blank=<true> null=<true> sanitizer=saleor.core.utils.editorjs.clean_editor_js ) ) migrations.AlterField(model_name="producttranslation" name="description" field=saleor.core.db.fields.SanitizedJSONField(blank=<true> null=<true> sanitizer=saleor.core.utils.editorjs.clean_editor_js ) ) migrations.RunPython(update_empty_description_field migrations.RunPython.noop ) ]<block_end>
""" This makes the functions in torch._C._VariableFunctions available as torch._VF.<funcname> without mypy being able to find them. A subset of those functions are mapped to ATen functions in torch/jit/_builtins.py See https://github.com/pytorch/pytorch/issues/21478 for the reason for introducing torch._VF """<import_stmt>torch<import_stmt>sys<import_stmt>types<class_stmt>VFModule(types.ModuleType)<block_start>vf:types.ModuleType<def_stmt>__init__ self name<block_start>super(VFModule self).__init__(name)<line_sep>self.vf=torch._C._VariableFunctions<block_end><def_stmt>__getattr__ self attr<block_start><return>getattr(self.vf attr)<block_end><block_end>sys.modules[__name__]=VFModule(__name__)<line_sep>
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_from_future_stmt> absolute_import division print_function unicode_literals<import_stmt>os<import_stmt>unittest<import_from_stmt>transformers.tokenization_xlnet XLNetTokenizer SPIECE_UNDERLINE <import_from_stmt>.tokenization_tests_commons CommonTestCases<line_sep>SAMPLE_VOCAB=os.path.join(os.path.dirname(os.path.abspath(__file__)) 'fixtures/test_sentencepiece.model')<class_stmt>XLNetTokenizationTest(CommonTestCases.CommonTokenizerTester)<block_start>tokenizer_class=XLNetTokenizer<def_stmt>setUp self<block_start>super(XLNetTokenizationTest self).setUp()<line_sep># We have a SentencePiece fixture for testing tokenizer=XLNetTokenizer(SAMPLE_VOCAB keep_accents=<true>)<line_sep>tokenizer.save_pretrained(self.tmpdirname)<block_end><def_stmt>get_tokenizer self **kwargs<block_start><return>XLNetTokenizer.from_pretrained(self.tmpdirname **kwargs)<block_end><def_stmt>get_input_output_texts self<block_start>input_text=u"This is a test"<line_sep>output_text=u"This is a test"<line_sep><return>input_text output_text<block_end><def_stmt>test_full_tokenizer self<block_start>tokenizer=XLNetTokenizer(SAMPLE_VOCAB keep_accents=<true>)<line_sep>tokens=tokenizer.tokenize(u'This is a test')<line_sep>self.assertListEqual(tokens [u'▁This' u'▁is' u'▁a' u'▁t' u'est'])<line_sep>self.assertListEqual(tokenizer.convert_tokens_to_ids(tokens) [285 46 10 170 382])<line_sep>tokens=tokenizer.tokenize(u"I was born in 92000, and this is falsé.")<line_sep>self.assertListEqual(tokens [SPIECE_UNDERLINE+u'I' SPIECE_UNDERLINE+u'was' SPIECE_UNDERLINE+u'b' u'or' u'n' SPIECE_UNDERLINE+u'in' SPIECE_UNDERLINE+u'' u'9' u'2' u'0' u'0' u'0' u',' SPIECE_UNDERLINE+u'and' SPIECE_UNDERLINE+u'this' SPIECE_UNDERLINE+u'is' SPIECE_UNDERLINE+u'f' u'al' u's' u'é' u'.'])<line_sep>ids=tokenizer.convert_tokens_to_ids(tokens)<line_sep>self.assertListEqual(ids [8 21 84 55 24 19 7 0 602 347 347 347 3 12 66 46 72 80 6 0 4])<line_sep>back_tokens=tokenizer.convert_ids_to_tokens(ids)<line_sep>self.assertListEqual(back_tokens [SPIECE_UNDERLINE+u'I' SPIECE_UNDERLINE+u'was' SPIECE_UNDERLINE+u'b' u'or' u'n' SPIECE_UNDERLINE+u'in' SPIECE_UNDERLINE+u'' u'<unk>' u'2' u'0' u'0' u'0' u',' SPIECE_UNDERLINE+u'and' SPIECE_UNDERLINE+u'this' SPIECE_UNDERLINE+u'is' SPIECE_UNDERLINE+u'f' u'al' u's' u'<unk>' u'.'])<block_end><def_stmt>test_tokenizer_lower self<block_start>tokenizer=XLNetTokenizer(SAMPLE_VOCAB do_lower_case=<true>)<line_sep>tokens=tokenizer.tokenize(u"I was born in 92000, and this is falsé.")<line_sep>self.assertListEqual(tokens [SPIECE_UNDERLINE+u'' u'i' SPIECE_UNDERLINE+u'was' SPIECE_UNDERLINE+u'b' u'or' u'n' SPIECE_UNDERLINE+u'in' SPIECE_UNDERLINE+u'' u'9' u'2' u'0' u'0' u'0' u',' SPIECE_UNDERLINE+u'and' SPIECE_UNDERLINE+u'this' SPIECE_UNDERLINE+u'is' SPIECE_UNDERLINE+u'f' u'al' u'se' u'.'])<line_sep>self.assertListEqual(tokenizer.tokenize(u"H\u00E9llo") [u"▁he" u"ll" u"o"])<block_end><def_stmt>test_tokenizer_no_lower self<block_start>tokenizer=XLNetTokenizer(SAMPLE_VOCAB do_lower_case=<false>)<line_sep>tokens=tokenizer.tokenize(u"I was born in 92000, and this is falsé.")<line_sep>self.assertListEqual(tokens [SPIECE_UNDERLINE+u'I' SPIECE_UNDERLINE+u'was' SPIECE_UNDERLINE+u'b' u'or' u'n' SPIECE_UNDERLINE+u'in' SPIECE_UNDERLINE+u'' u'9' u'2' u'0' u'0' u'0' u',' SPIECE_UNDERLINE+u'and' SPIECE_UNDERLINE+u'this' SPIECE_UNDERLINE+u'is' SPIECE_UNDERLINE+u'f' u'al' u'se' u'.'])<block_end><def_stmt>test_sequence_builders self<block_start>tokenizer=XLNetTokenizer.from_pretrained("xlnet-base-cased")<line_sep>text=tokenizer.encode("sequence builders")<line_sep>text_2=tokenizer.encode("multi-sequence build")<line_sep>encoded_sentence=tokenizer.add_special_tokens_single_sequence(text)<line_sep>encoded_pair=tokenizer.add_special_tokens_sequence_pair(text text_2)<assert_stmt>encoded_sentence<eq>text+[4 3]<assert_stmt>encoded_pair<eq>text+[4]+text_2+[4 3]<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
version="1.0.10.dev0"<line_sep>
# Time: sum(O(l * 2^l) for l in range(1, 11)) = O(20 * 2^10) = O(1) # Space: O(1) <class_stmt>Solution(object)<block_start><def_stmt>findInteger self k digit1 digit2<block_start>""" :type k: int :type digit1: int :type digit2: int :rtype: int """<line_sep>MAX_NUM_OF_DIGITS=10<line_sep>INT_MAX=2<power>31-1<if_stmt>digit1<l>digit2<block_start>digit1,digit2=digit2 digit1<block_end>total=2<for_stmt>l xrange(1 MAX_NUM_OF_DIGITS+1)<block_start><for_stmt>mask xrange(total)<block_start>curr,bit=0 total<rshift>1<while_stmt>bit<block_start>curr=curr<times>10+(digit1<if>mask&bit<else>digit2)<line_sep>bit<augrshift>1<block_end><if_stmt>k<l>curr<le>INT_MAX<and>curr%k<eq>0<block_start><return>curr<block_end><block_end>total<auglshift>1<block_end><return>-1<block_end><block_end>
# Copyright (C) 2020 THL A29 Limited, a Tencent company. # All rights reserved. # Licensed under the BSD 3-Clause License (the "License"); you may # not use this file except in compliance with the License. You may # obtain a copy of the License at # https://opensource.org/licenses/BSD-3-Clause # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" basis, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. See the License for the specific language governing # permissions and limitations under the License. # See the AUTHORS file for names of contributors.
# # Copyright 2013 The py-lmdb authors, all rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted only as authorized by the OpenLDAP # Public License. # # A copy of this license is available in the file LICENSE in the # top-level directory of the distribution or, alternatively, at # <http://www.OpenLDAP.org/license.html>. # # OpenLDAP is a registered trademark of the OpenLDAP Foundation. # # Individual files and/or contributed packages may be copyright by # other parties and/or subject to additional restrictions. # # This work also contains materials derived from public sources. # # Additional information about OpenLDAP can be obtained at # <http://www.openldap.org/>. # """ CPython/CFFI wrapper for OpenLDAP's "Lightning" MDB database. Please see https://lmdb.readthedocs.io/ """<import_from_future_stmt> absolute_import<import_from_future_stmt> with_statement<import_stmt>errno<import_stmt>inspect<import_stmt>os<import_stmt>sys<import_stmt>threading<line_sep>is_win32=sys.platform<eq>'win32'<if_stmt>is_win32<block_start><import_stmt>msvcrt<block_end><try_stmt><block_start><import_stmt>__builtin__<block_end><except_stmt>ImportError<block_start><import_stmt>builtins<as>__builtin__<block_end># type: ignore <import_stmt>lmdb<try_stmt><block_start><import_from_stmt>lmdb _config<block_end><except_stmt>ImportError<block_start>_config=<none><block_end># type: ignore __all__=['Cursor' 'Environment' 'Transaction' '_Database' 'enable_drop_gil' 'version' ]<line_sep>__all__<augadd>['BadDbiError' 'BadRslotError' 'BadTxnError' 'BadValsizeError' 'CorruptedError' 'CursorFullError' 'DbsFullError' 'DiskError' 'Error' 'IncompatibleError' 'InvalidError' 'InvalidParameterError' 'KeyExistsError' 'LockError' 'MapFullError' 'MapResizedError' 'MemoryError' 'NotFoundError' 'PageFullError' 'PageNotFoundError' 'PanicError' 'ReadersFullError' 'ReadonlyError' 'TlsFullError' 'TxnFullError' 'VersionMismatchError' ]<line_sep># Handle moronic Python 3 mess. UnicodeType=getattr(__builtin__ 'unicode' str)<line_sep>BytesType=getattr(__builtin__ 'bytes' str)<line_sep>O_0755=int('0755' 8)<line_sep>O_0111=int('0111' 8)<line_sep>EMPTY_BYTES=UnicodeType().encode()<line_sep># Used to track context across CFFI callbacks. _callbacks=threading.local()<line_sep>_CFFI_CDEF=''' typedef int mode_t; typedef ... MDB_env; typedef struct MDB_txn MDB_txn; typedef struct MDB_cursor MDB_cursor; typedef unsigned int MDB_dbi; enum MDB_cursor_op { MDB_FIRST, MDB_FIRST_DUP, MDB_GET_BOTH, MDB_GET_BOTH_RANGE, MDB_GET_CURRENT, MDB_GET_MULTIPLE, MDB_LAST, MDB_LAST_DUP, MDB_NEXT, MDB_NEXT_DUP, MDB_NEXT_MULTIPLE, MDB_NEXT_NODUP, MDB_PREV, MDB_PREV_DUP, MDB_PREV_NODUP, MDB_SET, MDB_SET_KEY, MDB_SET_RANGE, ... }; typedef enum MDB_cursor_op MDB_cursor_op; struct MDB_val { size_t mv_size; void *mv_data; ...; }; typedef struct MDB_val MDB_val; struct MDB_stat { unsigned int ms_psize; unsigned int ms_depth; size_t ms_branch_pages; size_t ms_leaf_pages; size_t ms_overflow_pages; size_t ms_entries; ...; }; typedef struct MDB_stat MDB_stat; struct MDB_envinfo { void *me_mapaddr; size_t me_mapsize; size_t me_last_pgno; size_t me_last_txnid; unsigned int me_maxreaders; unsigned int me_numreaders; ...; }; typedef struct MDB_envinfo MDB_envinfo; typedef int (*MDB_cmp_func)(const MDB_val *a, const MDB_val *b); typedef void (*MDB_rel_func)(MDB_val *item, void *oldptr, void *newptr, void *relctx); char *mdb_strerror(int err); int mdb_env_create(MDB_env **env); int mdb_env_open(MDB_env *env, const char *path, unsigned int flags, mode_t mode); int mdb_env_copy2(MDB_env *env, const char *path, int flags); int mdb_env_copyfd2(MDB_env *env, int fd, int flags); int mdb_env_stat(MDB_env *env, MDB_stat *stat); int mdb_env_info(MDB_env *env, MDB_envinfo *stat); int mdb_env_get_maxkeysize(MDB_env *env); int mdb_env_sync(MDB_env *env, int force); void mdb_env_close(MDB_env *env); int mdb_env_set_flags(MDB_env *env, unsigned int flags, int onoff); int mdb_env_get_flags(MDB_env *env, unsigned int *flags); int mdb_env_get_path(MDB_env *env, const char **path); int mdb_env_set_mapsize(MDB_env *env, size_t size); int mdb_env_set_maxreaders(MDB_env *env, unsigned int readers); int mdb_env_get_maxreaders(MDB_env *env, unsigned int *readers); int mdb_env_set_maxdbs(MDB_env *env, MDB_dbi dbs); int mdb_txn_begin(MDB_env *env, MDB_txn *parent, unsigned int flags, MDB_txn **txn); int mdb_txn_commit(MDB_txn *txn); void mdb_txn_reset(MDB_txn *txn); int mdb_txn_renew(MDB_txn *txn); void mdb_txn_abort(MDB_txn *txn); size_t mdb_txn_id(MDB_txn *txn); int mdb_dbi_open(MDB_txn *txn, const char *name, unsigned int flags, MDB_dbi *dbi); int mdb_stat(MDB_txn *txn, MDB_dbi dbi, MDB_stat *stat); int mdb_drop(MDB_txn *txn, MDB_dbi dbi, int del_); int mdb_get(MDB_txn *txn, MDB_dbi dbi, MDB_val *key, MDB_val *data); int mdb_cursor_open(MDB_txn *txn, MDB_dbi dbi, MDB_cursor **cursor); void mdb_cursor_close(MDB_cursor *cursor); int mdb_cursor_del(MDB_cursor *cursor, unsigned int flags); int mdb_cursor_count(MDB_cursor *cursor, size_t *countp); int mdb_cursor_get(MDB_cursor *cursor, MDB_val *key, MDB_val*data, int op); typedef int (MDB_msg_func)(const char *msg, void *ctx); int mdb_reader_list(MDB_env *env, MDB_msg_func *func, void *ctx); int mdb_reader_check(MDB_env *env, int *dead); int mdb_dbi_flags(MDB_txn *txn, MDB_dbi dbi, unsigned int *flags); #define MDB_VERSION_MAJOR ... #define MDB_VERSION_MINOR ... #define MDB_VERSION_PATCH ... #define EACCES ... #define EAGAIN ... #define EINVAL ... #define ENOMEM ... #define ENOSPC ... #define MDB_BAD_RSLOT ... #define MDB_BAD_DBI ... #define MDB_BAD_TXN ... #define MDB_BAD_VALSIZE ... #define MDB_CORRUPTED ... #define MDB_CURSOR_FULL ... #define MDB_DBS_FULL ... #define MDB_INCOMPATIBLE ... #define MDB_INVALID ... #define MDB_KEYEXIST ... #define MDB_MAP_FULL ... #define MDB_MAP_RESIZED ... #define MDB_NOTFOUND ... #define MDB_PAGE_FULL ... #define MDB_PAGE_NOTFOUND ... #define MDB_PANIC ... #define MDB_READERS_FULL ... #define MDB_TLS_FULL ... #define MDB_TXN_FULL ... #define MDB_VERSION_MISMATCH ... #define MDB_APPEND ... #define MDB_APPENDDUP ... #define MDB_CP_COMPACT ... #define MDB_CREATE ... #define MDB_DUPFIXED ... #define MDB_DUPSORT ... #define MDB_INTEGERDUP ... #define MDB_INTEGERKEY ... #define MDB_MAPASYNC ... #define MDB_NODUPDATA ... #define MDB_NOLOCK ... #define MDB_NOMEMINIT ... #define MDB_NOMETASYNC ... #define MDB_NOOVERWRITE ... #define MDB_NORDAHEAD ... #define MDB_NOSUBDIR ... #define MDB_NOSYNC ... #define MDB_NOTLS ... #define MDB_RDONLY ... #define MDB_REVERSEKEY ... #define MDB_WRITEMAP ... // Helpers below inline MDB_vals. Avoids key alloc/dup on CPython, where // CFFI will use PyString_AS_STRING when passed as an argument. static int pymdb_del(MDB_txn *txn, MDB_dbi dbi, char *key_s, size_t keylen, char *val_s, size_t vallen); static int pymdb_put(MDB_txn *txn, MDB_dbi dbi, char *key_s, size_t keylen, char *val_s, size_t vallen, unsigned int flags); static int pymdb_get(MDB_txn *txn, MDB_dbi dbi, char *key_s, size_t keylen, MDB_val *val_out); static int pymdb_cursor_get(MDB_cursor *cursor, char *key_s, size_t key_len, char *data_s, size_t data_len, MDB_val *key, MDB_val *data, int op); static int pymdb_cursor_put(MDB_cursor *cursor, char *key_s, size_t keylen, char *val_s, size_t vallen, int flags); // Prefaults a range static void preload(int rc, void *x, size_t size); '''<line_sep>_CFFI_CDEF_PATCHED=''' int mdb_env_copy3(MDB_env *env, const char *path, unsigned int flags, MDB_txn *txn); int mdb_env_copyfd3(MDB_env *env, int fd, unsigned int flags, MDB_txn *txn); '''<line_sep>_CFFI_VERIFY=''' #include <sys/stat.h> #include "lmdb.h" #include "preload.h" // Helpers below inline MDB_vals. Avoids key alloc/dup on CPython, where // CFFI will use PyString_AS_STRING when passed as an argument. static int pymdb_get(MDB_txn *txn, MDB_dbi dbi, char *key_s, size_t keylen, MDB_val *val_out) { MDB_val key = {keylen, key_s}; int rc = mdb_get(txn, dbi, &key, val_out); return rc; } static int pymdb_put(MDB_txn *txn, MDB_dbi dbi, char *key_s, size_t keylen, char *val_s, size_t vallen, unsigned int flags) { MDB_val key = {keylen, key_s}; MDB_val val = {vallen, val_s}; return mdb_put(txn, dbi, &key, &val, flags); } static int pymdb_del(MDB_txn *txn, MDB_dbi dbi, char *key_s, size_t keylen, char *val_s, size_t vallen) { MDB_val key = {keylen, key_s}; MDB_val val = {vallen, val_s}; MDB_val *valptr; if(vallen == 0) { valptr = NULL; } else { valptr = &val; } return mdb_del(txn, dbi, &key, valptr); } static int pymdb_cursor_get(MDB_cursor *cursor, char *key_s, size_t key_len, char *data_s, size_t data_len, MDB_val *key, MDB_val *data, int op) { MDB_val tmp_key = {key_len, key_s}; MDB_val tmp_data = {data_len, data_s}; int rc = mdb_cursor_get(cursor, &tmp_key, &tmp_data, op); if(! rc) { *key = tmp_key; *data = tmp_data; } return rc; } static int pymdb_cursor_put(MDB_cursor *cursor, char *key_s, size_t keylen, char *val_s, size_t vallen, int flags) { MDB_val tmpkey = {keylen, key_s}; MDB_val tmpval = {vallen, val_s}; return mdb_cursor_put(cursor, &tmpkey, &tmpval, flags); } '''<if_stmt><not>lmdb._reading_docs()<block_start><import_stmt>cffi<line_sep># Try to use distutils-bundled CFFI configuration to avoid a recompile and # potential compile errors during first module import. _config_vars=_config.CONFIG<if>_config<else>{'extra_compile_args':['-w'] 'extra_sources':['lib/mdb.c' 'lib/midl.c'] 'extra_include_dirs':['lib'] 'extra_library_dirs':[] 'libraries':[]}<line_sep>_have_patched_lmdb='-DHAVE_PATCHED_LMDB=1'<in>_config.CONFIG['extra_compile_args']# type: ignore <if_stmt>_have_patched_lmdb<block_start>_CFFI_CDEF<augadd>_CFFI_CDEF_PATCHED<block_end>_ffi=cffi.FFI()<line_sep>_ffi.cdef(_CFFI_CDEF)<line_sep>_lib=_ffi.verify(_CFFI_VERIFY modulename='lmdb_cffi' ext_package='lmdb' sources=_config_vars['extra_sources'] extra_compile_args=_config_vars['extra_compile_args'] include_dirs=_config_vars['extra_include_dirs'] libraries=_config_vars['libraries'] library_dirs=_config_vars['extra_library_dirs'])<line_sep>@_ffi.callback("int(char *, void *)")<def_stmt>_msg_func s _<block_start>"""mdb_msg_func() callback. Appends `s` to _callbacks.msg_func list. """<line_sep>_callbacks.msg_func.append(_ffi.string(s).decode())<line_sep><return>0<block_end><block_end><class_stmt>Error(Exception)<block_start>"""Raised when an LMDB-related error occurs, and no more specific :py:class:`lmdb.Error` subclass exists."""<def_stmt>__init__ self what code=0<block_start>self.what=what<line_sep>self.code=code<line_sep>self.reason=_ffi.string(_lib.mdb_strerror(code))<line_sep>msg=what<if_stmt>code<block_start>msg='%s: %s'%(what self.reason)<line_sep>hint=getattr(self 'MDB_HINT' <none>)<if_stmt>hint<block_start>msg<augadd>' (%s)'%(hint )<block_end><block_end>Exception.__init__(self msg)<block_end><block_end><class_stmt>KeyExistsError(Error)<block_start>"""Key/data pair already exists."""<line_sep>MDB_NAME='MDB_KEYEXIST'<block_end><class_stmt>NotFoundError(Error)<block_start>"""No matching key/data pair found. Normally py-lmdb indicates a missing key by returning ``None``, or a user-supplied default value, however LMDB may return this error where py-lmdb does not know to convert it into a non-exceptional return. """<line_sep>MDB_NAME='MDB_NOTFOUND'<block_end><class_stmt>PageNotFoundError(Error)<block_start>"""Request page not found."""<line_sep>MDB_NAME='MDB_PAGE_NOTFOUND'<block_end><class_stmt>CorruptedError(Error)<block_start>"""Located page was of the wrong type."""<line_sep>MDB_NAME='MDB_CORRUPTED'<block_end><class_stmt>PanicError(Error)<block_start>"""Update of meta page failed."""<line_sep>MDB_NAME='MDB_PANIC'<block_end><class_stmt>VersionMismatchError(Error)<block_start>"""Database environment version mismatch."""<line_sep>MDB_NAME='MDB_VERSION_MISMATCH'<block_end><class_stmt>InvalidError(Error)<block_start>"""File is not an MDB file."""<line_sep>MDB_NAME='MDB_INVALID'<block_end><class_stmt>MapFullError(Error)<block_start>"""Environment map_size= limit reached."""<line_sep>MDB_NAME='MDB_MAP_FULL'<line_sep>MDB_HINT='Please use a larger Environment(map_size=) parameter'<block_end><class_stmt>DbsFullError(Error)<block_start>"""Environment max_dbs= limit reached."""<line_sep>MDB_NAME='MDB_DBS_FULL'<line_sep>MDB_HINT='Please use a larger Environment(max_dbs=) parameter'<block_end><class_stmt>ReadersFullError(Error)<block_start>"""Environment max_readers= limit reached."""<line_sep>MDB_NAME='MDB_READERS_FULL'<line_sep>MDB_HINT='Please use a larger Environment(max_readers=) parameter'<block_end><class_stmt>TlsFullError(Error)<block_start>"""Thread-local storage keys full - too many environments open."""<line_sep>MDB_NAME='MDB_TLS_FULL'<block_end><class_stmt>TxnFullError(Error)<block_start>"""Transaciton has too many dirty pages - transaction too big."""<line_sep>MDB_NAME='MDB_TXN_FULL'<line_sep>MDB_HINT='Please do less work within your transaction'<block_end><class_stmt>CursorFullError(Error)<block_start>"""Internal error - cursor stack limit reached."""<line_sep>MDB_NAME='MDB_CURSOR_FULL'<block_end><class_stmt>PageFullError(Error)<block_start>"""Internal error - page has no more space."""<line_sep>MDB_NAME='MDB_PAGE_FULL'<block_end><class_stmt>MapResizedError(Error)<block_start>"""Database contents grew beyond environment map_size=."""<line_sep>MDB_NAME='MDB_MAP_RESIZED'<block_end><class_stmt>IncompatibleError(Error)<block_start>"""Operation and DB incompatible, or DB flags changed."""<line_sep>MDB_NAME='MDB_INCOMPATIBLE'<block_end><class_stmt>BadRslotError(Error)<block_start>"""Invalid reuse of reader locktable slot."""<line_sep>MDB_NAME='MDB_BAD_RSLOT'<block_end><class_stmt>BadDbiError(Error)<block_start>"""The specified DBI was changed unexpectedly."""<line_sep>MDB_NAME='MDB_BAD_DBI'<block_end><class_stmt>BadTxnError(Error)<block_start>"""Transaction cannot recover - it must be aborted."""<line_sep>MDB_NAME='MDB_BAD_TXN'<block_end><class_stmt>BadValsizeError(Error)<block_start>"""Too big key/data, key is empty, or wrong DUPFIXED size."""<line_sep>MDB_NAME='MDB_BAD_VALSIZE'<block_end><class_stmt>ReadonlyError(Error)<block_start>"""An attempt was made to modify a read-only database."""<line_sep>MDB_NAME='EACCES'<block_end><class_stmt>InvalidParameterError(Error)<block_start>"""An invalid parameter was specified."""<line_sep>MDB_NAME='EINVAL'<block_end><class_stmt>LockError(Error)<block_start>"""The environment was locked by another process."""<line_sep>MDB_NAME='EAGAIN'<block_end><class_stmt>MemoryError(Error)<block_start>"""Out of memory."""<line_sep>MDB_NAME='ENOMEM'<block_end><class_stmt>DiskError(Error)<block_start>"""No more disk space."""<line_sep>MDB_NAME='ENOSPC'<block_end># Prepare _error_map, a mapping of integer MDB_ERROR_CODE to exception class. <if_stmt><not>lmdb._reading_docs()<block_start>_error_map={}<for_stmt>obj list(globals().values())<block_start><if_stmt>inspect.isclass(obj)<and>issubclass(obj Error)<and>obj<is><not>Error<block_start>_error_map[getattr(_lib obj.MDB_NAME)]=obj<block_end><block_end><del_stmt>obj<block_end><def_stmt>_error what rc<block_start>"""Lookup and instantiate the correct exception class for the error code `rc`, using :py:class:`Error` if no better class exists."""<line_sep><return>_error_map.get(rc Error)(what rc)<block_end><class_stmt>Some_LMDB_Resource_That_Was_Deleted_Or_Closed(object)<block_start>"""We need this because CFFI on PyPy treats None as cffi.NULL, instead of throwing an exception it feeds LMDB null pointers. That means simply replacing native handles with None during _invalidate() will cause NULL pointer dereferences. Instead use this class, and its weird name to cause a TypeError, with a very obvious string in the exception text. The only alternatives to this are inserting a check around every single use of a native handle to ensure the handle is still valid prior to calling LMDB, or doing no crash-safety checking at all. """<def_stmt>__nonzero__ self<block_start><return>0<block_end><def_stmt>__bool__ self<block_start><return><false><block_end><def_stmt>__repr__ self<block_start><return>"<This used to be a LMDB resource but it was deleted or closed>"<block_end><block_end>_invalid=Some_LMDB_Resource_That_Was_Deleted_Or_Closed()<def_stmt>_mvbuf mv<block_start>"""Convert a MDB_val cdata to a CFFI buffer object."""<line_sep><return>_ffi.buffer(mv.mv_data mv.mv_size)<block_end><def_stmt>_mvstr mv<block_start>"""Convert a MDB_val cdata to Python bytes."""<line_sep><return>_ffi.buffer(mv.mv_data mv.mv_size)[:]<block_end><def_stmt>preload mv<block_start>_lib.preload(0 mv.mv_data mv.mv_size)<block_end><def_stmt>enable_drop_gil <block_start>"""Deprecated."""<block_end><def_stmt>version subpatch=<false><block_start>""" Return a tuple of integers `(major, minor, patch)` describing the LMDB library version that the binding is linked against. The version of the binding itself is available from ``lmdb.__version__``. `subpatch`: If true, returns a 4 integer tuple consisting of the same plus an extra integer that represents any patches applied by py-lmdb itself (0 representing no patches). """<if_stmt>subpatch<block_start><return>(_lib.MDB_VERSION_MAJOR _lib.MDB_VERSION_MINOR _lib.MDB_VERSION_PATCH 1<if>_have_patched_lmdb<else>0)<block_end><return>(_lib.MDB_VERSION_MAJOR _lib.MDB_VERSION_MINOR _lib.MDB_VERSION_PATCH)<block_end><class_stmt>Environment(object)<block_start>""" Structure for a database environment. An environment may contain multiple databases, all residing in the same shared-memory map and underlying disk file. To write to the environment a :py:class:`Transaction` must be created. One simultaneous write transaction is allowed, however there is no limit on the number of read transactions even when a write transaction exists. This class is aliased to `lmdb.open`. It is a serious error to have open the same LMDB file in the same process at the same time. Failure to heed this may lead to data corruption and interpreter crash. Equivalent to `mdb_env_open() <http://lmdb.tech/doc/group__mdb.html#ga1fe2740e25b1689dc412e7b9faadba1b>`_ `path`: Location of directory (if `subdir=True`) or file prefix to store the database. `map_size`: Maximum size database may grow to; used to size the memory mapping. If database grows larger than ``map_size``, an exception will be raised and the user must close and reopen :py:class:`Environment`. On 64-bit there is no penalty for making this huge (say 1TB). Must be <2GB on 32-bit. .. note:: **The default map size is set low to encourage a crash**, so users can figure out a good value before learning about this option too late. `subdir`: If ``True``, `path` refers to a subdirectory to store the data and lock files in, otherwise it refers to a filename prefix. `readonly`: If ``True``, disallow any write operations. Note the lock file is still modified. If specified, the ``write`` flag to :py:meth:`begin` or :py:class:`Transaction` is ignored. `metasync`: If ``False``, flush system buffers to disk only once per transaction, omit the metadata flush. Defer that until the system flushes files to disk, or next commit or :py:meth:`sync`. This optimization maintains database integrity, but a system crash may undo the last committed transaction. I.e. it preserves the ACI (atomicity, consistency, isolation) but not D (durability) database property. `sync`: If ``False``, don't flush system buffers to disk when committing a transaction. This optimization means a system crash can corrupt the database or lose the last transactions if buffers are not yet flushed to disk. The risk is governed by how often the system flushes dirty buffers to disk and how often :py:meth:`sync` is called. However, if the filesystem preserves write order and `writemap=False`, transactions exhibit ACI (atomicity, consistency, isolation) properties and only lose D (durability). I.e. database integrity is maintained, but a system crash may undo the final transactions. Note that `sync=False, writemap=True` leaves the system with no hint for when to write transactions to disk, unless :py:meth:`sync` is called. `map_async=True, writemap=True` may be preferable. `mode`: File creation mode. `create`: If ``False``, do not create the directory `path` if it is missing. `readahead`: If ``False``, LMDB will disable the OS filesystem readahead mechanism, which may improve random read performance when a database is larger than RAM. `writemap`: If ``True``, use a writeable memory map unless `readonly=True`. This is faster and uses fewer mallocs, but loses protection from application bugs like wild pointer writes and other bad updates into the database. Incompatible with nested transactions. Processes with and without `writemap` on the same environment do not cooperate well. `meminit`: If ``False`` LMDB will not zero-initialize buffers prior to writing them to disk. This improves performance but may cause old heap data to be written saved in the unused portion of the buffer. Do not use this option if your application manipulates confidential data (e.g. plaintext passwords) in memory. This option is only meaningful when `writemap=False`; new pages are always zero-initialized when `writemap=True`. `map_async`: When ``writemap=True``, use asynchronous flushes to disk. As with ``sync=False``, a system crash can then corrupt the database or lose the last transactions. Calling :py:meth:`sync` ensures on-disk database integrity until next commit. `max_readers`: Maximum number of simultaneous read transactions. Can only be set by the first process to open an environment, as it affects the size of the lock file and shared memory area. Attempts to simultaneously start more than this many *read* transactions will fail. `max_dbs`: Maximum number of databases available. If 0, assume environment will be used as a single database. `max_spare_txns`: Read-only transactions to cache after becoming unused. Caching transactions avoids two allocations, one lock and linear scan of the shared environment per invocation of :py:meth:`begin`, :py:class:`Transaction`, :py:meth:`get`, :py:meth:`gets`, or :py:meth:`cursor`. Should match the process's maximum expected concurrent transactions (e.g. thread count). `lock`: If ``False``, don't do any locking. If concurrent access is anticipated, the caller must manage all concurrency itself. For proper operation the caller must enforce single-writer semantics, and must ensure that no readers are using old transactions while a writer is active. The simplest approach is to use an exclusive lock so that no readers may be active at all when a writer begins. """<def_stmt>__init__ self path map_size=10485760 subdir=<true> readonly=<false> metasync=<true> sync=<true> map_async=<false> mode=O_0755 create=<true> readahead=<true> writemap=<false> meminit=<true> max_readers=126 max_dbs=0 max_spare_txns=1 lock=<true><block_start>self._max_spare_txns=max_spare_txns<line_sep>self._spare_txns=[]<line_sep>envpp=_ffi.new('MDB_env **')<line_sep>rc=_lib.mdb_env_create(envpp)<if_stmt>rc<block_start><raise>_error("mdb_env_create" rc)<block_end>self._env=envpp[0]<line_sep>self._deps=set()<line_sep>self._creating_db_in_readonly=<false><line_sep>self.set_mapsize(map_size)<line_sep>rc=_lib.mdb_env_set_maxreaders(self._env max_readers)<if_stmt>rc<block_start><raise>_error("mdb_env_set_maxreaders" rc)<block_end>rc=_lib.mdb_env_set_maxdbs(self._env max_dbs)<if_stmt>rc<block_start><raise>_error("mdb_env_set_maxdbs" rc)<block_end><if_stmt>create<and>subdir<and><not>readonly<block_start><try_stmt><block_start>os.mkdir(path mode)<block_end><except_stmt>EnvironmentError<as>e<block_start><if_stmt>e.errno<ne>errno.EEXIST<block_start><raise><block_end><block_end><block_end>flags=_lib.MDB_NOTLS<if_stmt><not>subdir<block_start>flags<augor>_lib.MDB_NOSUBDIR<block_end><if_stmt>readonly<block_start>flags<augor>_lib.MDB_RDONLY<block_end>self.readonly=readonly<if_stmt><not>metasync<block_start>flags<augor>_lib.MDB_NOMETASYNC<block_end><if_stmt><not>sync<block_start>flags<augor>_lib.MDB_NOSYNC<block_end><if_stmt>map_async<block_start>flags<augor>_lib.MDB_MAPASYNC<block_end><if_stmt><not>readahead<block_start>flags<augor>_lib.MDB_NORDAHEAD<block_end><if_stmt>writemap<block_start>flags<augor>_lib.MDB_WRITEMAP<block_end><if_stmt><not>meminit<block_start>flags<augor>_lib.MDB_NOMEMINIT<block_end><if_stmt><not>lock<block_start>flags<augor>_lib.MDB_NOLOCK<block_end><if_stmt>isinstance(path UnicodeType)<block_start>path=path.encode(sys.getfilesystemencoding())<block_end>rc=_lib.mdb_env_open(self._env path flags mode&~O_0111)<if_stmt>rc<block_start><raise>_error(path rc)<block_end><with_stmt>self.begin(db=object())<as>txn<block_start>self._db=_Database(env=self txn=txn name=<none> reverse_key=<false> dupsort=<false> create=<true> integerkey=<false> integerdup=<false> dupfixed=<false>)<block_end>self._dbs={<none>:self._db}<block_end><def_stmt>__enter__ self<block_start><return>self<block_end><def_stmt>__exit__ self _1 _2 _3<block_start>self.close()<block_end><def_stmt>__del__ self<block_start>self.close()<block_end>_env=<none><line_sep>_deps=<none><line_sep>_spare_txns=<none><line_sep>_dbs=<none><def_stmt>set_mapsize self map_size<block_start>"""Change the maximum size of the map file. This function will fail if any transactions are active in the current process. `map_size`: The new size in bytes. Equivalent to `mdb_env_set_mapsize() <http://lmdb.tech/doc/group__mdb.html#gaa2506ec8dab3d969b0e609cd82e619e5>`_ Warning: There's a data race in the underlying library that may cause catastrophic loss of data if you use this method. You are safe if one of the following are true: * Only one process accessing a particular LMDB file ever calls this method. * You use locking external to this library to ensure that only one process accessing the current LMDB file can be inside this function. """<line_sep>rc=_lib.mdb_env_set_mapsize(self._env map_size)<if_stmt>rc<block_start><raise>_error("mdb_env_set_mapsize" rc)<block_end><block_end><def_stmt>close self<block_start>"""Close the environment, invalidating any open iterators, cursors, and transactions. Repeat calls to :py:meth:`close` have no effect. Equivalent to `mdb_env_close() <http://lmdb.tech/doc/group__mdb.html#ga4366c43ada8874588b6a62fbda2d1e95>`_ """<if_stmt>self._env<block_start><if_stmt>self._deps<block_start><while_stmt>self._deps<block_start>self._deps.pop()._invalidate()<block_end><block_end>self._deps=<none><if_stmt>self._spare_txns<block_start><while_stmt>self._spare_txns<block_start>_lib.mdb_txn_abort(self._spare_txns.pop())<block_end><block_end>self._spare_txns=<none><if_stmt>self._dbs<block_start>self._dbs.clear()<block_end>self._dbs=<none><line_sep>self._db=<none><line_sep>_lib.mdb_env_close(self._env)<line_sep>self._env=_invalid<block_end><block_end><def_stmt>path self<block_start>"""Directory path or file name prefix where this environment is stored. Equivalent to `mdb_env_get_path() <http://lmdb.tech/doc/group__mdb.html#gac699fdd8c4f8013577cb933fb6a757fe>`_ """<line_sep>path=_ffi.new('char **')<line_sep>rc=_lib.mdb_env_get_path(self._env path)<if_stmt>rc<block_start><raise>_error("mdb_env_get_path" rc)<block_end><return>_ffi.string(path[0]).decode(sys.getfilesystemencoding())<block_end><def_stmt>copy self path compact=<false> txn=<none><block_start>"""Make a consistent copy of the environment in the given destination directory. `compact`: If ``True``, perform compaction while copying: omit free pages and sequentially renumber all pages in output. This option consumes more CPU and runs more slowly than the default, but may produce a smaller output database. `txn`: If provided, the backup will be taken from the database with respect to that transaction, otherwise a temporary read-only transaction will be created. Note: this parameter being non-None is not available if the module was built with LMDB_PURE. Note: this parameter may be set only if compact=True. Equivalent to `mdb_env_copy2() or mdb_env_copy3() <http://lmdb.tech/doc/group__mdb.html#ga5d51d6130325f7353db0955dbedbc378>`_ """<line_sep>flags=_lib.MDB_CP_COMPACT<if>compact<else>0<if_stmt>txn<and><not>_have_patched_lmdb<block_start><raise>TypeError("Non-patched LMDB doesn't support transaction with env.copy")<block_end><if_stmt>txn<and><not>flags<block_start><raise>TypeError("txn argument only compatible with compact=True")<block_end>encoded=path.encode(sys.getfilesystemencoding())<if_stmt>_have_patched_lmdb<block_start>rc=_lib.mdb_env_copy3(self._env encoded flags txn._txn<if>txn<else>_ffi.NULL)<if_stmt>rc<block_start><raise>_error("mdb_env_copy3" rc)<block_end><block_end><else_stmt><block_start>rc=_lib.mdb_env_copy2(self._env encoded flags)<if_stmt>rc<block_start><raise>_error("mdb_env_copy2" rc)<block_end><block_end><block_end><def_stmt>copyfd self fd compact=<false> txn=<none><block_start>"""Copy a consistent version of the environment to file descriptor `fd`. `compact`: If ``True``, perform compaction while copying: omit free pages and sequentially renumber all pages in output. This option consumes more CPU and runs more slowly than the default, but may produce a smaller output database. `txn`: If provided, the backup will be taken from the database with respect to that transaction, otherwise a temporary read-only transaction will be created. Note: this parameter being non-None is not available if the module was built with LMDB_PURE. Equivalent to `mdb_env_copyfd2() or mdb_env_copyfd3 <http://lmdb.tech/doc/group__mdb.html#ga5d51d6130325f7353db0955dbedbc378>`_ """<if_stmt>txn<and><not>_have_patched_lmdb<block_start><raise>TypeError("Non-patched LMDB doesn't support transaction with env.copy")<block_end><if_stmt>is_win32# Convert C library handle to kernel handle. <block_start>fd=msvcrt.get_osfhandle(fd)<block_end>flags=_lib.MDB_CP_COMPACT<if>compact<else>0<if_stmt>txn<and><not>flags<block_start><raise>TypeError("txn argument only compatible with compact=True")<block_end><if_stmt>_have_patched_lmdb<block_start>rc=_lib.mdb_env_copyfd3(self._env fd flags txn._txn<if>txn<else>_ffi.NULL)<if_stmt>rc<block_start><raise>_error("mdb_env_copyfd3" rc)<block_end><block_end><else_stmt><block_start>rc=_lib.mdb_env_copyfd2(self._env fd flags)<if_stmt>rc<block_start><raise>_error("mdb_env_copyfd2" rc)<block_end><block_end><block_end><def_stmt>sync self force=<false><block_start>"""Flush the data buffers to disk. Equivalent to `mdb_env_sync() <http://lmdb.tech/doc/group__mdb.html#ga85e61f05aa68b520cc6c3b981dba5037>`_ Data is always written to disk when :py:meth:`Transaction.commit` is called, but the operating system may keep it buffered. MDB always flushes the OS buffers upon commit as well, unless the environment was opened with `sync=False` or `metasync=False`. `force`: If ``True``, force a synchronous flush. Otherwise if the environment was opened with `sync=False` the flushes will be omitted, and with `map_async=True` they will be asynchronous. """<line_sep>rc=_lib.mdb_env_sync(self._env force)<if_stmt>rc<block_start><raise>_error("mdb_env_sync" rc)<block_end><block_end><def_stmt>_convert_stat self st<block_start>"""Convert a MDB_stat to a dict. """<line_sep><return>{"psize":st.ms_psize "depth":st.ms_depth "branch_pages":st.ms_branch_pages "leaf_pages":st.ms_leaf_pages "overflow_pages":st.ms_overflow_pages "entries":st.ms_entries}<block_end><def_stmt>stat self<block_start>"""stat() Return some environment statistics for the default database as a dict: +--------------------+---------------------------------------+ | ``psize`` | Size of a database page in bytes. | +--------------------+---------------------------------------+ | ``depth`` | Height of the B-tree. | +--------------------+---------------------------------------+ | ``branch_pages`` | Number of internal (non-leaf) pages. | +--------------------+---------------------------------------+ | ``leaf_pages`` | Number of leaf pages. | +--------------------+---------------------------------------+ | ``overflow_pages`` | Number of overflow pages. | +--------------------+---------------------------------------+ | ``entries`` | Number of data items. | +--------------------+---------------------------------------+ Equivalent to `mdb_env_stat() <http://lmdb.tech/doc/group__mdb.html#gaf881dca452050efbd434cd16e4bae255>`_ """<line_sep>st=_ffi.new('MDB_stat *')<line_sep>rc=_lib.mdb_env_stat(self._env st)<if_stmt>rc<block_start><raise>_error("mdb_env_stat" rc)<block_end><return>self._convert_stat(st)<block_end><def_stmt>info self<block_start>"""Return some nice environment information as a dict: +--------------------+---------------------------------------------+ | ``map_addr`` | Address of database map in RAM. | +--------------------+---------------------------------------------+ | ``map_size`` | Size of database map in RAM. | +--------------------+---------------------------------------------+ | ``last_pgno`` | ID of last used page. | +--------------------+---------------------------------------------+ | ``last_txnid`` | ID of last committed transaction. | +--------------------+---------------------------------------------+ | ``max_readers`` | Number of reader slots allocated in the | | | lock file. Equivalent to the value of | | | `maxreaders=` specified by the first | | | process opening the Environment. | +--------------------+---------------------------------------------+ | ``num_readers`` | Maximum number of reader slots in | | | simultaneous use since the lock file was | | | initialized. | +--------------------+---------------------------------------------+ Equivalent to `mdb_env_info() <http://lmdb.tech/doc/group__mdb.html#ga18769362c7e7d6cf91889a028a5c5947>`_ """<line_sep>info=_ffi.new('MDB_envinfo *')<line_sep>rc=_lib.mdb_env_info(self._env info)<if_stmt>rc<block_start><raise>_error("mdb_env_info" rc)<block_end><return>{"map_addr":int(_ffi.cast('long' info.me_mapaddr)) "map_size":info.me_mapsize "last_pgno":info.me_last_pgno "last_txnid":info.me_last_txnid "max_readers":info.me_maxreaders "num_readers":info.me_numreaders}<block_end><def_stmt>flags self<block_start>"""Return a dict describing Environment constructor flags used to instantiate this environment."""<line_sep>flags_=_ffi.new('unsigned int[]' 1)<line_sep>rc=_lib.mdb_env_get_flags(self._env flags_)<if_stmt>rc<block_start><raise>_error("mdb_env_get_flags" rc)<block_end>flags=flags_[0]<line_sep><return>{'subdir':<not>(flags&_lib.MDB_NOSUBDIR) 'readonly':bool(flags&_lib.MDB_RDONLY) 'metasync':<not>(flags&_lib.MDB_NOMETASYNC) 'sync':<not>(flags&_lib.MDB_NOSYNC) 'map_async':bool(flags&_lib.MDB_MAPASYNC) 'readahead':<not>(flags&_lib.MDB_NORDAHEAD) 'writemap':bool(flags&_lib.MDB_WRITEMAP) 'meminit':<not>(flags&_lib.MDB_NOMEMINIT) 'lock':<not>(flags&_lib.MDB_NOLOCK) }<block_end><def_stmt>max_key_size self<block_start>"""Return the maximum size in bytes of a record's key part. This matches the ``MDB_MAXKEYSIZE`` constant set at compile time."""<line_sep><return>_lib.mdb_env_get_maxkeysize(self._env)<block_end><def_stmt>max_readers self<block_start>"""Return the maximum number of readers specified during open of the environment by the first process. This is the same as `max_readers=` specified to the constructor if this process was the first to open the environment."""<line_sep>readers_=_ffi.new('unsigned int[]' 1)<line_sep>rc=_lib.mdb_env_get_maxreaders(self._env readers_)<if_stmt>rc<block_start><raise>_error("mdb_env_get_maxreaders" rc)<block_end><return>readers_[0]<block_end><def_stmt>readers self<block_start>"""Return a multi line Unicode string describing the current state of the reader lock table."""<line_sep>_callbacks.msg_func=[]<try_stmt><block_start>rc=_lib.mdb_reader_list(self._env _msg_func _ffi.NULL)<if_stmt>rc<block_start><raise>_error("mdb_reader_list" rc)<block_end><return>UnicodeType().join(_callbacks.msg_func)<block_end><finally_stmt><block_start><del_stmt>_callbacks.msg_func<block_end><block_end><def_stmt>reader_check self<block_start>"""Search the reader lock table for stale entries, for example due to a crashed process. Returns the number of stale entries that were cleared. """<line_sep>reaped=_ffi.new('int[]' 1)<line_sep>rc=_lib.mdb_reader_check(self._env reaped)<if_stmt>rc<block_start><raise>_error('mdb_reader_check' rc)<block_end><return>reaped[0]<block_end><def_stmt>open_db self key=<none> txn=<none> reverse_key=<false> dupsort=<false> create=<true> integerkey=<false> integerdup=<false> dupfixed=<false><block_start>""" Open a database, returning an instance of :py:class:`_Database`. Repeat :py:meth:`Environment.open_db` calls for the same name will return the same handle. As a special case, the main database is always open. Equivalent to `mdb_dbi_open() <http://lmdb.tech/doc/group__mdb.html#gac08cad5b096925642ca359a6d6f0562a>`_ Named databases are implemented by *storing a special descriptor in the main database*. All databases in an environment *share the same file*. Because the descriptor is present in the main database, attempts to create a named database will fail if a key matching the database's name already exists. Furthermore *the key is visible to lookups and enumerations*. If your main database keyspace conflicts with the names you use for named databases, then move the contents of your main database to another named database. :: >>> env = lmdb.open('/tmp/test', max_dbs=2) >>> with env.begin(write=True) as txn ... txn.put('somename', 'somedata') >>> # Error: database cannot share name of existing key! >>> subdb = env.open_db('somename') A newly created database will not exist if the transaction that created it aborted, nor if another process deleted it. The handle resides in the shared environment, it is not owned by the current transaction or process. Only one thread should call this function; it is not mutex-protected in a read-only transaction. The `dupsort`, `integerkey`, `integerdup`, and `dupfixed` parameters are ignored if the database already exists. The state of those settings are persistent and immutable per database. See :py:meth:`_Database.flags` to view the state of those options for an opened database. A consequence of the immutability of these flags is that the default non-named database will never have these flags set. Preexisting transactions, other than the current transaction and any parents, must not use the new handle, nor must their children. `key`: Bytestring database name. If ``None``, indicates the main database should be returned, otherwise indicates a named database should be created inside the main database. In other words, *a key representing the database will be visible in the main database, and the database name cannot conflict with any existing key.* `txn`: Transaction used to create the database if it does not exist. If unspecified, a temporarily write transaction is used. Do not call :py:meth:`open_db` from inside an existing transaction without supplying it here. Note the passed transaction must have `write=True`. `reverse_key`: If ``True``, keys are compared from right to left (e.g. DNS names). `dupsort`: Duplicate keys may be used in the database. (Or, from another perspective, keys may have multiple data items, stored in sorted order.) By default keys must be unique and may have only a single data item. `create`: If ``True``, create the database if it doesn't exist, otherwise raise an exception. `integerkey`: If ``True``, indicates keys in the database are C unsigned or ``size_t`` integers encoded in native byte order. Keys must all be either unsigned or ``size_t``, they cannot be mixed in a single database. `integerdup`: If ``True``, values in the database are C unsigned or ``size_t`` integers encode din native byte order. Implies `dupsort` and `dupfixed` are ``True``. `dupfixed`: If ``True``, values for each key in database are of fixed size, allowing each additional duplicate value for a key to be stored without a header indicating its size. Implies `dupsort` is ``True``. """<if_stmt>isinstance(key UnicodeType)<block_start><raise>TypeError('key must be bytes')<block_end><if_stmt>key<is><none><and>(reverse_key<or>dupsort<or>integerkey<or>integerdup<or>dupfixed)<block_start><raise>ValueError('May not set flags on the main database')<block_end>db=self._dbs.get(key)<if_stmt>db<block_start><return>db<block_end><if_stmt>integerdup<block_start>dupfixed=<true><block_end><if_stmt>dupfixed<block_start>dupsort=<true><block_end><if_stmt>txn<block_start>db=_Database(self txn key reverse_key dupsort create integerkey integerdup dupfixed)<block_end><else_stmt><block_start><try_stmt><block_start>self._creating_db_in_readonly=<true><with_stmt>self.begin(write=<not>self.readonly)<as>txn<block_start>db=_Database(self txn key reverse_key dupsort create integerkey integerdup dupfixed)<block_end><block_end><finally_stmt><block_start>self._creating_db_in_readonly=<false><block_end><block_end>self._dbs[key]=db<line_sep><return>db<block_end><def_stmt>begin self db=<none> parent=<none> write=<false> buffers=<false><block_start>"""Shortcut for :py:class:`lmdb.Transaction`"""<line_sep><return>Transaction(self db parent write buffers)<block_end><block_end><class_stmt>_Database(object)<block_start>""" Internal database handle. This class is opaque, save a single method. Should not be constructed directly. Use :py:meth:`Environment.open_db` instead. """<def_stmt>__init__ self env txn name reverse_key dupsort create integerkey integerdup dupfixed<block_start>env._deps.add(self)<line_sep>self._deps=set()<line_sep>self._name=name<line_sep>flags=0<if_stmt>reverse_key<block_start>flags<augor>_lib.MDB_REVERSEKEY<block_end><if_stmt>dupsort<block_start>flags<augor>_lib.MDB_DUPSORT<block_end><if_stmt>create<block_start>flags<augor>_lib.MDB_CREATE<block_end><if_stmt>integerkey<block_start>flags<augor>_lib.MDB_INTEGERKEY<block_end><if_stmt>integerdup<block_start>flags<augor>_lib.MDB_INTEGERDUP<block_end><if_stmt>dupfixed<block_start>flags<augor>_lib.MDB_DUPFIXED<block_end>dbipp=_ffi.new('MDB_dbi *')<line_sep>self._dbi=<none><line_sep>rc=_lib.mdb_dbi_open(txn._txn name<or>_ffi.NULL flags dbipp)<if_stmt>rc<block_start><raise>_error("mdb_dbi_open" rc)<block_end>self._dbi=dbipp[0]<line_sep>self._load_flags(txn)<block_end><def_stmt>_load_flags self txn<block_start>"""Load MDB's notion of the database flags."""<line_sep>flags_=_ffi.new('unsigned int[]' 1)<line_sep>rc=_lib.mdb_dbi_flags(txn._txn self._dbi flags_)<if_stmt>rc<block_start><raise>_error("mdb_dbi_flags" rc)<block_end>self._flags=flags_[0]<block_end><def_stmt>flags self *args<block_start>"""Return the database's associated flags as a dict of _Database constructor kwargs."""<if_stmt>len(args)<g>1<block_start><raise>TypeError('flags takes 0 or 1 arguments')<block_end><return>{'reverse_key':bool(self._flags&_lib.MDB_REVERSEKEY) 'dupsort':bool(self._flags&_lib.MDB_DUPSORT) 'integerkey':bool(self._flags&_lib.MDB_INTEGERKEY) 'integerdup':bool(self._flags&_lib.MDB_INTEGERDUP) 'dupfixed':bool(self._flags&_lib.MDB_DUPFIXED) }<block_end><def_stmt>_invalidate self<block_start>self._dbi=_invalid<block_end><block_end>open=Environment<class_stmt>Transaction(object)<block_start>""" A transaction object. All operations require a transaction handle, transactions may be read-only or read-write. Write transactions may not span threads. Transaction objects implement the context manager protocol, so that reliable release of the transaction happens even in the face of unhandled exceptions: .. code-block:: python # Transaction aborts correctly: with env.begin(write=True) as txn: crash() # Transaction commits automatically: with env.begin(write=True) as txn: txn.put('a', 'b') Equivalent to `mdb_txn_begin() <http://lmdb.tech/doc/group__mdb.html#gad7ea55da06b77513609efebd44b26920>`_ `env`: Environment the transaction should be on. `db`: Default named database to operate on. If unspecified, defaults to the environment's main database. Can be overridden on a per-call basis below. `parent`: ``None``, or a parent transaction (see lmdb.h). `write`: Transactions are read-only by default. To modify the database, you must pass `write=True`. This flag is ignored if :py:class:`Environment` was opened with ``readonly=True``. `buffers`: If ``True``, indicates :py:func:`buffer` objects should be yielded instead of bytestrings. This setting applies to the :py:class:`Transaction` instance itself and any :py:class:`Cursors <Cursor>` created within the transaction. This feature significantly improves performance, since MDB has a zero-copy design, but it requires care when manipulating the returned buffer objects. The benefit of this facility is diminished when using small keys and values. """<line_sep># If constructor fails, then __del__ will attempt to access these # attributes. _env=_invalid<line_sep>_txn=_invalid<line_sep>_parent=<none><line_sep>_write=<false><line_sep># Mutations occurred since transaction start. Required to know when Cursor # key/value must be refreshed. _mutations=0<def_stmt>__init__ self env db=<none> parent=<none> write=<false> buffers=<false><block_start>env._deps.add(self)<line_sep>self.env=env# hold ref self._db=db<or>env._db<line_sep>self._env=env._env<line_sep>self._key=_ffi.new('MDB_val *')<line_sep>self._val=_ffi.new('MDB_val *')<line_sep>self._to_py=_mvbuf<if>buffers<else>_mvstr<line_sep>self._deps=set()<if_stmt>parent<block_start>self._parent=parent<line_sep>parent_txn=parent._txn<line_sep>parent._deps.add(self)<block_end><else_stmt><block_start>parent_txn=_ffi.NULL<block_end><if_stmt>write<block_start><if_stmt>env.readonly<block_start>msg='Cannot start write transaction with read-only env'<line_sep><raise>_error(msg _lib.EACCES)<block_end>txnpp=_ffi.new('MDB_txn **')<line_sep>rc=_lib.mdb_txn_begin(self._env parent_txn 0 txnpp)<if_stmt>rc<block_start><raise>_error("mdb_txn_begin" rc)<block_end>self._txn=txnpp[0]<line_sep>self._write=<true><block_end><else_stmt><block_start><try_stmt># Exception catch in order to avoid racy 'if txns:' test <block_start><if_stmt>env._creating_db_in_readonly# Don't use spare txns for creating a DB when read-only <block_start><raise>IndexError<block_end>self._txn=env._spare_txns.pop()<line_sep>env._max_spare_txns<augadd>1<line_sep>rc=_lib.mdb_txn_renew(self._txn)<if_stmt>rc<block_start><while_stmt>self._deps<block_start>self._deps.pop()._invalidate()<block_end>_lib.mdb_txn_abort(self._txn)<line_sep>self._txn=_invalid<line_sep>self._invalidate()<line_sep><raise>_error("mdb_txn_renew" rc)<block_end><block_end><except_stmt>IndexError<block_start>txnpp=_ffi.new('MDB_txn **')<line_sep>flags=_lib.MDB_RDONLY<line_sep>rc=_lib.mdb_txn_begin(self._env parent_txn flags txnpp)<if_stmt>rc<block_start><raise>_error("mdb_txn_begin" rc)<block_end>self._txn=txnpp[0]<block_end><block_end><block_end><def_stmt>_invalidate self<block_start><if_stmt>self._txn<block_start>self.abort()<block_end>self.env._deps.discard(self)<line_sep>self._parent=<none><line_sep>self._env=_invalid<block_end><def_stmt>__del__ self<block_start>self.abort()<block_end><def_stmt>__enter__ self<block_start><return>self<block_end><def_stmt>__exit__ self exc_type exc_value traceback<block_start><if_stmt>exc_type<block_start>self.abort()<block_end><else_stmt><block_start>self.commit()<block_end><block_end><def_stmt>id self<block_start>"""id() Return the transaction's ID. This returns the identifier associated with this transaction. For a read-only transaction, this corresponds to the snapshot being read; concurrent readers will frequently have the same transaction ID. """<line_sep><return>_lib.mdb_txn_id(self._txn)<block_end><def_stmt>stat self db<block_start>"""stat(db) Return statistics like :py:meth:`Environment.stat`, except for a single DBI. `db` must be a database handle returned by :py:meth:`open_db`. """<line_sep>st=_ffi.new('MDB_stat *')<line_sep>rc=_lib.mdb_stat(self._txn db._dbi st)<if_stmt>rc<block_start><raise>_error('mdb_stat' rc)<block_end><return>self.env._convert_stat(st)<block_end><def_stmt>drop self db delete=<true><block_start>"""Delete all keys in a named database and optionally delete the named database itself. Deleting the named database causes it to become unavailable, and invalidates existing cursors. Equivalent to `mdb_drop() <http://lmdb.tech/doc/group__mdb.html#gab966fab3840fc54a6571dfb32b00f2db>`_ """<while_stmt>db._deps<block_start>db._deps.pop()._invalidate()<block_end>rc=_lib.mdb_drop(self._txn db._dbi delete)<line_sep>self._mutations<augadd>1<if_stmt>rc<block_start><raise>_error("mdb_drop" rc)<block_end><if_stmt>db._name<in>self.env._dbs<block_start><del_stmt>self.env._dbs[db._name]<block_end><block_end><def_stmt>_cache_spare self# In order to avoid taking and maintaining a lock, a race is allowed # below which may result in more spare txns than desired. It seems # unlikely the race could ever result in a large amount of spare txns, # and in any case a correctly configured program should not be opening # more read-only transactions than there are configured spares. <block_start><if_stmt>self.env._max_spare_txns<g>0<block_start>_lib.mdb_txn_reset(self._txn)<line_sep>self.env._spare_txns.append(self._txn)<line_sep>self.env._max_spare_txns<augsub>1<line_sep>self._txn=_invalid<line_sep>self._invalidate()<line_sep><return><true><block_end><return><false><block_end><def_stmt>commit self<block_start>"""Commit the pending transaction. Equivalent to `mdb_txn_commit() <http://lmdb.tech/doc/group__mdb.html#ga846fbd6f46105617ac9f4d76476f6597>`_ """<while_stmt>self._deps<block_start>self._deps.pop()._invalidate()<block_end><if_stmt>self._write<or><not>self._cache_spare()<block_start>rc=_lib.mdb_txn_commit(self._txn)<line_sep>self._txn=_invalid<if_stmt>rc<block_start><raise>_error("mdb_txn_commit" rc)<block_end>self._invalidate()<block_end><block_end><def_stmt>abort self<block_start>"""Abort the pending transaction. Repeat calls to :py:meth:`abort` have no effect after a previously successful :py:meth:`commit` or :py:meth:`abort`, or after the associated :py:class:`Environment` has been closed. Equivalent to `mdb_txn_abort() <http://lmdb.tech/doc/group__mdb.html#ga73a5938ae4c3239ee11efa07eb22b882>`_ """<if_stmt>self._txn<block_start><while_stmt>self._deps<block_start>self._deps.pop()._invalidate()<block_end><if_stmt>self._write<or><not>self._cache_spare()<block_start>rc=_lib.mdb_txn_abort(self._txn)<line_sep>self._txn=_invalid<if_stmt>rc<block_start><raise>_error("mdb_txn_abort" rc)<block_end><block_end>self._invalidate()<block_end><block_end><def_stmt>get self key default=<none> db=<none><block_start>"""Fetch the first value matching `key`, returning `default` if `key` does not exist. A cursor must be used to fetch all values for a key in a `dupsort=True` database. Equivalent to `mdb_get() <http://lmdb.tech/doc/group__mdb.html#ga8bf10cd91d3f3a83a34d04ce6b07992d>`_ """<line_sep>rc=_lib.pymdb_get(self._txn (db<or>self._db)._dbi key len(key) self._val)<if_stmt>rc<block_start><if_stmt>rc<eq>_lib.MDB_NOTFOUND<block_start><return>default<block_end><raise>_error("mdb_cursor_get" rc)<block_end>preload(self._val)<line_sep><return>self._to_py(self._val)<block_end><def_stmt>put self key value dupdata=<true> overwrite=<true> append=<false> db=<none><block_start>"""Store a record, returning ``True`` if it was written, or ``False`` to indicate the key was already present and `overwrite=False`. On success, the cursor is positioned on the new record. Equivalent to `mdb_put() <http://lmdb.tech/doc/group__mdb.html#ga4fa8573d9236d54687c61827ebf8cac0>`_ `key`: Bytestring key to store. `value`: Bytestring value to store. `dupdata`: If ``False`` and database was opened with `dupsort=True`, will return ``False`` if the key already has that value. In other words, this only affects the return value. `overwrite`: If ``False``, do not overwrite any existing matching key. If False and writing to a dupsort=True database, this will not add a value to the key and this function will return ``False``. `append`: If ``True``, append the pair to the end of the database without comparing its order first. Appending a key that is not greater than the highest existing key will fail and return ``False``. `db`: Named database to operate on. If unspecified, defaults to the database given to the :py:class:`Transaction` constructor. """<line_sep>flags=0<if_stmt><not>dupdata<block_start>flags<augor>_lib.MDB_NODUPDATA<block_end><if_stmt><not>overwrite<block_start>flags<augor>_lib.MDB_NOOVERWRITE<block_end><if_stmt>append<block_start>flags<augor>_lib.MDB_APPEND<block_end>rc=_lib.pymdb_put(self._txn (db<or>self._db)._dbi key len(key) value len(value) flags)<line_sep>self._mutations<augadd>1<if_stmt>rc<block_start><if_stmt>rc<eq>_lib.MDB_KEYEXIST<block_start><return><false><block_end><raise>_error("mdb_put" rc)<block_end><return><true><block_end><def_stmt>replace self key value db=<none><block_start>"""Use a temporary cursor to invoke :py:meth:`Cursor.replace`. `db`: Named database to operate on. If unspecified, defaults to the database given to the :py:class:`Transaction` constructor. """<with_stmt>Cursor(db<or>self._db self)<as>curs<block_start><return>curs.replace(key value)<block_end><block_end><def_stmt>pop self key db=<none><block_start>"""Use a temporary cursor to invoke :py:meth:`Cursor.pop`. `db`: Named database to operate on. If unspecified, defaults to the database given to the :py:class:`Transaction` constructor. """<with_stmt>Cursor(db<or>self._db self)<as>curs<block_start><return>curs.pop(key)<block_end><block_end><def_stmt>delete self key value=EMPTY_BYTES db=<none><block_start>"""Delete a key from the database. Equivalent to `mdb_del() <http://lmdb.tech/doc/group__mdb.html#gab8182f9360ea69ac0afd4a4eaab1ddb0>`_ `key`: The key to delete. value: If the database was opened with dupsort=True and value is not the empty bytestring, then delete elements matching only this `(key, value)` pair, otherwise all values for key are deleted. Returns True if at least one key was deleted. """<if_stmt>value<is><none># for bug-compatibility with cpython impl <block_start>value=EMPTY_BYTES<block_end>rc=_lib.pymdb_del(self._txn (db<or>self._db)._dbi key len(key) value len(value))<line_sep>self._mutations<augadd>1<if_stmt>rc<block_start><if_stmt>rc<eq>_lib.MDB_NOTFOUND<block_start><return><false><block_end><raise>_error("mdb_del" rc)<block_end><return><true><block_end><def_stmt>cursor self db=<none><block_start>"""Shortcut for ``lmdb.Cursor(db, self)``"""<line_sep><return>Cursor(db<or>self._db self)<block_end><block_end><class_stmt>Cursor(object)<block_start>""" Structure for navigating a database. Equivalent to `mdb_cursor_open() <http://lmdb.tech/doc/group__mdb.html#ga9ff5d7bd42557fd5ee235dc1d62613aa>`_ `db`: :py:class:`_Database` to navigate. `txn`: :py:class:`Transaction` to navigate. As a convenience, :py:meth:`Transaction.cursor` can be used to quickly return a cursor: :: >>> env = lmdb.open('/tmp/foo') >>> child_db = env.open_db('child_db') >>> with env.begin() as txn: ... cursor = txn.cursor() # Cursor on main database. ... cursor2 = txn.cursor(child_db) # Cursor on child database. Cursors start in an unpositioned state. If :py:meth:`iternext` or :py:meth:`iterprev` are used in this state, iteration proceeds from the start or end respectively. Iterators directly position using the cursor, meaning strange behavior results when multiple iterators exist on the same cursor. .. note:: From the perspective of the Python binding, cursors return to an 'unpositioned' state once any scanning or seeking method (e.g. :py:meth:`next`, :py:meth:`prev_nodup`, :py:meth:`set_range`) returns ``False`` or raises an exception. This is primarily to ensure safe, consistent semantics in the face of any error condition. When the Cursor returns to an unpositioned state, its :py:meth:`key` and :py:meth:`value` return empty strings to indicate there is no active position, although internally the LMDB cursor may still have a valid position. This may lead to slightly surprising behaviour when iterating the values for a `dupsort=True` database's keys, since methods such as :py:meth:`iternext_dup` will cause Cursor to appear unpositioned, despite it returning ``False`` only to indicate there are no more values for the current key. In that case, simply calling :py:meth:`next` would cause iteration to resume at the next available key. This behaviour may change in future. Iterator methods such as :py:meth:`iternext` and :py:meth:`iterprev` accept `keys` and `values` arguments. If both are ``True``, then the value of :py:meth:`item` is yielded on each iteration. If only `keys` is ``True``, :py:meth:`key` is yielded, otherwise only :py:meth:`value` is yielded. Prior to iteration, a cursor can be positioned anywhere in the database: :: >>> with env.begin() as txn: ... cursor = txn.cursor() ... if not cursor.set_range('5'): # Position at first key >= '5'. ... print('Not found!') ... else: ... for key, value in cursor: # Iterate from first key >= '5'. ... print((key, value)) Iteration is not required to navigate, and sometimes results in ugly or inefficient code. In cases where the iteration order is not obvious, or is related to the data being read, use of :py:meth:`set_key`, :py:meth:`set_range`, :py:meth:`key`, :py:meth:`value`, and :py:meth:`item` may be preferable: :: >>> # Record the path from a child to the root of a tree. >>> path = ['child14123'] >>> while path[-1] != 'root': ... assert cursor.set_key(path[-1]), \\ ... 'Tree is broken! Path: %s' % (path,) ... path.append(cursor.value()) """<def_stmt>__init__ self db txn<block_start>db._deps.add(self)<line_sep>txn._deps.add(self)<line_sep>self.db=db# hold ref self.txn=txn# hold ref self._dbi=db._dbi<line_sep>self._txn=txn._txn<line_sep>self._key=_ffi.new('MDB_val *')<line_sep>self._val=_ffi.new('MDB_val *')<line_sep>self._valid=<false><line_sep>self._to_py=txn._to_py<line_sep>curpp=_ffi.new('MDB_cursor **')<line_sep>self._cur=<none><line_sep>rc=_lib.mdb_cursor_open(self._txn self._dbi curpp)<if_stmt>rc<block_start><raise>_error("mdb_cursor_open" rc)<block_end>self._cur=curpp[0]<line_sep># If Transaction.mutations!=last_mutation, must MDB_GET_CURRENT to # refresh `key' and `val'. self._last_mutation=txn._mutations<block_end><def_stmt>_invalidate self<block_start><if_stmt>self._cur<block_start>_lib.mdb_cursor_close(self._cur)<line_sep>self.db._deps.discard(self)<line_sep>self.txn._deps.discard(self)<line_sep>self._cur=_invalid<line_sep>self._dbi=_invalid<line_sep>self._txn=_invalid<block_end><block_end><def_stmt>__del__ self<block_start>self._invalidate()<block_end><def_stmt>close self<block_start>"""Close the cursor, freeing its associated resources."""<line_sep>self._invalidate()<block_end><def_stmt>__enter__ self<block_start><return>self<block_end><def_stmt>__exit__ self _1 _2 _3<block_start>self._invalidate()<block_end><def_stmt>key self<block_start>"""Return the current key."""<line_sep># Must refresh `key` and `val` following mutation. <if_stmt>self._last_mutation<ne>self.txn._mutations<block_start>self._cursor_get(_lib.MDB_GET_CURRENT)<block_end><return>self._to_py(self._key)<block_end><def_stmt>value self<block_start>"""Return the current value."""<line_sep># Must refresh `key` and `val` following mutation. <if_stmt>self._last_mutation<ne>self.txn._mutations<block_start>self._cursor_get(_lib.MDB_GET_CURRENT)<block_end>preload(self._val)<line_sep><return>self._to_py(self._val)<block_end><def_stmt>item self<block_start>"""Return the current `(key, value)` pair."""<line_sep># Must refresh `key` and `val` following mutation. <if_stmt>self._last_mutation<ne>self.txn._mutations<block_start>self._cursor_get(_lib.MDB_GET_CURRENT)<block_end>preload(self._val)<line_sep><return>self._to_py(self._key) self._to_py(self._val)<block_end><def_stmt>_iter self op keys values<block_start><if_stmt><not>values<block_start>get=self.key<block_end><elif_stmt><not>keys<block_start>get=self.value<block_end><else_stmt><block_start>get=self.item<block_end>cur=self._cur<line_sep>key=self._key<line_sep>val=self._val<line_sep>rc=0<while_stmt>self._valid<block_start><yield>get()<line_sep>rc=_lib.mdb_cursor_get(cur key val op)<line_sep>self._valid=<not>rc<block_end><if_stmt>rc<block_start>self._key.mv_size=0<line_sep>self._val.mv_size=0<if_stmt>rc<ne>_lib.MDB_NOTFOUND<block_start><raise>_error("mdb_cursor_get" rc)<block_end><block_end><block_end><def_stmt>iternext self keys=<true> values=<true><block_start>"""Return a forward iterator that yields the current element before calling :py:meth:`next`, repeating until the end of the database is reached. As a convenience, :py:class:`Cursor` implements the iterator protocol by automatically returning a forward iterator when invoked: :: >>> # Equivalent: >>> it = iter(cursor) >>> it = cursor.iternext(keys=True, values=True) If the cursor is not yet positioned, it is moved to the first key in the database, otherwise iteration proceeds from the current position. """<if_stmt><not>self._valid<block_start>self.first()<block_end><return>self._iter(_lib.MDB_NEXT keys values)<block_end>__iter__=iternext<def_stmt>iternext_dup self keys=<false> values=<true><block_start>"""Return a forward iterator that yields the current value ("duplicate") of the current key before calling :py:meth:`next_dup`, repeating until the last value of the current key is reached. Only meaningful for databases opened with `dupsort=True`. .. code-block:: python if not cursor.set_key("foo"): print("No values found for 'foo'") else: for idx, data in enumerate(cursor.iternext_dup()): print("%d'th value for 'foo': %s" % (idx, data)) """<line_sep><return>self._iter(_lib.MDB_NEXT_DUP keys values)<block_end><def_stmt>iternext_nodup self keys=<true> values=<false><block_start>"""Return a forward iterator that yields the current value ("duplicate") of the current key before calling :py:meth:`next_nodup`, repeating until the end of the database is reached. Only meaningful for databases opened with `dupsort=True`. If the cursor is not yet positioned, it is moved to the first key in the database, otherwise iteration proceeds from the current position. .. code-block:: python for key in cursor.iternext_nodup(): print("Key '%s' has %d values" % (key, cursor.count())) """<if_stmt><not>self._valid<block_start>self.first()<block_end><return>self._iter(_lib.MDB_NEXT_NODUP keys values)<block_end><def_stmt>iterprev self keys=<true> values=<true><block_start>"""Return a reverse iterator that yields the current element before calling :py:meth:`prev`, until the start of the database is reached. If the cursor is not yet positioned, it is moved to the last key in the database, otherwise iteration proceeds from the current position. :: >>> with env.begin() as txn: ... for i, (key, value) in enumerate(txn.cursor().iterprev()): ... print('%dth last item is (%r, %r)' % (1+i, key, value)) """<if_stmt><not>self._valid<block_start>self.last()<block_end><return>self._iter(_lib.MDB_PREV keys values)<block_end><def_stmt>iterprev_dup self keys=<false> values=<true><block_start>"""Return a reverse iterator that yields the current value ("duplicate") of the current key before calling :py:meth:`prev_dup`, repeating until the first value of the current key is reached. Only meaningful for databases opened with `dupsort=True`. """<line_sep><return>self._iter(_lib.MDB_PREV_DUP keys values)<block_end><def_stmt>iterprev_nodup self keys=<true> values=<false><block_start>"""Return a reverse iterator that yields the current value ("duplicate") of the current key before calling :py:meth:`prev_nodup`, repeating until the start of the database is reached. If the cursor is not yet positioned, it is moved to the last key in the database, otherwise iteration proceeds from the current position. Only meaningful for databases opened with `dupsort=True`. """<if_stmt><not>self._valid<block_start>self.last()<block_end><return>self._iter(_lib.MDB_PREV_NODUP keys values)<block_end><def_stmt>_cursor_get self op<block_start>rc=_lib.mdb_cursor_get(self._cur self._key self._val op)<line_sep>self._valid=v=<not>rc<line_sep>self._last_mutation=self.txn._mutations<if_stmt>rc<block_start>self._key.mv_size=0<line_sep>self._val.mv_size=0<if_stmt>rc<ne>_lib.MDB_NOTFOUND<block_start><if_stmt><not>(rc<eq>_lib.EINVAL<and>op<eq>_lib.MDB_GET_CURRENT)<block_start><raise>_error("mdb_cursor_get" rc)<block_end><block_end><block_end><return>v<block_end><def_stmt>_cursor_get_kv self op k v<block_start>rc=_lib.pymdb_cursor_get(self._cur k len(k) v len(v) self._key self._val op)<line_sep>self._valid=v=<not>rc<if_stmt>rc<block_start>self._key.mv_size=0<line_sep>self._val.mv_size=0<if_stmt>rc<ne>_lib.MDB_NOTFOUND<block_start><if_stmt><not>(rc<eq>_lib.EINVAL<and>op<eq>_lib.MDB_GET_CURRENT)<block_start><raise>_error("mdb_cursor_get" rc)<block_end><block_end><block_end><return>v<block_end><def_stmt>first self<block_start>"""Move to the first key in the database, returning ``True`` on success or ``False`` if the database is empty. If the database was opened with `dupsort=True` and the key contains duplicates, the cursor is positioned on the first value ("duplicate"). Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_FIRST <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get(_lib.MDB_FIRST)<block_end><def_stmt>first_dup self<block_start>"""Move to the first value ("duplicate") for the current key, returning ``True`` on success or ``False`` if the database is empty. Only meaningful for databases opened with `dupsort=True`. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_FIRST_DUP <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get(_lib.MDB_FIRST_DUP)<block_end><def_stmt>last self<block_start>"""Move to the last key in the database, returning ``True`` on success or ``False`` if the database is empty. If the database was opened with `dupsort=True` and the key contains duplicates, the cursor is positioned on the last value ("duplicate"). Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_LAST <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get(_lib.MDB_LAST)<block_end><def_stmt>last_dup self<block_start>"""Move to the last value ("duplicate") for the current key, returning ``True`` on success or ``False`` if the database is empty. Only meaningful for databases opened with `dupsort=True`. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_LAST_DUP <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get(_lib.MDB_LAST_DUP)<block_end><def_stmt>prev self<block_start>"""Move to the previous element, returning ``True`` on success or ``False`` if there is no previous item. For databases opened with `dupsort=True`, moves to the previous data item ("duplicate") for the current key if one exists, otherwise moves to the previous key. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_PREV <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get(_lib.MDB_PREV)<block_end><def_stmt>prev_dup self<block_start>"""Move to the previous value ("duplicate") of the current key, returning ``True`` on success or ``False`` if there is no previous value. Only meaningful for databases opened with `dupsort=True`. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_PREV_DUP <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get(_lib.MDB_PREV_DUP)<block_end><def_stmt>prev_nodup self<block_start>"""Move to the last value ("duplicate") of the previous key, returning ``True`` on success or ``False`` if there is no previous key. Only meaningful for databases opened with `dupsort=True`. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_PREV_NODUP <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get(_lib.MDB_PREV_NODUP)<block_end><def_stmt>next self<block_start>"""Move to the next element, returning ``True`` on success or ``False`` if there is no next element. For databases opened with `dupsort=True`, moves to the next value ("duplicate") for the current key if one exists, otherwise moves to the first value of the next key. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_NEXT <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get(_lib.MDB_NEXT)<block_end><def_stmt>next_dup self<block_start>"""Move to the next value ("duplicate") of the current key, returning ``True`` on success or ``False`` if there is no next value. Only meaningful for databases opened with `dupsort=True`. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_NEXT_DUP <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get(_lib.MDB_NEXT_DUP)<block_end><def_stmt>next_nodup self<block_start>"""Move to the first value ("duplicate") of the next key, returning ``True`` on success or ``False`` if there is no next key. Only meaningful for databases opened with `dupsort=True`. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_NEXT_NODUP <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get(_lib.MDB_NEXT_NODUP)<block_end><def_stmt>set_key self key<block_start>"""Seek exactly to `key`, returning ``True`` on success or ``False`` if the exact key was not found. It is an error to :py:meth:`set_key` the empty bytestring. For databases opened with `dupsort=True`, moves to the first value ("duplicate") for the key. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_SET_KEY <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get_kv(_lib.MDB_SET_KEY key EMPTY_BYTES)<block_end><def_stmt>set_key_dup self key value<block_start>"""Seek exactly to `(key, value)`, returning ``True`` on success or ``False`` if the exact key and value was not found. It is an error to :py:meth:`set_key` the empty bytestring. Only meaningful for databases opened with `dupsort=True`. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_GET_BOTH <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep><return>self._cursor_get_kv(_lib.MDB_GET_BOTH key value)<block_end><def_stmt>get self key default=<none><block_start>"""Equivalent to :py:meth:`set_key()`, except :py:meth:`value` is returned when `key` is found, otherwise `default`. """<if_stmt>self._cursor_get_kv(_lib.MDB_SET_KEY key EMPTY_BYTES)<block_start><return>self.value()<block_end><return>default<block_end><def_stmt>getmulti self keys dupdata=<false> dupfixed_bytes=<none> keyfixed=<false><block_start>"""Returns an iterable of `(key, value)` 2-tuples containing results for each key in the iterable `keys`. `keys`: Iterable to read keys from. `dupdata`: If ``True`` and database was opened with `dupsort=True`, read all duplicate values for each matching key. `dupfixed_bytes`: If database was opened with `dupsort=True` and `dupfixed=True`, accepts the size of each value, in bytes, and applies an optimization reducing the number of database lookups. `keyfixed`: If `dupfixed_bytes` is set and database key size is fixed, setting keyfixed=True will result in this function returning a memoryview to the results as a structured array of bytes. The structured array can be instantiated by passing the memoryview buffer to NumPy: .. code-block:: python key_bytes, val_bytes = 4, 8 dtype = np.dtype([(f'S{key_bytes}', f'S{val_bytes}}')]) arr = np.frombuffer( cur.getmulti(keys, dupdata=True, dupfixed_bytes=val_bytes, keyfixed=True) ) """<if_stmt>dupfixed_bytes<and>dupfixed_bytes<l>0<block_start><raise>_error("dupfixed_bytes must be a positive integer.")<block_end><elif_stmt>(dupfixed_bytes<or>keyfixed)<and><not>dupdata<block_start><raise>_error("dupdata is required for dupfixed_bytes/key_bytes.")<block_end><elif_stmt>keyfixed<and><not>dupfixed_bytes<block_start><raise>_error("dupfixed_bytes is required for key_bytes.")<block_end><if_stmt>dupfixed_bytes<block_start>get_op=_lib.MDB_GET_MULTIPLE<line_sep>next_op=_lib.MDB_NEXT_MULTIPLE<block_end><else_stmt><block_start>get_op=_lib.MDB_GET_CURRENT<line_sep>next_op=_lib.MDB_NEXT_DUP<block_end>a=bytearray()<line_sep>lst=list()<for_stmt>key keys<block_start><if_stmt>self.set_key(key)<block_start><while_stmt>self._valid<block_start>self._cursor_get(get_op)<line_sep>preload(self._val)<line_sep>key=self._to_py(self._key)<line_sep>val=self._to_py(self._val)<if_stmt>dupfixed_bytes<block_start>gen=((key val[i:i+dupfixed_bytes])<for>i range(0 len(val) dupfixed_bytes))<if_stmt>keyfixed<block_start><for_stmt>k,v gen<block_start>a.extend(k+v)<block_end><block_end><else_stmt><block_start><for_stmt>k,v gen<block_start>lst.append((k v))<block_end><block_end><block_end><else_stmt><block_start>lst.append((key val))<block_end><if_stmt>dupdata<block_start>self._cursor_get(next_op)<block_end><else_stmt><block_start><break><block_end><block_end><block_end><block_end><if_stmt>keyfixed<block_start><return>memoryview(a)<block_end><else_stmt><block_start><return>lst<block_end><block_end><def_stmt>set_range self key<block_start>"""Seek to the first key greater than or equal to `key`, returning ``True`` on success, or ``False`` to indicate key was past end of database. Behaves like :py:meth:`first` if `key` is the empty bytestring. For databases opened with `dupsort=True`, moves to the first value ("duplicate") for the key. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_SET_RANGE <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<if_stmt><not>key<block_start><return>self.first()<block_end><return>self._cursor_get_kv(_lib.MDB_SET_RANGE key EMPTY_BYTES)<block_end><def_stmt>set_range_dup self key value<block_start>"""Seek to the first key/value pair greater than or equal to `key`, returning ``True`` on success, or ``False`` to indicate that `value` was past the last value of `key` or that `(key, value)` was past the end end of database. Only meaningful for databases opened with `dupsort=True`. Equivalent to `mdb_cursor_get() <http://lmdb.tech/doc/group__mdb.html#ga48df35fb102536b32dfbb801a47b4cb0>`_ with `MDB_GET_BOTH_RANGE <http://lmdb.tech/doc/group__mdb.html#ga1206b2af8b95e7f6b0ef6b28708c9127>`_ """<line_sep>rc=self._cursor_get_kv(_lib.MDB_GET_BOTH_RANGE key value)<line_sep># issue #126: MDB_GET_BOTH_RANGE does not satisfy its documentation, # and fails to update `key` and `value` on success. Therefore # explicitly call MDB_GET_CURRENT after MDB_GET_BOTH_RANGE. self._cursor_get(_lib.MDB_GET_CURRENT)<line_sep><return>rc<block_end><def_stmt>delete self dupdata=<false><block_start>"""Delete the current element and move to the next, returning ``True`` on success or ``False`` if the database was empty. If `dupdata` is ``True``, delete all values ("duplicates") for the current key, otherwise delete only the currently positioned value. Only meaningful for databases opened with `dupsort=True`. Equivalent to `mdb_cursor_del() <http://lmdb.tech/doc/group__mdb.html#ga26a52d3efcfd72e5bf6bd6960bf75f95>`_ """<line_sep>v=self._valid<if_stmt>v<block_start>flags=_lib.MDB_NODUPDATA<if>dupdata<else>0<line_sep>rc=_lib.mdb_cursor_del(self._cur flags)<line_sep>self.txn._mutations<augadd>1<if_stmt>rc<block_start><raise>_error("mdb_cursor_del" rc)<block_end>self._cursor_get(_lib.MDB_GET_CURRENT)<line_sep>v=rc<eq>0<block_end><return>v<block_end><def_stmt>count self<block_start>"""Return the number of values ("duplicates") for the current key. Only meaningful for databases opened with `dupsort=True`. Equivalent to `mdb_cursor_count() <http://lmdb.tech/doc/group__mdb.html#ga4041fd1e1862c6b7d5f10590b86ffbe2>`_ """<line_sep>countp=_ffi.new('size_t *')<line_sep>rc=_lib.mdb_cursor_count(self._cur countp)<if_stmt>rc<block_start><raise>_error("mdb_cursor_count" rc)<block_end><return>countp[0]<block_end><def_stmt>put self key val dupdata=<true> overwrite=<true> append=<false><block_start>"""Store a record, returning ``True`` if it was written, or ``False`` to indicate the key was already present and `overwrite=False`. On success, the cursor is positioned on the key. Equivalent to `mdb_cursor_put() <http://lmdb.tech/doc/group__mdb.html#ga1f83ccb40011837ff37cc32be01ad91e>`_ `key`: Bytestring key to store. `val`: Bytestring value to store. `dupdata`: If ``False`` and database was opened with `dupsort=True`, will return ``False`` if the key already has that value. In other words, this only affects the return value. `overwrite`: If ``False``, do not overwrite the value for the key if it exists, just return ``False``. For databases opened with `dupsort=True`, ``False`` will always be returned if a duplicate key/value pair is inserted, regardless of the setting for `overwrite`. `append`: If ``True``, append the pair to the end of the database without comparing its order first. Appending a key that is not greater than the highest existing key will fail and return ``False``. """<line_sep>flags=0<if_stmt><not>dupdata<block_start>flags<augor>_lib.MDB_NODUPDATA<block_end><if_stmt><not>overwrite<block_start>flags<augor>_lib.MDB_NOOVERWRITE<block_end><if_stmt>append<block_start><if_stmt>self.txn._db._flags&_lib.MDB_DUPSORT<block_start>flags<augor>_lib.MDB_APPENDDUP<block_end><else_stmt><block_start>flags<augor>_lib.MDB_APPEND<block_end><block_end>rc=_lib.pymdb_cursor_put(self._cur key len(key) val len(val) flags)<line_sep>self.txn._mutations<augadd>1<if_stmt>rc<block_start><if_stmt>rc<eq>_lib.MDB_KEYEXIST<block_start><return><false><block_end><raise>_error("mdb_cursor_put" rc)<block_end>self._cursor_get(_lib.MDB_GET_CURRENT)<line_sep><return><true><block_end><def_stmt>putmulti self items dupdata=<true> overwrite=<true> append=<false><block_start>"""Invoke :py:meth:`put` for each `(key, value)` 2-tuple from the iterable `items`. Elements must be exactly 2-tuples, they may not be of any other type, or tuple subclass. Returns a tuple `(consumed, added)`, where `consumed` is the number of elements read from the iterable, and `added` is the number of new entries added to the database. `added` may be less than `consumed` when `overwrite=False`. `items`: Iterable to read records from. `dupdata`: If ``True`` and database was opened with `dupsort=True`, add pair as a duplicate if the given key already exists. Otherwise overwrite any existing matching key. `overwrite`: If ``False``, do not overwrite the value for the key if it exists, just return ``False``. For databases opened with `dupsort=True`, ``False`` will always be returned if a duplicate key/value pair is inserted, regardless of the setting for `overwrite`. `append`: If ``True``, append records to the end of the database without comparing their order first. Appending a key that is not greater than the highest existing key will cause corruption. """<line_sep>flags=0<if_stmt><not>dupdata<block_start>flags<augor>_lib.MDB_NODUPDATA<block_end><if_stmt><not>overwrite<block_start>flags<augor>_lib.MDB_NOOVERWRITE<block_end><if_stmt>append<block_start><if_stmt>self.txn._db._flags&_lib.MDB_DUPSORT<block_start>flags<augor>_lib.MDB_APPENDDUP<block_end><else_stmt><block_start>flags<augor>_lib.MDB_APPEND<block_end><block_end>added=0<line_sep>skipped=0<for_stmt>key,value items<block_start>rc=_lib.pymdb_cursor_put(self._cur key len(key) value len(value) flags)<line_sep>self.txn._mutations<augadd>1<line_sep>added<augadd>1<if_stmt>rc<block_start><if_stmt>rc<eq>_lib.MDB_KEYEXIST<block_start>skipped<augadd>1<block_end><else_stmt><block_start><raise>_error("mdb_cursor_put" rc)<block_end><block_end><block_end>self._cursor_get(_lib.MDB_GET_CURRENT)<line_sep><return>added added-skipped<block_end><def_stmt>replace self key val<block_start>"""Store a record, returning its previous value if one existed. Returns ``None`` if no previous value existed. This uses the best available mechanism to minimize the cost of a `set-and-return-previous` operation. For databases opened with `dupsort=True`, only the first data element ("duplicate") is returned if it existed, all data elements are removed and the new `(key, data)` pair is inserted. `key`: Bytestring key to store. `value`: Bytestring value to store. """<if_stmt>self.db._flags&_lib.MDB_DUPSORT<block_start><if_stmt>self._cursor_get_kv(_lib.MDB_SET_KEY key EMPTY_BYTES)<block_start>preload(self._val)<line_sep>old=_mvstr(self._val)<line_sep>self.delete(<true>)<block_end><else_stmt><block_start>old=<none><block_end>self.put(key val)<line_sep><return>old<block_end>flags=_lib.MDB_NOOVERWRITE<line_sep>keylen=len(key)<line_sep>rc=_lib.pymdb_cursor_put(self._cur key keylen val len(val) flags)<line_sep>self.txn._mutations<augadd>1<if_stmt><not>rc<block_start><return><block_end><if_stmt>rc<ne>_lib.MDB_KEYEXIST<block_start><raise>_error("mdb_cursor_put" rc)<block_end>self._cursor_get(_lib.MDB_GET_CURRENT)<line_sep>preload(self._val)<line_sep>old=_mvstr(self._val)<line_sep>rc=_lib.pymdb_cursor_put(self._cur key keylen val len(val) 0)<line_sep>self.txn._mutations<augadd>1<if_stmt>rc<block_start><raise>_error("mdb_cursor_put" rc)<block_end>self._cursor_get(_lib.MDB_GET_CURRENT)<line_sep><return>old<block_end><def_stmt>pop self key<block_start>"""Fetch a record's value then delete it. Returns ``None`` if no previous value existed. This uses the best available mechanism to minimize the cost of a `delete-and-return-previous` operation. For databases opened with `dupsort=True`, the first data element ("duplicate") for the key will be popped. `key`: Bytestring key to delete. """<if_stmt>self._cursor_get_kv(_lib.MDB_SET_KEY key EMPTY_BYTES)<block_start>preload(self._val)<line_sep>old=_mvstr(self._val)<line_sep>rc=_lib.mdb_cursor_del(self._cur 0)<line_sep>self.txn._mutations<augadd>1<if_stmt>rc<block_start><raise>_error("mdb_cursor_del" rc)<block_end>self._cursor_get(_lib.MDB_GET_CURRENT)<line_sep><return>old<block_end><block_end><def_stmt>_iter_from self k reverse<block_start>"""Helper for centidb. Please do not rely on this interface, it may be removed in future. """<if_stmt><not>k<and><not>reverse<block_start>found=self.first()<block_end><else_stmt><block_start>found=self.set_range(k)<block_end><if_stmt>reverse<block_start><if_stmt><not>found<block_start>self.last()<block_end><return>self.iterprev()<block_end><else_stmt><block_start><if_stmt><not>found<block_start><return>iter(())<block_end><return>self.iternext()<block_end><block_end><block_end>
# Copyright 2015 Ufora Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>unittest<import_stmt>pyfora<import_stmt>ufora.config.Setup<as>Setup<import_stmt>ufora.FORA.python.PurePython.DictTestCases<as>DictTestCases<import_stmt>ufora.FORA.python.PurePython.ListTestCases<as>ListTestCases<import_stmt>ufora.FORA.python.PurePython.TupleTestCases<as>TupleTestCases<import_stmt>ufora.FORA.python.PurePython.ExecutorTestCommon<as>ExecutorTestCommon<import_stmt>ufora.test.ClusterSimulation<as>ClusterSimulation<class_stmt>ExecutorSimulationTest(unittest.TestCase ExecutorTestCommon.ExecutorTestCommon DictTestCases.DictTestCases ListTestCases.ListTestCases TupleTestCases.TupleTestCases)<block_start>@classmethod<def_stmt>setUpClass cls<block_start>cls.config=Setup.config()<line_sep>cls.executor=<none><line_sep>cls.simulation=ClusterSimulation.Simulator.createGlobalSimulator()<line_sep>cls.simulation.startService()<line_sep>cls.simulation.getDesirePublisher().desireNumberOfWorkers(1)<block_end>@classmethod<def_stmt>tearDownClass cls<block_start>cls.simulation.stopService()<block_end>@classmethod<def_stmt>create_executor cls allowCached=<true><block_start><if_stmt><not>allowCached<block_start><return>pyfora.connect('http://localhost:30000')<block_end><if_stmt>cls.executor<is><none><block_start>cls.executor=pyfora.connect('http://localhost:30000')<line_sep>cls.executor.stayOpenOnExit=<true><block_end><return>cls.executor<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start><import_stmt>ufora.config.Mainline<as>Mainline<line_sep>Mainline.UnitTestMainline()<block_end>
<import_from_stmt>tensorhive.models.Group Group<import_from_stmt>fixtures.controllers API_URI<as>BASE_URI HEADERS<import_from_stmt>http HTTPStatus<import_from_stmt>importlib reload<import_stmt>json<import_stmt>auth_patcher<line_sep>ENDPOINT=BASE_URI+'/groups'<def_stmt>setup_module _<block_start>auth_patches=auth_patcher.get_patches(superuser=<true>)<for_stmt>auth_patch auth_patches<block_start>auth_patch.start()<block_end><for_stmt>module auth_patcher.CONTROLLER_MODULES<block_start>reload(module)<block_end><for_stmt>auth_patch auth_patches<block_start>auth_patch.stop()<block_end><block_end># POST /groups <def_stmt>test_create_group tables client<block_start>group_name='TestGroup'<line_sep>data={'name':group_name}<line_sep>resp=client.post(ENDPOINT headers=HEADERS data=json.dumps(data))<line_sep>resp_json=json.loads(resp.data.decode('utf-8'))<assert_stmt>resp.status_code<eq>HTTPStatus.CREATED<assert_stmt>resp_json['group']['id']<is><not><none><assert_stmt>resp_json['group']['name']<eq>group_name<assert_stmt>Group.get(int(resp_json['group']['id']))<is><not><none><block_end># PUT /groups/{id} <def_stmt>test_update_group tables client new_group<block_start>new_group.save()<line_sep>new_group_name=new_group.name+'111'<line_sep>resp=client.put(ENDPOINT+'/'+str(new_group.id) headers=HEADERS data=json.dumps({'name':new_group_name}))<line_sep>resp_json=json.loads(resp.data.decode('utf-8'))<assert_stmt>resp.status_code<eq>HTTPStatus.OK<assert_stmt>resp_json['group']['name']<eq>new_group_name<assert_stmt>Group.get(new_group.id).name<eq>new_group_name<block_end># PUT /groups/{id} - nonexistent id <def_stmt>test_update_group_that_doesnt_exist tables client<block_start>non_existent_id='777'<line_sep>resp=client.put(ENDPOINT+'/'+non_existent_id headers=HEADERS data=json.dumps({'name':'test'}))<assert_stmt>resp.status_code<eq>HTTPStatus.NOT_FOUND<block_end># DELETE /groups/{id} <def_stmt>test_delete_group tables client new_group<block_start>new_group.save()<line_sep>resp=client.delete(ENDPOINT+'/'+str(new_group.id) headers=HEADERS)<assert_stmt>resp.status_code<eq>HTTPStatus.OK<line_sep># Let's get all groups to verify resp=client.get(ENDPOINT headers=HEADERS)<line_sep>resp_json=json.loads(resp.data.decode('utf-8'))<assert_stmt>len(resp_json)<eq>0<block_end># DELETE /groups/{id} - nonexistent id <def_stmt>test_delete_group_that_doesnt_exist tables client<block_start>non_existent_id='777'<line_sep>resp=client.delete(ENDPOINT+'/'+non_existent_id headers=HEADERS)<assert_stmt>resp.status_code<eq>HTTPStatus.NOT_FOUND<block_end># PUT /groups/{id}/users/{id} <def_stmt>test_add_user_to_a_group tables client new_group new_user<block_start>new_group.save()<line_sep>new_user.save()<line_sep>resp=client.put(ENDPOINT+'/{}/users/{}'.format(new_group.id new_user.id) headers=HEADERS)<assert_stmt>resp.status_code<eq>HTTPStatus.OK<assert_stmt>new_group<in>new_user.groups<assert_stmt>new_user<in>new_group.users<block_end># DELETE /groups/{id}/users/{id} <def_stmt>test_remove_user_from_a_group tables client new_group_with_member<block_start>new_group_with_member.save()<line_sep>user=new_group_with_member.users[0]<line_sep>resp=client.delete(ENDPOINT+'/{}/users/{}'.format(new_group_with_member.id user.id) headers=HEADERS)<assert_stmt>resp.status_code<eq>HTTPStatus.OK<assert_stmt>new_group_with_member<not><in>user.groups<assert_stmt>user<not><in>new_group_with_member.users<block_end># PUT /groups/{id}/users/{id} - nonexistent user id <def_stmt>test_add_nonexistent_user_to_a_group tables client new_group<block_start>new_group.save()<line_sep>nonexistent_user_id='777'<line_sep>resp=client.put(ENDPOINT+'/{}/users/{}'.format(new_group.id nonexistent_user_id) headers=HEADERS)<assert_stmt>resp.status_code<eq>HTTPStatus.NOT_FOUND<block_end># PUT /groups/{id}/users/{id} - nonexistent group id <def_stmt>test_add_user_to_nonexistent_group tables client new_user<block_start>new_user.save()<line_sep>nonexistent_group_id='777'<line_sep>resp=client.put(ENDPOINT+'/{}/users/{}'.format(nonexistent_group_id new_user.id) headers=HEADERS)<assert_stmt>resp.status_code<eq>HTTPStatus.NOT_FOUND<block_end># DELETE /groups/{id}/users/{id} - nonexistent user id <def_stmt>test_remove_nonexistent_user_from_a_group tables client new_group<block_start>new_group.save()<line_sep>nonexistent_user_id='777'<line_sep>resp=client.delete(ENDPOINT+'/{}/users/{}'.format(new_group.id nonexistent_user_id) headers=HEADERS)<assert_stmt>resp.status_code<eq>HTTPStatus.NOT_FOUND<block_end># DELETE /groups/{id}/users/{id} - nonexistent group id <def_stmt>test_remove_user_from_a_nonexistent_group tables client new_user<block_start>new_user.save()<line_sep>nonexistent_group_id='777'<line_sep>resp=client.delete(ENDPOINT+'/{}/users/{}'.format(nonexistent_group_id new_user.id) headers=HEADERS)<assert_stmt>resp.status_code<eq>HTTPStatus.NOT_FOUND<block_end># PUT /groups/{id} <def_stmt>test_set_group_as_a_default tables client new_group<block_start>new_group.save()<line_sep>resp=client.put(ENDPOINT+'/{}'.format(new_group.id) data=json.dumps({'isDefault':<true>}) headers=HEADERS)<assert_stmt>resp.status_code<eq>HTTPStatus.OK<assert_stmt>Group.get(new_group.id).is_default<block_end># PUT /groups/{id} <def_stmt>test_mark_default_group_as_non_default tables client new_group<block_start>new_group.is_default=<true><line_sep>new_group.save()<line_sep>resp=client.put(ENDPOINT+'/{}'.format(new_group.id) data=json.dumps({'isDefault':<false>}) headers=HEADERS)<assert_stmt>resp.status_code<eq>HTTPStatus.OK<assert_stmt>Group.get(new_group.id).is_default<is><false><block_end>
<import_stmt>asyncio<import_from_stmt>contextlib asynccontextmanager<import_stmt>pytest<import_from_stmt>mitmproxy exceptions<import_from_stmt>mitmproxy.addons.proxyserver Proxyserver<import_from_stmt>mitmproxy.connection Address<import_from_stmt>mitmproxy.proxy layers server_hooks<import_from_stmt>mitmproxy.proxy.layers.http HTTPMode<import_from_stmt>mitmproxy.test taddons tflow<import_from_stmt>mitmproxy.test.tflow tclient_conn tserver_conn<class_stmt>HelperAddon<block_start><def_stmt>__init__ self<block_start>self.flows=[]<line_sep>self.layers=[<lambda>ctx:layers.modes.HttpProxy(ctx) <lambda>ctx:layers.HttpLayer(ctx HTTPMode.regular) <lambda>ctx:layers.TCPLayer(ctx) ]<block_end><def_stmt>request self f<block_start>self.flows.append(f)<block_end><def_stmt>tcp_start self f<block_start>self.flows.append(f)<block_end><def_stmt>next_layer self nl<block_start>nl.layer=self.layers.pop(0)(nl.context)<block_end><block_end>@asynccontextmanager<async_keyword><def_stmt>tcp_server handle_conn<arrow>Address<block_start>server=<await>asyncio.start_server(handle_conn '127.0.0.1' 0)<line_sep><await>server.start_serving()<try_stmt><block_start><yield>server.sockets[0].getsockname()<block_end><finally_stmt><block_start>server.close()<block_end><block_end>@pytest.mark.asyncio<async_keyword><def_stmt>test_start_stop <block_start><async_keyword><def_stmt>server_handler reader:asyncio.StreamReader writer:asyncio.StreamWriter<block_start><assert_stmt><await>reader.readuntil(b"\r\n\r\n")<eq>b"GET /hello HTTP/1.1\r\n\r\n"<line_sep>writer.write(b"HTTP/1.1 204 No Content\r\n\r\n")<line_sep><await>writer.drain()<line_sep>writer.close()<block_end>ps=Proxyserver()<with_stmt>taddons.context(ps)<as>tctx<block_start>state=HelperAddon()<line_sep>tctx.master.addons.add(state)<async_keyword><with_stmt>tcp_server(server_handler)<as>addr<block_start>tctx.configure(ps listen_host="127.0.0.1" listen_port=0)<assert_stmt><not>ps.server<line_sep>ps.running()<line_sep><await>tctx.master.await_log("Proxy server listening" level="info")<assert_stmt>ps.server<line_sep>proxy_addr=ps.server.sockets[0].getsockname()[:2]<line_sep>reader,writer=<await>asyncio.open_connection(*proxy_addr)<line_sep>req=f"GET http://{addr[0]}:{addr[1]}/hello HTTP/1.1\r\n\r\n"<line_sep>writer.write(req.encode())<assert_stmt><await>reader.readuntil(b"\r\n\r\n")<eq>b"HTTP/1.1 204 No Content\r\n\r\n"<assert_stmt>repr(ps)<eq>"ProxyServer(running, 1 active conns)"<line_sep>tctx.configure(ps server=<false>)<line_sep><await>tctx.master.await_log("Stopping server" level="info")<assert_stmt><not>ps.server<assert_stmt>state.flows<assert_stmt>state.flows[0].request.path<eq>"/hello"<assert_stmt>state.flows[0].response.status_code<eq>204<line_sep># Waiting here until everything is really torn down... takes some effort. conn_handler=list(ps._connections.values())[0]<line_sep>client_handler=conn_handler.transports[conn_handler.client].handler<line_sep>writer.close()<line_sep><await>writer.wait_closed()<try_stmt><block_start><await>client_handler<block_end><except_stmt>asyncio.CancelledError<block_start><pass><block_end><for_stmt>_ range(5)# Get all other scheduled coroutines to run. <block_start><await>asyncio.sleep(0)<block_end><assert_stmt>repr(ps)<eq>"ProxyServer(stopped, 0 active conns)"<block_end><block_end><block_end>@pytest.mark.asyncio<async_keyword><def_stmt>test_inject <arrow><none><block_start><async_keyword><def_stmt>server_handler reader:asyncio.StreamReader writer:asyncio.StreamWriter<block_start><while_stmt>s:=<await>reader.read(1)<block_start>writer.write(s.upper())<block_end><block_end>ps=Proxyserver()<with_stmt>taddons.context(ps)<as>tctx<block_start>state=HelperAddon()<line_sep>tctx.master.addons.add(state)<async_keyword><with_stmt>tcp_server(server_handler)<as>addr<block_start>tctx.configure(ps listen_host="127.0.0.1" listen_port=0)<line_sep>ps.running()<line_sep><await>tctx.master.await_log("Proxy server listening" level="info")<line_sep>proxy_addr=ps.server.sockets[0].getsockname()[:2]<line_sep>reader,writer=<await>asyncio.open_connection(*proxy_addr)<line_sep>req=f"CONNECT {addr[0]}:{addr[1]} HTTP/1.1\r\n\r\n"<line_sep>writer.write(req.encode())<assert_stmt><await>reader.readuntil(b"\r\n\r\n")<eq>b"HTTP/1.1 200 Connection established\r\n\r\n"<line_sep>writer.write(b"a")<assert_stmt><await>reader.read(1)<eq>b"A"<line_sep>ps.inject_tcp(state.flows[0] <false> b"b")<assert_stmt><await>reader.read(1)<eq>b"B"<line_sep>ps.inject_tcp(state.flows[0] <true> b"c")<assert_stmt><await>reader.read(1)<eq>b"c"<block_end><block_end><block_end>@pytest.mark.asyncio<async_keyword><def_stmt>test_inject_fail <arrow><none><block_start>ps=Proxyserver()<with_stmt>taddons.context(ps)<as>tctx<block_start>ps.inject_websocket(tflow.tflow() <true> b"test")<line_sep><await>tctx.master.await_log("Cannot inject WebSocket messages into non-WebSocket flows." level="warn")<line_sep>ps.inject_tcp(tflow.tflow() <true> b"test")<line_sep><await>tctx.master.await_log("Cannot inject TCP messages into non-TCP flows." level="warn")<line_sep>ps.inject_websocket(tflow.twebsocketflow() <true> b"test")<line_sep><await>tctx.master.await_log("Flow is not from a live connection." level="warn")<line_sep>ps.inject_websocket(tflow.ttcpflow() <true> b"test")<line_sep><await>tctx.master.await_log("Flow is not from a live connection." level="warn")<block_end><block_end>@pytest.mark.asyncio<async_keyword><def_stmt>test_warn_no_nextlayer <block_start>""" Test that we log an error if the proxy server is started without NextLayer addon. That is a mean trap to fall into when writing end-to-end tests. """<line_sep>ps=Proxyserver()<with_stmt>taddons.context(ps)<as>tctx<block_start>tctx.configure(ps listen_host="127.0.0.1" listen_port=0)<line_sep>ps.running()<line_sep><await>tctx.master.await_log("Proxy server listening at" level="info")<assert_stmt>tctx.master.has_log("Warning: Running proxyserver without nextlayer addon!" level="warn")<line_sep><await>ps.shutdown_server()<block_end><block_end><def_stmt>test_self_connect <block_start>server=tserver_conn()<line_sep>client=tclient_conn()<line_sep>server.address=("localhost" 8080)<line_sep>ps=Proxyserver()<with_stmt>taddons.context(ps)<as>tctx# not calling .running() here to avoid unnecessary socket <block_start>ps.options=tctx.options<line_sep>ps.server_connect(server_hooks.ServerConnectionHookData(server client))<assert_stmt>server.error<eq>"Stopped mitmproxy from recursively connecting to itself."<block_end><block_end><def_stmt>test_options <block_start>ps=Proxyserver()<with_stmt>taddons.context(ps)<as>tctx<block_start><with_stmt>pytest.raises(exceptions.OptionsError)<block_start>tctx.configure(ps body_size_limit="invalid")<block_end>tctx.configure(ps body_size_limit="1m")<with_stmt>pytest.raises(exceptions.OptionsError)<block_start>tctx.configure(ps stream_large_bodies="invalid")<block_end>tctx.configure(ps stream_large_bodies="1m")<block_end><block_end>