





<!DOCTYPE html>
<html class="writer-html5" lang="zh-CN" >
<head>
  <meta charset="utf-8">
  
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  
  <title>Relay Arm® Compute Library Integration &mdash; tvm 0.8.dev1982 文档</title>
  

  
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" integrity="sha384-Gn5384xqQ1aoWXA+058RXPxPg6fy4IWvTNh0E263XmFcJlSAwiGgFAW/dAiS6JXm" crossorigin="anonymous">
  <link rel="stylesheet" href="../../_static/css/theme.css" type="text/css" />
  <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
  <link rel="stylesheet" href="../../_static/css/theme.css" type="text/css" />
  <link rel="stylesheet" href="../../_static/gallery.css" type="text/css" />
  <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
  <link rel="stylesheet" href="../../_static/css/tlcpack_theme.css" type="text/css" />

  
  
    <link rel="shortcut icon" href="../../_static/tvm-logo-square.png"/>
  

  
  
  
  
    
      <script type="text/javascript" id="documentation_options" data-url_root="../../" src="../../_static/documentation_options.js"></script>
        <script data-url_root="../../" id="documentation_options" src="../../_static/documentation_options.js"></script>
        <script src="../../_static/jquery.js"></script>
        <script src="../../_static/underscore.js"></script>
        <script src="../../_static/doctools.js"></script>
        <script src="../../_static/translations.js"></script>
    
    <script type="text/javascript" src="../../_static/js/theme.js"></script>

    
    <script type="text/javascript" src="../../_static/js/tlcpack_theme.js"></script>
    <link rel="index" title="索引" href="../../genindex.html" />
    <link rel="search" title="搜索" href="../../search.html" />
    <link rel="next" title="Relay TensorRT Integration" href="tensorrt.html" />
    <link rel="prev" title="HLS Backend Example" href="hls.html" /> 
</head>

<body class="wy-body-for-nav">

   
  <div class="wy-grid-for-nav">
    
    
<header class="header">
    <div class="innercontainer">
      <div class="headerInner d-flex justify-content-between align-items-center">
          <div class="headerLogo">
               <a href="https://tvm.apache.org/"><img src=https://tvm.apache.org/assets/images/logo.svg alt="logo"></a>
          </div>

          <div id="headMenu" class="headerNav">
            <button type="button" id="closeHeadMenu" class="navCloseBtn"><img src="../../_static/img/close-icon.svg" alt="Close"></button>
             <ul class="nav">
                <li class="nav-item">
                   <a class="nav-link" href=https://tvm.apache.org/community>Community</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvm.apache.org/download>Download</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvm.apache.org/vta>VTA</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvm.apache.org/blog>Blog</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvm.apache.org/docs>Docs</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvmconf.org>Conference</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://github.com/apache/tvm/>Github</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvmchinese.github.io/declaration_zh_CN.html>About-Translators</a>
                </li>
             </ul>
               <div class="responsivetlcdropdown">
                 <button type="button" class="btn-link">
                   ASF
                 </button>
                 <ul>
                     <li>
                       <a href=https://apache.org/>Apache Homepage</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/licenses/>License</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/foundation/sponsorship.html>Sponsorship</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/security/>Security</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/foundation/thanks.html>Thanks</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/events/current-event>Events</a>
                     </li>
                     <li>
                       <a href=https://www.zhihu.com/column/c_1429578595417563136>Zhihu</a>
                     </li>
                 </ul>
               </div>
          </div>
            <div class="responsiveMenuIcon">
              <button type="button" id="menuBtn" class="btn-menu"><img src="../../_static/img/menu-icon.svg" alt="Menu Icon"></button>
            </div>

            <div class="tlcDropdown">
              <div class="dropdown">
                <button type="button" class="btn-link dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
                  ASF
                </button>
                <div class="dropdown-menu dropdown-menu-right">
                  <ul>
                     <li>
                       <a href=https://apache.org/>Apache Homepage</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/licenses/>License</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/foundation/sponsorship.html>Sponsorship</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/security/>Security</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/foundation/thanks.html>Thanks</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/events/current-event>Events</a>
                     </li>
                     <li>
                       <a href=https://www.zhihu.com/column/c_1429578595417563136>Zhihu</a>
                     </li>
                  </ul>
                </div>
              </div>
          </div>
       </div>
    </div>
 </header>
 
    <nav data-toggle="wy-nav-shift" class="wy-nav-side fixed">
      <div class="wy-side-scroll">
        <div class="wy-side-nav-search" >
          

          
            <a href="../../index.html">
          

          
            
            <img src="../../_static/tvm-logo-small.png" class="logo" alt="Logo"/>
          
          </a>

          
            
            
                <div class="version">
                  0.8.dev1982
                </div>
            
          

          
<div role="search">
  <form id="rtd-search-form" class="wy-form" action="../../search.html" method="get">
    <input type="text" name="q" placeholder="Search docs" />
    <input type="hidden" name="check_keywords" value="yes" />
    <input type="hidden" name="area" value="default" />
  </form>
</div>

          
        </div>

        
        <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
          
            
            
              
            
            
              <p class="caption" role="heading"><span class="caption-text">如何开始</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../install/index.html">安装 TVM</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../contribute/index.html">贡献者指南</a></li>
</ul>
<p class="caption" role="heading"><span class="caption-text">用户引导</span></p>
<ul class="current">
<li class="toctree-l1"><a class="reference internal" href="../../tutorial/index.html">User Tutorial</a></li>
<li class="toctree-l1 current"><a class="reference internal" href="../index.html">How To Guides</a><ul class="current">
<li class="toctree-l2"><a class="reference internal" href="../compile_models/index.html">编译深度学习模型</a></li>
<li class="toctree-l2 current"><a class="reference internal" href="index.html">TVM 部署模型和集成</a><ul class="current">
<li class="toctree-l3"><a class="reference internal" href="index.html#build-the-tvm-runtime-library">构建 TVM 运行 runtime 库</a></li>
<li class="toctree-l3"><a class="reference internal" href="index.html#cross-compile-the-tvm-runtime-for-other-architectures">为其它架构交叉编译TVM runtime</a></li>
<li class="toctree-l3"><a class="reference internal" href="index.html#optimize-and-tune-models-for-target-devices">针对目标设备优化和调整模型</a></li>
<li class="toctree-l3 current"><a class="reference internal" href="index.html#deploy-optimized-model-on-target-devices">在目标设备上部署优化的模型</a><ul class="current">
<li class="toctree-l4"><a class="reference internal" href="cpp_deploy.html">Deploy TVM Module using C++ API</a></li>
<li class="toctree-l4"><a class="reference internal" href="android.html">部署到安卓</a></li>
<li class="toctree-l4"><a class="reference internal" href="integrate.html">Integrate TVM into Your Project</a></li>
<li class="toctree-l4"><a class="reference internal" href="hls.html">HLS Backend Example</a></li>
<li class="toctree-l4 current"><a class="current reference internal" href="#">Relay Arm<sup>®</sup> Compute Library Integration</a></li>
<li class="toctree-l4"><a class="reference internal" href="tensorrt.html">Relay TensorRT Integration</a></li>
<li class="toctree-l4"><a class="reference internal" href="vitis_ai.html">Vitis AI Integration</a></li>
<li class="toctree-l4"><a class="reference internal" href="bnns.html">Relay BNNS Integration</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="index.html#additional-deployment-how-tos">其他部署方式</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../work_with_relay/index.html">Work With Relay</a></li>
<li class="toctree-l2"><a class="reference internal" href="../work_with_schedules/index.html">Work With Tensor Expression and Schedules</a></li>
<li class="toctree-l2"><a class="reference internal" href="../optimize_operators/index.html">优化张量算子</a></li>
<li class="toctree-l2"><a class="reference internal" href="../tune_with_autotvm/index.html">Auto-Tune with Templates and AutoTVM</a></li>
<li class="toctree-l2"><a class="reference internal" href="../tune_with_autoscheduler/index.html">Use AutoScheduler for Template-Free Scheduling</a></li>
<li class="toctree-l2"><a class="reference internal" href="../work_with_microtvm/index.html">Work With microTVM</a></li>
<li class="toctree-l2"><a class="reference internal" href="../extend_tvm/index.html">Extend TVM</a></li>
<li class="toctree-l2"><a class="reference internal" href="../profile/index.html">Profile Models</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../errors.html">Handle TVM Errors</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../faq.html">常见提问</a></li>
</ul>
</li>
</ul>
<p class="caption" role="heading"><span class="caption-text">开发者引导</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../dev/tutorial/index.html">Developer Tutorial</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../dev/how_to/how_to.html">开发者指南</a></li>
</ul>
<p class="caption" role="heading"><span class="caption-text">架构指南</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../arch/index.html">Design and Architecture</a></li>
</ul>
<p class="caption" role="heading"><span class="caption-text">主题引导</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../topic/microtvm/index.html">microTVM：裸机使用TVM</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../topic/vta/index.html">VTA: Versatile Tensor Accelerator</a></li>
</ul>
<p class="caption" role="heading"><span class="caption-text">参考指南</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../reference/langref/index.html">语言参考</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../reference/api/python/index.html">Python API</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../reference/api/links.html">Other APIs</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../reference/publications.html">Publications</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../genindex.html">索引</a></li>
</ul>

            
          
        </div>
        
      </div>
    </nav>

    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
      
      <nav class="wy-nav-top" aria-label="top navigation" data-toggle="wy-nav-top">
        
            <div class="togglemenu">

            </div>
            <div class="nav-content">
              <!-- tvm -->
              Table of content
            </div>
        
      </nav>


      <div class="wy-nav-content">
        
        <div class="rst-content">
        

          




















<div role="navigation" aria-label="breadcrumbs navigation">

  <ul class="wy-breadcrumbs">
    
      <li><a href="../../index.html">Docs</a> <span class="br-arrow">></span></li>
        
          <li><a href="../index.html">How To Guides</a> <span class="br-arrow">></span></li>
        
          <li><a href="index.html">TVM 部署模型和集成</a> <span class="br-arrow">></span></li>
        
      <li>Relay Arm<sup>®</sup> Compute Library Integration</li>
    
    
      <li class="wy-breadcrumbs-aside">
        
            
            <a href="../../_sources/how_to/deploy/arm_compute_lib.rst.txt" rel="nofollow"> <img src="../../_static//img/source.svg" alt="viewsource"/></a>
          
        
      </li>
    
  </ul>

  
  <hr/>
</div>
          <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
           <div itemprop="articleBody">
            
  <div class="section" id="relay-arm-compute-library-integration">
<h1>Relay Arm<sup>®</sup> Compute Library Integration<a class="headerlink" href="#relay-arm-compute-library-integration" title="永久链接至标题">¶</a></h1>
<p><strong>Author</strong>: <a class="reference external" href="https://github.com/lhutton1">Luke Hutton</a></p>
<div class="section" id="introduction">
<h2>介绍<a class="headerlink" href="#introduction" title="永久链接至标题">¶</a></h2>
<p>Arm Compute Library (ACL) is an open source project that provides accelerated kernels for Arm CPU’s
and GPU’s. Currently the integration offloads operators to ACL to use hand-crafted assembler
routines in the library. By offloading select operators from a relay graph to ACL we can achieve
a performance boost on such devices.</p>
</div>
<div class="section" id="installing-arm-compute-library">
<h2>Installing Arm Compute Library<a class="headerlink" href="#installing-arm-compute-library" title="永久链接至标题">¶</a></h2>
<p>Before installing Arm Compute Library, it is important to know what architecture to build for. One way
to determine this is to use <cite>lscpu</cite> and look for the “Model name” of the CPU. You can then use this to
determine the architecture by looking online.</p>
<p>We recommend two different ways to build and install ACL:</p>
<ul>
<li><p>Use the script located at <cite>docker/install/ubuntu_install_arm_compute_lib.sh</cite>. You can use this
script for building ACL from source natively or for cross-compiling the library on an x86 machine.
You may need to change the architecture of the device you wish to compile for by altering the
<cite>target_arch</cite> variable. Binaries will be built from source and installed to the location denoted by
<cite>install_path</cite>.</p></li>
<li><p>Alternatively, you can download and use pre-built binaries from:
<a class="reference external" href="https://github.com/ARM-software/ComputeLibrary/releases">https://github.com/ARM-software/ComputeLibrary/releases</a>. When using this package, you will need to
select the binaries for the architecture you require and make sure they are visible to cmake. This
can be done like so:</p>
<div class="highlight-bash notranslate"><div class="highlight"><pre><span></span><span class="nb">cd</span> &lt;acl-prebuilt-package&gt;/lib
mv ./linux-&lt;architecture-to-build-for&gt;-neon/* .
</pre></div>
</div>
</li>
</ul>
<p>In both cases you will need to set USE_ARM_COMPUTE_LIB_GRAPH_EXECUTOR to the path where the ACL package
is located. Cmake will look in /path-to-acl/ along with /path-to-acl/lib and /path-to-acl/build for the
required binaries. See the section below for more information on how to use these configuration options.</p>
</div>
<div class="section" id="building-with-acl-support">
<h2>Building with ACL support<a class="headerlink" href="#building-with-acl-support" title="永久链接至标题">¶</a></h2>
<p>The current implementation has two separate build options in cmake. The reason for this split is
because ACL cannot be used on an x86 machine. However, we still want to be able compile an ACL
runtime module on an x86 machine.</p>
<ul class="simple">
<li><p>USE_ARM_COMPUTE_LIB=ON/OFF - Enabling this flag will add support for compiling an ACL runtime module.</p></li>
<li><p>USE_ARM_COMPUTE_LIB_GRAPH_EXECUTOR=ON/OFF/path-to-acl - Enabling this flag will allow the graph executor to
compute the ACL offloaded functions.</p></li>
</ul>
<p>These flags can be used in different scenarios depending on your setup. For example, if you want
to compile an ACL module on an x86 machine and then run the module on a remote Arm device via RPC, you will
need to use USE_ARM_COMPUTE_LIB=ON on the x86 machine and USE_ARM_COMPUTE_LIB_GRAPH_EXECUTOR=ON on the remote
AArch64 device.</p>
<p>By default both options are set to OFF. Using USE_ARM_COMPUTE_LIB_GRAPH_EXECUTOR=ON will mean that ACL
binaries are searched for by cmake in the default locations
(see <a class="reference external" href="https://cmake.org/cmake/help/v3.4/command/find_library.html">https://cmake.org/cmake/help/v3.4/command/find_library.html</a>). In addition to this,
/path-to-tvm-project/acl/ will also be searched. It is likely that you will need to set your own path to
locate ACL. This can be done by specifying a path in the place of ON.</p>
<p>These flags should be set in your config.cmake file. For example:</p>
<div class="highlight-cmake notranslate"><div class="highlight"><pre><span></span><span class="nb">set</span><span class="p">(</span><span class="s">USE_ARM_COMPUTE_LIB</span> <span class="s">ON</span><span class="p">)</span>
<span class="nb">set</span><span class="p">(</span><span class="s">USE_ARM_COMPUTE_LIB_GRAPH_EXECUTOR</span> <span class="s">/path/to/acl</span><span class="p">)</span>
</pre></div>
</div>
</div>
<div class="section" id="usage">
<h2>Usage<a class="headerlink" href="#usage" title="永久链接至标题">¶</a></h2>
<div class="admonition note">
<p class="admonition-title">注解</p>
<p>This section may not stay up-to-date with changes to the API.</p>
</div>
<p>Create a relay graph. This may be a single operator or a whole graph. The intention is that any
relay graph can be input. The ACL integration will only pick supported operators to be offloaded
whilst the rest will be computed via TVM. (For this example we will use a single
max_pool2d operator).</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">import</span> <span class="nn">tvm</span>
<span class="kn">from</span> <span class="nn">tvm</span> <span class="kn">import</span> <span class="n">relay</span>

<span class="n">data_type</span> <span class="o">=</span> <span class="s2">&quot;float32&quot;</span>
<span class="n">data_shape</span> <span class="o">=</span> <span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">14</span><span class="p">,</span> <span class="mi">14</span><span class="p">,</span> <span class="mi">512</span><span class="p">)</span>
<span class="n">strides</span> <span class="o">=</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
<span class="n">padding</span> <span class="o">=</span> <span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="mi">0</span><span class="p">)</span>
<span class="n">pool_size</span> <span class="o">=</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
<span class="n">layout</span> <span class="o">=</span> <span class="s2">&quot;NHWC&quot;</span>
<span class="n">output_shape</span> <span class="o">=</span> <span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="mi">7</span><span class="p">,</span> <span class="mi">7</span><span class="p">,</span> <span class="mi">512</span><span class="p">)</span>

<span class="n">data</span> <span class="o">=</span> <span class="n">relay</span><span class="o">.</span><span class="n">var</span><span class="p">(</span><span class="s1">&#39;data&#39;</span><span class="p">,</span> <span class="n">shape</span><span class="o">=</span><span class="n">data_shape</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">data_type</span><span class="p">)</span>
<span class="n">out</span> <span class="o">=</span> <span class="n">relay</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">max_pool2d</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="n">pool_size</span><span class="o">=</span><span class="n">pool_size</span><span class="p">,</span> <span class="n">strides</span><span class="o">=</span><span class="n">strides</span><span class="p">,</span> <span class="n">layout</span><span class="o">=</span><span class="n">layout</span><span class="p">,</span> <span class="n">padding</span><span class="o">=</span><span class="n">padding</span><span class="p">)</span>
<span class="n">module</span> <span class="o">=</span> <span class="n">tvm</span><span class="o">.</span><span class="n">IRModule</span><span class="o">.</span><span class="n">from_expr</span><span class="p">(</span><span class="n">out</span><span class="p">)</span>
</pre></div>
</div>
<p>Annotate and partition the graph for ACL.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">tvm.relay.op.contrib.arm_compute_lib</span> <span class="kn">import</span> <span class="n">partition_for_arm_compute_lib</span>
<span class="n">module</span> <span class="o">=</span> <span class="n">partition_for_arm_compute_lib</span><span class="p">(</span><span class="n">module</span><span class="p">)</span>
</pre></div>
</div>
<p>Build the Relay graph.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">target</span> <span class="o">=</span> <span class="s2">&quot;llvm -mtriple=aarch64-linux-gnu -mattr=+neon&quot;</span>
<span class="k">with</span> <span class="n">tvm</span><span class="o">.</span><span class="n">transform</span><span class="o">.</span><span class="n">PassContext</span><span class="p">(</span><span class="n">opt_level</span><span class="o">=</span><span class="mi">3</span><span class="p">,</span> <span class="n">disabled_pass</span><span class="o">=</span><span class="p">[</span><span class="s2">&quot;AlterOpLayout&quot;</span><span class="p">]):</span>
    <span class="n">lib</span> <span class="o">=</span> <span class="n">relay</span><span class="o">.</span><span class="n">build</span><span class="p">(</span><span class="n">module</span><span class="p">,</span> <span class="n">target</span><span class="o">=</span><span class="n">target</span><span class="p">)</span>
</pre></div>
</div>
<p>Export the module.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">lib_path</span> <span class="o">=</span> <span class="s1">&#39;~/lib_acl.so&#39;</span>
<span class="n">cross_compile</span> <span class="o">=</span> <span class="s1">&#39;aarch64-linux-gnu-c++&#39;</span>
<span class="n">lib</span><span class="o">.</span><span class="n">export_library</span><span class="p">(</span><span class="n">lib_path</span><span class="p">,</span> <span class="n">cc</span><span class="o">=</span><span class="n">cross_compile</span><span class="p">)</span>
</pre></div>
</div>
<p>Run Inference. This must be on an Arm device. If compiling on x86 device and running on AArch64,
consider using the RPC mechanism. Tutorials for using the RPC mechanism:
<a class="reference external" href="https://tvm.apache.org/docs/tutorials/get_started/cross_compilation_and_rpc.html">https://tvm.apache.org/docs/tutorials/get_started/cross_compilation_and_rpc.html</a></p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">dev</span> <span class="o">=</span> <span class="n">tvm</span><span class="o">.</span><span class="n">cpu</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
<span class="n">loaded_lib</span> <span class="o">=</span> <span class="n">tvm</span><span class="o">.</span><span class="n">runtime</span><span class="o">.</span><span class="n">load_module</span><span class="p">(</span><span class="s1">&#39;lib_acl.so&#39;</span><span class="p">)</span>
<span class="n">gen_module</span> <span class="o">=</span> <span class="n">tvm</span><span class="o">.</span><span class="n">contrib</span><span class="o">.</span><span class="n">graph_executor</span><span class="o">.</span><span class="n">GraphModule</span><span class="p">(</span><span class="n">loaded_lib</span><span class="p">[</span><span class="s1">&#39;default&#39;</span><span class="p">](</span><span class="n">dev</span><span class="p">))</span>
<span class="n">d_data</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">uniform</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="n">data_shape</span><span class="p">)</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="n">data_type</span><span class="p">)</span>
<span class="n">map_inputs</span> <span class="o">=</span> <span class="p">{</span><span class="s1">&#39;data&#39;</span><span class="p">:</span> <span class="n">d_data</span><span class="p">}</span>
<span class="n">gen_module</span><span class="o">.</span><span class="n">set_input</span><span class="p">(</span><span class="o">**</span><span class="n">map_inputs</span><span class="p">)</span>
<span class="n">gen_module</span><span class="o">.</span><span class="n">run</span><span class="p">()</span>
</pre></div>
</div>
</div>
<div class="section" id="more-examples">
<h2>More examples<a class="headerlink" href="#more-examples" title="永久链接至标题">¶</a></h2>
<p>The example above only shows a basic example of how ACL can be used for offloading a single
Maxpool2D. If you would like to see more examples for each implemented operator and for
networks refer to the tests: <cite>tests/python/contrib/test_arm_compute_lib</cite>. Here you can modify
<cite>test_config.json</cite> to configure how a remote device is created in <cite>infrastructure.py</cite> and,
as a result, how runtime tests will be run.</p>
<p>An example configuration for <cite>test_config.json</cite>:</p>
<ul class="simple">
<li><p>connection_type - The type of RPC connection. Options: local, tracker, remote.</p></li>
<li><p>host - The host device to connect to.</p></li>
<li><p>port - The port to use when connecting.</p></li>
<li><p>target - The target to use for compilation.</p></li>
<li><p>device_key - The device key when connecting via a tracker.</p></li>
<li><p>cross_compile - Path to cross compiler when connecting from a non-arm platform e.g. aarch64-linux-gnu-g++.</p></li>
</ul>
<div class="highlight-json notranslate"><div class="highlight"><pre><span></span><span class="p">{</span>
  <span class="nt">&quot;connection_type&quot;</span><span class="p">:</span> <span class="s2">&quot;local&quot;</span><span class="p">,</span>
  <span class="nt">&quot;host&quot;</span><span class="p">:</span> <span class="s2">&quot;127.0.0.1&quot;</span><span class="p">,</span>
  <span class="nt">&quot;port&quot;</span><span class="p">:</span> <span class="mi">9090</span><span class="p">,</span>
  <span class="nt">&quot;target&quot;</span><span class="p">:</span> <span class="s2">&quot;llvm -mtriple=aarch64-linux-gnu -mattr=+neon&quot;</span><span class="p">,</span>
  <span class="nt">&quot;device_key&quot;</span><span class="p">:</span> <span class="s2">&quot;&quot;</span><span class="p">,</span>
  <span class="nt">&quot;cross_compile&quot;</span><span class="p">:</span> <span class="s2">&quot;&quot;</span>
<span class="p">}</span>
</pre></div>
</div>
</div>
<div class="section" id="operator-support">
<h2>Operator support<a class="headerlink" href="#operator-support" title="永久链接至标题">¶</a></h2>
<table class="docutils align-default">
<colgroup>
<col style="width: 23%" />
<col style="width: 77%" />
</colgroup>
<thead>
<tr class="row-odd"><th class="head"><p>Relay Node</p></th>
<th class="head"><p>Remarks</p></th>
</tr>
</thead>
<tbody>
<tr class="row-even"><td><p>nn.conv2d</p></td>
<td><dl class="simple">
<dt>fp32:</dt><dd><p>Simple: nn.conv2d
Composite: nn.pad?, nn.conv2d, nn.bias_add?, nn.relu?</p>
</dd>
</dl>
<p>Normal and depth-wise (when kernel is 3x3 or 5x5 and strides are 1x1
or 2x2) convolution supported. Grouped convolution is not supported.</p>
</td>
</tr>
<tr class="row-odd"><td><p>qnn.conv2d</p></td>
<td><dl class="simple">
<dt>uint8:</dt><dd><p>Composite: nn.pad?, nn.conv2d, nn.bias_add?, nn.relu?, qnn.requantize</p>
</dd>
</dl>
<p>Normal and depth-wise (when kernel is 3x3 or 5x5 and strides are 1x1
or 2x2) convolution supported. Grouped convolution is not supported.</p>
</td>
</tr>
<tr class="row-even"><td><p>nn.dense</p></td>
<td><dl class="simple">
<dt>fp32:</dt><dd><p>Simple: nn.dense
Composite: nn.dense, nn.bias_add?</p>
</dd>
</dl>
</td>
</tr>
<tr class="row-odd"><td><p>qnn.dense</p></td>
<td><dl class="simple">
<dt>uint8:</dt><dd><p>Composite: qnn.dense, nn.bias_add?, qnn.requantize</p>
</dd>
</dl>
</td>
</tr>
<tr class="row-even"><td><p>nn.max_pool2d</p></td>
<td><p>fp32, uint8</p></td>
</tr>
<tr class="row-odd"><td><p>nn.global_max_pool2d</p></td>
<td><p>fp32, uint8</p></td>
</tr>
<tr class="row-even"><td><p>nn.avg_pool2d</p></td>
<td><dl class="simple">
<dt>fp32:</dt><dd><p>Simple: nn.avg_pool2d</p>
</dd>
<dt>uint8:</dt><dd><p>Composite: cast(int32), nn.avg_pool2d, cast(uint8)</p>
</dd>
</dl>
</td>
</tr>
<tr class="row-odd"><td><p>nn.global_avg_pool2d</p></td>
<td><dl class="simple">
<dt>fp32:</dt><dd><p>Simple: nn.global_avg_pool2d</p>
</dd>
<dt>uint8:</dt><dd><p>Composite: cast(int32), nn.avg_pool2d, cast(uint8)</p>
</dd>
</dl>
</td>
</tr>
<tr class="row-even"><td><p>power(of 2) +
nn.avg_pool2d +
sqrt</p></td>
<td><p>A special case for L2 pooling.</p>
<dl class="simple">
<dt>fp32:</dt><dd><p>Composite: power(of 2), nn.avg_pool2d, sqrt</p>
</dd>
</dl>
</td>
</tr>
<tr class="row-odd"><td><p>reshape</p></td>
<td><p>fp32, uint8</p></td>
</tr>
<tr class="row-even"><td><p>maximum</p></td>
<td><p>fp32</p></td>
</tr>
<tr class="row-odd"><td><p>add</p></td>
<td><p>fp32</p></td>
</tr>
<tr class="row-even"><td><p>qnn.add</p></td>
<td><p>uint8</p></td>
</tr>
</tbody>
</table>
<div class="admonition note">
<p class="admonition-title">注解</p>
<p>A composite operator is a series of operators that map to a single Arm Compute Library operator. You can view this
as being a single fused operator from the view point of Arm Compute Library. ‘?’ denotes an optional operator in
the series of operators that make up a composite operator.</p>
</div>
</div>
<div class="section" id="adding-a-new-operator">
<h2>Adding a new operator<a class="headerlink" href="#adding-a-new-operator" title="永久链接至标题">¶</a></h2>
<p>Adding a new operator requires changes to a series of places. This section will give a hint on
what needs to be changed and where, it will not however dive into the complexities for an
individual operator. This is left to the developer.</p>
<p>There are a series of files we need to make changes to:</p>
<ul class="simple">
<li><p><cite>python/relay/op/contrib/arm_compute_lib.py</cite> In this file we define the operators we wish to offload using the
<cite>op.register</cite> decorator. This will mean the annotation pass recognizes this operator as ACL offloadable.</p></li>
<li><p><cite>src/relay/backend/contrib/arm_compute_lib/codegen.cc</cite> Implement <cite>Create[OpName]JSONNode</cite> method. This is where we
declare how the operator should be represented by JSON. This will be used to create the ACL module.</p></li>
<li><p><cite>src/runtime/contrib/arm_compute_lib/acl_runtime.cc</cite> Implement <cite>Create[OpName]Layer</cite> method. This is where we
define how the JSON representation can be used to create an ACL function. We simply define how to
translate from the JSON representation to ACL API.</p></li>
<li><p><cite>tests/python/contrib/test_arm_compute_lib</cite> Add unit tests for the given operator.</p></li>
</ul>
</div>
</div>


           </div>
           
          </div>
          

<footer>

    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
      
        <a href="tensorrt.html" class="btn btn-neutral float-right" title="Relay TensorRT Integration" accesskey="n" rel="next">下一个 <span class="fa fa-arrow-circle-right"></span></a>
      
      
        <a href="hls.html" class="btn btn-neutral float-left" title="HLS Backend Example" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left"></span> 上一个</a>
      
    </div>

<div id="button" class="backtop"><img src="../../_static//img/right.svg" alt="backtop"/> </div>
<section class="footerSec">
    <div class="footerHeader">
      <ul class="d-flex align-md-items-center justify-content-between flex-column flex-md-row">
        <li class="copywrite d-flex align-items-center">
          <h5 id="copy-right-info">© 2020 Apache Software Foundation | All right reserved</h5>
        </li>
      </ul>

    </div>

    <ul>
      <li class="footernote">Copyright © 2020 The Apache Software Foundation. Apache TVM, Apache, the Apache feather, and the Apache TVM project logo are either trademarks or registered trademarks of the Apache Software Foundation.</li>
    </ul>

</section>
</footer>
        </div>
      </div>

    </section>

  </div>
  

    <script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.12.9/umd/popper.min.js" integrity="sha384-ApNbgh9B+Y1QKtv3Rn7W3mgPxhU9K/ScQsAP7hUibX39j7fakFPskvXusvfa0b4Q" crossorigin="anonymous"></script>
    <script src="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/js/bootstrap.min.js" integrity="sha384-JZR6Spejh4U02d8jOt6vLEHfe/JQGiRRSQQxSfFWpi1MquVdAyjUar5+76PVCmYl" crossorigin="anonymous"></script>

  </body>
  <script type="text/javascript">
      jQuery(function () {
          SphinxRtdTheme.Navigation.enable(true);
      });
  </script>

  
  
    
    <!-- Theme Analytics -->
    <script>
    (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
      (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
      m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
    })(window,document,'script','https://www.google-analytics.com/analytics.js','ga');

    ga('create', 'UA-75982049-2', 'auto');
    ga('send', 'pageview');
    </script>

    
   

</body>
</html>