





<!DOCTYPE html>
<html class="writer-html5" lang="zh-CN" >
<head>
  <meta charset="utf-8">
  
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  
  <title>Relay Operator Strategy &mdash; tvm 0.8.dev1982 文档</title>
  

  
  <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" integrity="sha384-Gn5384xqQ1aoWXA+058RXPxPg6fy4IWvTNh0E263XmFcJlSAwiGgFAW/dAiS6JXm" crossorigin="anonymous">
  <link rel="stylesheet" href="../_static/css/theme.css" type="text/css" />
  <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
  <link rel="stylesheet" href="../_static/css/theme.css" type="text/css" />
  <link rel="stylesheet" href="../_static/gallery.css" type="text/css" />
  <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
  <link rel="stylesheet" href="../_static/css/tlcpack_theme.css" type="text/css" />

  
  
    <link rel="shortcut icon" href="../_static/tvm-logo-square.png"/>
  

  
  
  
  
    
      <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
        <script data-url_root="../" id="documentation_options" src="../_static/documentation_options.js"></script>
        <script src="../_static/jquery.js"></script>
        <script src="../_static/underscore.js"></script>
        <script src="../_static/doctools.js"></script>
        <script src="../_static/translations.js"></script>
    
    <script type="text/javascript" src="../_static/js/theme.js"></script>

    
    <script type="text/javascript" src="../_static/js/tlcpack_theme.js"></script>
    <link rel="index" title="索引" href="../genindex.html" />
    <link rel="search" title="搜索" href="../search.html" />
    <link rel="next" title="Convert Layout Pass" href="convert_layout.html" />
    <link rel="prev" title="Relay 介绍" href="relay_intro.html" /> 
</head>

<body class="wy-body-for-nav">

   
  <div class="wy-grid-for-nav">
    
    
<header class="header">
    <div class="innercontainer">
      <div class="headerInner d-flex justify-content-between align-items-center">
          <div class="headerLogo">
               <a href="https://tvm.apache.org/"><img src=https://tvm.apache.org/assets/images/logo.svg alt="logo"></a>
          </div>

          <div id="headMenu" class="headerNav">
            <button type="button" id="closeHeadMenu" class="navCloseBtn"><img src="../_static/img/close-icon.svg" alt="Close"></button>
             <ul class="nav">
                <li class="nav-item">
                   <a class="nav-link" href=https://tvm.apache.org/community>Community</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvm.apache.org/download>Download</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvm.apache.org/vta>VTA</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvm.apache.org/blog>Blog</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvm.apache.org/docs>Docs</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvmconf.org>Conference</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://github.com/apache/tvm/>Github</a>
                </li>
                <li class="nav-item">
                   <a class="nav-link" href=https://tvmchinese.github.io/declaration_zh_CN.html>About-Translators</a>
                </li>
             </ul>
               <div class="responsivetlcdropdown">
                 <button type="button" class="btn-link">
                   ASF
                 </button>
                 <ul>
                     <li>
                       <a href=https://apache.org/>Apache Homepage</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/licenses/>License</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/foundation/sponsorship.html>Sponsorship</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/security/>Security</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/foundation/thanks.html>Thanks</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/events/current-event>Events</a>
                     </li>
                     <li>
                       <a href=https://www.zhihu.com/column/c_1429578595417563136>Zhihu</a>
                     </li>
                 </ul>
               </div>
          </div>
            <div class="responsiveMenuIcon">
              <button type="button" id="menuBtn" class="btn-menu"><img src="../_static/img/menu-icon.svg" alt="Menu Icon"></button>
            </div>

            <div class="tlcDropdown">
              <div class="dropdown">
                <button type="button" class="btn-link dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
                  ASF
                </button>
                <div class="dropdown-menu dropdown-menu-right">
                  <ul>
                     <li>
                       <a href=https://apache.org/>Apache Homepage</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/licenses/>License</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/foundation/sponsorship.html>Sponsorship</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/security/>Security</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/foundation/thanks.html>Thanks</a>
                     </li>
                     <li>
                       <a href=https://www.apache.org/events/current-event>Events</a>
                     </li>
                     <li>
                       <a href=https://www.zhihu.com/column/c_1429578595417563136>Zhihu</a>
                     </li>
                  </ul>
                </div>
              </div>
          </div>
       </div>
    </div>
 </header>
 
    <nav data-toggle="wy-nav-shift" class="wy-nav-side fixed">
      <div class="wy-side-scroll">
        <div class="wy-side-nav-search" >
          

          
            <a href="../index.html">
          

          
            
            <img src="../_static/tvm-logo-small.png" class="logo" alt="Logo"/>
          
          </a>

          
            
            
                <div class="version">
                  0.8.dev1982
                </div>
            
          

          
<div role="search">
  <form id="rtd-search-form" class="wy-form" action="../search.html" method="get">
    <input type="text" name="q" placeholder="Search docs" />
    <input type="hidden" name="check_keywords" value="yes" />
    <input type="hidden" name="area" value="default" />
  </form>
</div>

          
        </div>

        
        <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
          
            
            
              
            
            
              <p class="caption" role="heading"><span class="caption-text">如何开始</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../install/index.html">安装 TVM</a></li>
<li class="toctree-l1"><a class="reference internal" href="../contribute/index.html">贡献者指南</a></li>
</ul>
<p class="caption" role="heading"><span class="caption-text">用户引导</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../tutorial/index.html">User Tutorial</a></li>
<li class="toctree-l1"><a class="reference internal" href="../how_to/index.html">How To Guides</a></li>
</ul>
<p class="caption" role="heading"><span class="caption-text">开发者引导</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../dev/tutorial/index.html">Developer Tutorial</a></li>
<li class="toctree-l1"><a class="reference internal" href="../dev/how_to/how_to.html">开发者指南</a></li>
</ul>
<p class="caption" role="heading"><span class="caption-text">架构指南</span></p>
<ul class="current">
<li class="toctree-l1 current"><a class="reference internal" href="index.html">Design and Architecture</a><ul class="current">
<li class="toctree-l2"><a class="reference internal" href="index.html#example-compilation-flow">编译流程示例</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#logical-architecture-components">逻辑架构组件</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#tvm-support">tvm/support</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#tvm-runtime">tvm/runtime</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#tvm-node">tvm/node</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#tvm-ir">tvm/ir</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#tvm-target">tvm/target</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#tvm-tir">tvm/tir</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#tvm-arith">tvm/arith</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#tvm-te">tvm/te</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#tvm-topi">tvm/topi</a></li>
<li class="toctree-l2 current"><a class="reference internal" href="index.html#tvm-relay">tvm/relay</a><ul class="current">
<li class="toctree-l3"><a class="reference internal" href="relay_intro.html">Relay 介绍</a></li>
<li class="toctree-l3 current"><a class="current reference internal" href="#">Relay Operator Strategy</a><ul>
<li class="toctree-l4"><a class="reference internal" href="#operator-strategy-design">Operator Strategy Design</a></li>
<li class="toctree-l4"><a class="reference internal" href="#write-a-strategy-function">Write A Strategy Function</a></li>
<li class="toctree-l4"><a class="reference internal" href="#register-strategy-function-to-an-operator">Register Strategy Function to An Operator</a></li>
<li class="toctree-l4"><a class="reference internal" href="#register-strategies-for-a-new-target">Register Strategies for A New Target</a></li>
<li class="toctree-l4"><a class="reference internal" href="#select-implementation-from-op-strategy">Select Implementation from Op Strategy</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="convert_layout.html">Convert Layout Pass</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="index.html#tvm-autotvm">tvm/autotvm</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#frontends">Frontends</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#security">安全</a></li>
<li class="toctree-l2"><a class="reference internal" href="index.html#microtvm">microTVM</a></li>
</ul>
</li>
</ul>
<p class="caption" role="heading"><span class="caption-text">主题引导</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../topic/microtvm/index.html">microTVM：裸机使用TVM</a></li>
<li class="toctree-l1"><a class="reference internal" href="../topic/vta/index.html">VTA: Versatile Tensor Accelerator</a></li>
</ul>
<p class="caption" role="heading"><span class="caption-text">参考指南</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../reference/langref/index.html">语言参考</a></li>
<li class="toctree-l1"><a class="reference internal" href="../reference/api/python/index.html">Python API</a></li>
<li class="toctree-l1"><a class="reference internal" href="../reference/api/links.html">Other APIs</a></li>
<li class="toctree-l1"><a class="reference internal" href="../reference/publications.html">Publications</a></li>
<li class="toctree-l1"><a class="reference internal" href="../genindex.html">索引</a></li>
</ul>

            
          
        </div>
        
      </div>
    </nav>

    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
      
      <nav class="wy-nav-top" aria-label="top navigation" data-toggle="wy-nav-top">
        
            <div class="togglemenu">

            </div>
            <div class="nav-content">
              <!-- tvm -->
              Table of content
            </div>
        
      </nav>


      <div class="wy-nav-content">
        
        <div class="rst-content">
        

          




















<div role="navigation" aria-label="breadcrumbs navigation">

  <ul class="wy-breadcrumbs">
    
      <li><a href="../index.html">Docs</a> <span class="br-arrow">></span></li>
        
          <li><a href="index.html">Design and Architecture</a> <span class="br-arrow">></span></li>
        
      <li>Relay Operator Strategy</li>
    
    
      <li class="wy-breadcrumbs-aside">
        
            
            <a href="../_sources/arch/relay_op_strategy.rst.txt" rel="nofollow"> <img src="../_static//img/source.svg" alt="viewsource"/></a>
          
        
      </li>
    
  </ul>

  
  <hr/>
</div>
          <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
           <div itemprop="articleBody">
            
  <div class="section" id="relay-operator-strategy">
<span id="relay-op-strategy"></span><h1>Relay Operator Strategy<a class="headerlink" href="#relay-operator-strategy" title="永久链接至标题">¶</a></h1>
<p>In order to lower Relay operators to the implementations defined in TOPI
library, a compute and schedule function need to be registered to each Relay
operator.  However, compute and schedule functions are usually specialized for
each target, and further, even for the same target, we may have multiple
algorithms and implementations available. To deal with the complexity, we
introduce operator strategy to allow developers to define a flexible lowering
strategy for each operator and target.</p>
<div class="section" id="operator-strategy-design">
<h2>Operator Strategy Design<a class="headerlink" href="#operator-strategy-design" title="永久链接至标题">¶</a></h2>
<p>The basic element in operator strategy is an <code class="docutils literal notranslate"><span class="pre">OpImplementation</span></code>. It includes
the a pair of compute and schedule function, the name of the implementation,
and a priority level (the use of priority level is explained in
<a class="reference internal" href="#select-implementation-from-op-strategy">Select Implementation from Op Strategy</a>).</p>
<p>The <code class="docutils literal notranslate"><span class="pre">OpStrategy</span></code> includes a list of <code class="docutils literal notranslate"><span class="pre">OpSpecialization</span></code>. Each <code class="docutils literal notranslate"><span class="pre">OpSpecialization</span></code>
contains a list of <code class="docutils literal notranslate"><span class="pre">OpImplementation</span></code> associated with a <code class="docutils literal notranslate"><span class="pre">SpecializedCondition</span></code>
(see definition in <code class="docutils literal notranslate"><span class="pre">include/tvm/te/schedule.h</span></code>).  The <code class="docutils literal notranslate"><span class="pre">SpecializedCondition</span></code>
can be null, indicating the implementations are generally applicable;
otherwise, the implementations are only considered when the specialized
condition is satisfied. <code class="docutils literal notranslate"><span class="pre">SpecializedCondition</span></code> consists of a list
of clauses defined in Tensor Expression in conjunctive normal form (CNF) and
only supports conditions on tensor shapes.</p>
<p>Last, a strategy function, or <code class="docutils literal notranslate"><span class="pre">FTVMStrategy</span></code>, determines which pair(s) of
compute and schedule functions should be used given a workload, and needs to be
registered to each Relay operator.  <code class="docutils literal notranslate"><span class="pre">FTVMStrategy</span></code> is a generic function (see
<code class="docutils literal notranslate"><span class="pre">include/tvm/target/generic_func.h</span></code>), that can be overwritten for each
target. The function signature is</p>
<div class="highlight-c notranslate"><div class="highlight"><pre><span></span><span class="n">OpStrategy</span><span class="p">(</span><span class="k">const</span> <span class="n">Attrs</span><span class="o">&amp;</span> <span class="n">attrs</span><span class="p">,</span> <span class="k">const</span> <span class="n">Array</span><span class="o">&lt;</span><span class="n">Tensor</span><span class="o">&gt;&amp;</span> <span class="n">inputs</span><span class="p">,</span> <span class="k">const</span> <span class="n">Type</span><span class="o">&amp;</span> <span class="n">out_type</span><span class="p">,</span> <span class="k">const</span> <span class="n">Target</span><span class="o">&amp;</span> <span class="n">target</span><span class="p">)</span>
</pre></div>
</div>
<p>that the function returns an <code class="docutils literal notranslate"><span class="pre">OpStrategy</span></code> given the op attributes, input
tensors, output types, and target to compile to.</p>
</div>
<div class="section" id="write-a-strategy-function">
<h2>Write A Strategy Function<a class="headerlink" href="#write-a-strategy-function" title="永久链接至标题">¶</a></h2>
<p>We recommend developers to write strategy function in Python as
most TOPI compute and schedule functions are written in Python.
In python, we provide <code class="docutils literal notranslate"><span class="pre">OpStrategy</span></code> class in <code class="docutils literal notranslate"><span class="pre">pyton/tvm/relay/op/op.py</span></code>.
It only has one API, which is to add an implementation to the strategy:</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="k">def</span> <span class="nf">add_implementation</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">compute</span><span class="p">,</span> <span class="n">schedule</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s2">&quot;default&quot;</span><span class="p">,</span> <span class="n">plevel</span><span class="o">=</span><span class="mi">10</span><span class="p">)</span>
</pre></div>
</div>
<p>We now take <code class="docutils literal notranslate"><span class="pre">topk</span></code> as an example to explain how to write the
<code class="docutils literal notranslate"><span class="pre">FTVMStrategy</span></code> function:</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="c1"># add to python/tvm/relay/op/strategy/generic.py</span>
<span class="nd">@override_native_generic_func</span><span class="p">(</span><span class="s2">&quot;topk_strategy&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">topk_strategy</span><span class="p">(</span><span class="n">attrs</span><span class="p">,</span> <span class="n">inputs</span><span class="p">,</span> <span class="n">out_type</span><span class="p">,</span> <span class="n">target</span><span class="p">):</span>
    <span class="n">strategy</span> <span class="o">=</span> <span class="n">_op</span><span class="o">.</span><span class="n">OpStrategy</span><span class="p">()</span>
    <span class="n">strategy</span><span class="o">.</span><span class="n">add_implementation</span><span class="p">(</span>
        <span class="n">wrap_compute_topk</span><span class="p">(</span><span class="n">topi</span><span class="o">.</span><span class="n">topk</span><span class="p">),</span>
        <span class="n">wrap_topi_schedule</span><span class="p">(</span><span class="n">topi</span><span class="o">.</span><span class="n">generic</span><span class="o">.</span><span class="n">schedule_topk</span><span class="p">),</span>
        <span class="n">name</span><span class="o">=</span><span class="s2">&quot;topk.generic&quot;</span><span class="p">)</span>
    <span class="k">return</span> <span class="n">strategy</span>

<span class="c1"># add to each target file in python/tvm/relay/op/strategy, e.g., x86.py, cuda.py, etc.</span>
<span class="nd">@topk_strategy.register</span><span class="p">([</span><span class="s2">&quot;cuda&quot;</span><span class="p">,</span> <span class="s2">&quot;gpu&quot;</span><span class="p">])</span>
<span class="k">def</span> <span class="nf">topk_strategy_cuda</span><span class="p">(</span><span class="n">attrs</span><span class="p">,</span> <span class="n">inputs</span><span class="p">,</span> <span class="n">out_type</span><span class="p">,</span> <span class="n">target</span><span class="p">):</span>
    <span class="n">strategy</span> <span class="o">=</span> <span class="n">_op</span><span class="o">.</span><span class="n">OpStrategy</span><span class="p">()</span>
    <span class="n">strategy</span><span class="o">.</span><span class="n">add_implementation</span><span class="p">(</span>
        <span class="n">wrap_compute_my_new_op</span><span class="p">(</span><span class="n">topi</span><span class="o">.</span><span class="n">cuda</span><span class="o">.</span><span class="n">topk</span><span class="p">),</span>
        <span class="n">wrap_topi_schedule</span><span class="p">(</span><span class="n">topi</span><span class="o">.</span><span class="n">cuda</span><span class="o">.</span><span class="n">schedule_topk</span><span class="p">),</span>
        <span class="n">name</span><span class="o">=</span><span class="s2">&quot;topk.cuda&quot;</span><span class="p">)</span>
    <span class="k">return</span> <span class="n">strategy</span>
</pre></div>
</div>
<p>In this example, we use <code class="docutils literal notranslate"><span class="pre">topi.cuda.topk</span></code> and <code class="docutils literal notranslate"><span class="pre">topi.cuda.schedule_topk</span></code>
as the compute and schedule function for CUDA or GPU target, while use TOPI
generic compute and schedule for the rest of targets.
Note that we use two wrapper functions that wrap the topi
compute and schedule to conform with the required function signature (
see <code class="docutils literal notranslate"><span class="pre">FTVMCompute</span></code> and <code class="docutils literal notranslate"><span class="pre">FTVMSchedule</span></code> in <code class="docutils literal notranslate"><span class="pre">include/tvm/relay/op_attr_types.h</span></code>).
Usually we need to write a customized compute wrapper function for each operator
to get different fields from op attributes.</p>
<p>The example above shows a very basic strategy function that only
adds one implementation in the strategy. But for many complicated operators,
we may need to add multiple implementations that use different algorithms.
For example, we can use both direct and winograd algorithm to
compute a conv2d op. In order to achieve this, we can write the strategy function
as follows:</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">strategy</span><span class="o">.</span><span class="n">add_implementation</span><span class="p">(</span>
    <span class="n">wrap_compute_conv2d</span><span class="p">(</span><span class="n">topi</span><span class="o">.</span><span class="n">cuda</span><span class="o">.</span><span class="n">conv2d_nchw</span><span class="p">),</span>
    <span class="n">wrap_topi_schedule</span><span class="p">(</span><span class="n">topi</span><span class="o">.</span><span class="n">cuda</span><span class="o">.</span><span class="n">schedule_conv2d_nchw</span><span class="p">),</span>
    <span class="n">name</span><span class="o">=</span><span class="s2">&quot;conv2d_nchw.cuda&quot;</span><span class="p">,</span>
    <span class="n">plevel</span><span class="o">=</span><span class="mi">10</span><span class="p">)</span>

<span class="k">if</span> <span class="n">winograd_condition</span><span class="p">:</span>
    <span class="n">strategy</span><span class="o">.</span><span class="n">add_implementation</span><span class="p">(</span>
        <span class="n">wrap_compute_conv2d</span><span class="p">(</span><span class="n">topi</span><span class="o">.</span><span class="n">cuda</span><span class="o">.</span><span class="n">conv2d_nchw_winograd</span><span class="p">),</span>
        <span class="n">wrap_topi_schedule</span><span class="p">(</span><span class="n">topi</span><span class="o">.</span><span class="n">cuda</span><span class="o">.</span><span class="n">schedule_conv2d_nchw_winograd</span><span class="p">),</span>
        <span class="n">name</span><span class="o">=</span><span class="s2">&quot;conv2d_nchw_winograd.cuda&quot;</span><span class="p">,</span>
        <span class="n">plevel</span><span class="o">=</span><span class="mi">15</span><span class="p">)</span>
</pre></div>
</div>
<p>In this example, we add two implementations to the conv2d strategy where
winograd algorithm is only added when <code class="docutils literal notranslate"><span class="pre">winograd_condition</span></code> is true.
The implementation <code class="docutils literal notranslate"><span class="pre">&quot;conv2d_nchw_winograd.cuda&quot;</span></code> will be used to compile
conv2d when <code class="docutils literal notranslate"><span class="pre">winograd_condition</span></code> is true as it has higher
priority level (this could be changed if certain implementation is an AutoTVM
template. See <a class="reference internal" href="#select-implementation-from-op-strategy">Select Implementation from Op Strategy</a> for more
details). Otherwise, <code class="docutils literal notranslate"><span class="pre">&quot;conv2d_nchw.cuda&quot;</span></code> is used.</p>
<p>We can extend the example above to third party library implementation. For
example, we can add the implementation that invokes kernel in the cblas
library when cblas is included in the target.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="k">if</span> <span class="s2">&quot;cblas&quot;</span> <span class="ow">in</span> <span class="n">target</span><span class="o">.</span><span class="n">libs</span><span class="p">:</span>
    <span class="n">strategy</span><span class="o">.</span><span class="n">add_implementation</span><span class="p">(</span>
        <span class="n">wrap_compute_dense</span><span class="p">(</span><span class="n">topi</span><span class="o">.</span><span class="n">x86</span><span class="o">.</span><span class="n">dense_cblas</span><span class="p">),</span>
        <span class="n">wrap_topi_schedule</span><span class="p">(</span><span class="n">topi</span><span class="o">.</span><span class="n">x86</span><span class="o">.</span><span class="n">schedule_dense_cblas</span><span class="p">),</span>
        <span class="n">name</span><span class="o">=</span><span class="s2">&quot;dense_cblas.x86&quot;</span><span class="p">,</span>
        <span class="n">plevel</span><span class="o">=</span><span class="mi">15</span><span class="p">)</span>
</pre></div>
</div>
<p>Further, we can add implementation specialized for a certain range of shapes.
The code below shows an example of dense strategy that adds an implementation
that is specialized for <code class="docutils literal notranslate"><span class="pre">m</span></code> greater than 16. The main difference between
hardcode python condition like examples above and specialized condition is that
it allows TVM to generate multiple kernels when the input tensors have symbolic
shapes. The compile engine will generate a dispatch function that invokes the
specialized kernel when the corresponding condition is met; otherwise,
invoke the kernel that has no associated specialized condition (<code class="docutils literal notranslate"><span class="pre">dense_common</span></code>
in this example). This part is still work in progress. More details will be
provided after it is done.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="k">def</span> <span class="nf">dense_strategy</span><span class="p">(</span><span class="n">attrs</span><span class="p">,</span> <span class="n">inputs</span><span class="p">,</span> <span class="n">out_type</span><span class="p">,</span> <span class="n">target</span><span class="p">):</span>
    <span class="n">m</span> <span class="o">=</span> <span class="n">inputs</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
    <span class="n">strategy</span> <span class="o">=</span> <span class="n">_op</span><span class="o">.</span><span class="n">OpStrategy</span><span class="p">()</span>
    <span class="n">strategy</span><span class="o">.</span><span class="n">add_implementation</span><span class="p">(</span>
        <span class="n">wrap_compute_dense</span><span class="p">(</span><span class="n">dense_compute1</span><span class="p">),</span>
        <span class="n">wrap_topi_schedule</span><span class="p">(</span><span class="n">dense_schedule1</span><span class="p">),</span>
        <span class="n">name</span><span class="o">=</span><span class="s2">&quot;dense_common&quot;</span><span class="p">)</span>

    <span class="k">with</span> <span class="n">tvm</span><span class="o">.</span><span class="n">te</span><span class="o">.</span><span class="n">SpecializedCondition</span><span class="p">(</span><span class="n">m</span> <span class="o">&gt;</span> <span class="mi">16</span><span class="p">):</span>
        <span class="n">strategy</span><span class="o">.</span><span class="n">add_implementation</span><span class="p">(</span>
            <span class="n">wrap_compute_dense</span><span class="p">(</span><span class="n">dense_compute2</span><span class="p">),</span>
            <span class="n">wrap_topi_schedule</span><span class="p">(</span><span class="n">dense_schedule2</span><span class="p">),</span>
            <span class="n">name</span><span class="o">=</span><span class="s2">&quot;dense_for_large_m&quot;</span><span class="p">,</span>
            <span class="n">plevel</span><span class="o">=</span><span class="mi">15</span><span class="p">)</span>

    <span class="k">return</span> <span class="n">strategy</span>
</pre></div>
</div>
</div>
<div class="section" id="register-strategy-function-to-an-operator">
<h2>Register Strategy Function to An Operator<a class="headerlink" href="#register-strategy-function-to-an-operator" title="永久链接至标题">¶</a></h2>
<p>After we define the strategy function for an operator, we can now
register the strategy function to this operator with</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">register_strategy</span><span class="p">(</span><span class="s2">&quot;topk&quot;</span><span class="p">,</span> <span class="n">strategy</span><span class="o">.</span><span class="n">topk_strategy</span><span class="p">)</span>
</pre></div>
</div>
<p>However, it takes much effort to write a strategy function for an operator.
Therefore, we provide two other methods for simpler operators.</p>
<p>First, for operators that have injective, broadcast, or reduction pattern, we
can call <code class="docutils literal notranslate"><span class="pre">register_injective_schedule</span></code>, <code class="docutils literal notranslate"><span class="pre">register_broadcast_schedule</span></code>, and
<code class="docutils literal notranslate"><span class="pre">register_reduce_schedule</span></code> repsectively. The schedule function for these
patterns are already registered by each target and can be applied to these
operators. We assume the compute function should be the same across all targets,
and <code class="docutils literal notranslate"><span class="pre">FTVMCompute</span></code> needs to be registered to the op before invoking register
schedule.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">register_broadcast_schedule</span><span class="p">(</span><span class="s2">&quot;add&quot;</span><span class="p">)</span>
</pre></div>
</div>
<p>Second, for operators that doesn’t have these common patterns mentioned before,
but also have the same compute function for all targets, we can use
<code class="docutils literal notranslate"><span class="pre">register_schedule</span></code> API. It is easier to write <code class="docutils literal notranslate"><span class="pre">FTVMSchedule</span></code> function
as we only need to provide which schedule function to use. The following
code snippet shows <code class="docutils literal notranslate"><span class="pre">FTVMSchedule</span></code> function for pooling.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="c1"># add to python/tvm/relay/op/strategy/generic.py</span>
<span class="nd">@generic_func</span>
<span class="k">def</span> <span class="nf">schedule_pool</span><span class="p">(</span><span class="n">attrs</span><span class="p">,</span> <span class="n">outs</span><span class="p">,</span> <span class="n">target</span><span class="p">):</span>
    <span class="k">with</span> <span class="n">target</span><span class="p">:</span>
        <span class="k">return</span> <span class="n">topi</span><span class="o">.</span><span class="n">generic</span><span class="o">.</span><span class="n">schedule_pool</span><span class="p">(</span><span class="n">outs</span><span class="p">,</span> <span class="n">attrs</span><span class="o">.</span><span class="n">layout</span><span class="p">)</span>

<span class="c1"># add to each target file in python/tvm/relay/op/strategy, e.g., x86.py, cuda.py, etc.</span>
<span class="nd">@schedule_pool.register</span><span class="p">(</span><span class="s2">&quot;cpu&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">schedule_pool_cpu</span><span class="p">(</span><span class="n">attrs</span><span class="p">,</span> <span class="n">outs</span><span class="p">,</span> <span class="n">target</span><span class="p">):</span>
    <span class="o">...</span>
</pre></div>
</div>
<p>After we created the <code class="docutils literal notranslate"><span class="pre">FTVMSchedule</span></code> for an operator, we can
register the strategy using <code class="docutils literal notranslate"><span class="pre">register_schedule</span></code>:</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">register_schedule</span><span class="p">(</span><span class="s2">&quot;nn.max_pool2d&quot;</span><span class="p">,</span> <span class="n">strategy</span><span class="o">.</span><span class="n">schedule_pool</span><span class="p">)</span>
</pre></div>
</div>
</div>
<div class="section" id="register-strategies-for-a-new-target">
<h2>Register Strategies for A New Target<a class="headerlink" href="#register-strategies-for-a-new-target" title="永久链接至标题">¶</a></h2>
<p>There are two ways to register strategies for a new target. The more
straightforward one is adding a new target file in the directory
<code class="docutils literal notranslate"><span class="pre">python/tvm/relay/op/strategy</span></code>. You only need to customize the strategy for
ops that have been implemented for this new target and reuse the generic
strategies for the rest.</p>
<p>Alternatively, you can also register the strategy for the new target outside the
TVM python library. The following code snippet shows an example how to do
so. You can find more examples in <code class="docutils literal notranslate"><span class="pre">vta/python/vta/top/op.py</span></code>.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="nd">@relay.op.strategy.conv2d_strategy.register</span><span class="p">(</span><span class="s2">&quot;mytarget&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">conv2d_strategy_mytarget</span><span class="p">(</span><span class="n">attrs</span><span class="p">,</span> <span class="n">inputs</span><span class="p">,</span> <span class="n">out_type</span><span class="p">,</span> <span class="n">target</span><span class="p">):</span>
    <span class="o">...</span>
</pre></div>
</div>
</div>
<div class="section" id="select-implementation-from-op-strategy">
<h2>Select Implementation from Op Strategy<a class="headerlink" href="#select-implementation-from-op-strategy" title="永久链接至标题">¶</a></h2>
<p>During the compilation, Relay compile engine needs to determine which
implementation to use for an operator when there are multiple. The selection
policy works as follows.</p>
<p>When the input tensors to an operator or a fused op all have constant shapes,
the compile engine first finds the best implementation based on AutoTVM tuning
logs. If there is no implementation that is an AutoTVM template or all AutoTVM
templates have fallback configs, the implementation with highest priority level
will then be chosen. Implementations with same priority level in this case leads
to an undefined behavior, and any of them might be selected.</p>
<p>The selection policy for ops with symbolic input shapes is still work in
progess. Currently, if any input tensor has a symbolic shape, only the
implementation with highest priority level will be used for this operator. This
will be updated after the implemention finishes.</p>
<p>For debug purpose, you can add the following lines before you compile the Relay
model to learn which implementation is used for each operator.</p>
<div class="highlight-python notranslate"><div class="highlight"><pre><span></span><span class="n">logging</span><span class="o">.</span><span class="n">getLogger</span><span class="p">(</span><span class="s2">&quot;compile_engine&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">setLevel</span><span class="p">(</span><span class="n">logging</span><span class="o">.</span><span class="n">INFO</span><span class="p">)</span>
<span class="n">logging</span><span class="o">.</span><span class="n">getLogger</span><span class="p">(</span><span class="s2">&quot;compile_engine&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">addHandler</span><span class="p">(</span><span class="n">logging</span><span class="o">.</span><span class="n">StreamHandler</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">stdout</span><span class="p">))</span>
</pre></div>
</div>
</div>
</div>


           </div>
           
          </div>
          

<footer>

    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
      
        <a href="convert_layout.html" class="btn btn-neutral float-right" title="Convert Layout Pass" accesskey="n" rel="next">下一个 <span class="fa fa-arrow-circle-right"></span></a>
      
      
        <a href="relay_intro.html" class="btn btn-neutral float-left" title="Relay 介绍" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left"></span> 上一个</a>
      
    </div>

<div id="button" class="backtop"><img src="../_static//img/right.svg" alt="backtop"/> </div>
<section class="footerSec">
    <div class="footerHeader">
      <ul class="d-flex align-md-items-center justify-content-between flex-column flex-md-row">
        <li class="copywrite d-flex align-items-center">
          <h5 id="copy-right-info">© 2020 Apache Software Foundation | All right reserved</h5>
        </li>
      </ul>

    </div>

    <ul>
      <li class="footernote">Copyright © 2020 The Apache Software Foundation. Apache TVM, Apache, the Apache feather, and the Apache TVM project logo are either trademarks or registered trademarks of the Apache Software Foundation.</li>
    </ul>

</section>
</footer>
        </div>
      </div>

    </section>

  </div>
  

    <script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.12.9/umd/popper.min.js" integrity="sha384-ApNbgh9B+Y1QKtv3Rn7W3mgPxhU9K/ScQsAP7hUibX39j7fakFPskvXusvfa0b4Q" crossorigin="anonymous"></script>
    <script src="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/js/bootstrap.min.js" integrity="sha384-JZR6Spejh4U02d8jOt6vLEHfe/JQGiRRSQQxSfFWpi1MquVdAyjUar5+76PVCmYl" crossorigin="anonymous"></script>

  </body>
  <script type="text/javascript">
      jQuery(function () {
          SphinxRtdTheme.Navigation.enable(true);
      });
  </script>

  
  
    
    <!-- Theme Analytics -->
    <script>
    (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
      (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
      m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
    })(window,document,'script','https://www.google-analytics.com/analytics.js','ga');

    ga('create', 'UA-75982049-2', 'auto');
    ga('send', 'pageview');
    </script>

    
   

</body>
</html>