

<!DOCTYPE html>
<html class="writer-html5" lang="en" >
<head>
  <meta charset="utf-8" />
  
  <meta name="viewport" content="width=device-width, initial-scale=1.0" />
  
  <title>mindspore.compression &mdash; MindSpore master documentation</title>
  

  
  <link rel="stylesheet" href="../_static/css/theme.css" type="text/css" />
  <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />

  
  

  
  

  

  
  <!--[if lt IE 9]>
    <script src="../_static/js/html5shiv.min.js"></script>
  <![endif]-->
  
    
      <script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
        <script src="../_static/jquery.js"></script>
        <script src="../_static/underscore.js"></script>
        <script src="../_static/doctools.js"></script>
        <script src="../_static/language_data.js"></script>
        <script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
    
    <script type="text/javascript" src="../_static/js/theme.js"></script>

    
    <link rel="index" title="Index" href="../genindex.html" />
    <link rel="search" title="Search" href="../search.html" />
    <link rel="next" title="mindspore.context" href="mindspore.context.html" />
    <link rel="prev" title="mindspore.communication" href="mindspore.communication.html" /> 
</head>

<body class="wy-body-for-nav">

   
  <div class="wy-grid-for-nav">
    
    <nav data-toggle="wy-nav-shift" class="wy-nav-side">
      <div class="wy-side-scroll">
        <div class="wy-side-nav-search" >
          

          
            <a href="../index.html" class="icon icon-home"> MindSpore
          

          
          </a>

          
            
            
          

          
<div role="search">
  <form id="rtd-search-form" class="wy-form" action="../search.html" method="get">
    <input type="text" name="q" placeholder="Search docs" />
    <input type="hidden" name="check_keywords" value="yes" />
    <input type="hidden" name="area" value="default" />
  </form>
</div>

          
        </div>

        
        <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
          
            
            
              
            
            
              <p class="caption"><span class="caption-text">MindSpore Python API</span></p>
<ul class="current">
<li class="toctree-l1"><a class="reference internal" href="mindspore.html">mindspore</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.common.initializer.html">mindspore.common.initializer</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.communication.html">mindspore.communication</a></li>
<li class="toctree-l1 current"><a class="current reference internal" href="#">mindspore.compression</a><ul>
<li class="toctree-l2"><a class="reference internal" href="#mindspore-compression-quant">mindspore.compression.quant</a></li>
<li class="toctree-l2"><a class="reference internal" href="#mindspore-compression-common">mindspore.compression.common</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.context.html">mindspore.context</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.dataset.html">mindspore.dataset</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.dataset.audio.html">mindspore.dataset.audio</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.dataset.config.html">mindspore.dataset.config</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.dataset.text.html">mindspore.dataset.text</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.dataset.transforms.html">mindspore.dataset.transforms</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.dataset.vision.html">mindspore.dataset.vision</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.mindrecord.html">mindspore.mindrecord</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.nn.html">mindspore.nn</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.nn.probability.html">mindspore.nn.probability</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.nn.transformer.html">mindspore.nn.transformer</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.numpy.html">mindspore.numpy</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.ops.html">mindspore.ops</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.parallel.html">mindspore.parallel</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.parallel.nn.html">mindspore.parallel.nn</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.profiler.html">mindspore.profiler</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.scipy.html">mindspore.scipy</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.train.html">mindspore.train</a></li>
<li class="toctree-l1"><a class="reference internal" href="mindspore.boost.html">mindspore.boost</a></li>
</ul>
<p class="caption"><span class="caption-text">MindSpore C++ API</span></p>
<ul>
<li class="toctree-l1"><a class="reference external" href="https://www.mindspore.cn/lite/api/zh-CN/master/api_cpp/mindspore.html">MindSpore Lite↗</a></li>
</ul>

            
          
        </div>
        
      </div>
    </nav>

    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">

      
      <nav class="wy-nav-top" aria-label="top navigation">
        
          <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
          <a href="../index.html">MindSpore</a>
        
      </nav>


      <div class="wy-nav-content">
        
        <div class="rst-content">
        
          

















<div role="navigation" aria-label="breadcrumbs navigation">

  <ul class="wy-breadcrumbs">
    
      <li><a href="../index.html" class="icon icon-home"></a> &raquo;</li>
        
      <li>mindspore.compression</li>
    
    
      <li class="wy-breadcrumbs-aside">
        
          
            <a href="../_sources/api_python/mindspore.compression.rst.txt" rel="nofollow"> View page source</a>
          
        
      </li>
    
  </ul>

  
  <hr/>
</div>
          <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
           <div itemprop="articleBody">
            
  <div class="section" id="mindspore-compression">
<h1>mindspore.compression<a class="headerlink" href="#mindspore-compression" title="Permalink to this headline">¶</a></h1>
<p>Quantization module, including base class of the quantizer, the quantization aware training algorithm, and quantization utils.</p>
<div class="section" id="mindspore-compression-quant">
<h2>mindspore.compression.quant<a class="headerlink" href="#mindspore-compression-quant" title="Permalink to this headline">¶</a></h2>
<dl class="function">
<dt id="mindspore.compression.quant.load_nonquant_param_into_quant_net">
<code class="sig-prename descclassname">mindspore.compression.quant.</code><code class="sig-name descname">load_nonquant_param_into_quant_net</code><span class="sig-paren">(</span><em class="sig-param">quant_model</em>, <em class="sig-param">params_dict</em>, <em class="sig-param">quant_new_params=None</em><span class="sig-paren">)</span><a class="reference internal" href="../_modules/mindspore/compression/quant/quant_utils.html#load_nonquant_param_into_quant_net"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mindspore.compression.quant.load_nonquant_param_into_quant_net" title="Permalink to this definition">¶</a></dt>
<dd><p>Load fp32 model parameters into quantization model.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>quant_model</strong> (<a class="reference internal" href="nn/mindspore.nn.Cell.html#mindspore.nn.Cell" title="mindspore.nn.Cell"><em>Cell</em></a>) – Quantization model.</p></li>
<li><p><strong>params_dict</strong> (<a class="reference external" href="https://docs.python.org/library/stdtypes.html#dict" title="(in Python v3.8)"><em>dict</em></a>) – Parameter dict that stores fp32 parameters.</p></li>
<li><p><strong>quant_new_params</strong> (<a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a>) – Parameters that exist in quantization network but not in non-quantization
network. Default: None.</p></li>
</ul>
</dd>
<dt class="field-even">Raises</dt>
<dd class="field-even"><ul class="simple">
<li><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#TypeError" title="(in Python v3.8)"><strong>TypeError</strong></a> – If <cite>quant_new_params</cite> is not None and is not list.</p></li>
<li><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#ValueError" title="(in Python v3.8)"><strong>ValueError</strong></a> – If there are parameters in the <cite>quant_model</cite> that are neither in <cite>params_dict</cite>
    nor in <cite>quant_new_params</cite>.</p></li>
</ul>
</dd>
</dl>
<p class="rubric">Examples</p>
<div class="doctest highlight-default notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mindspore</span> <span class="kn">import</span> <span class="n">load_checkpoint</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mindspore.compression.quant.quant_utils</span> <span class="kn">import</span> <span class="n">load_nonquant_param_into_quant_net</span>
<span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">LeNet5</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Cell</span><span class="p">):</span>
<span class="gp">... </span>    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">num_class</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span> <span class="n">channel</span><span class="o">=</span><span class="mi">1</span><span class="p">):</span>
<span class="gp">... </span>        <span class="nb">super</span><span class="p">(</span><span class="n">LeNet5</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">type</span> <span class="o">=</span> <span class="s2">&quot;fusion&quot;</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">num_class</span> <span class="o">=</span> <span class="n">num_class</span>
<span class="gp">...</span>
<span class="gp">... </span>        <span class="c1"># change `nn.Conv2d` to `nn.Conv2dBnAct`</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">conv1</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Conv2dBnAct</span><span class="p">(</span><span class="n">channel</span><span class="p">,</span> <span class="mi">6</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">pad_mode</span><span class="o">=</span><span class="s1">&#39;valid&#39;</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">conv2</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Conv2dBnAct</span><span class="p">(</span><span class="mi">6</span><span class="p">,</span> <span class="mi">16</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">pad_mode</span><span class="o">=</span><span class="s1">&#39;valid&#39;</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="c1"># change `nn.Dense` to `nn.DenseBnAct`</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">fc1</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">DenseBnAct</span><span class="p">(</span><span class="mi">16</span> <span class="o">*</span> <span class="mi">5</span> <span class="o">*</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">120</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">fc2</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">DenseBnAct</span><span class="p">(</span><span class="mi">120</span><span class="p">,</span> <span class="mi">84</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">fc3</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">DenseBnAct</span><span class="p">(</span><span class="mi">84</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">num_class</span><span class="p">)</span>
<span class="gp">...</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">max_pool2d</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">MaxPool2d</span><span class="p">(</span><span class="n">kernel_size</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">stride</span><span class="o">=</span><span class="mi">2</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">flatten</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Flatten</span><span class="p">()</span>
<span class="gp">...</span>
<span class="gp">... </span>    <span class="k">def</span> <span class="nf">construct</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">x</span><span class="p">):</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conv1</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">max_pool2d</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conv2</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">max_pool2d</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">flatten</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">fc1</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">fc2</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">fc3</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="k">return</span> <span class="n">x</span>
<span class="gp">...</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">net</span> <span class="o">=</span> <span class="n">LeNet5</span><span class="p">()</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">ckpt_file_name</span> <span class="o">=</span> <span class="s2">&quot;./checkpoint/LeNet5_noquant-1_32.ckpt&quot;</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">param_dict</span> <span class="o">=</span> <span class="n">load_checkpoint</span><span class="p">(</span><span class="n">ckpt_file_name</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">load_nonquant_param_into_quant_net</span><span class="p">(</span><span class="n">net</span><span class="p">,</span> <span class="n">param_dict</span><span class="p">)</span>
</pre></div>
</div>
</dd></dl>

<dl class="function">
<dt id="mindspore.compression.quant.query_quant_layers">
<code class="sig-prename descclassname">mindspore.compression.quant.</code><code class="sig-name descname">query_quant_layers</code><span class="sig-paren">(</span><em class="sig-param">network</em><span class="sig-paren">)</span><a class="reference internal" href="../_modules/mindspore/compression/quant/quant_utils.html#query_quant_layers"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mindspore.compression.quant.query_quant_layers" title="Permalink to this definition">¶</a></dt>
<dd><p>Query the network’s quantization strategy of each quantized layer and print it to the screen, note that all the
quantization layers are queried before graph compile optimization in the graph mode, thus, some redundant quantized
layers, which not exist in practical execution, may appear.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><p><strong>network</strong> (<a class="reference internal" href="nn/mindspore.nn.Cell.html#mindspore.nn.Cell" title="mindspore.nn.Cell"><em>Cell</em></a>) – input network</p>
</dd>
</dl>
<p class="rubric">Examples</p>
<div class="doctest highlight-default notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mindspore.compression.quant</span> <span class="kn">import</span> <span class="n">QuantizationAwareTraining</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mindspore.compression.quant.quant_utils</span> <span class="kn">import</span> <span class="n">query_quant_layers</span>
<span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">LeNet5</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Cell</span><span class="p">):</span>
<span class="gp">... </span>    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">num_class</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span> <span class="n">channel</span><span class="o">=</span><span class="mi">1</span><span class="p">):</span>
<span class="gp">... </span>        <span class="nb">super</span><span class="p">(</span><span class="n">LeNet5</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">type</span> <span class="o">=</span> <span class="s2">&quot;fusion&quot;</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">num_class</span> <span class="o">=</span> <span class="n">num_class</span>
<span class="gp">...</span>
<span class="gp">... </span>        <span class="c1"># change `nn.Conv2d` to `nn.Conv2dBnAct`</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">conv1</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Conv2dBnAct</span><span class="p">(</span><span class="n">channel</span><span class="p">,</span> <span class="mi">6</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">pad_mode</span><span class="o">=</span><span class="s1">&#39;valid&#39;</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">conv2</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Conv2dBnAct</span><span class="p">(</span><span class="mi">6</span><span class="p">,</span> <span class="mi">16</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">pad_mode</span><span class="o">=</span><span class="s1">&#39;valid&#39;</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="c1"># change `nn.Dense` to `nn.DenseBnAct`</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">fc1</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">DenseBnAct</span><span class="p">(</span><span class="mi">16</span> <span class="o">*</span> <span class="mi">5</span> <span class="o">*</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">120</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">fc2</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">DenseBnAct</span><span class="p">(</span><span class="mi">120</span><span class="p">,</span> <span class="mi">84</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">fc3</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">DenseBnAct</span><span class="p">(</span><span class="mi">84</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">num_class</span><span class="p">)</span>
<span class="gp">...</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">max_pool2d</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">MaxPool2d</span><span class="p">(</span><span class="n">kernel_size</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">stride</span><span class="o">=</span><span class="mi">2</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">flatten</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Flatten</span><span class="p">()</span>
<span class="gp">...</span>
<span class="gp">... </span>    <span class="k">def</span> <span class="nf">construct</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">x</span><span class="p">):</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conv1</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">max_pool2d</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conv2</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">max_pool2d</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">flatten</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">fc1</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">fc2</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">fc3</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="k">return</span> <span class="n">x</span>
<span class="gp">...</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">net</span> <span class="o">=</span> <span class="n">LeNet5</span><span class="p">()</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">quantizer</span> <span class="o">=</span> <span class="n">QuantizationAwareTraining</span><span class="p">(</span><span class="n">bn_fold</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">per_channel</span><span class="o">=</span><span class="p">[</span><span class="kc">True</span><span class="p">,</span> <span class="kc">False</span><span class="p">],</span> <span class="n">symmetric</span><span class="o">=</span><span class="p">[</span><span class="kc">True</span><span class="p">,</span> <span class="kc">False</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">net_qat</span> <span class="o">=</span> <span class="n">quantizer</span><span class="o">.</span><span class="n">quantize</span><span class="p">(</span><span class="n">net</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">query_quant_layers</span><span class="p">(</span><span class="n">net_qat</span><span class="p">)</span>
<span class="go">conv1.conv.fake_quant_weight                                       INT8</span>
<span class="go">conv1.activation.fake_quant_act                                    INT8</span>
<span class="go">conv2.conv.fake_quant_weight                                       INT8</span>
<span class="go">conv2.activation.fake_quant_act                                    INT8</span>
<span class="go">fc1.dense.fake_quant_weight                                        INT8</span>
<span class="go">fc1.activation.fake_quant_act                                      INT8</span>
<span class="go">fc2.dense.fake_quant_weight                                        INT8</span>
<span class="go">fc2.activation.fake_quant_act                                      INT8</span>
<span class="go">fc3.dense.fake_quant_weight                                        INT8</span>
<span class="go">fc3.activation.fake_quant_act                                      INT8</span>
</pre></div>
</div>
</dd></dl>

<dl class="class">
<dt id="mindspore.compression.quant.QuantizationAwareTraining">
<em class="property">class </em><code class="sig-prename descclassname">mindspore.compression.quant.</code><code class="sig-name descname">QuantizationAwareTraining</code><span class="sig-paren">(</span><em class="sig-param">bn_fold=True</em>, <em class="sig-param">freeze_bn=10000000</em>, <em class="sig-param">quant_delay=(0</em>, <em class="sig-param">0)</em>, <em class="sig-param">quant_dtype=(QuantDtype.INT8</em>, <em class="sig-param">QuantDtype.INT8)</em>, <em class="sig-param">per_channel=(False</em>, <em class="sig-param">False)</em>, <em class="sig-param">symmetric=(False</em>, <em class="sig-param">False)</em>, <em class="sig-param">narrow_range=(False</em>, <em class="sig-param">False)</em>, <em class="sig-param">optimize_option=OptimizeOption.QAT</em>, <em class="sig-param">one_conv_fold=True</em><span class="sig-paren">)</span><a class="reference internal" href="../_modules/mindspore/compression/quant/qat.html#QuantizationAwareTraining"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mindspore.compression.quant.QuantizationAwareTraining" title="Permalink to this definition">¶</a></dt>
<dd><p>Quantizer for quantization aware training.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>bn_fold</strong> (<a class="reference external" href="https://docs.python.org/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a>) – Whether to use bn fold ops for simulation inference operation. Default: True.</p></li>
<li><p><strong>freeze_bn</strong> (<a class="reference external" href="https://docs.python.org/library/functions.html#int" title="(in Python v3.8)"><em>int</em></a>) – Number of steps after which BatchNorm OP parameters fixed to global mean and variance.
Default: 1e7.</p></li>
<li><p><strong>quant_delay</strong> (<em>Union</em><em>[</em><a class="reference external" href="https://docs.python.org/library/functions.html#int" title="(in Python v3.8)"><em>int</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Number of steps after which weights and activations are quantized
during train and eval. The first element represents weights and the second element represents data flow.
Default: (0, 0).</p></li>
<li><p><strong>quant_dtype</strong> (<em>Union</em><em>[</em><a class="reference internal" href="#mindspore.compression.common.QuantDtype" title="mindspore.compression.common.QuantDtype"><em>QuantDtype</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Datatype used to quantize weights and activations. The first
element represents weights and the second element represents data flow. It is necessary to consider the
precision support of hardware devices in the practical quantization infer scenario.
Default: (QuantDtype.INT8, QuantDtype.INT8).</p></li>
<li><p><strong>per_channel</strong> (<em>Union</em><em>[</em><a class="reference external" href="https://docs.python.org/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Quantization granularity based on layer or on channel. If <cite>True</cite>
then base on per channel, otherwise base on per layer. The first element represents weights and the
second element represents data flow, and the second element must be <cite>False</cite> now. Default: (False, False).</p></li>
<li><p><strong>symmetric</strong> (<em>Union</em><em>[</em><a class="reference external" href="https://docs.python.org/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Whether the quantization algorithm is symmetric or not. If <cite>True</cite> then
base on symmetric, otherwise base on asymmetric. The first element represents weights and the second
element represents data flow. Default: (False, False).</p></li>
<li><p><strong>narrow_range</strong> (<em>Union</em><em>[</em><a class="reference external" href="https://docs.python.org/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Whether the quantization algorithm uses narrow range or not.
The first element represents weights and the second element represents data flow.
Default: (False, False).</p></li>
<li><p><strong>optimize_option</strong> (<em>Union</em><em>[</em><a class="reference internal" href="#mindspore.compression.quant.OptimizeOption" title="mindspore.compression.quant.OptimizeOption"><em>OptimizeOption</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Specifies the quant algorithm and options, currently
only support <cite>QAT</cite> and <cite>LEARNED_SCALE</cite> (Note that, if both <cite>QAT</cite> and <cite>LEARNED_SCALE</cite> are configured,
<cite>LEARNED_SCALE</cite> has a higher priority. <cite>LEARNED_SCALE</cite> currently only work under some constraints, which
includes: freeze_bn=0, quant_delay=0, symmetric=True, narrow_range=True, More specifically, for operators
such as Relu and Relu6, which only have positive values, we add a negative truncation to optimize this
scenario, and narrow_range will automatically match to False). Default: OptimizeOption.QAT.</p></li>
<li><p><strong>one_conv_fold</strong> (<a class="reference external" href="https://docs.python.org/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a>) – Whether to use one conv bn fold ops for simulation inference operation. Default: True.</p></li>
</ul>
</dd>
<dt class="field-even">Raises</dt>
<dd class="field-even"><ul class="simple">
<li><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#TypeError" title="(in Python v3.8)"><strong>TypeError</strong></a> – If the element of <cite>quant_delay</cite> or <cite>freeze_bn</cite> is not int.</p></li>
<li><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#TypeError" title="(in Python v3.8)"><strong>TypeError</strong></a> – If <cite>bn_fold</cite>, <cite>one_conv_fold</cite> or the element of <cite>per_channel</cite>, <cite>symmetric</cite>, <cite>narrow_range</cite>
    is not bool.</p></li>
<li><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#TypeError" title="(in Python v3.8)"><strong>TypeError</strong></a> – If the element of <cite>quant_dtype</cite> is not <cite>QuantDtype</cite>.</p></li>
<li><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#ValueError" title="(in Python v3.8)"><strong>ValueError</strong></a> – If the length of <cite>quant_delay</cite>, <cite>quant_dtype</cite>, <cite>per_channel</cite>, <cite>symmetric</cite> or <cite>narrow_range</cite> is
    not less than 2.</p></li>
<li><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#ValueError" title="(in Python v3.8)"><strong>ValueError</strong></a> – If the <cite>optimize_option</cite> is <cite>LEARNED_SCALE</cite> and <cite>freeze_bn</cite> is not equal to 0.</p></li>
<li><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#ValueError" title="(in Python v3.8)"><strong>ValueError</strong></a> – If the <cite>optimize_option</cite> is <cite>LEARNED_SCALE</cite> and <cite>symmetric</cite> is not (True, True).</p></li>
<li><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#ValueError" title="(in Python v3.8)"><strong>ValueError</strong></a> – If the <cite>optimize_option</cite> is <cite>LEARNED_SCALE</cite> and <cite>narrow_range</cite> is not (True, True).</p></li>
<li><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#ValueError" title="(in Python v3.8)"><strong>ValueError</strong></a> – If the <cite>optimize_option</cite> is <cite>LEARNED_SCALE</cite> and <cite>quant_delay</cite> is not (0, 0).</p></li>
</ul>
</dd>
</dl>
<p class="rubric">Examples</p>
<div class="doctest highlight-default notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mindspore.compression.quant</span> <span class="kn">import</span> <span class="n">QuantizationAwareTraining</span>
<span class="gp">&gt;&gt;&gt; </span><span class="k">class</span> <span class="nc">LeNet5</span><span class="p">(</span><span class="n">nn</span><span class="o">.</span><span class="n">Cell</span><span class="p">):</span>
<span class="gp">... </span>    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">num_class</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span> <span class="n">channel</span><span class="o">=</span><span class="mi">1</span><span class="p">):</span>
<span class="gp">... </span>        <span class="nb">super</span><span class="p">(</span><span class="n">LeNet5</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">type</span> <span class="o">=</span> <span class="s2">&quot;fusion&quot;</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">num_class</span> <span class="o">=</span> <span class="n">num_class</span>
<span class="gp">...</span>
<span class="gp">... </span>        <span class="c1"># change `nn.Conv2d` to `nn.Conv2dBnAct`</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">conv1</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Conv2dBnAct</span><span class="p">(</span><span class="n">channel</span><span class="p">,</span> <span class="mi">6</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">pad_mode</span><span class="o">=</span><span class="s1">&#39;valid&#39;</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">conv2</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Conv2dBnAct</span><span class="p">(</span><span class="mi">6</span><span class="p">,</span> <span class="mi">16</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="n">pad_mode</span><span class="o">=</span><span class="s1">&#39;valid&#39;</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="c1"># change `nn.Dense` to `nn.DenseBnAct`</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">fc1</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">DenseBnAct</span><span class="p">(</span><span class="mi">16</span> <span class="o">*</span> <span class="mi">5</span> <span class="o">*</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">120</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">fc2</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">DenseBnAct</span><span class="p">(</span><span class="mi">120</span><span class="p">,</span> <span class="mi">84</span><span class="p">,</span> <span class="n">activation</span><span class="o">=</span><span class="s1">&#39;relu&#39;</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">fc3</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">DenseBnAct</span><span class="p">(</span><span class="mi">84</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">num_class</span><span class="p">)</span>
<span class="gp">...</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">max_pool2d</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">MaxPool2d</span><span class="p">(</span><span class="n">kernel_size</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">stride</span><span class="o">=</span><span class="mi">2</span><span class="p">)</span>
<span class="gp">... </span>        <span class="bp">self</span><span class="o">.</span><span class="n">flatten</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Flatten</span><span class="p">()</span>
<span class="gp">...</span>
<span class="gp">... </span>    <span class="k">def</span> <span class="nf">construct</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">x</span><span class="p">):</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conv1</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">max_pool2d</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conv2</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">max_pool2d</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">flatten</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">fc1</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">fc2</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="n">x</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">fc3</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="gp">... </span>        <span class="k">return</span> <span class="n">x</span>
<span class="gp">...</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">net</span> <span class="o">=</span> <span class="n">LeNet5</span><span class="p">()</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">quantizer</span> <span class="o">=</span> <span class="n">QuantizationAwareTraining</span><span class="p">(</span><span class="n">bn_fold</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">per_channel</span><span class="o">=</span><span class="p">[</span><span class="kc">True</span><span class="p">,</span> <span class="kc">False</span><span class="p">],</span> <span class="n">symmetric</span><span class="o">=</span><span class="p">[</span><span class="kc">True</span><span class="p">,</span> <span class="kc">False</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">net_qat</span> <span class="o">=</span> <span class="n">quantizer</span><span class="o">.</span><span class="n">quantize</span><span class="p">(</span><span class="n">net</span><span class="p">)</span>
</pre></div>
</div>
<dl class="method">
<dt id="mindspore.compression.quant.QuantizationAwareTraining.quantize">
<code class="sig-name descname">quantize</code><span class="sig-paren">(</span><em class="sig-param">network</em><span class="sig-paren">)</span><a class="reference internal" href="../_modules/mindspore/compression/quant/qat.html#QuantizationAwareTraining.quantize"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mindspore.compression.quant.QuantizationAwareTraining.quantize" title="Permalink to this definition">¶</a></dt>
<dd><p>Quant API to convert input network to a quantization aware training network.</p>
<div class="admonition note">
<p class="admonition-title">Note</p>
<p>Please refer to the Examples of class: <cite>mindspore.compression.quant.QuantizationAwareTraining</cite>.</p>
</div>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><p><strong>network</strong> (<a class="reference internal" href="nn/mindspore.nn.Cell.html#mindspore.nn.Cell" title="mindspore.nn.Cell"><em>Cell</em></a>) – network to be quantized.</p>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><p>Cell, a quantization aware training network.</p>
</dd>
<dt class="field-odd">Raises</dt>
<dd class="field-odd"><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#KeyError" title="(in Python v3.8)"><strong>KeyError</strong></a> – If the <cite>device_target</cite> set in context is not in <cite>support_device</cite>.</p>
</dd>
</dl>
</dd></dl>

</dd></dl>

<dl class="function">
<dt id="mindspore.compression.quant.create_quant_config">
<code class="sig-prename descclassname">mindspore.compression.quant.</code><code class="sig-name descname">create_quant_config</code><span class="sig-paren">(</span><em class="sig-param">quant_observer=(nn.FakeQuantWithMinMaxObserver</em>, <em class="sig-param">nn.FakeQuantWithMinMaxObserver)</em>, <em class="sig-param">quant_delay=(0</em>, <em class="sig-param">0)</em>, <em class="sig-param">quant_dtype=(QuantDtype.INT8</em>, <em class="sig-param">QuantDtype.INT8)</em>, <em class="sig-param">per_channel=(False</em>, <em class="sig-param">False)</em>, <em class="sig-param">symmetric=(False</em>, <em class="sig-param">False)</em>, <em class="sig-param">narrow_range=(False</em>, <em class="sig-param">False)</em>, <em class="sig-param">mode=&quot;DEFAULT&quot;</em><span class="sig-paren">)</span><a class="reference internal" href="../_modules/mindspore/compression/quant/qat.html#create_quant_config"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mindspore.compression.quant.create_quant_config" title="Permalink to this definition">¶</a></dt>
<dd><p>Config the observer type of weights and data flow with quant parameters.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><ul class="simple">
<li><p><strong>quant_observer</strong> (<em>Union</em><em>[</em><em>Observer</em><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – The types of observer for quantization. The first element
applies to weights and the second applies to data flow. Currently, only
<code class="xref py py-class docutils literal notranslate"><span class="pre">FakeQuantWithMinMaxObserver</span></code> supported.
Default: (nn.FakeQuantWithMinMaxObserver, nn.FakeQuantWithMinMaxObserver).</p></li>
<li><p><strong>quant_delay</strong> (<em>Union</em><em>[</em><a class="reference external" href="https://docs.python.org/library/functions.html#int" title="(in Python v3.8)"><em>int</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Number of steps after which weights and activations are quantized
during train and eval. The first element represents weights and the second element represents data flow.
Default: (0, 0).</p></li>
<li><p><strong>quant_dtype</strong> (<em>Union</em><em>[</em><a class="reference internal" href="#mindspore.compression.common.QuantDtype" title="mindspore.compression.common.QuantDtype"><em>QuantDtype</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Datatype used to quantize weights and activations. The first
element represents weights and the second element represents data flow.
Default: (QuantDtype.INT8, QuantDtype.INT8).</p></li>
<li><p><strong>per_channel</strong> (<em>Union</em><em>[</em><a class="reference external" href="https://docs.python.org/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Quantization granularity based on layer or on channel. If <cite>True</cite>
then base on per channel, otherwise base on per layer. The first element represents weights
and the second element represents data flow, and the second element must be <cite>False</cite> now.
Default: (False, False).</p></li>
<li><p><strong>symmetric</strong> (<em>Union</em><em>[</em><a class="reference external" href="https://docs.python.org/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Whether the quantization algorithm is symmetric or not. If <cite>True</cite> then
base on symmetric, otherwise base on asymmetric. The first element represents weights and the second
element represents data flow. Default: (False, False).</p></li>
<li><p><strong>narrow_range</strong> (<em>Union</em><em>[</em><a class="reference external" href="https://docs.python.org/library/functions.html#bool" title="(in Python v3.8)"><em>bool</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#list" title="(in Python v3.8)"><em>list</em></a><em>, </em><a class="reference external" href="https://docs.python.org/library/stdtypes.html#tuple" title="(in Python v3.8)"><em>tuple</em></a><em>]</em>) – Whether the quantization algorithm uses narrow range or not.
The first element represents weights and the second element represents data flow.
Default: (False, False).</p></li>
<li><p><strong>mode</strong> (<a class="reference external" href="https://docs.python.org/library/stdtypes.html#str" title="(in Python v3.8)"><em>str</em></a>) – Optional quantization mode, currently only <cite>DEFAULT`(QAT) and `LEARNED_SCALE</cite> are supported.
Default: “DEFAULT”.</p></li>
</ul>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><p>QuantConfig, contains the observer type of weight and activation.</p>
</dd>
<dt class="field-odd">Raises</dt>
<dd class="field-odd"><p><a class="reference external" href="https://docs.python.org/library/exceptions.html#ValueError" title="(in Python v3.8)"><strong>ValueError</strong></a> – If the second element of <cite>per_channel</cite> is not <cite>False</cite>.</p>
</dd>
</dl>
</dd></dl>

<dl class="class">
<dt id="mindspore.compression.quant.OptimizeOption">
<em class="property">class </em><code class="sig-prename descclassname">mindspore.compression.quant.</code><code class="sig-name descname">OptimizeOption</code><a class="reference internal" href="../_modules/mindspore/compression/quant/quantizer.html#OptimizeOption"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mindspore.compression.quant.OptimizeOption" title="Permalink to this definition">¶</a></dt>
<dd><p>An enum for the model quantization optimize option, currently only support <cite>QAT</cite> and <cite>LEARNED_SCALE</cite>.</p>
</dd></dl>

</div>
<div class="section" id="mindspore-compression-common">
<h2>mindspore.compression.common<a class="headerlink" href="#mindspore-compression-common" title="Permalink to this headline">¶</a></h2>
<dl class="class">
<dt id="mindspore.compression.common.QuantDtype">
<em class="property">class </em><code class="sig-prename descclassname">mindspore.compression.common.</code><code class="sig-name descname">QuantDtype</code><a class="reference internal" href="../_modules/mindspore/compression/common/constant.html#QuantDtype"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#mindspore.compression.common.QuantDtype" title="Permalink to this definition">¶</a></dt>
<dd><p>An enum for quant datatype, contains <cite>INT2</cite> ~ <cite>INT8</cite>, <cite>UINT2</cite> ~ <cite>UINT8</cite>.</p>
<dl class="attribute">
<dt id="mindspore.compression.common.QuantDtype.num_bits">
<code class="sig-name descname">num_bits</code><a class="headerlink" href="#mindspore.compression.common.QuantDtype.num_bits" title="Permalink to this definition">¶</a></dt>
<dd><p>Get the num bits of the QuantDtype member.</p>
<dl class="field-list simple">
<dt class="field-odd">Returns</dt>
<dd class="field-odd"><p>int, the num bits of the QuantDtype member.</p>
</dd>
</dl>
<p class="rubric">Examples</p>
<div class="doctest highlight-default notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">mindspore.compression.common</span> <span class="kn">import</span> <span class="n">QuantDtype</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">quant_dtype</span> <span class="o">=</span> <span class="n">QuantDtype</span><span class="o">.</span><span class="n">INT8</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">num_bits</span> <span class="o">=</span> <span class="n">quant_dtype</span><span class="o">.</span><span class="n">num_bits</span>
<span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="n">num_bits</span><span class="p">)</span>
<span class="go">8</span>
</pre></div>
</div>
</dd></dl>

</dd></dl>

</div>
</div>


           </div>
           
          </div>
          <footer>
    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
        <a href="mindspore.context.html" class="btn btn-neutral float-right" title="mindspore.context" accesskey="n" rel="next">Next <span class="fa fa-arrow-circle-right" aria-hidden="true"></span></a>
        <a href="mindspore.communication.html" class="btn btn-neutral float-left" title="mindspore.communication" accesskey="p" rel="prev"><span class="fa fa-arrow-circle-left" aria-hidden="true"></span> Previous</a>
    </div>

  <hr/>

  <div role="contentinfo">
    <p>
        &#169; Copyright 2021, MindSpore.

    </p>
  </div>
    
    
    
    Built with <a href="https://www.sphinx-doc.org/">Sphinx</a> using a
    
    <a href="https://github.com/readthedocs/sphinx_rtd_theme">theme</a>
    
    provided by <a href="https://readthedocs.org">Read the Docs</a>. 

</footer>
        </div>
      </div>

    </section>

  </div>
  

  <script type="text/javascript">
      jQuery(function () {
          SphinxRtdTheme.Navigation.enable(true);
      });
  </script>

  
  
    
   

</body>
</html>