<!DOCTYPE html>
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
  <meta charset="utf-8">
  <meta http-equiv="X-UA-Compatible" content="IE=edge">
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  
  <meta name="author" content="Rogerspy">
  <link rel="canonical" href="https://pytorch-zh.gitee.io/torchnn/conv3d/">
  <link rel="shortcut icon" href="/pytorch-zh/img/favicon.ico">
  <title>Conv3d - Pytorch 中文文档（1.4.0）</title>
  <link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700" />

  <link rel="stylesheet" href="../../css/theme.css" />
  <link rel="stylesheet" href="../../css/theme_extra.css" />
  <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/styles/github.min.css" />
  
  <script>
    // Current page data
    var mkdocs_page_name = "Conv3d";
    var mkdocs_page_input_path = "torchnn\\conv3d.md";
    var mkdocs_page_url = "/torchnn/conv3d/";
  </script>
  
  <script src="../../js/jquery-2.1.1.min.js" defer></script>
  <script src="../../js/modernizr-2.8.3.min.js" defer></script>
  <script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/highlight.min.js"></script>
  <script>hljs.initHighlightingOnLoad();</script> 
  
</head>

<body class="wy-body-for-nav" role="document">

  <div class="wy-grid-for-nav">

    
    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
    <div class="wy-side-scroll">
      <div class="wy-side-nav-search">
	    <img src='/pytorch-zh/img/logo.svg'/>
        <a href="../.." class="icon icon-home"> Pytorch 中文文档（1.4.0）</a>
        <div role="search">
  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
    <input type="text" name="q" placeholder="Search docs" title="Type search term here" />
  </form>
</div>
      </div>

      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../..">主页</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../../get-started/">60分钟快速入门</a>
                    </li>
                </ul>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../../faq/">PyTorch FAQ</a>
                    </li>
                </ul>
                <p class="caption"><span class="caption-text">两种基本结构</span></p>
                <ul>
                    <li class="toctree-l1"><a class="reference internal" href="../parameters/">参数 Parameters</a>
                    </li>
                    <li class="toctree-l1"><a class="reference internal" href="#">容器 Containers</a>
    <ul>
                <li class="toctree-l2"><a class="reference internal" href="../module/">Module</a>
                </li>
                <li class="toctree-l2"><a class="reference internal" href="../sequential/">Sequential</a>
                </li>
                <li class="toctree-l2"><a class="reference internal" href="../modulelist/">Modulelist</a>
                </li>
                <li class="toctree-l2"><a class="reference internal" href="../moduledict/">Moduledict</a>
                </li>
                <li class="toctree-l2"><a class="reference internal" href="../parameterlist/">Parameterlist</a>
                </li>
                <li class="toctree-l2"><a class="reference internal" href="../parameterdict/">Parameterdict</a>
                </li>
    </ul>
                    </li>
                </ul>
                <p class="caption"><span class="caption-text">网络层</span></p>
                <ul class="current">
                    <li class="toctree-l1 current"><a class="reference internal current" href="#">卷积层</a>
    <ul class="current">
                <li class="toctree-l2"><a class="reference internal" href="../conv1d/">Conv1d</a>
                </li>
                <li class="toctree-l2"><a class="reference internal" href="../conv2d/">Conv2d</a>
                </li>
                <li class="toctree-l2 current"><a class="reference internal current" href="./">Conv3d</a>
    <ul class="current">
    </ul>
                </li>
                <li class="toctree-l2"><a class="reference internal" href="../convtranspose1d/">Convtranspose1d</a>
                </li>
                <li class="toctree-l2"><a class="reference internal" href="../convtranspose2d/">Convtranspose2d</a>
                </li>
                <li class="toctree-l2"><a class="reference internal" href="../convtranspose3d/">Convtranspose3d</a>
                </li>
    </ul>
                    </li>
                </ul>
      </div>
    </div>
    </nav>

    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">

      
      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
        <a href="../..">Pytorch 中文文档（1.4.0）</a>
      </nav>

      
      <div class="wy-nav-content">
        <div class="rst-content">
          <div role="navigation" aria-label="breadcrumbs navigation">
  <ul class="wy-breadcrumbs">
    <li><a href="../..">Docs</a> &raquo;</li>
    
      
        
          <li>卷积层 &raquo;</li>
        
      
        
          <li>网络层 &raquo;</li>
        
      
    
    <li>Conv3d</li>
    <li class="wy-breadcrumbs-aside">
      
    </li>
  </ul>
  
  <hr/>
</div>
          <div role="main">
            <div class="section">
              
                <h2 id="conv3d">Conv3d<a class="headerlink" href="#conv3d" title="Permanent link">&para;</a></h2>
<div class='important'>
    <pre><font color='red'>CLASS</font> torch.nn.Conv3d(
            <i>in_channels: int,
            out_channels: int,
            kernel_size: Union[T, Tuple[T, T, T]],
            stride: Union[T, Tuple[T, T, T]] = 1,
            padding: Union[T, Tuple[T, T, T]] = 0,
            dilation: Union[T, Tuple[T, T, T]] = 1,
            groups: int = 1,
            bias: bool = True,
            padding_mode: str = 'zeros'</i>
        )</pre>    
</div>

<p>对多个输入平面组合成的输入信号使用 3D 卷积。</p>
<p>在最简单的情况下，输入尺寸为 <script type="math/tex">(N, C_{in}, D, H, W)</script> 的层的输出值以及输出 <script type="math/tex">(N, C_{out}, D_{out}, H_{out}, W_{out})</script> 可以精确描述为：
<script type="math/tex; mode=display">
\mathrm{out}(N_i, C_{out_j}) = \mathrm{bias}(C_{out_j}) + \sum_{k=0}^{C_{in}-1} \mathrm{weight}(C_{out_j}, k) \star \mathrm{input}(N_i, k)
</script>
其中 <script type="math/tex">\star</script> 是 3D <a href="https://en.wikipedia.org/wiki/Cross-correlation">互相关</a>运算符。</p>
<p>这个模块支持 <a href="https://pytorch.org/docs/stable/notes/cuda.html#tf32-on-ampere">TensorFloat32</a>。</p>
<ul>
<li>
<p><code>stride</code>：控制互相关的步长。</p>
</li>
<li>
<p><code>padding</code>：对于输入平面，控制每个维度两侧的隐式零填充量。</p>
</li>
<li>
<p><code>dilation</code>：控制卷积核中心点之间的间隔，即所谓的 <code>trous</code> 算法。这个概念很难用简单的语言描述，但是<a href="https://github.com/vdumoulin/conv_arithmetic/blob/master/README.md">这里</a> 提供了一个非常棒的可视化用来解释 <code>dilation</code> 到底是什么。</p>
</li>
<li>
<p><code>groups</code>：控制输入与输出之间的连接。<code>in_channels</code> 和 <code>out_channels</code> 必须可以被 <code>groups</code> 整除，比如：</p>
</li>
</ul>
<blockquote>
<ul>
<li><code>groups=1</code>，所有的输入卷积成为所有的输出；</li>
<li><code>groups=2</code>，等效为两个并排的卷积层，每个卷积层只能看到一般输入，然后分别进行卷积，最后将两个卷积的结果拼接成输出；</li>
<li><code>groups=in_channels</code>，每个通道只与它自己的卷积核进行卷积，尺寸为 <script type="math/tex">\left[\frac{\mathrm{out\_channel}}{\mathrm{in\_channels}}\right]</script>。</li>
</ul>
</blockquote>
<p><code>kernel_size</code>，<code>stride</code>，<code>padding</code>，<code>dilation</code> 参数分别是：</p>
<ul>
<li>整数 <code>int</code> —— 这种情况下深度，宽度，高度都一样；</li>
<li>元组<code>tuple</code> —— 这种情况下，第一个位置的整数表示深度，第二个位置的整数表示高度，第三个位置的整数表示宽度。</li>
</ul>
<div class='container' style='margin-top:40px;margin-bottom:20px;'>
    <div style='background-color:#54c7ec;height:36px;line-height:36px;vertical-align:middle;'>
        <div style='margin-left:10px'>
            <font color='white' size=4>
                • 注意
            </font>
        </div>
    </div>
    <div style='background-color:#F3F4F7'>
        <div style='padding:15px 10px 15px 20px;line-height:1.5;'>
            根据卷积核的大小，输入的（最后）几列可能会丢失，因为它是有效互相关而不是完整互相关。这取决于用户是否添加了合适的 <code>padding</code>。
        </div>    
    </div>    
</div>

<div class='container' style='margin-top:40px;margin-bottom:20px;'>
    <div style='background-color:#54c7ec;height:36px;line-height:36px;vertical-align:middle;'>
        <div style='margin-left:10px'>
            <font color='white' size=4>
                • 注意
            </font>
        </div>
    </div>
    <div style='background-color:#F3F4F7'>
        <div style='padding:15px 10px 15px 20px;line-height:1.5;'>
            当 <i>groups=in_channels</i>，并且 <i>out_channels==K*in_channles</i>，其中 <i>K</i> 是正整数，这个操作在文献当中被称为逐深度卷积（<i>depthwise convolution</i>）。<br/>
            <br/>
            换句话说，对于尺寸为 \((N, C_{in}, D_{in}, H_{in}, W_{in})\) 的输入，一个带有 <i>K</i> 乘子的逐深度卷积可以通过参数 \((\mathrm{in\_channels}=C_{in}, \mathrm{out\_channels}=C_{in} \times K, ..., \mathrm{groups}=C_{in})\) 来进行构建。
        </div>    
    </div>    
</div>

<div class='container' style='margin-top:40px;margin-bottom:20px;'>
    <div style='background-color:#54c7ec;height:36px;line-height:36px;vertical-align:middle;'>
        <div style='margin-left:10px'>
            <font color='white' size=4>
                • 注意
            </font>
        </div>
    </div>
    <div style='background-color:#F3F4F7'>
        <div style='padding:15px 10px 15px 20px;line-height:1.5;'>
            某些条件下，使用 CuDNN 的 CUDA 后端的时候，这个操作可能会选择一个非确定性的操作来提升效果。如果这个操作不理想的话，你可以通过以下设置把这个操作变成确定性操作：
            <code>torch.backends.cudnn.deterministic = True</code>。可以查看<a href='https://pytorch.org/docs/stable/notes/randomness.html'>可复现性</a>的背景介绍。
        </div>    
    </div>    
</div>

<blockquote>
<p><strong><em>参数</em></strong></p>
<ul>
<li><em>in_channels (int)</em>：输入图像的通道数</li>
<li><em>out_channels (int)</em>：卷积生成的通道数</li>
<li><em>kernel_size (int or tuple)</em>：卷积核大小</li>
<li><em>stride (int or tuple, optional)</em>：卷积步长，默认为1</li>
<li><em>padding (int or tuple, optional)</em>：输入的前后两端的零填充。默认为 0</li>
<li><em>padding_mode (string, optional)</em>：<code>zeros</code>，<code>reflect</code>，<code>replicate</code>，<code>circular</code>。默认为 <code>zeros</code></li>
<li><em>dilation (int or tuple, optional)</em>：和元素之间的间隔，默认为1</li>
<li><em>groups (int, optional)</em>：输入到输出之间的连接块数，默认为1</li>
<li><em>bias (bool, optional)</em>：如果为 <code>True</code>，添加可学习的偏置到输出，默认为 <code>True</code></li>
</ul>
<p><strong><em>形状</em></strong></p>
<ul>
<li>
<p>输入：<script type="math/tex">(N, C_{in}, D_{in}, H_{in}, W_{in})</script>
</p>
</li>
<li>
<p>输出： <script type="math/tex">(N, C_{out}, D_{out}, H_{out}, W_{out})</script>，其中
  <script type="math/tex; mode=display">
  D_{out} = \left[\frac{H_{in} + 2 \times padding[0] - dilation[0] \times(kernel\_size[0]-1)-1}{stride[0]}+1\right]
  </script>
</p>
</li>
</ul>
<p>
<script type="math/tex; mode=display">
  H_{out} = \left[\frac{H_{in} + 2 \times padding[1] - dilation[1] \times(kernel\_size[1]-1)-1}{stride[1]}+1\right]
  </script>
</p>
<p>
<script type="math/tex; mode=display">
  W_{out} = \left[\frac{W_{in}+2 \times padding[2]-dilation[2] \times (kernel\_size[2]-1)-1}{stride[2]}+1\right]
  </script>
</p>
<p><strong><em>变量</em></strong></p>
<ul>
<li>~<strong><em>Conv3d.weight</em></strong> (<em>Tensor</em>) —— 可学习的模块权重，形状为 <script type="math/tex">(\mathrm{out\_channels, \frac{\mathrm{in\_channels}}{groups}}, \mathrm{kernel\_size[0]}, \mathrm{kernel\_size[1]}, \mathrm{kernel\_size[2]})</script>。这些权重时通过 <script type="math/tex">\mathcal{U}(-\sqrt{k}, \sqrt{k})</script> 进行采样的，其中 <script type="math/tex">k=\frac{\mathrm{groups}}{C_{in} * \prod_{i=1}^{2} \mathrm{kernel\_size[i]}}</script>。</li>
<li>~<strong><em>Conv3d.bias</em></strong> (<em>Tensor</em>) —— 可学习的模块偏置，其形状为（out_channels）。如果 <code>bias</code> 设置为 <code>True</code>，这些值是通过 <script type="math/tex">\mathcal{U}(-\sqrt{k}, \sqrt{k})</script> 进行采样的，其中 <script type="math/tex">k=\frac{\mathrm{groups}}{C_{in} * \prod_{i=1}^{2} \mathrm{kernel\_size[i]}}</script>。</li>
</ul>
</blockquote>
<p>例子：</p>
<div class="codehilite"><pre><span></span><code><span class="o">&gt;&gt;&gt;</span> <span class="c1"># With square kernels and equal stride</span>
<span class="o">&gt;&gt;&gt;</span> <span class="n">m</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Conv3d</span><span class="p">(</span><span class="mi">16</span><span class="p">,</span> <span class="mi">33</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="n">stride</span><span class="o">=</span><span class="mi">2</span><span class="p">)</span>
<span class="o">&gt;&gt;&gt;</span> <span class="c1"># non-square kernels and unequal stride and with padding</span>
<span class="o">&gt;&gt;&gt;</span> <span class="n">m</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Conv3d</span><span class="p">(</span><span class="mi">16</span><span class="p">,</span> <span class="mi">33</span><span class="p">,</span> <span class="p">(</span><span class="mi">3</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mi">2</span><span class="p">),</span> <span class="n">stride</span><span class="o">=</span><span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">),</span> <span class="n">padding</span><span class="o">=</span><span class="p">(</span><span class="mi">4</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">0</span><span class="p">))</span>
<span class="o">&gt;&gt;&gt;</span> <span class="nb">input</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">randn</span><span class="p">(</span><span class="mi">20</span><span class="p">,</span> <span class="mi">16</span><span class="p">,</span> <span class="mi">10</span><span class="p">,</span> <span class="mi">50</span><span class="p">,</span> <span class="mi">100</span><span class="p">)</span>
<span class="o">&gt;&gt;&gt;</span> <span class="n">output</span> <span class="o">=</span> <span class="n">m</span><span class="p">(</span><span class="nb">input</span><span class="p">)</span>
</code></pre></div>
              
            </div>
          </div>
          <footer>
  
    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
      
        <a href="../convtranspose1d/" class="btn btn-neutral float-right" title="Convtranspose1d">Next <span class="icon icon-circle-arrow-right"></span></a>
      
      
        <a href="../conv2d/" class="btn btn-neutral" title="Conv2d"><span class="icon icon-circle-arrow-left"></span> Previous</a>
      
    </div>
  

  <hr/>

  <div role="contentinfo">
    <!-- Copyright etc -->
    
      <p>©2020 Rogerspy. All rights reserved.</p>
    
  </div>

  Built with <a href="https://www.mkdocs.org/">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
</footer>
      
        </div>
      </div>

    </section>

  </div>

  <div class="rst-versions" role="note" aria-label="versions">
    <span class="rst-current-version" data-toggle="rst-current-version">
      
      
        <span><a href="../conv2d/" style="color: #fcfcfc;">&laquo; Previous</a></span>
      
      
        <span style="margin-left: 15px"><a href="../convtranspose1d/" style="color: #fcfcfc">Next &raquo;</a></span>
      
    </span>
</div>
    <script>var base_url = '../..';</script>
    <script src="../../js/theme.js" defer></script>
      <script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/MathJax.js?config=TeX-AMS-MML_HTMLorMML" defer></script>
      <script src="../../search/main.js" defer></script>
    <script defer>
        window.onload = function () {
            SphinxRtdTheme.Navigation.enable(true);
        };
    </script>

</body>
</html>
