<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

<html lang="en">

<head>
  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  <title>LCOV - code analysis - include/caffe/layers/recurrent_layer.hpp</title>
  <link rel="stylesheet" type="text/css" href="../../../gcov.css">
</head>

<body>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="title">LCOV - code coverage report</td></tr>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>

    <tr>
      <td width="100%">
        <table cellpadding=1 border=0 width="100%">
          <tr>
            <td width="10%" class="headerItem">Current view:</td>
            <td width="35%" class="headerValue"><a href="../../../index.html">top level</a> - <a href="index.html">include/caffe/layers</a> - recurrent_layer.hpp<span style="font-size: 80%;"> (source / <a href="recurrent_layer.hpp.func-sort-c.html">functions</a>)</span></td>
            <td width="5%"></td>
            <td width="15%"></td>
            <td width="10%" class="headerCovTableHead">Hit</td>
            <td width="10%" class="headerCovTableHead">Total</td>
            <td width="15%" class="headerCovTableHead">Coverage</td>
          </tr>
          <tr>
            <td class="headerItem">Test:</td>
            <td class="headerValue">code analysis</td>
            <td></td>
            <td class="headerItem">Lines:</td>
            <td class="headerCovTableEntry">0</td>
            <td class="headerCovTableEntry">19</td>
            <td class="headerCovTableEntryLo">0.0 %</td>
          </tr>
          <tr>
            <td class="headerItem">Date:</td>
            <td class="headerValue">2020-09-11 22:50:33</td>
            <td></td>
            <td class="headerItem">Functions:</td>
            <td class="headerCovTableEntry">0</td>
            <td class="headerCovTableEntry">16</td>
            <td class="headerCovTableEntryLo">0.0 %</td>
          </tr>
          <tr>
            <td class="headerItem">Legend:</td>
            <td class="headerValueLeg">            Lines:
            <span class="coverLegendCov">hit</span>
            <span class="coverLegendNoCov">not hit</span>
</td>
            <td></td>
          </tr>
          <tr><td><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
        </table>
      </td>
    </tr>

    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
  </table>

  <table cellpadding=0 cellspacing=0 border=0>
    <tr>
      <td><br></td>
    </tr>
    <tr>
      <td>
<pre class="sourceHeading">          Line data    Source code</pre>
<pre class="source">
<a name="1"><span class="lineNum">       1 </span>            : #ifndef CAFFE_RECURRENT_LAYER_HPP_</a>
<span class="lineNum">       2 </span>            : #define CAFFE_RECURRENT_LAYER_HPP_
<span class="lineNum">       3 </span>            : 
<span class="lineNum">       4 </span>            : #include &lt;string&gt;
<span class="lineNum">       5 </span>            : #include &lt;utility&gt;
<span class="lineNum">       6 </span>            : #include &lt;vector&gt;
<span class="lineNum">       7 </span>            : 
<span class="lineNum">       8 </span>            : #include &quot;caffe/blob.hpp&quot;
<span class="lineNum">       9 </span>            : #include &quot;caffe/common.hpp&quot;
<span class="lineNum">      10 </span>            : #include &quot;caffe/layer.hpp&quot;
<span class="lineNum">      11 </span>            : #include &quot;caffe/net.hpp&quot;
<span class="lineNum">      12 </span>            : #include &quot;caffe/proto/caffe.pb.h&quot;
<span class="lineNum">      13 </span>            : #include &quot;caffe/util/format.hpp&quot;
<span class="lineNum">      14 </span>            : 
<span class="lineNum">      15 </span>            : namespace caffe {
<span class="lineNum">      16 </span>            : 
<span class="lineNum">      17 </span>            : template &lt;typename Dtype&gt; class RecurrentLayer;
<span class="lineNum">      18 </span>            : 
<span class="lineNum">      19 </span>            : /**
<span class="lineNum">      20 </span>            :  * @brief An abstract class for implementing recurrent behavior inside of an
<span class="lineNum">      21 </span>            :  *        unrolled network.  This Layer type cannot be instantiated -- instead,
<span class="lineNum">      22 </span>            :  *        you should use one of its implementations which defines the recurrent
<span class="lineNum">      23 </span>            :  *        architecture, such as RNNLayer or LSTMLayer.
<a name="24"><span class="lineNum">      24 </span>            :  */</a>
<span class="lineNum">      25 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      26 </span><span class="lineNoCov">          0 : class RecurrentLayer : public Layer&lt;Dtype&gt; {</span>
<span class="lineNum">      27 </span>            :  public:
<span class="lineNum">      28 </span><span class="lineNoCov">          0 :   explicit RecurrentLayer(const LayerParameter&amp; param)</span>
<span class="lineNum">      29 </span><span class="lineNoCov">          0 :       : Layer&lt;Dtype&gt;(param) {}</span>
<span class="lineNum">      30 </span>            :   virtual void LayerSetUp(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,
<span class="lineNum">      31 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top);
<span class="lineNum">      32 </span>            :   virtual void Reshape(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,
<span class="lineNum">      33 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top);
<a name="34"><span class="lineNum">      34 </span>            :   virtual void Reset();</a>
<span class="lineNum">      35 </span>            : 
<span class="lineNum">      36 </span><span class="lineNoCov">          0 :   virtual inline const char* type() const { return &quot;Recurrent&quot;; }</span>
<span class="lineNum">      37 </span><span class="lineNoCov">          0 :   virtual inline int MinBottomBlobs() const {</span>
<span class="lineNum">      38 </span>            :     int min_bottoms = 2;
<span class="lineNum">      39 </span><span class="lineNoCov">          0 :     if (this-&gt;layer_param_.recurrent_param().expose_hidden()) {</span>
<span class="lineNum">      40 </span><span class="lineNoCov">          0 :       vector&lt;string&gt; inputs;</span>
<span class="lineNum">      41 </span><span class="lineNoCov">          0 :       this-&gt;RecurrentInputBlobNames(&amp;inputs);</span>
<span class="lineNum">      42 </span><span class="lineNoCov">          0 :       min_bottoms += inputs.size();</span>
<span class="lineNum">      43 </span>            :     }
<a name="44"><span class="lineNum">      44 </span><span class="lineNoCov">          0 :     return min_bottoms;</span></a>
<span class="lineNum">      45 </span>            :   }
<span class="lineNum">      46 </span><span class="lineNoCov">          0 :   virtual inline int MaxBottomBlobs() const { return MinBottomBlobs() + 1; }</span>
<span class="lineNum">      47 </span><span class="lineNoCov">          0 :   virtual inline int ExactNumTopBlobs() const {</span>
<span class="lineNum">      48 </span>            :     int num_tops = 1;
<span class="lineNum">      49 </span><span class="lineNoCov">          0 :     if (this-&gt;layer_param_.recurrent_param().expose_hidden()) {</span>
<span class="lineNum">      50 </span><span class="lineNoCov">          0 :       vector&lt;string&gt; outputs;</span>
<span class="lineNum">      51 </span><span class="lineNoCov">          0 :       this-&gt;RecurrentOutputBlobNames(&amp;outputs);</span>
<span class="lineNum">      52 </span><span class="lineNoCov">          0 :       num_tops += outputs.size();</span>
<span class="lineNum">      53 </span>            :     }
<span class="lineNum">      54 </span><span class="lineNoCov">          0 :     return num_tops;</span>
<a name="55"><span class="lineNum">      55 </span>            :   }</a>
<span class="lineNum">      56 </span>            : 
<span class="lineNum">      57 </span><span class="lineNoCov">          0 :   virtual inline bool AllowForceBackward(const int bottom_index) const {</span>
<span class="lineNum">      58 </span>            :     // Can't propagate to sequence continuation indicators.
<span class="lineNum">      59 </span><span class="lineNoCov">          0 :     return bottom_index != 1;</span>
<span class="lineNum">      60 </span>            :   }
<span class="lineNum">      61 </span>            : 
<span class="lineNum">      62 </span>            :  protected:
<span class="lineNum">      63 </span>            :   /**
<span class="lineNum">      64 </span>            :    * @brief Fills net_param with the recurrent network architecture.  Subclasses
<span class="lineNum">      65 </span>            :    *        should define this -- see RNNLayer and LSTMLayer for examples.
<span class="lineNum">      66 </span>            :    */
<span class="lineNum">      67 </span>            :   virtual void FillUnrolledNet(NetParameter* net_param) const = 0;
<span class="lineNum">      68 </span>            : 
<span class="lineNum">      69 </span>            :   /**
<span class="lineNum">      70 </span>            :    * @brief Fills names with the names of the 0th timestep recurrent input
<span class="lineNum">      71 </span>            :    *        Blob&amp;s.  Subclasses should define this -- see RNNLayer and LSTMLayer
<span class="lineNum">      72 </span>            :    *        for examples.
<span class="lineNum">      73 </span>            :    */
<span class="lineNum">      74 </span>            :   virtual void RecurrentInputBlobNames(vector&lt;string&gt;* names) const = 0;
<span class="lineNum">      75 </span>            : 
<span class="lineNum">      76 </span>            :   /**
<span class="lineNum">      77 </span>            :    * @brief Fills shapes with the shapes of the recurrent input Blob&amp;s.
<span class="lineNum">      78 </span>            :    *        Subclasses should define this -- see RNNLayer and LSTMLayer
<span class="lineNum">      79 </span>            :    *        for examples.
<span class="lineNum">      80 </span>            :    */
<span class="lineNum">      81 </span>            :   virtual void RecurrentInputShapes(vector&lt;BlobShape&gt;* shapes) const = 0;
<span class="lineNum">      82 </span>            : 
<span class="lineNum">      83 </span>            :   /**
<span class="lineNum">      84 </span>            :    * @brief Fills names with the names of the Tth timestep recurrent output
<span class="lineNum">      85 </span>            :    *        Blob&amp;s.  Subclasses should define this -- see RNNLayer and LSTMLayer
<span class="lineNum">      86 </span>            :    *        for examples.
<span class="lineNum">      87 </span>            :    */
<span class="lineNum">      88 </span>            :   virtual void RecurrentOutputBlobNames(vector&lt;string&gt;* names) const = 0;
<span class="lineNum">      89 </span>            : 
<span class="lineNum">      90 </span>            :   /**
<span class="lineNum">      91 </span>            :    * @brief Fills names with the names of the output blobs, concatenated across
<span class="lineNum">      92 </span>            :    *        all timesteps.  Should return a name for each top Blob.
<span class="lineNum">      93 </span>            :    *        Subclasses should define this -- see RNNLayer and LSTMLayer for
<span class="lineNum">      94 </span>            :    *        examples.
<span class="lineNum">      95 </span>            :    */
<span class="lineNum">      96 </span>            :   virtual void OutputBlobNames(vector&lt;string&gt;* names) const = 0;
<span class="lineNum">      97 </span>            : 
<span class="lineNum">      98 </span>            :   /**
<span class="lineNum">      99 </span>            :    * @param bottom input Blob vector (length 2-3)
<span class="lineNum">     100 </span>            :    *
<span class="lineNum">     101 </span>            :    *   -# @f$ (T \times N \times ...) @f$
<span class="lineNum">     102 </span>            :    *      the time-varying input @f$ x @f$.  After the first two axes, whose
<span class="lineNum">     103 </span>            :    *      dimensions must correspond to the number of timesteps @f$ T @f$ and
<span class="lineNum">     104 </span>            :    *      the number of independent streams @f$ N @f$, respectively, its
<span class="lineNum">     105 </span>            :    *      dimensions may be arbitrary.  Note that the ordering of dimensions --
<span class="lineNum">     106 </span>            :    *      @f$ (T \times N \times ...) @f$, rather than
<span class="lineNum">     107 </span>            :    *      @f$ (N \times T \times ...) @f$ -- means that the @f$ N @f$
<span class="lineNum">     108 </span>            :    *      independent input streams must be &quot;interleaved&quot;.
<span class="lineNum">     109 </span>            :    *
<span class="lineNum">     110 </span>            :    *   -# @f$ (T \times N) @f$
<span class="lineNum">     111 </span>            :    *      the sequence continuation indicators @f$ \delta @f$.
<span class="lineNum">     112 </span>            :    *      These inputs should be binary (0 or 1) indicators, where
<span class="lineNum">     113 </span>            :    *      @f$ \delta_{t,n} = 0 @f$ means that timestep @f$ t @f$ of stream
<span class="lineNum">     114 </span>            :    *      @f$ n @f$ is the beginning of a new sequence, and hence the previous
<span class="lineNum">     115 </span>            :    *      hidden state @f$ h_{t-1} @f$ is multiplied by @f$ \delta_t = 0 @f$
<span class="lineNum">     116 </span>            :    *      and has no effect on the cell's output at timestep @f$ t @f$, and
<span class="lineNum">     117 </span>            :    *      a value of @f$ \delta_{t,n} = 1 @f$ means that timestep @f$ t @f$ of
<span class="lineNum">     118 </span>            :    *      stream @f$ n @f$ is a continuation from the previous timestep
<span class="lineNum">     119 </span>            :    *      @f$ t-1 @f$, and the previous hidden state @f$ h_{t-1} @f$ affects the
<span class="lineNum">     120 </span>            :    *      updated hidden state and output.
<span class="lineNum">     121 </span>            :    *
<span class="lineNum">     122 </span>            :    *   -# @f$ (N \times ...) @f$ (optional)
<span class="lineNum">     123 </span>            :    *      the static (non-time-varying) input @f$ x_{static} @f$.
<span class="lineNum">     124 </span>            :    *      After the first axis, whose dimension must be the number of
<span class="lineNum">     125 </span>            :    *      independent streams, its dimensions may be arbitrary.
<span class="lineNum">     126 </span>            :    *      This is mathematically equivalent to using a time-varying input of
<span class="lineNum">     127 </span>            :    *      @f$ x'_t = [x_t; x_{static}] @f$ -- i.e., tiling the static input
<span class="lineNum">     128 </span>            :    *      across the @f$ T @f$ timesteps and concatenating with the time-varying
<span class="lineNum">     129 </span>            :    *      input.  Note that if this input is used, all timesteps in a single
<span class="lineNum">     130 </span>            :    *      batch within a particular one of the @f$ N @f$ streams must share the
<span class="lineNum">     131 </span>            :    *      same static input, even if the sequence continuation indicators
<span class="lineNum">     132 </span>            :    *      suggest that difference sequences are ending and beginning within a
<span class="lineNum">     133 </span>            :    *      single batch.  This may require padding and/or truncation for uniform
<span class="lineNum">     134 </span>            :    *      length.
<span class="lineNum">     135 </span>            :    *
<span class="lineNum">     136 </span>            :    * @param top output Blob vector (length 1)
<span class="lineNum">     137 </span>            :    *   -# @f$ (T \times N \times D) @f$
<span class="lineNum">     138 </span>            :    *      the time-varying output @f$ y @f$, where @f$ D @f$ is
<span class="lineNum">     139 </span>            :    *      &lt;code&gt;recurrent_param.num_output()&lt;/code&gt;.
<span class="lineNum">     140 </span>            :    *      Refer to documentation for particular RecurrentLayer implementations
<span class="lineNum">     141 </span>            :    *      (such as RNNLayer and LSTMLayer) for the definition of @f$ y @f$.
<span class="lineNum">     142 </span>            :    */
<span class="lineNum">     143 </span>            :   virtual void Forward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,
<span class="lineNum">     144 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top);
<span class="lineNum">     145 </span>            :   virtual void Forward_gpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,
<span class="lineNum">     146 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top);
<span class="lineNum">     147 </span>            :   virtual void Backward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top,
<span class="lineNum">     148 </span>            :       const vector&lt;bool&gt;&amp; propagate_down, const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom);
<span class="lineNum">     149 </span>            : 
<span class="lineNum">     150 </span>            :   /// @brief A Net to implement the Recurrent functionality.
<span class="lineNum">     151 </span>            :   shared_ptr&lt;Net&lt;Dtype&gt; &gt; unrolled_net_;
<span class="lineNum">     152 </span>            : 
<span class="lineNum">     153 </span>            :   /// @brief The number of independent streams to process simultaneously.
<span class="lineNum">     154 </span>            :   int N_;
<span class="lineNum">     155 </span>            : 
<span class="lineNum">     156 </span>            :   /**
<span class="lineNum">     157 </span>            :    * @brief The number of timesteps in the layer's input, and the number of
<span class="lineNum">     158 </span>            :    *        timesteps over which to backpropagate through time.
<span class="lineNum">     159 </span>            :    */
<span class="lineNum">     160 </span>            :   int T_;
<span class="lineNum">     161 </span>            : 
<span class="lineNum">     162 </span>            :   /// @brief Whether the layer has a &quot;static&quot; input copied across all timesteps.
<span class="lineNum">     163 </span>            :   bool static_input_;
<span class="lineNum">     164 </span>            : 
<span class="lineNum">     165 </span>            :   /**
<span class="lineNum">     166 </span>            :    * @brief The last layer to run in the network. (Any later layers are losses
<span class="lineNum">     167 </span>            :    *        added to force the recurrent net to do backprop.)
<span class="lineNum">     168 </span>            :    */
<span class="lineNum">     169 </span>            :   int last_layer_index_;
<span class="lineNum">     170 </span>            : 
<span class="lineNum">     171 </span>            :   /**
<span class="lineNum">     172 </span>            :    * @brief Whether the layer's hidden state at the first and last timesteps
<span class="lineNum">     173 </span>            :    *        are layer inputs and outputs, respectively.
<span class="lineNum">     174 </span>            :    */
<span class="lineNum">     175 </span>            :   bool expose_hidden_;
<span class="lineNum">     176 </span>            : 
<span class="lineNum">     177 </span>            :   vector&lt;Blob&lt;Dtype&gt;* &gt; recur_input_blobs_;
<span class="lineNum">     178 </span>            :   vector&lt;Blob&lt;Dtype&gt;* &gt; recur_output_blobs_;
<span class="lineNum">     179 </span>            :   vector&lt;Blob&lt;Dtype&gt;* &gt; output_blobs_;
<span class="lineNum">     180 </span>            :   Blob&lt;Dtype&gt;* x_input_blob_;
<span class="lineNum">     181 </span>            :   Blob&lt;Dtype&gt;* x_static_input_blob_;
<span class="lineNum">     182 </span>            :   Blob&lt;Dtype&gt;* cont_input_blob_;
<span class="lineNum">     183 </span>            : };
<span class="lineNum">     184 </span>            : 
<span class="lineNum">     185 </span>            : }  // namespace caffe
<span class="lineNum">     186 </span>            : 
<span class="lineNum">     187 </span>            : #endif  // CAFFE_RECURRENT_LAYER_HPP_
</pre>
      </td>
    </tr>
  </table>
  <br>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
    <tr><td class="versionInfo">Generated by: <a href="http://ltp.sourceforge.net/coverage/lcov.php" target="_parent">LCOV version 1.12</a></td></tr>
  </table>
  <br>

</body>
</html>
