<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

<html lang="en">

<head>
  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  <title>LCOV - code analysis - include/caffe/layers/lstm_layer.hpp</title>
  <link rel="stylesheet" type="text/css" href="../../../gcov.css">
</head>

<body>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="title">LCOV - code coverage report</td></tr>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>

    <tr>
      <td width="100%">
        <table cellpadding=1 border=0 width="100%">
          <tr>
            <td width="10%" class="headerItem">Current view:</td>
            <td width="35%" class="headerValue"><a href="../../../index.html">top level</a> - <a href="index.html">include/caffe/layers</a> - lstm_layer.hpp<span style="font-size: 80%;"> (source / <a href="lstm_layer.hpp.func-sort-c.html">functions</a>)</span></td>
            <td width="5%"></td>
            <td width="15%"></td>
            <td width="10%" class="headerCovTableHead">Hit</td>
            <td width="10%" class="headerCovTableHead">Total</td>
            <td width="15%" class="headerCovTableHead">Coverage</td>
          </tr>
          <tr>
            <td class="headerItem">Test:</td>
            <td class="headerValue">code analysis</td>
            <td></td>
            <td class="headerItem">Lines:</td>
            <td class="headerCovTableEntry">0</td>
            <td class="headerCovTableEntry">12</td>
            <td class="headerCovTableEntryLo">0.0 %</td>
          </tr>
          <tr>
            <td class="headerItem">Date:</td>
            <td class="headerValue">2020-09-11 22:25:26</td>
            <td></td>
            <td class="headerItem">Functions:</td>
            <td class="headerCovTableEntry">0</td>
            <td class="headerCovTableEntry">22</td>
            <td class="headerCovTableEntryLo">0.0 %</td>
          </tr>
          <tr>
            <td class="headerItem">Legend:</td>
            <td class="headerValueLeg">            Lines:
            <span class="coverLegendCov">hit</span>
            <span class="coverLegendNoCov">not hit</span>
</td>
            <td></td>
          </tr>
          <tr><td><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
        </table>
      </td>
    </tr>

    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
  </table>

  <table cellpadding=0 cellspacing=0 border=0>
    <tr>
      <td><br></td>
    </tr>
    <tr>
      <td>
<pre class="sourceHeading">          Line data    Source code</pre>
<pre class="source">
<a name="1"><span class="lineNum">       1 </span>            : #ifndef CAFFE_LSTM_LAYER_HPP_</a>
<span class="lineNum">       2 </span>            : #define CAFFE_LSTM_LAYER_HPP_
<span class="lineNum">       3 </span>            : 
<span class="lineNum">       4 </span>            : #include &lt;string&gt;
<span class="lineNum">       5 </span>            : #include &lt;utility&gt;
<span class="lineNum">       6 </span>            : #include &lt;vector&gt;
<span class="lineNum">       7 </span>            : 
<span class="lineNum">       8 </span>            : #include &quot;caffe/blob.hpp&quot;
<span class="lineNum">       9 </span>            : #include &quot;caffe/common.hpp&quot;
<span class="lineNum">      10 </span>            : #include &quot;caffe/layer.hpp&quot;
<span class="lineNum">      11 </span>            : #include &quot;caffe/layers/recurrent_layer.hpp&quot;
<span class="lineNum">      12 </span>            : #include &quot;caffe/net.hpp&quot;
<span class="lineNum">      13 </span>            : #include &quot;caffe/proto/caffe.pb.h&quot;
<span class="lineNum">      14 </span>            : 
<span class="lineNum">      15 </span>            : namespace caffe {
<span class="lineNum">      16 </span>            : 
<span class="lineNum">      17 </span>            : template &lt;typename Dtype&gt; class RecurrentLayer;
<span class="lineNum">      18 </span>            : 
<span class="lineNum">      19 </span>            : /**
<span class="lineNum">      20 </span>            :  * @brief Processes sequential inputs using a &quot;Long Short-Term Memory&quot; (LSTM)
<span class="lineNum">      21 </span>            :  *        [1] style recurrent neural network (RNN). Implemented by unrolling
<span class="lineNum">      22 </span>            :  *        the LSTM computation through time.
<span class="lineNum">      23 </span>            :  *
<span class="lineNum">      24 </span>            :  * The specific architecture used in this implementation is as described in
<span class="lineNum">      25 </span>            :  * &quot;Learning to Execute&quot; [2], reproduced below:
<span class="lineNum">      26 </span>            :  *     i_t := \sigmoid[ W_{hi} * h_{t-1} + W_{xi} * x_t + b_i ]
<span class="lineNum">      27 </span>            :  *     f_t := \sigmoid[ W_{hf} * h_{t-1} + W_{xf} * x_t + b_f ]
<span class="lineNum">      28 </span>            :  *     o_t := \sigmoid[ W_{ho} * h_{t-1} + W_{xo} * x_t + b_o ]
<span class="lineNum">      29 </span>            :  *     g_t :=    \tanh[ W_{hg} * h_{t-1} + W_{xg} * x_t + b_g ]
<span class="lineNum">      30 </span>            :  *     c_t := (f_t .* c_{t-1}) + (i_t .* g_t)
<span class="lineNum">      31 </span>            :  *     h_t := o_t .* \tanh[c_t]
<span class="lineNum">      32 </span>            :  * In the implementation, the i, f, o, and g computations are performed as a
<span class="lineNum">      33 </span>            :  * single inner product.
<span class="lineNum">      34 </span>            :  *
<span class="lineNum">      35 </span>            :  * Notably, this implementation lacks the &quot;diagonal&quot; gates, as used in the
<span class="lineNum">      36 </span>            :  * LSTM architectures described by Alex Graves [3] and others.
<span class="lineNum">      37 </span>            :  *
<span class="lineNum">      38 </span>            :  * [1] Hochreiter, Sepp, and Schmidhuber, Jürgen. &quot;Long short-term memory.&quot;
<span class="lineNum">      39 </span>            :  *     Neural Computation 9, no. 8 (1997): 1735-1780.
<span class="lineNum">      40 </span>            :  *
<span class="lineNum">      41 </span>            :  * [2] Zaremba, Wojciech, and Sutskever, Ilya. &quot;Learning to execute.&quot;
<span class="lineNum">      42 </span>            :  *     arXiv preprint arXiv:1410.4615 (2014).
<span class="lineNum">      43 </span>            :  *
<span class="lineNum">      44 </span>            :  * [3] Graves, Alex. &quot;Generating sequences with recurrent neural networks.&quot;
<span class="lineNum">      45 </span>            :  *     arXiv preprint arXiv:1308.0850 (2013).
<a name="46"><span class="lineNum">      46 </span>            :  */</a>
<span class="lineNum">      47 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      48 </span><span class="lineNoCov">          0 : class LSTMLayer : public RecurrentLayer&lt;Dtype&gt; {</span>
<span class="lineNum">      49 </span>            :  public:
<span class="lineNum">      50 </span><span class="lineNoCov">          0 :   explicit LSTMLayer(const LayerParameter&amp; param)</span>
<a name="51"><span class="lineNum">      51 </span><span class="lineNoCov">          0 :       : RecurrentLayer&lt;Dtype&gt;(param) {}</span></a>
<span class="lineNum">      52 </span>            : 
<span class="lineNum">      53 </span><span class="lineNoCov">          0 :   virtual inline const char* type() const { return &quot;LSTM&quot;; }</span>
<span class="lineNum">      54 </span>            : 
<span class="lineNum">      55 </span>            :  protected:
<span class="lineNum">      56 </span>            :   virtual void FillUnrolledNet(NetParameter* net_param) const;
<span class="lineNum">      57 </span>            :   virtual void RecurrentInputBlobNames(vector&lt;string&gt;* names) const;
<span class="lineNum">      58 </span>            :   virtual void RecurrentOutputBlobNames(vector&lt;string&gt;* names) const;
<span class="lineNum">      59 </span>            :   virtual void RecurrentInputShapes(vector&lt;BlobShape&gt;* shapes) const;
<span class="lineNum">      60 </span>            :   virtual void OutputBlobNames(vector&lt;string&gt;* names) const;
<span class="lineNum">      61 </span>            : };
<span class="lineNum">      62 </span>            : 
<span class="lineNum">      63 </span>            : /**
<span class="lineNum">      64 </span>            :  * @brief A helper for LSTMLayer: computes a single timestep of the
<span class="lineNum">      65 </span>            :  *        non-linearity of the LSTM, producing the updated cell and hidden
<span class="lineNum">      66 </span>            :  *        states.
<a name="67"><span class="lineNum">      67 </span>            :  */</a>
<span class="lineNum">      68 </span>            : template &lt;typename Dtype&gt;
<a name="69"><span class="lineNum">      69 </span><span class="lineNoCov">          0 : class LSTMUnitLayer : public Layer&lt;Dtype&gt; {</span></a>
<span class="lineNum">      70 </span>            :  public:
<span class="lineNum">      71 </span><span class="lineNoCov">          0 :   explicit LSTMUnitLayer(const LayerParameter&amp; param)</span>
<span class="lineNum">      72 </span><span class="lineNoCov">          0 :       : Layer&lt;Dtype&gt;(param) {}</span>
<span class="lineNum">      73 </span>            :   virtual void Reshape(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,
<a name="74"><span class="lineNum">      74 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top);</a>
<a name="75"><span class="lineNum">      75 </span>            : </a>
<a name="76"><span class="lineNum">      76 </span><span class="lineNoCov">          0 :   virtual inline const char* type() const { return &quot;LSTMUnit&quot;; }</span></a>
<span class="lineNum">      77 </span><span class="lineNoCov">          0 :   virtual inline int ExactNumBottomBlobs() const { return 3; }</span>
<a name="78"><span class="lineNum">      78 </span><span class="lineNoCov">          0 :   virtual inline int ExactNumTopBlobs() const { return 2; }</span></a>
<span class="lineNum">      79 </span>            : 
<span class="lineNum">      80 </span><span class="lineNoCov">          0 :   virtual inline bool AllowForceBackward(const int bottom_index) const {</span>
<span class="lineNum">      81 </span>            :     // Can't propagate to sequence continuation indicators.
<span class="lineNum">      82 </span><span class="lineNoCov">          0 :     return bottom_index != 2;</span>
<span class="lineNum">      83 </span>            :   }
<span class="lineNum">      84 </span>            : 
<span class="lineNum">      85 </span>            :  protected:
<span class="lineNum">      86 </span>            :   /**
<span class="lineNum">      87 </span>            :    * @param bottom input Blob vector (length 3)
<span class="lineNum">      88 </span>            :    *   -# @f$ (1 \times N \times D) @f$
<span class="lineNum">      89 </span>            :    *      the previous timestep cell state @f$ c_{t-1} @f$
<span class="lineNum">      90 </span>            :    *   -# @f$ (1 \times N \times 4D) @f$
<span class="lineNum">      91 </span>            :    *      the &quot;gate inputs&quot; @f$ [i_t', f_t', o_t', g_t'] @f$
<span class="lineNum">      92 </span>            :    *   -# @f$ (1 \times N) @f$
<span class="lineNum">      93 </span>            :    *      the sequence continuation indicators  @f$ \delta_t @f$
<span class="lineNum">      94 </span>            :    * @param top output Blob vector (length 2)
<span class="lineNum">      95 </span>            :    *   -# @f$ (1 \times N \times D) @f$
<span class="lineNum">      96 </span>            :    *      the updated cell state @f$ c_t @f$, computed as:
<span class="lineNum">      97 </span>            :    *          i_t := \sigmoid[i_t']
<span class="lineNum">      98 </span>            :    *          f_t := \sigmoid[f_t']
<span class="lineNum">      99 </span>            :    *          o_t := \sigmoid[o_t']
<span class="lineNum">     100 </span>            :    *          g_t := \tanh[g_t']
<span class="lineNum">     101 </span>            :    *          c_t := cont_t * (f_t .* c_{t-1}) + (i_t .* g_t)
<span class="lineNum">     102 </span>            :    *   -# @f$ (1 \times N \times D) @f$
<span class="lineNum">     103 </span>            :    *      the updated hidden state @f$ h_t @f$, computed as:
<span class="lineNum">     104 </span>            :    *          h_t := o_t .* \tanh[c_t]
<span class="lineNum">     105 </span>            :    */
<span class="lineNum">     106 </span>            :   virtual void Forward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,
<span class="lineNum">     107 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top);
<span class="lineNum">     108 </span>            :   virtual void Forward_gpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,
<span class="lineNum">     109 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top);
<span class="lineNum">     110 </span>            : 
<span class="lineNum">     111 </span>            :   /**
<span class="lineNum">     112 </span>            :    * @brief Computes the error gradient w.r.t. the LSTMUnit inputs.
<span class="lineNum">     113 </span>            :    *
<span class="lineNum">     114 </span>            :    * @param top output Blob vector (length 2), providing the error gradient with
<span class="lineNum">     115 </span>            :    *        respect to the outputs
<span class="lineNum">     116 </span>            :    *   -# @f$ (1 \times N \times D) @f$:
<span class="lineNum">     117 </span>            :    *      containing error gradients @f$ \frac{\partial E}{\partial c_t} @f$
<span class="lineNum">     118 </span>            :    *      with respect to the updated cell state @f$ c_t @f$
<span class="lineNum">     119 </span>            :    *   -# @f$ (1 \times N \times D) @f$:
<span class="lineNum">     120 </span>            :    *      containing error gradients @f$ \frac{\partial E}{\partial h_t} @f$
<span class="lineNum">     121 </span>            :    *      with respect to the updated cell state @f$ h_t @f$
<span class="lineNum">     122 </span>            :    * @param propagate_down see Layer::Backward.
<span class="lineNum">     123 </span>            :    * @param bottom input Blob vector (length 3), into which the error gradients
<span class="lineNum">     124 </span>            :    *        with respect to the LSTMUnit inputs @f$ c_{t-1} @f$ and the gate
<span class="lineNum">     125 </span>            :    *        inputs are computed.  Computatation of the error gradients w.r.t.
<span class="lineNum">     126 </span>            :    *        the sequence indicators is not implemented.
<span class="lineNum">     127 </span>            :    *   -# @f$ (1 \times N \times D) @f$
<span class="lineNum">     128 </span>            :    *      the error gradient w.r.t. the previous timestep cell state
<span class="lineNum">     129 </span>            :    *      @f$ c_{t-1} @f$
<span class="lineNum">     130 </span>            :    *   -# @f$ (1 \times N \times 4D) @f$
<span class="lineNum">     131 </span>            :    *      the error gradient w.r.t. the &quot;gate inputs&quot;
<span class="lineNum">     132 </span>            :    *      @f$ [
<span class="lineNum">     133 </span>            :    *          \frac{\partial E}{\partial i_t}
<span class="lineNum">     134 </span>            :    *          \frac{\partial E}{\partial f_t}
<span class="lineNum">     135 </span>            :    *          \frac{\partial E}{\partial o_t}
<span class="lineNum">     136 </span>            :    *          \frac{\partial E}{\partial g_t}
<span class="lineNum">     137 </span>            :    *          ] @f$
<span class="lineNum">     138 </span>            :    *   -# @f$ (1 \times 1 \times N) @f$
<span class="lineNum">     139 </span>            :    *      the gradient w.r.t. the sequence continuation indicators
<span class="lineNum">     140 </span>            :    *      @f$ \delta_t @f$ is currently not computed.
<span class="lineNum">     141 </span>            :    */
<span class="lineNum">     142 </span>            :   virtual void Backward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top,
<span class="lineNum">     143 </span>            :       const vector&lt;bool&gt;&amp; propagate_down, const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom);
<span class="lineNum">     144 </span>            :   virtual void Backward_gpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top,
<span class="lineNum">     145 </span>            :       const vector&lt;bool&gt;&amp; propagate_down, const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom);
<span class="lineNum">     146 </span>            : 
<span class="lineNum">     147 </span>            :   /// @brief The hidden and output dimension.
<span class="lineNum">     148 </span>            :   int hidden_dim_;
<span class="lineNum">     149 </span>            :   Blob&lt;Dtype&gt; X_acts_;
<span class="lineNum">     150 </span>            : };
<span class="lineNum">     151 </span>            : 
<span class="lineNum">     152 </span>            : }  // namespace caffe
<span class="lineNum">     153 </span>            : 
<span class="lineNum">     154 </span>            : #endif  // CAFFE_LSTM_LAYER_HPP_
</pre>
      </td>
    </tr>
  </table>
  <br>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
    <tr><td class="versionInfo">Generated by: <a href="http://ltp.sourceforge.net/coverage/lcov.php" target="_parent">LCOV version 1.12</a></td></tr>
  </table>
  <br>

</body>
</html>
