<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

<html lang="en">

<head>
  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  <title>LCOV - code analysis - src/caffe/layers/recurrent_layer.cpp</title>
  <link rel="stylesheet" type="text/css" href="../../../gcov.css">
</head>

<body>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="title">LCOV - code coverage report</td></tr>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>

    <tr>
      <td width="100%">
        <table cellpadding=1 border=0 width="100%">
          <tr>
            <td width="10%" class="headerItem">Current view:</td>
            <td width="35%" class="headerValue"><a href="../../../index.html">top level</a> - <a href="index.html">src/caffe/layers</a> - recurrent_layer.cpp<span style="font-size: 80%;"> (source / <a href="recurrent_layer.cpp.func-sort-c.html">functions</a>)</span></td>
            <td width="5%"></td>
            <td width="15%"></td>
            <td width="10%" class="headerCovTableHead">Hit</td>
            <td width="10%" class="headerCovTableHead">Total</td>
            <td width="15%" class="headerCovTableHead">Coverage</td>
          </tr>
          <tr>
            <td class="headerItem">Test:</td>
            <td class="headerValue">code analysis</td>
            <td></td>
            <td class="headerItem">Lines:</td>
            <td class="headerCovTableEntry">1</td>
            <td class="headerCovTableEntry">149</td>
            <td class="headerCovTableEntryLo">0.7 %</td>
          </tr>
          <tr>
            <td class="headerItem">Date:</td>
            <td class="headerValue">2020-09-11 22:50:33</td>
            <td></td>
            <td class="headerItem">Functions:</td>
            <td class="headerCovTableEntry">1</td>
            <td class="headerCovTableEntry">13</td>
            <td class="headerCovTableEntryLo">7.7 %</td>
          </tr>
          <tr>
            <td class="headerItem">Legend:</td>
            <td class="headerValueLeg">            Lines:
            <span class="coverLegendCov">hit</span>
            <span class="coverLegendNoCov">not hit</span>
</td>
            <td></td>
          </tr>
          <tr><td><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
        </table>
      </td>
    </tr>

    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
  </table>

  <table cellpadding=0 cellspacing=0 border=0>
    <tr>
      <td><br></td>
    </tr>
    <tr>
      <td>
<pre class="sourceHeading">          Line data    Source code</pre>
<pre class="source">
<a name="1"><span class="lineNum">       1 </span>            : #include &lt;string&gt;</a>
<span class="lineNum">       2 </span>            : #include &lt;vector&gt;
<span class="lineNum">       3 </span>            : 
<span class="lineNum">       4 </span>            : #include &quot;caffe/blob.hpp&quot;
<span class="lineNum">       5 </span>            : #include &quot;caffe/common.hpp&quot;
<span class="lineNum">       6 </span>            : #include &quot;caffe/filler.hpp&quot;
<span class="lineNum">       7 </span>            : #include &quot;caffe/layer.hpp&quot;
<span class="lineNum">       8 </span>            : #include &quot;caffe/layers/recurrent_layer.hpp&quot;
<span class="lineNum">       9 </span>            : #include &quot;caffe/util/math_functions.hpp&quot;
<span class="lineNum">      10 </span>            : 
<span class="lineNum">      11 </span>            : namespace caffe {
<span class="lineNum">      12 </span>            : 
<span class="lineNum">      13 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      14 </span><span class="lineNoCov">          0 : void RecurrentLayer&lt;Dtype&gt;::LayerSetUp(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">      15 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      16 </span><span class="lineNoCov">          0 :   CHECK_GE(bottom[0]-&gt;num_axes(), 2)</span>
<span class="lineNum">      17 </span>            :       &lt;&lt; &quot;bottom[0] must have at least 2 axes -- (#timesteps, #streams, ...)&quot;;
<span class="lineNum">      18 </span><span class="lineNoCov">          0 :   T_ = bottom[0]-&gt;shape(0);</span>
<span class="lineNum">      19 </span><span class="lineNoCov">          0 :   N_ = bottom[0]-&gt;shape(1);</span>
<span class="lineNum">      20 </span><span class="lineNoCov">          0 :   LOG(INFO) &lt;&lt; &quot;Initializing recurrent layer: assuming input batch contains &quot;</span>
<span class="lineNum">      21 </span><span class="lineNoCov">          0 :             &lt;&lt; T_ &lt;&lt; &quot; timesteps of &quot; &lt;&lt; N_ &lt;&lt; &quot; independent streams.&quot;;</span>
<span class="lineNum">      22 </span>            : 
<span class="lineNum">      23 </span><span class="lineNoCov">          0 :   CHECK_EQ(bottom[1]-&gt;num_axes(), 2)</span>
<span class="lineNum">      24 </span>            :       &lt;&lt; &quot;bottom[1] must have exactly 2 axes -- (#timesteps, #streams)&quot;;
<span class="lineNum">      25 </span><span class="lineNoCov">          0 :   CHECK_EQ(T_, bottom[1]-&gt;shape(0));</span>
<span class="lineNum">      26 </span><span class="lineNoCov">          0 :   CHECK_EQ(N_, bottom[1]-&gt;shape(1));</span>
<span class="lineNum">      27 </span>            : 
<span class="lineNum">      28 </span>            :   // If expose_hidden is set, we take as input and produce as output
<span class="lineNum">      29 </span>            :   // the hidden state blobs at the first and last timesteps.
<span class="lineNum">      30 </span><span class="lineNoCov">          0 :   expose_hidden_ = this-&gt;layer_param_.recurrent_param().expose_hidden();</span>
<span class="lineNum">      31 </span>            : 
<span class="lineNum">      32 </span>            :   // Get (recurrent) input/output names.
<span class="lineNum">      33 </span><span class="lineNoCov">          0 :   vector&lt;string&gt; output_names;</span>
<span class="lineNum">      34 </span><span class="lineNoCov">          0 :   OutputBlobNames(&amp;output_names);</span>
<span class="lineNum">      35 </span><span class="lineNoCov">          0 :   vector&lt;string&gt; recur_input_names;</span>
<span class="lineNum">      36 </span><span class="lineNoCov">          0 :   RecurrentInputBlobNames(&amp;recur_input_names);</span>
<span class="lineNum">      37 </span><span class="lineNoCov">          0 :   vector&lt;string&gt; recur_output_names;</span>
<span class="lineNum">      38 </span><span class="lineNoCov">          0 :   RecurrentOutputBlobNames(&amp;recur_output_names);</span>
<span class="lineNum">      39 </span><span class="lineNoCov">          0 :   const int num_recur_blobs = recur_input_names.size();</span>
<span class="lineNum">      40 </span><span class="lineNoCov">          0 :   CHECK_EQ(num_recur_blobs, recur_output_names.size());</span>
<span class="lineNum">      41 </span>            : 
<span class="lineNum">      42 </span>            :   // If provided, bottom[2] is a static input to the recurrent net.
<span class="lineNum">      43 </span><span class="lineNoCov">          0 :   const int num_hidden_exposed = expose_hidden_ * num_recur_blobs;</span>
<span class="lineNum">      44 </span><span class="lineNoCov">          0 :   static_input_ = (bottom.size() &gt; 2 + num_hidden_exposed);</span>
<span class="lineNum">      45 </span><span class="lineNoCov">          0 :   if (static_input_) {</span>
<span class="lineNum">      46 </span><span class="lineNoCov">          0 :     CHECK_GE(bottom[2]-&gt;num_axes(), 1);</span>
<span class="lineNum">      47 </span><span class="lineNoCov">          0 :     CHECK_EQ(N_, bottom[2]-&gt;shape(0));</span>
<span class="lineNum">      48 </span>            :   }
<span class="lineNum">      49 </span>            : 
<span class="lineNum">      50 </span>            :   // Create a NetParameter; setup the inputs that aren't unique to particular
<span class="lineNum">      51 </span>            :   // recurrent architectures.
<span class="lineNum">      52 </span><span class="lineNoCov">          0 :   NetParameter net_param;</span>
<span class="lineNum">      53 </span>            : 
<span class="lineNum">      54 </span>            :   LayerParameter* input_layer_param = net_param.add_layer();
<span class="lineNum">      55 </span><span class="lineNoCov">          0 :   input_layer_param-&gt;set_type(&quot;Input&quot;);</span>
<span class="lineNum">      56 </span><span class="lineNoCov">          0 :   InputParameter* input_param = input_layer_param-&gt;mutable_input_param();</span>
<span class="lineNum">      57 </span>            :   input_layer_param-&gt;add_top(&quot;x&quot;);
<span class="lineNum">      58 </span><span class="lineNoCov">          0 :   BlobShape input_shape;</span>
<span class="lineNum">      59 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; bottom[0]-&gt;num_axes(); ++i) {</span>
<span class="lineNum">      60 </span><span class="lineNoCov">          0 :     input_shape.add_dim(bottom[0]-&gt;shape(i));</span>
<span class="lineNum">      61 </span>            :   }
<span class="lineNum">      62 </span><span class="lineNoCov">          0 :   input_param-&gt;add_shape()-&gt;CopyFrom(input_shape);</span>
<span class="lineNum">      63 </span>            : 
<span class="lineNum">      64 </span><span class="lineNoCov">          0 :   input_shape.Clear();</span>
<span class="lineNum">      65 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; bottom[1]-&gt;num_axes(); ++i) {</span>
<span class="lineNum">      66 </span><span class="lineNoCov">          0 :     input_shape.add_dim(bottom[1]-&gt;shape(i));</span>
<span class="lineNum">      67 </span>            :   }
<span class="lineNum">      68 </span>            :   input_layer_param-&gt;add_top(&quot;cont&quot;);
<span class="lineNum">      69 </span><span class="lineNoCov">          0 :   input_param-&gt;add_shape()-&gt;CopyFrom(input_shape);</span>
<span class="lineNum">      70 </span>            : 
<span class="lineNum">      71 </span><span class="lineNoCov">          0 :   if (static_input_) {</span>
<span class="lineNum">      72 </span><span class="lineNoCov">          0 :     input_shape.Clear();</span>
<span class="lineNum">      73 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; bottom[2]-&gt;num_axes(); ++i) {</span>
<span class="lineNum">      74 </span><span class="lineNoCov">          0 :       input_shape.add_dim(bottom[2]-&gt;shape(i));</span>
<span class="lineNum">      75 </span>            :     }
<span class="lineNum">      76 </span>            :     input_layer_param-&gt;add_top(&quot;x_static&quot;);
<span class="lineNum">      77 </span><span class="lineNoCov">          0 :     input_param-&gt;add_shape()-&gt;CopyFrom(input_shape);</span>
<span class="lineNum">      78 </span>            :   }
<span class="lineNum">      79 </span>            : 
<span class="lineNum">      80 </span>            :   // Call the child's FillUnrolledNet implementation to specify the unrolled
<span class="lineNum">      81 </span>            :   // recurrent architecture.
<span class="lineNum">      82 </span><span class="lineNoCov">          0 :   this-&gt;FillUnrolledNet(&amp;net_param);</span>
<span class="lineNum">      83 </span>            : 
<span class="lineNum">      84 </span>            :   // Prepend this layer's name to the names of each layer in the unrolled net.
<span class="lineNum">      85 </span>            :   const string&amp; layer_name = this-&gt;layer_param_.name();
<span class="lineNum">      86 </span><span class="lineNoCov">          0 :   if (layer_name.size()) {</span>
<span class="lineNum">      87 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; net_param.layer_size(); ++i) {</span>
<span class="lineNum">      88 </span>            :       LayerParameter* layer = net_param.mutable_layer(i);
<span class="lineNum">      89 </span><span class="lineNoCov">          0 :       layer-&gt;set_name(layer_name + &quot;_&quot; + layer-&gt;name());</span>
<span class="lineNum">      90 </span>            :     }
<span class="lineNum">      91 </span>            :   }
<span class="lineNum">      92 </span>            : 
<span class="lineNum">      93 </span>            :   // Add &quot;pseudo-losses&quot; to all outputs to force backpropagation.
<span class="lineNum">      94 </span>            :   // (Setting force_backward is too aggressive as we may not need to backprop to
<span class="lineNum">      95 </span>            :   // all inputs, e.g., the sequence continuation indicators.)
<span class="lineNum">      96 </span><span class="lineNoCov">          0 :   vector&lt;string&gt; pseudo_losses(output_names.size());</span>
<span class="lineNum">      97 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; output_names.size(); ++i) {</span>
<span class="lineNum">      98 </span>            :     LayerParameter* layer = net_param.add_layer();
<span class="lineNum">      99 </span><span class="lineNoCov">          0 :     pseudo_losses[i] = output_names[i] + &quot;_pseudoloss&quot;;</span>
<span class="lineNum">     100 </span><span class="lineNoCov">          0 :     layer-&gt;set_name(pseudo_losses[i]);</span>
<span class="lineNum">     101 </span><span class="lineNoCov">          0 :     layer-&gt;set_type(&quot;Reduction&quot;);</span>
<span class="lineNum">     102 </span>            :     layer-&gt;add_bottom(output_names[i]);
<span class="lineNum">     103 </span>            :     layer-&gt;add_top(pseudo_losses[i]);
<span class="lineNum">     104 </span>            :     layer-&gt;add_loss_weight(1);
<span class="lineNum">     105 </span>            :   }
<span class="lineNum">     106 </span>            : 
<span class="lineNum">     107 </span>            :   // Create the unrolled net.
<span class="lineNum">     108 </span><span class="lineNoCov">          0 :   unrolled_net_.reset(new Net&lt;Dtype&gt;(net_param));</span>
<span class="lineNum">     109 </span>            :   unrolled_net_-&gt;set_debug_info(
<span class="lineNum">     110 </span>            :       this-&gt;layer_param_.recurrent_param().debug_info());
<span class="lineNum">     111 </span>            : 
<span class="lineNum">     112 </span>            :   // Setup pointers to the inputs.
<span class="lineNum">     113 </span><span class="lineNoCov">          0 :   x_input_blob_ = CHECK_NOTNULL(unrolled_net_-&gt;blob_by_name(&quot;x&quot;).get());</span>
<span class="lineNum">     114 </span><span class="lineNoCov">          0 :   cont_input_blob_ = CHECK_NOTNULL(unrolled_net_-&gt;blob_by_name(&quot;cont&quot;).get());</span>
<span class="lineNum">     115 </span><span class="lineNoCov">          0 :   if (static_input_) {</span>
<span class="lineNum">     116 </span><span class="lineNoCov">          0 :     x_static_input_blob_ =</span>
<span class="lineNum">     117 </span><span class="lineNoCov">          0 :         CHECK_NOTNULL(unrolled_net_-&gt;blob_by_name(&quot;x_static&quot;).get());</span>
<span class="lineNum">     118 </span>            :   }
<span class="lineNum">     119 </span>            : 
<span class="lineNum">     120 </span>            :   // Setup pointers to paired recurrent inputs/outputs.
<span class="lineNum">     121 </span><span class="lineNoCov">          0 :   recur_input_blobs_.resize(num_recur_blobs);</span>
<span class="lineNum">     122 </span><span class="lineNoCov">          0 :   recur_output_blobs_.resize(num_recur_blobs);</span>
<span class="lineNum">     123 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; recur_input_names.size(); ++i) {</span>
<span class="lineNum">     124 </span><span class="lineNoCov">          0 :     recur_input_blobs_[i] =</span>
<span class="lineNum">     125 </span><span class="lineNoCov">          0 :         CHECK_NOTNULL(unrolled_net_-&gt;blob_by_name(recur_input_names[i]).get());</span>
<span class="lineNum">     126 </span><span class="lineNoCov">          0 :     recur_output_blobs_[i] =</span>
<span class="lineNum">     127 </span><span class="lineNoCov">          0 :         CHECK_NOTNULL(unrolled_net_-&gt;blob_by_name(recur_output_names[i]).get());</span>
<span class="lineNum">     128 </span>            :   }
<span class="lineNum">     129 </span>            : 
<span class="lineNum">     130 </span>            :   // Setup pointers to outputs.
<span class="lineNum">     131 </span><span class="lineNoCov">          0 :   CHECK_EQ(top.size() - num_hidden_exposed, output_names.size())</span>
<span class="lineNum">     132 </span>            :       &lt;&lt; &quot;OutputBlobNames must provide an output blob name for each top.&quot;;
<span class="lineNum">     133 </span><span class="lineNoCov">          0 :   output_blobs_.resize(output_names.size());</span>
<span class="lineNum">     134 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; output_names.size(); ++i) {</span>
<span class="lineNum">     135 </span><span class="lineNoCov">          0 :     output_blobs_[i] =</span>
<span class="lineNum">     136 </span><span class="lineNoCov">          0 :         CHECK_NOTNULL(unrolled_net_-&gt;blob_by_name(output_names[i]).get());</span>
<span class="lineNum">     137 </span>            :   }
<span class="lineNum">     138 </span>            : 
<span class="lineNum">     139 </span>            :   // We should have 2 inputs (x and cont), plus a number of recurrent inputs,
<span class="lineNum">     140 </span>            :   // plus maybe a static input.
<span class="lineNum">     141 </span><span class="lineNoCov">          0 :   CHECK_EQ(2 + num_recur_blobs + static_input_,</span>
<span class="lineNum">     142 </span>            :            unrolled_net_-&gt;input_blobs().size());
<span class="lineNum">     143 </span>            : 
<span class="lineNum">     144 </span>            :   // This layer's parameters are any parameters in the layers of the unrolled
<span class="lineNum">     145 </span>            :   // net. We only want one copy of each parameter, so check that the parameter
<span class="lineNum">     146 </span>            :   // is &quot;owned&quot; by the layer, rather than shared with another.
<span class="lineNum">     147 </span><span class="lineNoCov">          0 :   this-&gt;blobs_.clear();</span>
<span class="lineNum">     148 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; unrolled_net_-&gt;params().size(); ++i) {</span>
<span class="lineNum">     149 </span><span class="lineNoCov">          0 :     if (unrolled_net_-&gt;param_owners()[i] == -1) {</span>
<span class="lineNum">     150 </span><span class="lineNoCov">          0 :       LOG(INFO) &lt;&lt; &quot;Adding parameter &quot; &lt;&lt; i &lt;&lt; &quot;: &quot;</span>
<span class="lineNum">     151 </span>            :                 &lt;&lt; unrolled_net_-&gt;param_display_names()[i];
<span class="lineNum">     152 </span><span class="lineNoCov">          0 :       this-&gt;blobs_.push_back(unrolled_net_-&gt;params()[i]);</span>
<span class="lineNum">     153 </span>            :     }
<span class="lineNum">     154 </span>            :   }
<span class="lineNum">     155 </span>            :   // Check that param_propagate_down is set for all of the parameters in the
<span class="lineNum">     156 </span>            :   // unrolled net; set param_propagate_down to true in this layer.
<span class="lineNum">     157 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; unrolled_net_-&gt;layers().size(); ++i) {</span>
<span class="lineNum">     158 </span><span class="lineNoCov">          0 :     for (int j = 0; j &lt; unrolled_net_-&gt;layers()[i]-&gt;blobs().size(); ++j) {</span>
<span class="lineNum">     159 </span><span class="lineNoCov">          0 :       CHECK(unrolled_net_-&gt;layers()[i]-&gt;param_propagate_down(j))</span>
<span class="lineNum">     160 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;param_propagate_down not set for layer &quot; &lt;&lt; i &lt;&lt; &quot;, param &quot; &lt;&lt; j;</span>
<span class="lineNum">     161 </span>            :     }
<span class="lineNum">     162 </span>            :   }
<span class="lineNum">     163 </span>            :   this-&gt;param_propagate_down_.clear();
<span class="lineNum">     164 </span><span class="lineNoCov">          0 :   this-&gt;param_propagate_down_.resize(this-&gt;blobs_.size(), true);</span>
<span class="lineNum">     165 </span>            : 
<span class="lineNum">     166 </span>            :   // Set the diffs of recurrent outputs to 0 -- we can't backpropagate across
<span class="lineNum">     167 </span>            :   // batches.
<span class="lineNum">     168 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; recur_output_blobs_.size(); ++i) {</span>
<span class="lineNum">     169 </span><span class="lineNoCov">          0 :     caffe_set(recur_output_blobs_[i]-&gt;count(), Dtype(0),</span>
<span class="lineNum">     170 </span>            :               recur_output_blobs_[i]-&gt;mutable_cpu_diff());
<span class="lineNum">     171 </span>            :   }
<span class="lineNum">     172 </span>            : 
<span class="lineNum">     173 </span>            :   // Check that the last output_names.size() layers are the pseudo-losses;
<span class="lineNum">     174 </span>            :   // set last_layer_index so that we don't actually run these layers.
<span class="lineNum">     175 </span>            :   const vector&lt;string&gt;&amp; layer_names = unrolled_net_-&gt;layer_names();
<span class="lineNum">     176 </span><span class="lineNoCov">          0 :   last_layer_index_ = layer_names.size() - 1 - pseudo_losses.size();</span>
<span class="lineNum">     177 </span><span class="lineNoCov">          0 :   for (int i = last_layer_index_ + 1, j = 0; i &lt; layer_names.size(); ++i, ++j) {</span>
<span class="lineNum">     178 </span><span class="lineNoCov">          0 :     CHECK_EQ(layer_names[i], pseudo_losses[j]);</span>
<span class="lineNum">     179 </span>            :   }
<span class="lineNum">     180 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     181 </span>            : 
<span class="lineNum">     182 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     183 </span><span class="lineNoCov">          0 : void RecurrentLayer&lt;Dtype&gt;::Reshape(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">     184 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">     185 </span><span class="lineNoCov">          0 :   CHECK_GE(bottom[0]-&gt;num_axes(), 2)</span>
<span class="lineNum">     186 </span>            :       &lt;&lt; &quot;bottom[0] must have at least 2 axes -- (#timesteps, #streams, ...)&quot;;
<span class="lineNum">     187 </span><span class="lineNoCov">          0 :   CHECK_EQ(T_, bottom[0]-&gt;shape(0)) &lt;&lt; &quot;input number of timesteps changed&quot;;</span>
<span class="lineNum">     188 </span><span class="lineNoCov">          0 :   N_ = bottom[0]-&gt;shape(1);</span>
<span class="lineNum">     189 </span><span class="lineNoCov">          0 :   CHECK_EQ(bottom[1]-&gt;num_axes(), 2)</span>
<span class="lineNum">     190 </span>            :       &lt;&lt; &quot;bottom[1] must have exactly 2 axes -- (#timesteps, #streams)&quot;;
<span class="lineNum">     191 </span><span class="lineNoCov">          0 :   CHECK_EQ(T_, bottom[1]-&gt;shape(0));</span>
<span class="lineNum">     192 </span><span class="lineNoCov">          0 :   CHECK_EQ(N_, bottom[1]-&gt;shape(1));</span>
<span class="lineNum">     193 </span><span class="lineNoCov">          0 :   x_input_blob_-&gt;ReshapeLike(*bottom[0]);</span>
<span class="lineNum">     194 </span><span class="lineNoCov">          0 :   vector&lt;int&gt; cont_shape = bottom[1]-&gt;shape();</span>
<span class="lineNum">     195 </span><span class="lineNoCov">          0 :   cont_input_blob_-&gt;Reshape(cont_shape);</span>
<span class="lineNum">     196 </span><span class="lineNoCov">          0 :   if (static_input_) {</span>
<span class="lineNum">     197 </span><span class="lineNoCov">          0 :     x_static_input_blob_-&gt;ReshapeLike(*bottom[2]);</span>
<span class="lineNum">     198 </span>            :   }
<span class="lineNum">     199 </span><span class="lineNoCov">          0 :   vector&lt;BlobShape&gt; recur_input_shapes;</span>
<span class="lineNum">     200 </span><span class="lineNoCov">          0 :   RecurrentInputShapes(&amp;recur_input_shapes);</span>
<span class="lineNum">     201 </span><span class="lineNoCov">          0 :   CHECK_EQ(recur_input_shapes.size(), recur_input_blobs_.size());</span>
<span class="lineNum">     202 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; recur_input_shapes.size(); ++i) {</span>
<span class="lineNum">     203 </span><span class="lineNoCov">          0 :     recur_input_blobs_[i]-&gt;Reshape(recur_input_shapes[i]);</span>
<span class="lineNum">     204 </span>            :   }
<span class="lineNum">     205 </span><span class="lineNoCov">          0 :   unrolled_net_-&gt;Reshape();</span>
<span class="lineNum">     206 </span><span class="lineNoCov">          0 :   x_input_blob_-&gt;ShareData(*bottom[0]);</span>
<span class="lineNum">     207 </span><span class="lineNoCov">          0 :   x_input_blob_-&gt;ShareDiff(*bottom[0]);</span>
<span class="lineNum">     208 </span><span class="lineNoCov">          0 :   cont_input_blob_-&gt;ShareData(*bottom[1]);</span>
<span class="lineNum">     209 </span><span class="lineNoCov">          0 :   if (static_input_) {</span>
<span class="lineNum">     210 </span><span class="lineNoCov">          0 :     x_static_input_blob_-&gt;ShareData(*bottom[2]);</span>
<span class="lineNum">     211 </span><span class="lineNoCov">          0 :     x_static_input_blob_-&gt;ShareDiff(*bottom[2]);</span>
<span class="lineNum">     212 </span>            :   }
<span class="lineNum">     213 </span><span class="lineNoCov">          0 :   if (expose_hidden_) {</span>
<span class="lineNum">     214 </span><span class="lineNoCov">          0 :     const int bottom_offset = 2 + static_input_;</span>
<span class="lineNum">     215 </span><span class="lineNoCov">          0 :     for (int i = bottom_offset, j = 0; i &lt; bottom.size(); ++i, ++j) {</span>
<span class="lineNum">     216 </span><span class="lineNoCov">          0 :       CHECK(recur_input_blobs_[j]-&gt;shape() == bottom[i]-&gt;shape())</span>
<span class="lineNum">     217 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;shape mismatch - recur_input_blobs_[&quot; &lt;&lt; j &lt;&lt; &quot;]: &quot;</span>
<span class="lineNum">     218 </span><span class="lineNoCov">          0 :           &lt;&lt; recur_input_blobs_[j]-&gt;shape_string()</span>
<span class="lineNum">     219 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot; vs. bottom[&quot; &lt;&lt; i &lt;&lt; &quot;]: &quot; &lt;&lt; bottom[i]-&gt;shape_string();</span>
<span class="lineNum">     220 </span><span class="lineNoCov">          0 :       recur_input_blobs_[j]-&gt;ShareData(*bottom[i]);</span>
<span class="lineNum">     221 </span>            :     }
<span class="lineNum">     222 </span>            :   }
<span class="lineNum">     223 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; output_blobs_.size(); ++i) {</span>
<span class="lineNum">     224 </span><span class="lineNoCov">          0 :     top[i]-&gt;ReshapeLike(*output_blobs_[i]);</span>
<span class="lineNum">     225 </span><span class="lineNoCov">          0 :     top[i]-&gt;ShareData(*output_blobs_[i]);</span>
<span class="lineNum">     226 </span><span class="lineNoCov">          0 :     top[i]-&gt;ShareDiff(*output_blobs_[i]);</span>
<span class="lineNum">     227 </span>            :   }
<span class="lineNum">     228 </span><span class="lineNoCov">          0 :   if (expose_hidden_) {</span>
<span class="lineNum">     229 </span><span class="lineNoCov">          0 :     const int top_offset = output_blobs_.size();</span>
<span class="lineNum">     230 </span><span class="lineNoCov">          0 :     for (int i = top_offset, j = 0; i &lt; top.size(); ++i, ++j) {</span>
<span class="lineNum">     231 </span><span class="lineNoCov">          0 :       top[i]-&gt;ReshapeLike(*recur_output_blobs_[j]);</span>
<span class="lineNum">     232 </span>            :     }
<span class="lineNum">     233 </span>            :   }
<span class="lineNum">     234 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     235 </span>            : 
<span class="lineNum">     236 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     237 </span><span class="lineNoCov">          0 : void RecurrentLayer&lt;Dtype&gt;::Reset() {</span>
<span class="lineNum">     238 </span>            :   // &quot;Reset&quot; the hidden state of the net by zeroing out all recurrent outputs.
<span class="lineNum">     239 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; recur_output_blobs_.size(); ++i) {</span>
<span class="lineNum">     240 </span><span class="lineNoCov">          0 :     caffe_set(recur_output_blobs_[i]-&gt;count(), Dtype(0),</span>
<span class="lineNum">     241 </span>            :               recur_output_blobs_[i]-&gt;mutable_cpu_data());
<span class="lineNum">     242 </span>            :   }
<span class="lineNum">     243 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     244 </span>            : 
<span class="lineNum">     245 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     246 </span><span class="lineNoCov">          0 : void RecurrentLayer&lt;Dtype&gt;::Forward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">     247 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">     248 </span>            :   // Hacky fix for test time: reshare all the internal shared blobs, which may
<span class="lineNum">     249 </span>            :   // currently point to a stale owner blob that was dropped when Solver::Test
<span class="lineNum">     250 </span>            :   // called test_net-&gt;ShareTrainedLayersWith(net_.get()).
<span class="lineNum">     251 </span>            :   // TODO: somehow make this work non-hackily.
<span class="lineNum">     252 </span><span class="lineNoCov">          0 :   if (this-&gt;phase_ == TEST) {</span>
<span class="lineNum">     253 </span><span class="lineNoCov">          0 :     unrolled_net_-&gt;ShareWeights();</span>
<span class="lineNum">     254 </span>            :   }
<span class="lineNum">     255 </span>            : 
<span class="lineNum">     256 </span>            :   DCHECK_EQ(recur_input_blobs_.size(), recur_output_blobs_.size());
<span class="lineNum">     257 </span><span class="lineNoCov">          0 :   if (!expose_hidden_) {</span>
<span class="lineNum">     258 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; recur_input_blobs_.size(); ++i) {</span>
<span class="lineNum">     259 </span><span class="lineNoCov">          0 :       const int count = recur_input_blobs_[i]-&gt;count();</span>
<span class="lineNum">     260 </span>            :       DCHECK_EQ(count, recur_output_blobs_[i]-&gt;count());
<span class="lineNum">     261 </span><span class="lineNoCov">          0 :       const Dtype* timestep_T_data = recur_output_blobs_[i]-&gt;cpu_data();</span>
<span class="lineNum">     262 </span><span class="lineNoCov">          0 :       Dtype* timestep_0_data = recur_input_blobs_[i]-&gt;mutable_cpu_data();</span>
<span class="lineNum">     263 </span><span class="lineNoCov">          0 :       caffe_copy(count, timestep_T_data, timestep_0_data);</span>
<span class="lineNum">     264 </span>            :     }
<span class="lineNum">     265 </span>            :   }
<span class="lineNum">     266 </span>            : 
<span class="lineNum">     267 </span><span class="lineNoCov">          0 :   unrolled_net_-&gt;ForwardTo(last_layer_index_);</span>
<span class="lineNum">     268 </span>            : 
<span class="lineNum">     269 </span><span class="lineNoCov">          0 :   if (expose_hidden_) {</span>
<span class="lineNum">     270 </span><span class="lineNoCov">          0 :     const int top_offset = output_blobs_.size();</span>
<span class="lineNum">     271 </span><span class="lineNoCov">          0 :     for (int i = top_offset, j = 0; i &lt; top.size(); ++i, ++j) {</span>
<span class="lineNum">     272 </span><span class="lineNoCov">          0 :       top[i]-&gt;ShareData(*recur_output_blobs_[j]);</span>
<span class="lineNum">     273 </span>            :     }
<span class="lineNum">     274 </span>            :   }
<span class="lineNum">     275 </span><span class="lineNoCov">          0 : }</span>
<a name="276"><span class="lineNum">     276 </span>            : </a>
<span class="lineNum">     277 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     278 </span><span class="lineNoCov">          0 : void RecurrentLayer&lt;Dtype&gt;::Backward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top,</span>
<span class="lineNum">     279 </span>            :     const vector&lt;bool&gt;&amp; propagate_down, const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom) {
<span class="lineNum">     280 </span><span class="lineNoCov">          0 :   CHECK(!propagate_down[1]) &lt;&lt; &quot;Cannot backpropagate to sequence indicators.&quot;;</span>
<span class="lineNum">     281 </span>            : 
<span class="lineNum">     282 </span>            :   // TODO: skip backpropagation to inputs and parameters inside the unrolled
<span class="lineNum">     283 </span>            :   // net according to propagate_down[0] and propagate_down[2]. For now just
<span class="lineNum">     284 </span>            :   // backprop to inputs and parameters unconditionally, as either the inputs or
<span class="lineNum">     285 </span>            :   // the parameters do need backward (or Net would have set
<span class="lineNum">     286 </span>            :   // layer_needs_backward_[i] == false for this layer).
<span class="lineNum">     287 </span><span class="lineNoCov">          0 :   unrolled_net_-&gt;BackwardFrom(last_layer_index_);</span>
<span class="lineNum">     288 </span><span class="lineNoCov">          0 : }</span>
<a name="289"><span class="lineNum">     289 </span>            : </a>
<span class="lineNum">     290 </span>            : #ifdef CPU_ONLY
<span class="lineNum">     291 </span><span class="lineNoCov">          0 : STUB_GPU_FORWARD(RecurrentLayer, Forward);</span>
<span class="lineNum">     292 </span>            : #endif
<span class="lineNum">     293 </span>            : 
<a name="294"><span class="lineNum">     294 </span>            : INSTANTIATE_CLASS(RecurrentLayer);</a>
<span class="lineNum">     295 </span>            : 
<span class="lineNum">     296 </span><span class="lineCov">          2 : }  // namespace caffe</span>
</pre>
      </td>
    </tr>
  </table>
  <br>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
    <tr><td class="versionInfo">Generated by: <a href="http://ltp.sourceforge.net/coverage/lcov.php" target="_parent">LCOV version 1.12</a></td></tr>
  </table>
  <br>

</body>
</html>
