<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

<html lang="en">

<head>
  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  <title>LCOV - code analysis - src/caffe/layers/rnn_layer.cpp</title>
  <link rel="stylesheet" type="text/css" href="../../../gcov.css">
</head>

<body>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="title">LCOV - code coverage report</td></tr>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>

    <tr>
      <td width="100%">
        <table cellpadding=1 border=0 width="100%">
          <tr>
            <td width="10%" class="headerItem">Current view:</td>
            <td width="35%" class="headerValue"><a href="../../../index.html">top level</a> - <a href="index.html">src/caffe/layers</a> - rnn_layer.cpp<span style="font-size: 80%;"> (source / <a href="rnn_layer.cpp.func-sort-c.html">functions</a>)</span></td>
            <td width="5%"></td>
            <td width="15%"></td>
            <td width="10%" class="headerCovTableHead">Hit</td>
            <td width="10%" class="headerCovTableHead">Total</td>
            <td width="15%" class="headerCovTableHead">Coverage</td>
          </tr>
          <tr>
            <td class="headerItem">Test:</td>
            <td class="headerValue">code analysis</td>
            <td></td>
            <td class="headerItem">Lines:</td>
            <td class="headerCovTableEntry">2</td>
            <td class="headerCovTableEntry">112</td>
            <td class="headerCovTableEntryLo">1.8 %</td>
          </tr>
          <tr>
            <td class="headerItem">Date:</td>
            <td class="headerValue">2020-09-11 22:25:26</td>
            <td></td>
            <td class="headerItem">Functions:</td>
            <td class="headerCovTableEntry">2</td>
            <td class="headerCovTableEntry">14</td>
            <td class="headerCovTableEntryLo">14.3 %</td>
          </tr>
          <tr>
            <td class="headerItem">Legend:</td>
            <td class="headerValueLeg">            Lines:
            <span class="coverLegendCov">hit</span>
            <span class="coverLegendNoCov">not hit</span>
</td>
            <td></td>
          </tr>
          <tr><td><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
        </table>
      </td>
    </tr>

    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
  </table>

  <table cellpadding=0 cellspacing=0 border=0>
    <tr>
      <td><br></td>
    </tr>
    <tr>
      <td>
<pre class="sourceHeading">          Line data    Source code</pre>
<pre class="source">
<a name="1"><span class="lineNum">       1 </span>            : #include &lt;string&gt;</a>
<span class="lineNum">       2 </span>            : #include &lt;vector&gt;
<span class="lineNum">       3 </span>            : 
<span class="lineNum">       4 </span>            : #include &quot;caffe/blob.hpp&quot;
<span class="lineNum">       5 </span>            : #include &quot;caffe/common.hpp&quot;
<span class="lineNum">       6 </span>            : #include &quot;caffe/filler.hpp&quot;
<span class="lineNum">       7 </span>            : #include &quot;caffe/layer.hpp&quot;
<span class="lineNum">       8 </span>            : #include &quot;caffe/layers/rnn_layer.hpp&quot;
<span class="lineNum">       9 </span>            : #include &quot;caffe/util/math_functions.hpp&quot;
<span class="lineNum">      10 </span>            : 
<span class="lineNum">      11 </span>            : namespace caffe {
<a name="12"><span class="lineNum">      12 </span>            : </a>
<span class="lineNum">      13 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      14 </span><span class="lineNoCov">          0 : void RNNLayer&lt;Dtype&gt;::RecurrentInputBlobNames(vector&lt;string&gt;* names) const {</span>
<span class="lineNum">      15 </span><span class="lineNoCov">          0 :   names-&gt;resize(1);</span>
<span class="lineNum">      16 </span>            :   (*names)[0] = &quot;h_0&quot;;
<span class="lineNum">      17 </span><span class="lineNoCov">          0 : }</span>
<a name="18"><span class="lineNum">      18 </span>            : </a>
<span class="lineNum">      19 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      20 </span><span class="lineNoCov">          0 : void RNNLayer&lt;Dtype&gt;::RecurrentOutputBlobNames(vector&lt;string&gt;* names) const {</span>
<span class="lineNum">      21 </span><span class="lineNoCov">          0 :   names-&gt;resize(1);</span>
<span class="lineNum">      22 </span><span class="lineNoCov">          0 :   (*names)[0] = &quot;h_&quot; + format_int(this-&gt;T_);</span>
<span class="lineNum">      23 </span><span class="lineNoCov">          0 : }</span>
<a name="24"><span class="lineNum">      24 </span>            : </a>
<span class="lineNum">      25 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      26 </span><span class="lineNoCov">          0 : void RNNLayer&lt;Dtype&gt;::RecurrentInputShapes(vector&lt;BlobShape&gt;* shapes) const {</span>
<span class="lineNum">      27 </span><span class="lineNoCov">          0 :   const int num_output = this-&gt;layer_param_.recurrent_param().num_output();</span>
<span class="lineNum">      28 </span><span class="lineNoCov">          0 :   shapes-&gt;resize(1);</span>
<span class="lineNum">      29 </span><span class="lineNoCov">          0 :   (*shapes)[0].Clear();</span>
<span class="lineNum">      30 </span>            :   (*shapes)[0].add_dim(1);  // a single timestep
<span class="lineNum">      31 </span><span class="lineNoCov">          0 :   (*shapes)[0].add_dim(this-&gt;N_);</span>
<span class="lineNum">      32 </span><span class="lineNoCov">          0 :   (*shapes)[0].add_dim(num_output);</span>
<span class="lineNum">      33 </span><span class="lineNoCov">          0 : }</span>
<a name="34"><span class="lineNum">      34 </span>            : </a>
<span class="lineNum">      35 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      36 </span><span class="lineNoCov">          0 : void RNNLayer&lt;Dtype&gt;::OutputBlobNames(vector&lt;string&gt;* names) const {</span>
<span class="lineNum">      37 </span><span class="lineNoCov">          0 :   names-&gt;resize(1);</span>
<span class="lineNum">      38 </span>            :   (*names)[0] = &quot;o&quot;;
<span class="lineNum">      39 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">      40 </span>            : 
<span class="lineNum">      41 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      42 </span><span class="lineNoCov">          0 : void RNNLayer&lt;Dtype&gt;::FillUnrolledNet(NetParameter* net_param) const {</span>
<span class="lineNum">      43 </span><span class="lineNoCov">          0 :   const int num_output = this-&gt;layer_param_.recurrent_param().num_output();</span>
<span class="lineNum">      44 </span><span class="lineNoCov">          0 :   CHECK_GT(num_output, 0) &lt;&lt; &quot;num_output must be positive&quot;;</span>
<span class="lineNum">      45 </span>            :   const FillerParameter&amp; weight_filler =
<span class="lineNum">      46 </span>            :       this-&gt;layer_param_.recurrent_param().weight_filler();
<span class="lineNum">      47 </span>            :   const FillerParameter&amp; bias_filler =
<span class="lineNum">      48 </span>            :       this-&gt;layer_param_.recurrent_param().bias_filler();
<span class="lineNum">      49 </span>            : 
<span class="lineNum">      50 </span>            :   // Add generic LayerParameter's (without bottoms/tops) of layer types we'll
<span class="lineNum">      51 </span>            :   // use to save redundant code.
<span class="lineNum">      52 </span><span class="lineNoCov">          0 :   LayerParameter hidden_param;</span>
<span class="lineNum">      53 </span><span class="lineNoCov">          0 :   hidden_param.set_type(&quot;InnerProduct&quot;);</span>
<span class="lineNum">      54 </span><span class="lineNoCov">          0 :   hidden_param.mutable_inner_product_param()-&gt;set_num_output(num_output);</span>
<span class="lineNum">      55 </span><span class="lineNoCov">          0 :   hidden_param.mutable_inner_product_param()-&gt;set_bias_term(false);</span>
<span class="lineNum">      56 </span><span class="lineNoCov">          0 :   hidden_param.mutable_inner_product_param()-&gt;set_axis(2);</span>
<span class="lineNum">      57 </span><span class="lineNoCov">          0 :   hidden_param.mutable_inner_product_param()-&gt;</span>
<span class="lineNum">      58 </span><span class="lineNoCov">          0 :       mutable_weight_filler()-&gt;CopyFrom(weight_filler);</span>
<span class="lineNum">      59 </span>            : 
<span class="lineNum">      60 </span><span class="lineNoCov">          0 :   LayerParameter biased_hidden_param(hidden_param);</span>
<span class="lineNum">      61 </span><span class="lineNoCov">          0 :   biased_hidden_param.mutable_inner_product_param()-&gt;set_bias_term(true);</span>
<span class="lineNum">      62 </span><span class="lineNoCov">          0 :   biased_hidden_param.mutable_inner_product_param()-&gt;</span>
<span class="lineNum">      63 </span><span class="lineNoCov">          0 :       mutable_bias_filler()-&gt;CopyFrom(bias_filler);</span>
<span class="lineNum">      64 </span>            : 
<span class="lineNum">      65 </span><span class="lineNoCov">          0 :   LayerParameter sum_param;</span>
<span class="lineNum">      66 </span><span class="lineNoCov">          0 :   sum_param.set_type(&quot;Eltwise&quot;);</span>
<span class="lineNum">      67 </span><span class="lineNoCov">          0 :   sum_param.mutable_eltwise_param()-&gt;set_operation(</span>
<span class="lineNum">      68 </span>            :       EltwiseParameter_EltwiseOp_SUM);
<span class="lineNum">      69 </span>            : 
<span class="lineNum">      70 </span><span class="lineNoCov">          0 :   LayerParameter tanh_param;</span>
<span class="lineNum">      71 </span><span class="lineNoCov">          0 :   tanh_param.set_type(&quot;TanH&quot;);</span>
<span class="lineNum">      72 </span>            : 
<span class="lineNum">      73 </span><span class="lineNoCov">          0 :   LayerParameter scale_param;</span>
<span class="lineNum">      74 </span><span class="lineNoCov">          0 :   scale_param.set_type(&quot;Scale&quot;);</span>
<span class="lineNum">      75 </span><span class="lineNoCov">          0 :   scale_param.mutable_scale_param()-&gt;set_axis(0);</span>
<span class="lineNum">      76 </span>            : 
<span class="lineNum">      77 </span><span class="lineNoCov">          0 :   LayerParameter slice_param;</span>
<span class="lineNum">      78 </span><span class="lineNoCov">          0 :   slice_param.set_type(&quot;Slice&quot;);</span>
<span class="lineNum">      79 </span><span class="lineNoCov">          0 :   slice_param.mutable_slice_param()-&gt;set_axis(0);</span>
<span class="lineNum">      80 </span>            : 
<span class="lineNum">      81 </span><span class="lineNoCov">          0 :   vector&lt;BlobShape&gt; input_shapes;</span>
<span class="lineNum">      82 </span><span class="lineNoCov">          0 :   RecurrentInputShapes(&amp;input_shapes);</span>
<span class="lineNum">      83 </span><span class="lineNoCov">          0 :   CHECK_EQ(1, input_shapes.size());</span>
<span class="lineNum">      84 </span>            : 
<span class="lineNum">      85 </span>            :   LayerParameter* input_layer_param = net_param-&gt;add_layer();
<span class="lineNum">      86 </span><span class="lineNoCov">          0 :   input_layer_param-&gt;set_type(&quot;Input&quot;);</span>
<span class="lineNum">      87 </span><span class="lineNoCov">          0 :   InputParameter* input_param = input_layer_param-&gt;mutable_input_param();</span>
<span class="lineNum">      88 </span>            :   input_layer_param-&gt;add_top(&quot;h_0&quot;);
<span class="lineNum">      89 </span><span class="lineNoCov">          0 :   input_param-&gt;add_shape()-&gt;CopyFrom(input_shapes[0]);</span>
<span class="lineNum">      90 </span>            : 
<span class="lineNum">      91 </span>            :   LayerParameter* cont_slice_param = net_param-&gt;add_layer();
<span class="lineNum">      92 </span><span class="lineNoCov">          0 :   cont_slice_param-&gt;CopyFrom(slice_param);</span>
<span class="lineNum">      93 </span><span class="lineNoCov">          0 :   cont_slice_param-&gt;set_name(&quot;cont_slice&quot;);</span>
<span class="lineNum">      94 </span>            :   cont_slice_param-&gt;add_bottom(&quot;cont&quot;);
<span class="lineNum">      95 </span><span class="lineNoCov">          0 :   cont_slice_param-&gt;mutable_slice_param()-&gt;set_axis(0);</span>
<span class="lineNum">      96 </span>            : 
<span class="lineNum">      97 </span>            :   // Add layer to transform all timesteps of x to the hidden state dimension.
<span class="lineNum">      98 </span>            :   //     W_xh_x = W_xh * x + b_h
<span class="lineNum">      99 </span>            :   {
<span class="lineNum">     100 </span>            :     LayerParameter* x_transform_param = net_param-&gt;add_layer();
<span class="lineNum">     101 </span><span class="lineNoCov">          0 :     x_transform_param-&gt;CopyFrom(biased_hidden_param);</span>
<span class="lineNum">     102 </span><span class="lineNoCov">          0 :     x_transform_param-&gt;set_name(&quot;x_transform&quot;);</span>
<span class="lineNum">     103 </span><span class="lineNoCov">          0 :     x_transform_param-&gt;add_param()-&gt;set_name(&quot;W_xh&quot;);</span>
<span class="lineNum">     104 </span><span class="lineNoCov">          0 :     x_transform_param-&gt;add_param()-&gt;set_name(&quot;b_h&quot;);</span>
<span class="lineNum">     105 </span>            :     x_transform_param-&gt;add_bottom(&quot;x&quot;);
<span class="lineNum">     106 </span>            :     x_transform_param-&gt;add_top(&quot;W_xh_x&quot;);
<span class="lineNum">     107 </span>            :     x_transform_param-&gt;add_propagate_down(true);
<span class="lineNum">     108 </span>            :   }
<span class="lineNum">     109 </span>            : 
<span class="lineNum">     110 </span><span class="lineNoCov">          0 :   if (this-&gt;static_input_) {</span>
<span class="lineNum">     111 </span>            :     // Add layer to transform x_static to the hidden state dimension.
<span class="lineNum">     112 </span>            :     //     W_xh_x_static = W_xh_static * x_static
<span class="lineNum">     113 </span>            :     LayerParameter* x_static_transform_param = net_param-&gt;add_layer();
<span class="lineNum">     114 </span><span class="lineNoCov">          0 :     x_static_transform_param-&gt;CopyFrom(hidden_param);</span>
<span class="lineNum">     115 </span><span class="lineNoCov">          0 :     x_static_transform_param-&gt;mutable_inner_product_param()-&gt;set_axis(1);</span>
<span class="lineNum">     116 </span><span class="lineNoCov">          0 :     x_static_transform_param-&gt;set_name(&quot;W_xh_x_static&quot;);</span>
<span class="lineNum">     117 </span><span class="lineNoCov">          0 :     x_static_transform_param-&gt;add_param()-&gt;set_name(&quot;W_xh_static&quot;);</span>
<span class="lineNum">     118 </span>            :     x_static_transform_param-&gt;add_bottom(&quot;x_static&quot;);
<span class="lineNum">     119 </span>            :     x_static_transform_param-&gt;add_top(&quot;W_xh_x_static_preshape&quot;);
<span class="lineNum">     120 </span>            :     x_static_transform_param-&gt;add_propagate_down(true);
<span class="lineNum">     121 </span>            : 
<span class="lineNum">     122 </span>            :     LayerParameter* reshape_param = net_param-&gt;add_layer();
<span class="lineNum">     123 </span><span class="lineNoCov">          0 :     reshape_param-&gt;set_type(&quot;Reshape&quot;);</span>
<span class="lineNum">     124 </span>            :     BlobShape* new_shape =
<span class="lineNum">     125 </span><span class="lineNoCov">          0 :          reshape_param-&gt;mutable_reshape_param()-&gt;mutable_shape();</span>
<span class="lineNum">     126 </span>            :     new_shape-&gt;add_dim(1);  // One timestep.
<span class="lineNum">     127 </span>            :     // Should infer this-&gt;N as the dimension so we can reshape on batch size.
<span class="lineNum">     128 </span>            :     new_shape-&gt;add_dim(-1);
<span class="lineNum">     129 </span><span class="lineNoCov">          0 :     new_shape-&gt;add_dim(</span>
<span class="lineNum">     130 </span>            :         x_static_transform_param-&gt;inner_product_param().num_output());
<span class="lineNum">     131 </span><span class="lineNoCov">          0 :     reshape_param-&gt;set_name(&quot;W_xh_x_static_reshape&quot;);</span>
<span class="lineNum">     132 </span>            :     reshape_param-&gt;add_bottom(&quot;W_xh_x_static_preshape&quot;);
<span class="lineNum">     133 </span>            :     reshape_param-&gt;add_top(&quot;W_xh_x_static&quot;);
<span class="lineNum">     134 </span>            :   }
<span class="lineNum">     135 </span>            : 
<span class="lineNum">     136 </span>            :   LayerParameter* x_slice_param = net_param-&gt;add_layer();
<span class="lineNum">     137 </span><span class="lineNoCov">          0 :   x_slice_param-&gt;CopyFrom(slice_param);</span>
<span class="lineNum">     138 </span><span class="lineNoCov">          0 :   x_slice_param-&gt;set_name(&quot;W_xh_x_slice&quot;);</span>
<span class="lineNum">     139 </span>            :   x_slice_param-&gt;add_bottom(&quot;W_xh_x&quot;);
<span class="lineNum">     140 </span>            : 
<span class="lineNum">     141 </span><span class="lineNoCov">          0 :   LayerParameter output_concat_layer;</span>
<span class="lineNum">     142 </span><span class="lineNoCov">          0 :   output_concat_layer.set_name(&quot;o_concat&quot;);</span>
<span class="lineNum">     143 </span><span class="lineNoCov">          0 :   output_concat_layer.set_type(&quot;Concat&quot;);</span>
<span class="lineNum">     144 </span>            :   output_concat_layer.add_top(&quot;o&quot;);
<span class="lineNum">     145 </span><span class="lineNoCov">          0 :   output_concat_layer.mutable_concat_param()-&gt;set_axis(0);</span>
<span class="lineNum">     146 </span>            : 
<span class="lineNum">     147 </span><span class="lineNoCov">          0 :   for (int t = 1; t &lt;= this-&gt;T_; ++t) {</span>
<span class="lineNum">     148 </span><span class="lineNoCov">          0 :     string tm1s = format_int(t - 1);</span>
<span class="lineNum">     149 </span><span class="lineNoCov">          0 :     string ts = format_int(t);</span>
<span class="lineNum">     150 </span>            : 
<span class="lineNum">     151 </span><span class="lineNoCov">          0 :     cont_slice_param-&gt;add_top(&quot;cont_&quot; + ts);</span>
<span class="lineNum">     152 </span><span class="lineNoCov">          0 :     x_slice_param-&gt;add_top(&quot;W_xh_x_&quot; + ts);</span>
<span class="lineNum">     153 </span>            : 
<span class="lineNum">     154 </span>            :     // Add layer to flush the hidden state when beginning a new sequence,
<span class="lineNum">     155 </span>            :     // as indicated by cont_t.
<span class="lineNum">     156 </span>            :     //     h_conted_{t-1} := cont_t * h_{t-1}
<span class="lineNum">     157 </span>            :     //
<span class="lineNum">     158 </span>            :     // Normally, cont_t is binary (i.e., 0 or 1), so:
<span class="lineNum">     159 </span>            :     //     h_conted_{t-1} := h_{t-1} if cont_t == 1
<span class="lineNum">     160 </span>            :     //                       0   otherwise
<span class="lineNum">     161 </span>            :     {
<span class="lineNum">     162 </span>            :       LayerParameter* cont_h_param = net_param-&gt;add_layer();
<span class="lineNum">     163 </span><span class="lineNoCov">          0 :       cont_h_param-&gt;CopyFrom(scale_param);</span>
<span class="lineNum">     164 </span><span class="lineNoCov">          0 :       cont_h_param-&gt;set_name(&quot;h_conted_&quot; + tm1s);</span>
<span class="lineNum">     165 </span><span class="lineNoCov">          0 :       cont_h_param-&gt;add_bottom(&quot;h_&quot; + tm1s);</span>
<span class="lineNum">     166 </span><span class="lineNoCov">          0 :       cont_h_param-&gt;add_bottom(&quot;cont_&quot; + ts);</span>
<span class="lineNum">     167 </span><span class="lineNoCov">          0 :       cont_h_param-&gt;add_top(&quot;h_conted_&quot; + tm1s);</span>
<span class="lineNum">     168 </span>            :     }
<span class="lineNum">     169 </span>            : 
<span class="lineNum">     170 </span>            :     // Add layer to compute
<span class="lineNum">     171 </span>            :     //     W_hh_h_{t-1} := W_hh * h_conted_{t-1}
<span class="lineNum">     172 </span>            :     {
<span class="lineNum">     173 </span>            :       LayerParameter* w_param = net_param-&gt;add_layer();
<span class="lineNum">     174 </span><span class="lineNoCov">          0 :       w_param-&gt;CopyFrom(hidden_param);</span>
<span class="lineNum">     175 </span><span class="lineNoCov">          0 :       w_param-&gt;set_name(&quot;W_hh_h_&quot; + tm1s);</span>
<span class="lineNum">     176 </span><span class="lineNoCov">          0 :       w_param-&gt;add_param()-&gt;set_name(&quot;W_hh&quot;);</span>
<span class="lineNum">     177 </span><span class="lineNoCov">          0 :       w_param-&gt;add_bottom(&quot;h_conted_&quot; + tm1s);</span>
<span class="lineNum">     178 </span><span class="lineNoCov">          0 :       w_param-&gt;add_top(&quot;W_hh_h_&quot; + tm1s);</span>
<span class="lineNum">     179 </span><span class="lineNoCov">          0 :       w_param-&gt;mutable_inner_product_param()-&gt;set_axis(2);</span>
<span class="lineNum">     180 </span>            :     }
<span class="lineNum">     181 </span>            : 
<span class="lineNum">     182 </span>            :     // Add layers to compute
<span class="lineNum">     183 </span>            :     //     h_t := \tanh( W_hh * h_conted_{t-1} + W_xh * x_t + b_h )
<span class="lineNum">     184 </span>            :     //          = \tanh( W_hh_h_{t-1} + W_xh_t )
<span class="lineNum">     185 </span>            :     {
<span class="lineNum">     186 </span>            :       LayerParameter* h_input_sum_param = net_param-&gt;add_layer();
<span class="lineNum">     187 </span><span class="lineNoCov">          0 :       h_input_sum_param-&gt;CopyFrom(sum_param);</span>
<span class="lineNum">     188 </span><span class="lineNoCov">          0 :       h_input_sum_param-&gt;set_name(&quot;h_input_sum_&quot; + ts);</span>
<span class="lineNum">     189 </span><span class="lineNoCov">          0 :       h_input_sum_param-&gt;add_bottom(&quot;W_hh_h_&quot; + tm1s);</span>
<span class="lineNum">     190 </span><span class="lineNoCov">          0 :       h_input_sum_param-&gt;add_bottom(&quot;W_xh_x_&quot; + ts);</span>
<span class="lineNum">     191 </span><span class="lineNoCov">          0 :       if (this-&gt;static_input_) {</span>
<span class="lineNum">     192 </span>            :         h_input_sum_param-&gt;add_bottom(&quot;W_xh_x_static&quot;);
<span class="lineNum">     193 </span>            :       }
<span class="lineNum">     194 </span><span class="lineNoCov">          0 :       h_input_sum_param-&gt;add_top(&quot;h_neuron_input_&quot; + ts);</span>
<span class="lineNum">     195 </span>            :     }
<span class="lineNum">     196 </span>            :     {
<span class="lineNum">     197 </span>            :       LayerParameter* h_neuron_param = net_param-&gt;add_layer();
<span class="lineNum">     198 </span><span class="lineNoCov">          0 :       h_neuron_param-&gt;CopyFrom(tanh_param);</span>
<span class="lineNum">     199 </span><span class="lineNoCov">          0 :       h_neuron_param-&gt;set_name(&quot;h_neuron_&quot; + ts);</span>
<span class="lineNum">     200 </span><span class="lineNoCov">          0 :       h_neuron_param-&gt;add_bottom(&quot;h_neuron_input_&quot; + ts);</span>
<span class="lineNum">     201 </span><span class="lineNoCov">          0 :       h_neuron_param-&gt;add_top(&quot;h_&quot; + ts);</span>
<span class="lineNum">     202 </span>            :     }
<span class="lineNum">     203 </span>            : 
<span class="lineNum">     204 </span>            :     // Add layer to compute
<span class="lineNum">     205 </span>            :     //     W_ho_h_t := W_ho * h_t + b_o
<span class="lineNum">     206 </span>            :     {
<span class="lineNum">     207 </span>            :       LayerParameter* w_param = net_param-&gt;add_layer();
<span class="lineNum">     208 </span><span class="lineNoCov">          0 :       w_param-&gt;CopyFrom(biased_hidden_param);</span>
<span class="lineNum">     209 </span><span class="lineNoCov">          0 :       w_param-&gt;set_name(&quot;W_ho_h_&quot; + ts);</span>
<span class="lineNum">     210 </span><span class="lineNoCov">          0 :       w_param-&gt;add_param()-&gt;set_name(&quot;W_ho&quot;);</span>
<span class="lineNum">     211 </span><span class="lineNoCov">          0 :       w_param-&gt;add_param()-&gt;set_name(&quot;b_o&quot;);</span>
<span class="lineNum">     212 </span><span class="lineNoCov">          0 :       w_param-&gt;add_bottom(&quot;h_&quot; + ts);</span>
<span class="lineNum">     213 </span><span class="lineNoCov">          0 :       w_param-&gt;add_top(&quot;W_ho_h_&quot; + ts);</span>
<span class="lineNum">     214 </span><span class="lineNoCov">          0 :       w_param-&gt;mutable_inner_product_param()-&gt;set_axis(2);</span>
<span class="lineNum">     215 </span>            :     }
<span class="lineNum">     216 </span>            : 
<span class="lineNum">     217 </span>            :     // Add layers to compute
<span class="lineNum">     218 </span>            :     //     o_t := \tanh( W_ho * h_t + b_o)
<span class="lineNum">     219 </span>            :     //          = \tanh( W_ho_h_t )
<span class="lineNum">     220 </span>            :     {
<span class="lineNum">     221 </span>            :       LayerParameter* o_neuron_param = net_param-&gt;add_layer();
<span class="lineNum">     222 </span><span class="lineNoCov">          0 :       o_neuron_param-&gt;CopyFrom(tanh_param);</span>
<span class="lineNum">     223 </span><span class="lineNoCov">          0 :       o_neuron_param-&gt;set_name(&quot;o_neuron_&quot; + ts);</span>
<span class="lineNum">     224 </span><span class="lineNoCov">          0 :       o_neuron_param-&gt;add_bottom(&quot;W_ho_h_&quot; + ts);</span>
<span class="lineNum">     225 </span><span class="lineNoCov">          0 :       o_neuron_param-&gt;add_top(&quot;o_&quot; + ts);</span>
<span class="lineNum">     226 </span>            :     }
<span class="lineNum">     227 </span><span class="lineNoCov">          0 :     output_concat_layer.add_bottom(&quot;o_&quot; + ts);</span>
<span class="lineNum">     228 </span>            :   }  // for (int t = 1; t &lt;= this-&gt;T_; ++t)
<span class="lineNum">     229 </span>            : 
<span class="lineNum">     230 </span><span class="lineNoCov">          0 :   net_param-&gt;add_layer()-&gt;CopyFrom(output_concat_layer);</span>
<span class="lineNum">     231 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     232 </span>            : 
<span class="lineNum">     233 </span>            : INSTANTIATE_CLASS(RNNLayer);
<a name="234"><span class="lineNum">     234 </span><span class="lineCov">          3 : REGISTER_LAYER_CLASS(RNN);</span></a>
<span class="lineNum">     235 </span>            : 
<span class="lineNum">     236 </span><span class="lineCov">          3 : }  // namespace caffe</span>
</pre>
      </td>
    </tr>
  </table>
  <br>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
    <tr><td class="versionInfo">Generated by: <a href="http://ltp.sourceforge.net/coverage/lcov.php" target="_parent">LCOV version 1.12</a></td></tr>
  </table>
  <br>

</body>
</html>
