<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

<html lang="en">

<head>
  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  <title>LCOV - code analysis - src/caffe/net.cpp</title>
  <link rel="stylesheet" type="text/css" href="../../gcov.css">
</head>

<body>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="title">LCOV - code coverage report</td></tr>
    <tr><td class="ruler"><img src="../../glass.png" width=3 height=3 alt=""></td></tr>

    <tr>
      <td width="100%">
        <table cellpadding=1 border=0 width="100%">
          <tr>
            <td width="10%" class="headerItem">Current view:</td>
            <td width="35%" class="headerValue"><a href="../../index.html">top level</a> - <a href="index.html">src/caffe</a> - net.cpp<span style="font-size: 80%;"> (source / <a href="net.cpp.func-sort-c.html">functions</a>)</span></td>
            <td width="5%"></td>
            <td width="15%"></td>
            <td width="10%" class="headerCovTableHead">Hit</td>
            <td width="10%" class="headerCovTableHead">Total</td>
            <td width="15%" class="headerCovTableHead">Coverage</td>
          </tr>
          <tr>
            <td class="headerItem">Test:</td>
            <td class="headerValue">code analysis</td>
            <td></td>
            <td class="headerItem">Lines:</td>
            <td class="headerCovTableEntry">233</td>
            <td class="headerCovTableEntry">486</td>
            <td class="headerCovTableEntryLo">47.9 %</td>
          </tr>
          <tr>
            <td class="headerItem">Date:</td>
            <td class="headerValue">2020-09-11 22:50:33</td>
            <td></td>
            <td class="headerItem">Functions:</td>
            <td class="headerCovTableEntry">17</td>
            <td class="headerCovTableEntry">71</td>
            <td class="headerCovTableEntryLo">23.9 %</td>
          </tr>
          <tr>
            <td class="headerItem">Legend:</td>
            <td class="headerValueLeg">            Lines:
            <span class="coverLegendCov">hit</span>
            <span class="coverLegendNoCov">not hit</span>
</td>
            <td></td>
          </tr>
          <tr><td><img src="../../glass.png" width=3 height=3 alt=""></td></tr>
        </table>
      </td>
    </tr>

    <tr><td class="ruler"><img src="../../glass.png" width=3 height=3 alt=""></td></tr>
  </table>

  <table cellpadding=0 cellspacing=0 border=0>
    <tr>
      <td><br></td>
    </tr>
    <tr>
      <td>
<pre class="sourceHeading">          Line data    Source code</pre>
<pre class="source">
<a name="1"><span class="lineNum">       1 </span>            : #include &lt;algorithm&gt;</a>
<span class="lineNum">       2 </span>            : #include &lt;map&gt;
<span class="lineNum">       3 </span>            : #include &lt;set&gt;
<span class="lineNum">       4 </span>            : #include &lt;string&gt;
<span class="lineNum">       5 </span>            : #include &lt;utility&gt;
<span class="lineNum">       6 </span>            : #include &lt;vector&gt;
<span class="lineNum">       7 </span>            : 
<span class="lineNum">       8 </span>            : #ifdef USE_HDF5
<span class="lineNum">       9 </span>            : #include &quot;hdf5.h&quot;
<span class="lineNum">      10 </span>            : #endif  // USE_HDF5
<span class="lineNum">      11 </span>            : 
<span class="lineNum">      12 </span>            : #include &quot;caffe/common.hpp&quot;
<span class="lineNum">      13 </span>            : #include &quot;caffe/layer.hpp&quot;
<span class="lineNum">      14 </span>            : #include &quot;caffe/net.hpp&quot;
<span class="lineNum">      15 </span>            : #include &quot;caffe/parallel.hpp&quot;
<span class="lineNum">      16 </span>            : #include &quot;caffe/proto/caffe.pb.h&quot;
<span class="lineNum">      17 </span>            : #include &quot;caffe/util/hdf5.hpp&quot;
<span class="lineNum">      18 </span>            : #include &quot;caffe/util/insert_splits.hpp&quot;
<span class="lineNum">      19 </span>            : #include &quot;caffe/util/math_functions.hpp&quot;
<span class="lineNum">      20 </span>            : #include &quot;caffe/util/upgrade_proto.hpp&quot;
<span class="lineNum">      21 </span>            : 
<span class="lineNum">      22 </span>            : namespace caffe {
<span class="lineNum">      23 </span>            : 
<span class="lineNum">      24 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      25 </span><span class="lineCov">          2 : Net&lt;Dtype&gt;::Net(const NetParameter&amp; param) {</span>
<span class="lineNum">      26 </span><span class="lineCov">          2 :   Init(param);</span>
<span class="lineNum">      27 </span><span class="lineCov">          2 : }</span>
<span class="lineNum">      28 </span>            : 
<span class="lineNum">      29 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      30 </span><span class="lineNoCov">          0 : Net&lt;Dtype&gt;::Net(const string&amp; param_file, Phase phase,</span>
<span class="lineNum">      31 </span><span class="lineNoCov">          0 :     const int level, const vector&lt;string&gt;* stages) {</span>
<span class="lineNum">      32 </span><span class="lineNoCov">          0 :   NetParameter param;</span>
<span class="lineNum">      33 </span><span class="lineNoCov">          0 :   ReadNetParamsFromTextFileOrDie(param_file, &amp;param);</span>
<span class="lineNum">      34 </span>            :   // Set phase, stages and level
<span class="lineNum">      35 </span><span class="lineNoCov">          0 :   param.mutable_state()-&gt;set_phase(phase);</span>
<span class="lineNum">      36 </span><span class="lineNoCov">          0 :   if (stages != NULL) {</span>
<span class="lineNum">      37 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; stages-&gt;size(); i++) {</span>
<span class="lineNum">      38 </span><span class="lineNoCov">          0 :       param.mutable_state()-&gt;add_stage((*stages)[i]);</span>
<span class="lineNum">      39 </span>            :     }
<span class="lineNum">      40 </span>            :   }
<span class="lineNum">      41 </span><span class="lineNoCov">          0 :   param.mutable_state()-&gt;set_level(level);</span>
<span class="lineNum">      42 </span><span class="lineNoCov">          0 :   Init(param);</span>
<span class="lineNum">      43 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">      44 </span>            : 
<span class="lineNum">      45 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      46 </span><span class="lineCov">          2 : void Net&lt;Dtype&gt;::Init(const NetParameter&amp; in_param) {</span>
<span class="lineNum">      47 </span>            :   // Set phase from the state.
<span class="lineNum">      48 </span><span class="lineCov">          2 :   phase_ = in_param.state().phase();</span>
<span class="lineNum">      49 </span>            :   // Filter layers based on their include/exclude rules and
<span class="lineNum">      50 </span>            :   // the current NetState.
<span class="lineNum">      51 </span><span class="lineCov">          4 :   NetParameter filtered_param;</span>
<span class="lineNum">      52 </span><span class="lineCov">          2 :   FilterNet(in_param, &amp;filtered_param);</span>
<span class="lineNum">      53 </span><span class="lineCov">          6 :   LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">      54 </span>            :       &lt;&lt; &quot;Initializing net from parameters: &quot; &lt;&lt; std::endl
<span class="lineNum">      55 </span><span class="lineCov">          6 :       &lt;&lt; filtered_param.DebugString();</span>
<span class="lineNum">      56 </span>            :   // Create a copy of filtered_param with splits added where necessary.
<span class="lineNum">      57 </span><span class="lineCov">          4 :   NetParameter param;</span>
<span class="lineNum">      58 </span><span class="lineCov">          2 :   InsertSplits(filtered_param, &amp;param);</span>
<span class="lineNum">      59 </span>            :   // Basically, build all the layers and set up their connections.
<span class="lineNum">      60 </span><span class="lineCov">          2 :   name_ = param.name();</span>
<span class="lineNum">      61 </span>            :   map&lt;string, int&gt; blob_name_to_idx;
<span class="lineNum">      62 </span>            :   set&lt;string&gt; available_blobs;
<span class="lineNum">      63 </span><span class="lineCov">          2 :   memory_used_ = 0;</span>
<span class="lineNum">      64 </span>            :   // For each layer, set up its input and output
<span class="lineNum">      65 </span><span class="lineCov">          4 :   bottom_vecs_.resize(param.layer_size());</span>
<span class="lineNum">      66 </span><span class="lineCov">          4 :   top_vecs_.resize(param.layer_size());</span>
<span class="lineNum">      67 </span><span class="lineCov">          4 :   bottom_id_vecs_.resize(param.layer_size());</span>
<span class="lineNum">      68 </span><span class="lineCov">          4 :   param_id_vecs_.resize(param.layer_size());</span>
<span class="lineNum">      69 </span><span class="lineCov">          4 :   top_id_vecs_.resize(param.layer_size());</span>
<span class="lineNum">      70 </span><span class="lineCov">          4 :   bottom_need_backward_.resize(param.layer_size());</span>
<span class="lineNum">      71 </span><span class="lineCov">         44 :   for (int layer_id = 0; layer_id &lt; param.layer_size(); ++layer_id) {</span>
<span class="lineNum">      72 </span>            :     // Inherit phase from net if unset.
<span class="lineNum">      73 </span><span class="lineCov">         21 :     if (!param.layer(layer_id).has_phase()) {</span>
<span class="lineNum">      74 </span><span class="lineCov">         21 :       param.mutable_layer(layer_id)-&gt;set_phase(phase_);</span>
<span class="lineNum">      75 </span>            :     }
<span class="lineNum">      76 </span>            :     // Setup layer.
<span class="lineNum">      77 </span>            :     const LayerParameter&amp; layer_param = param.layer(layer_id);
<span class="lineNum">      78 </span><span class="lineCov">         21 :     if (layer_param.propagate_down_size() &gt; 0) {</span>
<span class="lineNum">      79 </span><span class="lineNoCov">          0 :       CHECK_EQ(layer_param.propagate_down_size(),</span>
<span class="lineNum">      80 </span>            :           layer_param.bottom_size())
<span class="lineNum">      81 </span>            :           &lt;&lt; &quot;propagate_down param must be specified &quot;
<span class="lineNum">      82 </span>            :           &lt;&lt; &quot;either 0 or bottom_size times &quot;;
<span class="lineNum">      83 </span>            :     }
<span class="lineNum">      84 </span><span class="lineCov">         42 :     layers_.push_back(LayerRegistry&lt;Dtype&gt;::CreateLayer(layer_param));</span>
<span class="lineNum">      85 </span><span class="lineCov">         21 :     layer_names_.push_back(layer_param.name());</span>
<span class="lineNum">      86 </span><span class="lineCov">         42 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">      87 </span>            :         &lt;&lt; &quot;Creating Layer &quot; &lt;&lt; layer_param.name();
<span class="lineNum">      88 </span>            :     bool need_backward = false;
<span class="lineNum">      89 </span>            : 
<span class="lineNum">      90 </span>            :     // Figure out this layer's input and output
<span class="lineNum">      91 </span><span class="lineCov">         65 :     for (int bottom_id = 0; bottom_id &lt; layer_param.bottom_size();</span>
<span class="lineNum">      92 </span>            :          ++bottom_id) {
<span class="lineNum">      93 </span>            :       const int blob_id = AppendBottom(param, layer_id, bottom_id,
<span class="lineNum">      94 </span><span class="lineCov">         22 :                                        &amp;available_blobs, &amp;blob_name_to_idx);</span>
<span class="lineNum">      95 </span>            :       // If a blob needs backward, this layer should provide it.
<span class="lineNum">      96 </span><span class="lineCov">         22 :       need_backward |= blob_need_backward_[blob_id];</span>
<span class="lineNum">      97 </span>            :     }
<span class="lineNum">      98 </span>            :     int num_top = layer_param.top_size();
<span class="lineNum">      99 </span><span class="lineCov">         71 :     for (int top_id = 0; top_id &lt; num_top; ++top_id) {</span>
<span class="lineNum">     100 </span><span class="lineCov">         25 :       AppendTop(param, layer_id, top_id, &amp;available_blobs, &amp;blob_name_to_idx);</span>
<span class="lineNum">     101 </span>            :       // Collect Input layer tops as Net inputs.
<span class="lineNum">     102 </span><span class="lineCov">         25 :       if (layer_param.type() == &quot;Input&quot;) {</span>
<span class="lineNum">     103 </span><span class="lineNoCov">          0 :         const int blob_id = blobs_.size() - 1;</span>
<span class="lineNum">     104 </span><span class="lineNoCov">          0 :         net_input_blob_indices_.push_back(blob_id);</span>
<span class="lineNum">     105 </span><span class="lineNoCov">          0 :         net_input_blobs_.push_back(blobs_[blob_id].get());</span>
<span class="lineNum">     106 </span>            :       }
<span class="lineNum">     107 </span>            :     }
<span class="lineNum">     108 </span>            :     // If the layer specifies that AutoTopBlobs() -&gt; true and the LayerParameter
<span class="lineNum">     109 </span>            :     // specified fewer than the required number (as specified by
<span class="lineNum">     110 </span>            :     // ExactNumTopBlobs() or MinTopBlobs()), allocate them here.
<span class="lineNum">     111 </span>            :     Layer&lt;Dtype&gt;* layer = layers_[layer_id].get();
<span class="lineNum">     112 </span><span class="lineCov">         21 :     if (layer-&gt;AutoTopBlobs()) {</span>
<span class="lineNum">     113 </span>            :       const int needed_num_top =
<span class="lineNum">     114 </span><span class="lineCov">          4 :           std::max(layer-&gt;MinTopBlobs(), layer-&gt;ExactNumTopBlobs());</span>
<span class="lineNum">     115 </span><span class="lineCov">          2 :       for (; num_top &lt; needed_num_top; ++num_top) {</span>
<span class="lineNum">     116 </span>            :         // Add &quot;anonymous&quot; top blobs -- do not modify available_blobs or
<span class="lineNum">     117 </span>            :         // blob_name_to_idx as we don't want these blobs to be usable as input
<span class="lineNum">     118 </span>            :         // to other layers.
<span class="lineNum">     119 </span><span class="lineNoCov">          0 :         AppendTop(param, layer_id, num_top, NULL, NULL);</span>
<span class="lineNum">     120 </span>            :       }
<span class="lineNum">     121 </span>            :     }
<span class="lineNum">     122 </span>            :     // After this layer is connected, set it up.
<span class="lineNum">     123 </span><span class="lineCov">         21 :     layers_[layer_id]-&gt;SetUp(bottom_vecs_[layer_id], top_vecs_[layer_id]);</span>
<span class="lineNum">     124 </span><span class="lineCov">         63 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     125 </span>            :         &lt;&lt; &quot;Setting up &quot; &lt;&lt; layer_names_[layer_id];
<span class="lineNum">     126 </span><span class="lineCov">        117 :     for (int top_id = 0; top_id &lt; top_vecs_[layer_id].size(); ++top_id) {</span>
<span class="lineNum">     127 </span><span class="lineCov">         25 :       if (blob_loss_weights_.size() &lt;= top_id_vecs_[layer_id][top_id]) {</span>
<span class="lineNum">     128 </span><span class="lineCov">         23 :         blob_loss_weights_.resize(top_id_vecs_[layer_id][top_id] + 1, Dtype(0));</span>
<span class="lineNum">     129 </span>            :       }
<span class="lineNum">     130 </span><span class="lineCov">         50 :       blob_loss_weights_[top_id_vecs_[layer_id][top_id]] = layer-&gt;loss(top_id);</span>
<span class="lineNum">     131 </span><span class="lineCov">         75 :       LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     132 </span><span class="lineCov">         75 :           &lt;&lt; &quot;Top shape: &quot; &lt;&lt; top_vecs_[layer_id][top_id]-&gt;shape_string();</span>
<span class="lineNum">     133 </span><span class="lineCov">         25 :       if (layer-&gt;loss(top_id)) {</span>
<span class="lineNum">     134 </span><span class="lineCov">          6 :         LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     135 </span>            :             &lt;&lt; &quot;    with loss weight &quot; &lt;&lt; layer-&gt;loss(top_id);
<span class="lineNum">     136 </span>            :       }
<span class="lineNum">     137 </span><span class="lineCov">         25 :       memory_used_ += top_vecs_[layer_id][top_id]-&gt;count();</span>
<span class="lineNum">     138 </span>            :     }
<span class="lineNum">     139 </span><span class="lineCov">         63 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     140 </span><span class="lineCov">         21 :         &lt;&lt; &quot;Memory required for data: &quot; &lt;&lt; memory_used_ * sizeof(Dtype);</span>
<span class="lineNum">     141 </span>            :     const int param_size = layer_param.param_size();
<span class="lineNum">     142 </span><span class="lineCov">         21 :     const int num_param_blobs = layers_[layer_id]-&gt;blobs().size();</span>
<span class="lineNum">     143 </span><span class="lineCov">         21 :     CHECK_LE(param_size, num_param_blobs)</span>
<span class="lineNum">     144 </span>            :         &lt;&lt; &quot;Too many params specified for layer &quot; &lt;&lt; layer_param.name();
<span class="lineNum">     145 </span><span class="lineCov">         42 :     ParamSpec default_param_spec;</span>
<span class="lineNum">     146 </span><span class="lineCov">         53 :     for (int param_id = 0; param_id &lt; num_param_blobs; ++param_id) {</span>
<span class="lineNum">     147 </span>            :       const ParamSpec* param_spec = (param_id &lt; param_size) ?
<span class="lineNum">     148 </span><span class="lineCov">         16 :           &amp;layer_param.param(param_id) : &amp;default_param_spec;</span>
<span class="lineNum">     149 </span><span class="lineCov">         16 :       const bool param_need_backward = param_spec-&gt;lr_mult() != 0;</span>
<span class="lineNum">     150 </span><span class="lineCov">         16 :       need_backward |= param_need_backward;</span>
<span class="lineNum">     151 </span><span class="lineCov">         32 :       layers_[layer_id]-&gt;set_param_propagate_down(param_id,</span>
<span class="lineNum">     152 </span>            :                                                   param_need_backward);
<span class="lineNum">     153 </span>            :     }
<span class="lineNum">     154 </span><span class="lineCov">         53 :     for (int param_id = 0; param_id &lt; num_param_blobs; ++param_id) {</span>
<span class="lineNum">     155 </span><span class="lineCov">         16 :       AppendParam(param, layer_id, param_id);</span>
<span class="lineNum">     156 </span>            :     }
<span class="lineNum">     157 </span>            :     // Finally, set the backward flag
<span class="lineNum">     158 </span><span class="lineCov">         21 :     layer_need_backward_.push_back(need_backward);</span>
<span class="lineNum">     159 </span><span class="lineCov">         21 :     if (need_backward) {</span>
<span class="lineNum">     160 </span><span class="lineCov">         93 :       for (int top_id = 0; top_id &lt; top_id_vecs_[layer_id].size(); ++top_id) {</span>
<span class="lineNum">     161 </span><span class="lineCov">         19 :         blob_need_backward_[top_id_vecs_[layer_id][top_id]] = true;</span>
<span class="lineNum">     162 </span>            :       }
<span class="lineNum">     163 </span>            :     }
<span class="lineNum">     164 </span>            :   }
<span class="lineNum">     165 </span>            :   // Go through the net backwards to determine which blobs contribute to the
<span class="lineNum">     166 </span>            :   // loss.  We can skip backward computation for blobs that don't contribute
<span class="lineNum">     167 </span>            :   // to the loss.
<span class="lineNum">     168 </span>            :   // Also checks if all bottom blobs don't need backward computation (possible
<span class="lineNum">     169 </span>            :   // because the skip_propagate_down param) and so we can skip backward
<span class="lineNum">     170 </span>            :   // computation for the entire layer
<span class="lineNum">     171 </span>            :   set&lt;string&gt; blobs_under_loss;
<span class="lineNum">     172 </span>            :   set&lt;string&gt; blobs_skip_backp;
<span class="lineNum">     173 </span><span class="lineCov">         23 :   for (int layer_id = layers_.size() - 1; layer_id &gt;= 0; --layer_id) {</span>
<span class="lineNum">     174 </span>            :     bool layer_contributes_loss = false;
<span class="lineNum">     175 </span>            :     bool layer_skip_propagate_down = true;
<span class="lineNum">     176 </span><span class="lineCov">         66 :     for (int top_id = 0; top_id &lt; top_vecs_[layer_id].size(); ++top_id) {</span>
<span class="lineNum">     177 </span><span class="lineCov">         25 :       const string&amp; blob_name = blob_names_[top_id_vecs_[layer_id][top_id]];</span>
<span class="lineNum">     178 </span><span class="lineCov">         48 :       if (layers_[layer_id]-&gt;loss(top_id) ||</span>
<span class="lineNum">     179 </span>            :           (blobs_under_loss.find(blob_name) != blobs_under_loss.end())) {
<span class="lineNum">     180 </span>            :         layer_contributes_loss = true;
<span class="lineNum">     181 </span>            :       }
<span class="lineNum">     182 </span><span class="lineCov">         25 :       if (blobs_skip_backp.find(blob_name) == blobs_skip_backp.end()) {</span>
<span class="lineNum">     183 </span>            :         layer_skip_propagate_down = false;
<span class="lineNum">     184 </span>            :       }
<span class="lineNum">     185 </span><span class="lineCov">         25 :       if (layer_contributes_loss &amp;&amp; !layer_skip_propagate_down)</span>
<span class="lineNum">     186 </span>            :         break;
<span class="lineNum">     187 </span>            :     }
<span class="lineNum">     188 </span>            :     // If this layer can skip backward computation, also all his bottom blobs
<span class="lineNum">     189 </span>            :     // don't need backpropagation
<span class="lineNum">     190 </span><span class="lineCov">         21 :     if (layer_need_backward_[layer_id] &amp;&amp; layer_skip_propagate_down) {</span>
<span class="lineNum">     191 </span>            :       layer_need_backward_[layer_id] = false;
<span class="lineNum">     192 </span><span class="lineNoCov">          0 :       for (int bottom_id = 0; bottom_id &lt; bottom_vecs_[layer_id].size();</span>
<span class="lineNum">     193 </span>            :                ++bottom_id) {
<span class="lineNum">     194 </span>            :         bottom_need_backward_[layer_id][bottom_id] = false;
<span class="lineNum">     195 </span>            :       }
<span class="lineNum">     196 </span>            :     }
<span class="lineNum">     197 </span><span class="lineCov">         21 :     if (!layer_contributes_loss) { layer_need_backward_[layer_id] = false; }</span>
<span class="lineNum">     198 </span><span class="lineCov">         21 :     if (Caffe::root_solver()) {</span>
<span class="lineNum">     199 </span><span class="lineCov">         21 :       if (layer_need_backward_[layer_id]) {</span>
<span class="lineNum">     200 </span><span class="lineCov">         34 :         LOG(INFO) &lt;&lt; layer_names_[layer_id] &lt;&lt; &quot; needs backward computation.&quot;;</span>
<span class="lineNum">     201 </span>            :       } else {
<span class="lineNum">     202 </span><span class="lineCov">          8 :         LOG(INFO) &lt;&lt; layer_names_[layer_id]</span>
<span class="lineNum">     203 </span>            :             &lt;&lt; &quot; does not need backward computation.&quot;;
<span class="lineNum">     204 </span>            :       }
<span class="lineNum">     205 </span>            :     }
<span class="lineNum">     206 </span><span class="lineCov">        108 :     for (int bottom_id = 0; bottom_id &lt; bottom_vecs_[layer_id].size();</span>
<span class="lineNum">     207 </span>            :          ++bottom_id) {
<span class="lineNum">     208 </span><span class="lineCov">         22 :       if (layer_contributes_loss) {</span>
<span class="lineNum">     209 </span>            :         const string&amp; blob_name =
<span class="lineNum">     210 </span><span class="lineCov">         20 :             blob_names_[bottom_id_vecs_[layer_id][bottom_id]];</span>
<span class="lineNum">     211 </span>            :         blobs_under_loss.insert(blob_name);
<span class="lineNum">     212 </span>            :       } else {
<span class="lineNum">     213 </span>            :         bottom_need_backward_[layer_id][bottom_id] = false;
<span class="lineNum">     214 </span>            :       }
<span class="lineNum">     215 </span><span class="lineCov">         22 :       if (!bottom_need_backward_[layer_id][bottom_id]) {</span>
<span class="lineNum">     216 </span>            :         const string&amp; blob_name =
<span class="lineNum">     217 </span><span class="lineCov">          7 :                    blob_names_[bottom_id_vecs_[layer_id][bottom_id]];</span>
<span class="lineNum">     218 </span>            :         blobs_skip_backp.insert(blob_name);
<span class="lineNum">     219 </span>            :       }
<span class="lineNum">     220 </span>            :     }
<span class="lineNum">     221 </span>            :   }
<span class="lineNum">     222 </span>            :   // Handle force_backward if needed.
<span class="lineNum">     223 </span><span class="lineCov">          2 :   if (param.force_backward()) {</span>
<span class="lineNum">     224 </span><span class="lineNoCov">          0 :     for (int layer_id = 0; layer_id &lt; layers_.size(); ++layer_id) {</span>
<span class="lineNum">     225 </span>            :       layer_need_backward_[layer_id] = true;
<span class="lineNum">     226 </span><span class="lineNoCov">          0 :       for (int bottom_id = 0;</span>
<span class="lineNum">     227 </span>            :            bottom_id &lt; bottom_need_backward_[layer_id].size(); ++bottom_id) {
<span class="lineNum">     228 </span><span class="lineNoCov">          0 :         bottom_need_backward_[layer_id][bottom_id] =</span>
<span class="lineNum">     229 </span><span class="lineNoCov">          0 :             bottom_need_backward_[layer_id][bottom_id] ||</span>
<span class="lineNum">     230 </span>            :             layers_[layer_id]-&gt;AllowForceBackward(bottom_id);
<span class="lineNum">     231 </span><span class="lineNoCov">          0 :         blob_need_backward_[bottom_id_vecs_[layer_id][bottom_id]] =</span>
<span class="lineNum">     232 </span><span class="lineNoCov">          0 :             blob_need_backward_[bottom_id_vecs_[layer_id][bottom_id]] ||</span>
<span class="lineNum">     233 </span>            :             bottom_need_backward_[layer_id][bottom_id];
<span class="lineNum">     234 </span>            :       }
<span class="lineNum">     235 </span><span class="lineNoCov">          0 :       for (int param_id = 0; param_id &lt; layers_[layer_id]-&gt;blobs().size();</span>
<span class="lineNum">     236 </span>            :            ++param_id) {
<span class="lineNum">     237 </span><span class="lineNoCov">          0 :         layers_[layer_id]-&gt;set_param_propagate_down(param_id, true);</span>
<span class="lineNum">     238 </span>            :       }
<span class="lineNum">     239 </span>            :     }
<span class="lineNum">     240 </span>            :   }
<span class="lineNum">     241 </span>            :   // In the end, all remaining blobs are considered output blobs.
<span class="lineNum">     242 </span><span class="lineCov">          5 :   for (set&lt;string&gt;::iterator it = available_blobs.begin();</span>
<span class="lineNum">     243 </span>            :       it != available_blobs.end(); ++it) {
<span class="lineNum">     244 </span><span class="lineCov">          6 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     245 </span>            :         &lt;&lt; &quot;This network produces output &quot; &lt;&lt; *it;
<span class="lineNum">     246 </span><span class="lineCov">          6 :     net_output_blobs_.push_back(blobs_[blob_name_to_idx[*it]].get());</span>
<span class="lineNum">     247 </span><span class="lineCov">          3 :     net_output_blob_indices_.push_back(blob_name_to_idx[*it]);</span>
<span class="lineNum">     248 </span>            :   }
<span class="lineNum">     249 </span><span class="lineCov">         48 :   for (size_t blob_id = 0; blob_id &lt; blob_names_.size(); ++blob_id) {</span>
<span class="lineNum">     250 </span><span class="lineCov">         23 :     blob_names_index_[blob_names_[blob_id]] = blob_id;</span>
<span class="lineNum">     251 </span>            :   }
<span class="lineNum">     252 </span><span class="lineCov">         44 :   for (size_t layer_id = 0; layer_id &lt; layer_names_.size(); ++layer_id) {</span>
<span class="lineNum">     253 </span><span class="lineCov">         21 :     layer_names_index_[layer_names_[layer_id]] = layer_id;</span>
<span class="lineNum">     254 </span>            :   }
<span class="lineNum">     255 </span><span class="lineCov">          2 :   ShareWeights();</span>
<span class="lineNum">     256 </span><span class="lineCov">          2 :   debug_info_ = param.debug_info();</span>
<span class="lineNum">     257 </span><span class="lineCov">          4 :   LOG_IF(INFO, Caffe::root_solver()) &lt;&lt; &quot;Network initialization done.&quot;;</span>
<span class="lineNum">     258 </span><span class="lineCov">          2 : }</span>
<a name="259"><span class="lineNum">     259 </span>            : </a>
<span class="lineNum">     260 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     261 </span><span class="lineCov">          2 : void Net&lt;Dtype&gt;::FilterNet(const NetParameter&amp; param,</span>
<span class="lineNum">     262 </span>            :     NetParameter* param_filtered) {
<span class="lineNum">     263 </span><span class="lineCov">          4 :   NetState net_state(param.state());</span>
<span class="lineNum">     264 </span><span class="lineCov">          2 :   param_filtered-&gt;CopyFrom(param);</span>
<span class="lineNum">     265 </span>            :   param_filtered-&gt;clear_layer();
<span class="lineNum">     266 </span><span class="lineCov">         46 :   for (int i = 0; i &lt; param.layer_size(); ++i) {</span>
<span class="lineNum">     267 </span>            :     const LayerParameter&amp; layer_param = param.layer(i);
<span class="lineNum">     268 </span>            :     const string&amp; layer_name = layer_param.name();
<span class="lineNum">     269 </span><span class="lineCov">         44 :     CHECK(layer_param.include_size() == 0 || layer_param.exclude_size() == 0)</span>
<span class="lineNum">     270 </span>            :           &lt;&lt; &quot;Specify either include rules or exclude rules; not both.&quot;;
<span class="lineNum">     271 </span>            :     // If no include rules are specified, the layer is included by default and
<span class="lineNum">     272 </span>            :     // only excluded if it meets one of the exclude rules.
<span class="lineNum">     273 </span><span class="lineCov">         22 :     bool layer_included = (layer_param.include_size() == 0);</span>
<span class="lineNum">     274 </span><span class="lineCov">         22 :     for (int j = 0; layer_included &amp;&amp; j &lt; layer_param.exclude_size(); ++j) {</span>
<span class="lineNum">     275 </span><span class="lineNoCov">          0 :       if (StateMeetsRule(net_state, layer_param.exclude(j), layer_name)) {</span>
<span class="lineNum">     276 </span>            :         layer_included = false;
<span class="lineNum">     277 </span>            :       }
<span class="lineNum">     278 </span>            :     }
<span class="lineNum">     279 </span><span class="lineCov">         34 :     for (int j = 0; !layer_included &amp;&amp; j &lt; layer_param.include_size(); ++j) {</span>
<span class="lineNum">     280 </span><span class="lineCov">          6 :       if (StateMeetsRule(net_state, layer_param.include(j), layer_name)) {</span>
<span class="lineNum">     281 </span>            :         layer_included = true;
<span class="lineNum">     282 </span>            :       }
<span class="lineNum">     283 </span>            :     }
<span class="lineNum">     284 </span><span class="lineCov">         22 :     if (layer_included) {</span>
<span class="lineNum">     285 </span><span class="lineCov">         19 :       param_filtered-&gt;add_layer()-&gt;CopyFrom(layer_param);</span>
<span class="lineNum">     286 </span>            :     }
<span class="lineNum">     287 </span>            :   }
<span class="lineNum">     288 </span><span class="lineCov">          2 : }</span>
<a name="289"><span class="lineNum">     289 </span>            : </a>
<span class="lineNum">     290 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     291 </span><span class="lineCov">          6 : bool Net&lt;Dtype&gt;::StateMeetsRule(const NetState&amp; state,</span>
<span class="lineNum">     292 </span>            :     const NetStateRule&amp; rule, const string&amp; layer_name) {
<span class="lineNum">     293 </span>            :   // Check whether the rule is broken due to phase.
<span class="lineNum">     294 </span><span class="lineCov">          6 :   if (rule.has_phase()) {</span>
<span class="lineNum">     295 </span><span class="lineCov">          6 :       if (rule.phase() != state.phase()) {</span>
<span class="lineNum">     296 </span><span class="lineCov">          6 :         LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     297 </span><span class="lineCov">          3 :             &lt;&lt; &quot;The NetState phase (&quot; &lt;&lt; state.phase()</span>
<span class="lineNum">     298 </span><span class="lineCov">          3 :             &lt;&lt; &quot;) differed from the phase (&quot; &lt;&lt; rule.phase()</span>
<span class="lineNum">     299 </span>            :             &lt;&lt; &quot;) specified by a rule in layer &quot; &lt;&lt; layer_name;
<span class="lineNum">     300 </span>            :         return false;
<span class="lineNum">     301 </span>            :       }
<span class="lineNum">     302 </span>            :   }
<span class="lineNum">     303 </span>            :   // Check whether the rule is broken due to min level.
<span class="lineNum">     304 </span><span class="lineCov">          3 :   if (rule.has_min_level()) {</span>
<span class="lineNum">     305 </span><span class="lineNoCov">          0 :     if (state.level() &lt; rule.min_level()) {</span>
<span class="lineNum">     306 </span><span class="lineNoCov">          0 :       LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     307 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;The NetState level (&quot; &lt;&lt; state.level()</span>
<span class="lineNum">     308 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;) is above the min_level (&quot; &lt;&lt; rule.min_level()</span>
<span class="lineNum">     309 </span>            :           &lt;&lt; &quot;) specified by a rule in layer &quot; &lt;&lt; layer_name;
<span class="lineNum">     310 </span>            :       return false;
<span class="lineNum">     311 </span>            :     }
<span class="lineNum">     312 </span>            :   }
<span class="lineNum">     313 </span>            :   // Check whether the rule is broken due to max level.
<span class="lineNum">     314 </span><span class="lineCov">          3 :   if (rule.has_max_level()) {</span>
<span class="lineNum">     315 </span><span class="lineNoCov">          0 :     if (state.level() &gt; rule.max_level()) {</span>
<span class="lineNum">     316 </span><span class="lineNoCov">          0 :       LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     317 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;The NetState level (&quot; &lt;&lt; state.level()</span>
<span class="lineNum">     318 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;) is above the max_level (&quot; &lt;&lt; rule.max_level()</span>
<span class="lineNum">     319 </span>            :           &lt;&lt; &quot;) specified by a rule in layer &quot; &lt;&lt; layer_name;
<span class="lineNum">     320 </span>            :       return false;
<span class="lineNum">     321 </span>            :     }
<span class="lineNum">     322 </span>            :   }
<span class="lineNum">     323 </span>            :   // Check whether the rule is broken due to stage. The NetState must
<span class="lineNum">     324 </span>            :   // contain ALL of the rule's stages to meet it.
<span class="lineNum">     325 </span><span class="lineCov">          3 :   for (int i = 0; i &lt; rule.stage_size(); ++i) {</span>
<span class="lineNum">     326 </span>            :     // Check that the NetState contains the rule's ith stage.
<span class="lineNum">     327 </span>            :     bool has_stage = false;
<span class="lineNum">     328 </span><span class="lineNoCov">          0 :     for (int j = 0; !has_stage &amp;&amp; j &lt; state.stage_size(); ++j) {</span>
<span class="lineNum">     329 </span><span class="lineNoCov">          0 :       if (rule.stage(i) == state.stage(j)) { has_stage = true; }</span>
<span class="lineNum">     330 </span>            :     }
<span class="lineNum">     331 </span><span class="lineNoCov">          0 :     if (!has_stage) {</span>
<span class="lineNum">     332 </span><span class="lineNoCov">          0 :       LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     333 </span>            :           &lt;&lt; &quot;The NetState did not contain stage '&quot; &lt;&lt; rule.stage(i)
<span class="lineNum">     334 </span>            :           &lt;&lt; &quot;' specified by a rule in layer &quot; &lt;&lt; layer_name;
<span class="lineNum">     335 </span>            :       return false;
<span class="lineNum">     336 </span>            :     }
<span class="lineNum">     337 </span>            :   }
<span class="lineNum">     338 </span>            :   // Check whether the rule is broken due to not_stage. The NetState must
<span class="lineNum">     339 </span>            :   // contain NONE of the rule's not_stages to meet it.
<span class="lineNum">     340 </span><span class="lineCov">          3 :   for (int i = 0; i &lt; rule.not_stage_size(); ++i) {</span>
<span class="lineNum">     341 </span>            :     // Check that the NetState contains the rule's ith not_stage.
<span class="lineNum">     342 </span>            :     bool has_stage = false;
<span class="lineNum">     343 </span><span class="lineNoCov">          0 :     for (int j = 0; !has_stage &amp;&amp; j &lt; state.stage_size(); ++j) {</span>
<span class="lineNum">     344 </span><span class="lineNoCov">          0 :       if (rule.not_stage(i) == state.stage(j)) { has_stage = true; }</span>
<span class="lineNum">     345 </span>            :     }
<span class="lineNum">     346 </span><span class="lineNoCov">          0 :     if (has_stage) {</span>
<span class="lineNum">     347 </span><span class="lineNoCov">          0 :       LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     348 </span>            :           &lt;&lt; &quot;The NetState contained a not_stage '&quot; &lt;&lt; rule.not_stage(i)
<span class="lineNum">     349 </span>            :           &lt;&lt; &quot;' specified by a rule in layer &quot; &lt;&lt; layer_name;
<span class="lineNum">     350 </span>            :       return false;
<span class="lineNum">     351 </span>            :     }
<span class="lineNum">     352 </span>            :   }
<span class="lineNum">     353 </span>            :   return true;
<span class="lineNum">     354 </span>            : }
<span class="lineNum">     355 </span>            : 
<span class="lineNum">     356 </span>            : // Helper for Net::Init: add a new top blob to the net.
<span class="lineNum">     357 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     358 </span><span class="lineCov">         25 : void Net&lt;Dtype&gt;::AppendTop(const NetParameter&amp; param, const int layer_id,</span>
<span class="lineNum">     359 </span>            :                            const int top_id, set&lt;string&gt;* available_blobs,
<span class="lineNum">     360 </span>            :                            map&lt;string, int&gt;* blob_name_to_idx) {
<span class="lineNum">     361 </span>            :   shared_ptr&lt;LayerParameter&gt; layer_param(
<span class="lineNum">     362 </span><span class="lineCov">         25 :       new LayerParameter(param.layer(layer_id)));</span>
<span class="lineNum">     363 </span>            :   const string&amp; blob_name = (layer_param-&gt;top_size() &gt; top_id) ?
<span class="lineNum">     364 </span><span class="lineCov">         25 :       layer_param-&gt;top(top_id) : &quot;(automatic)&quot;;</span>
<span class="lineNum">     365 </span>            :   // Check if we are doing in-place computation
<span class="lineNum">     366 </span><span class="lineCov">         44 :   if (blob_name_to_idx &amp;&amp; layer_param-&gt;bottom_size() &gt; top_id &amp;&amp;</span>
<span class="lineNum">     367 </span><span class="lineCov">         19 :       blob_name == layer_param-&gt;bottom(top_id)) {</span>
<span class="lineNum">     368 </span>            :     // In-place computation
<span class="lineNum">     369 </span><span class="lineCov">          4 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     370 </span>            :         &lt;&lt; layer_param-&gt;name() &lt;&lt; &quot; -&gt; &quot; &lt;&lt; blob_name &lt;&lt; &quot; (in-place)&quot;;
<span class="lineNum">     371 </span><span class="lineCov">          6 :     top_vecs_[layer_id].push_back(blobs_[(*blob_name_to_idx)[blob_name]].get());</span>
<span class="lineNum">     372 </span><span class="lineCov">          4 :     top_id_vecs_[layer_id].push_back((*blob_name_to_idx)[blob_name]);</span>
<span class="lineNum">     373 </span><span class="lineCov">         46 :   } else if (blob_name_to_idx &amp;&amp;</span>
<span class="lineNum">     374 </span>            :              blob_name_to_idx-&gt;find(blob_name) != blob_name_to_idx-&gt;end()) {
<span class="lineNum">     375 </span>            :     // If we are not doing in-place computation but have duplicated blobs,
<span class="lineNum">     376 </span>            :     // raise an error.
<span class="lineNum">     377 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Top blob '&quot; &lt;&lt; blob_name</span>
<span class="lineNum">     378 </span>            :                &lt;&lt; &quot;' produced by multiple sources.&quot;;
<span class="lineNum">     379 </span>            :   } else {
<span class="lineNum">     380 </span>            :     // Normal output.
<span class="lineNum">     381 </span><span class="lineCov">         23 :     if (Caffe::root_solver()) {</span>
<span class="lineNum">     382 </span><span class="lineCov">         46 :       LOG(INFO) &lt;&lt; layer_param-&gt;name() &lt;&lt; &quot; -&gt; &quot; &lt;&lt; blob_name;</span>
<span class="lineNum">     383 </span>            :     }
<span class="lineNum">     384 </span><span class="lineCov">         46 :     shared_ptr&lt;Blob&lt;Dtype&gt; &gt; blob_pointer(new Blob&lt;Dtype&gt;());</span>
<span class="lineNum">     385 </span><span class="lineCov">         23 :     const int blob_id = blobs_.size();</span>
<span class="lineNum">     386 </span><span class="lineCov">         23 :     blobs_.push_back(blob_pointer);</span>
<span class="lineNum">     387 </span><span class="lineCov">         23 :     blob_names_.push_back(blob_name);</span>
<span class="lineNum">     388 </span><span class="lineCov">         23 :     blob_need_backward_.push_back(false);</span>
<span class="lineNum">     389 </span><span class="lineCov">         23 :     if (blob_name_to_idx) { (*blob_name_to_idx)[blob_name] = blob_id; }</span>
<span class="lineNum">     390 </span><span class="lineCov">         23 :     top_id_vecs_[layer_id].push_back(blob_id);</span>
<span class="lineNum">     391 </span><span class="lineCov">         46 :     top_vecs_[layer_id].push_back(blob_pointer.get());</span>
<span class="lineNum">     392 </span>            :   }
<span class="lineNum">     393 </span><span class="lineCov">         25 :   if (available_blobs) { available_blobs-&gt;insert(blob_name); }</span>
<span class="lineNum">     394 </span><span class="lineCov">         25 : }</span>
<span class="lineNum">     395 </span>            : 
<span class="lineNum">     396 </span>            : // Helper for Net::Init: add a new bottom blob to the net.
<span class="lineNum">     397 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     398 </span><span class="lineCov">         22 : int Net&lt;Dtype&gt;::AppendBottom(const NetParameter&amp; param, const int layer_id,</span>
<span class="lineNum">     399 </span>            :     const int bottom_id, set&lt;string&gt;* available_blobs,
<span class="lineNum">     400 </span>            :     map&lt;string, int&gt;* blob_name_to_idx) {
<span class="lineNum">     401 </span>            :   const LayerParameter&amp; layer_param = param.layer(layer_id);
<span class="lineNum">     402 </span>            :   const string&amp; blob_name = layer_param.bottom(bottom_id);
<span class="lineNum">     403 </span><span class="lineCov">         22 :   if (available_blobs-&gt;find(blob_name) == available_blobs-&gt;end()) {</span>
<span class="lineNum">     404 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Unknown bottom blob '&quot; &lt;&lt; blob_name &lt;&lt; &quot;' (layer '&quot;</span>
<span class="lineNum">     405 </span><span class="lineNoCov">          0 :                &lt;&lt; layer_param.name() &lt;&lt; &quot;', bottom index &quot; &lt;&lt; bottom_id &lt;&lt; &quot;)&quot;;</span>
<span class="lineNum">     406 </span>            :   }
<span class="lineNum">     407 </span><span class="lineCov">         22 :   const int blob_id = (*blob_name_to_idx)[blob_name];</span>
<span class="lineNum">     408 </span><span class="lineCov">         66 :   LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     409 </span>            :       &lt;&lt; layer_names_[layer_id] &lt;&lt; &quot; &lt;- &quot; &lt;&lt; blob_name;
<span class="lineNum">     410 </span><span class="lineCov">         66 :   bottom_vecs_[layer_id].push_back(blobs_[blob_id].get());</span>
<span class="lineNum">     411 </span><span class="lineCov">         22 :   bottom_id_vecs_[layer_id].push_back(blob_id);</span>
<span class="lineNum">     412 </span>            :   available_blobs-&gt;erase(blob_name);
<span class="lineNum">     413 </span><span class="lineCov">         22 :   bool need_backward = blob_need_backward_[blob_id];</span>
<span class="lineNum">     414 </span>            :   // Check if the backpropagation on bottom_id should be skipped
<span class="lineNum">     415 </span><span class="lineCov">         22 :   if (layer_param.propagate_down_size() &gt; 0) {</span>
<span class="lineNum">     416 </span>            :     need_backward = layer_param.propagate_down(bottom_id);
<span class="lineNum">     417 </span>            :   }
<span class="lineNum">     418 </span><span class="lineCov">         44 :   bottom_need_backward_[layer_id].push_back(need_backward);</span>
<span class="lineNum">     419 </span><span class="lineCov">         22 :   return blob_id;</span>
<span class="lineNum">     420 </span>            : }
<span class="lineNum">     421 </span>            : 
<span class="lineNum">     422 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     423 </span><span class="lineCov">         16 : void Net&lt;Dtype&gt;::AppendParam(const NetParameter&amp; param, const int layer_id,</span>
<span class="lineNum">     424 </span>            :                              const int param_id) {
<span class="lineNum">     425 </span><span class="lineCov">         16 :   const LayerParameter&amp; layer_param = layers_[layer_id]-&gt;layer_param();</span>
<span class="lineNum">     426 </span>            :   const int param_size = layer_param.param_size();
<span class="lineNum">     427 </span>            :   string param_name =
<span class="lineNum">     428 </span><span class="lineCov">         16 :       (param_size &gt; param_id) ? layer_param.param(param_id).name() : &quot;&quot;;</span>
<span class="lineNum">     429 </span><span class="lineCov">         16 :   if (param_name.size()) {</span>
<span class="lineNum">     430 </span><span class="lineNoCov">          0 :     param_display_names_.push_back(param_name);</span>
<span class="lineNum">     431 </span>            :   } else {
<span class="lineNum">     432 </span><span class="lineCov">         32 :     ostringstream param_display_name;</span>
<span class="lineNum">     433 </span><span class="lineCov">         16 :     param_display_name &lt;&lt; param_id;</span>
<span class="lineNum">     434 </span><span class="lineCov">         32 :     param_display_names_.push_back(param_display_name.str());</span>
<span class="lineNum">     435 </span>            :   }
<span class="lineNum">     436 </span><span class="lineCov">         16 :   const int net_param_id = params_.size();</span>
<span class="lineNum">     437 </span><span class="lineCov">         32 :   params_.push_back(layers_[layer_id]-&gt;blobs()[param_id]);</span>
<span class="lineNum">     438 </span><span class="lineCov">         16 :   param_id_vecs_[layer_id].push_back(net_param_id);</span>
<span class="lineNum">     439 </span><span class="lineCov">         16 :   param_layer_indices_.push_back(make_pair(layer_id, param_id));</span>
<span class="lineNum">     440 </span><span class="lineCov">         32 :   ParamSpec default_param_spec;</span>
<span class="lineNum">     441 </span>            :   const ParamSpec* param_spec = (layer_param.param_size() &gt; param_id) ?
<span class="lineNum">     442 </span><span class="lineCov">         16 :       &amp;layer_param.param(param_id) : &amp;default_param_spec;</span>
<span class="lineNum">     443 </span><span class="lineCov">         16 :   if (!param_size || !param_name.size() || (param_name.size() &amp;&amp;</span>
<span class="lineNum">     444 </span>            :       param_names_index_.find(param_name) == param_names_index_.end())) {
<span class="lineNum">     445 </span>            :     // This layer &quot;owns&quot; this parameter blob -- it is either anonymous
<span class="lineNum">     446 </span>            :     // (i.e., not given a param_name) or explicitly given a name that we
<span class="lineNum">     447 </span>            :     // haven't already seen.
<span class="lineNum">     448 </span><span class="lineCov">         16 :     param_owners_.push_back(-1);</span>
<span class="lineNum">     449 </span><span class="lineCov">         16 :     if (param_name.size()) {</span>
<span class="lineNum">     450 </span><span class="lineNoCov">          0 :       param_names_index_[param_name] = net_param_id;</span>
<span class="lineNum">     451 </span>            :     }
<span class="lineNum">     452 </span><span class="lineCov">         16 :     const int learnable_param_id = learnable_params_.size();</span>
<span class="lineNum">     453 </span><span class="lineCov">         32 :     learnable_params_.push_back(params_[net_param_id].get());</span>
<span class="lineNum">     454 </span><span class="lineCov">         16 :     learnable_param_ids_.push_back(learnable_param_id);</span>
<span class="lineNum">     455 </span><span class="lineCov">         16 :     has_params_lr_.push_back(param_spec-&gt;has_lr_mult());</span>
<span class="lineNum">     456 </span><span class="lineCov">         16 :     has_params_decay_.push_back(param_spec-&gt;has_decay_mult());</span>
<span class="lineNum">     457 </span><span class="lineCov">         16 :     params_lr_.push_back(param_spec-&gt;lr_mult());</span>
<span class="lineNum">     458 </span><span class="lineCov">         16 :     params_weight_decay_.push_back(param_spec-&gt;decay_mult());</span>
<span class="lineNum">     459 </span>            :   } else {
<span class="lineNum">     460 </span>            :     // Named param blob with name we've seen before: share params
<span class="lineNum">     461 </span><span class="lineNoCov">          0 :     const int owner_net_param_id = param_names_index_[param_name];</span>
<span class="lineNum">     462 </span><span class="lineNoCov">          0 :     param_owners_.push_back(owner_net_param_id);</span>
<span class="lineNum">     463 </span>            :     const pair&lt;int, int&gt;&amp; owner_index =
<span class="lineNum">     464 </span><span class="lineNoCov">          0 :         param_layer_indices_[owner_net_param_id];</span>
<span class="lineNum">     465 </span><span class="lineNoCov">          0 :     const int owner_layer_id = owner_index.first;</span>
<span class="lineNum">     466 </span><span class="lineNoCov">          0 :     const int owner_param_id = owner_index.second;</span>
<span class="lineNum">     467 </span><span class="lineNoCov">          0 :     LOG_IF(INFO, Caffe::root_solver()) &lt;&lt; &quot;Sharing parameters '&quot; &lt;&lt; param_name</span>
<span class="lineNum">     468 </span>            :         &lt;&lt; &quot;' owned by &quot;
<span class="lineNum">     469 </span><span class="lineNoCov">          0 :         &lt;&lt; &quot;layer '&quot; &lt;&lt; layer_names_[owner_layer_id] &lt;&lt; &quot;', param &quot;</span>
<span class="lineNum">     470 </span><span class="lineNoCov">          0 :         &lt;&lt; &quot;index &quot; &lt;&lt; owner_param_id;</span>
<span class="lineNum">     471 </span>            :     Blob&lt;Dtype&gt;* this_blob = layers_[layer_id]-&gt;blobs()[param_id].get();
<span class="lineNum">     472 </span>            :     Blob&lt;Dtype&gt;* owner_blob =
<span class="lineNum">     473 </span><span class="lineNoCov">          0 :         layers_[owner_layer_id]-&gt;blobs()[owner_param_id].get();</span>
<span class="lineNum">     474 </span>            :     const int param_size = layer_param.param_size();
<span class="lineNum">     475 </span><span class="lineNoCov">          0 :     if (param_size &gt; param_id &amp;&amp; (layer_param.param(param_id).share_mode() ==</span>
<span class="lineNum">     476 </span>            :                                   ParamSpec_DimCheckMode_PERMISSIVE)) {
<span class="lineNum">     477 </span>            :       // Permissive dimension checking -- only check counts are the same.
<span class="lineNum">     478 </span><span class="lineNoCov">          0 :       CHECK_EQ(this_blob-&gt;count(), owner_blob-&gt;count())</span>
<span class="lineNum">     479 </span>            :           &lt;&lt; &quot;Cannot share param '&quot; &lt;&lt; param_name &lt;&lt; &quot;' owned by layer '&quot;
<span class="lineNum">     480 </span>            :           &lt;&lt; layer_names_[owner_layer_id] &lt;&lt; &quot;' with layer '&quot;
<span class="lineNum">     481 </span>            :           &lt;&lt; layer_names_[layer_id] &lt;&lt; &quot;'; count mismatch.  Owner layer param &quot;
<span class="lineNum">     482 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;shape is &quot; &lt;&lt; owner_blob-&gt;shape_string() &lt;&lt; &quot;; sharing layer &quot;</span>
<span class="lineNum">     483 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;shape is &quot; &lt;&lt; this_blob-&gt;shape_string();</span>
<span class="lineNum">     484 </span>            :     } else {
<span class="lineNum">     485 </span>            :       // Strict dimension checking -- all dims must be the same.
<span class="lineNum">     486 </span><span class="lineNoCov">          0 :       CHECK(this_blob-&gt;shape() == owner_blob-&gt;shape())</span>
<span class="lineNum">     487 </span>            :           &lt;&lt; &quot;Cannot share param '&quot; &lt;&lt; param_name &lt;&lt; &quot;' owned by layer '&quot;
<span class="lineNum">     488 </span>            :           &lt;&lt; layer_names_[owner_layer_id] &lt;&lt; &quot;' with layer '&quot;
<span class="lineNum">     489 </span>            :           &lt;&lt; layer_names_[layer_id] &lt;&lt; &quot;'; shape mismatch.  Owner layer param &quot;
<span class="lineNum">     490 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;shape is &quot; &lt;&lt; owner_blob-&gt;shape_string() &lt;&lt; &quot;; sharing layer &quot;</span>
<span class="lineNum">     491 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;expects shape &quot; &lt;&lt; this_blob-&gt;shape_string();</span>
<span class="lineNum">     492 </span>            :     }
<span class="lineNum">     493 </span><span class="lineNoCov">          0 :     const int learnable_param_id = learnable_param_ids_[owner_net_param_id];</span>
<span class="lineNum">     494 </span><span class="lineNoCov">          0 :     learnable_param_ids_.push_back(learnable_param_id);</span>
<span class="lineNum">     495 </span><span class="lineNoCov">          0 :     if (param_spec-&gt;has_lr_mult()) {</span>
<span class="lineNum">     496 </span><span class="lineNoCov">          0 :       if (has_params_lr_[learnable_param_id]) {</span>
<span class="lineNum">     497 </span><span class="lineNoCov">          0 :         CHECK_EQ(param_spec-&gt;lr_mult(), params_lr_[learnable_param_id])</span>
<span class="lineNum">     498 </span>            :             &lt;&lt; &quot;Shared param '&quot; &lt;&lt; param_name &lt;&lt; &quot;' has mismatched lr_mult.&quot;;
<span class="lineNum">     499 </span>            :       } else {
<span class="lineNum">     500 </span>            :         has_params_lr_[learnable_param_id] = true;
<span class="lineNum">     501 </span><span class="lineNoCov">          0 :         params_lr_[learnable_param_id] = param_spec-&gt;lr_mult();</span>
<span class="lineNum">     502 </span>            :       }
<span class="lineNum">     503 </span>            :     }
<span class="lineNum">     504 </span><span class="lineNoCov">          0 :     if (param_spec-&gt;has_decay_mult()) {</span>
<span class="lineNum">     505 </span><span class="lineNoCov">          0 :       if (has_params_decay_[learnable_param_id]) {</span>
<span class="lineNum">     506 </span><span class="lineNoCov">          0 :         CHECK_EQ(param_spec-&gt;decay_mult(),</span>
<span class="lineNum">     507 </span>            :                  params_weight_decay_[learnable_param_id])
<span class="lineNum">     508 </span>            :             &lt;&lt; &quot;Shared param '&quot; &lt;&lt; param_name &lt;&lt; &quot;' has mismatched decay_mult.&quot;;
<span class="lineNum">     509 </span>            :       } else {
<span class="lineNum">     510 </span>            :         has_params_decay_[learnable_param_id] = true;
<span class="lineNum">     511 </span><span class="lineNoCov">          0 :         params_weight_decay_[learnable_param_id] = param_spec-&gt;decay_mult();</span>
<span class="lineNum">     512 </span>            :       }
<span class="lineNum">     513 </span>            :     }
<span class="lineNum">     514 </span>            :   }
<span class="lineNum">     515 </span><span class="lineCov">         16 : }</span>
<span class="lineNum">     516 </span>            : 
<span class="lineNum">     517 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     518 </span><span class="lineCov">      12101 : Dtype Net&lt;Dtype&gt;::ForwardFromTo(int start, int end) {</span>
<span class="lineNum">     519 </span><span class="lineCov">      12101 :   CHECK_GE(start, 0);</span>
<span class="lineNum">     520 </span><span class="lineCov">      36303 :   CHECK_LT(end, layers_.size());</span>
<span class="lineNum">     521 </span>            :   Dtype loss = 0;
<span class="lineNum">     522 </span><span class="lineCov">     242519 :   for (int i = start; i &lt;= end; ++i) {</span>
<span class="lineNum">     523 </span><span class="lineCov">     230418 :     for (int c = 0; c &lt; before_forward_.size(); ++c) {</span>
<span class="lineNum">     524 </span><span class="lineNoCov">          0 :       before_forward_[c]-&gt;run(i);</span>
<span class="lineNum">     525 </span>            :     }
<span class="lineNum">     526 </span><span class="lineCov">     230418 :     Dtype layer_loss = layers_[i]-&gt;Forward(bottom_vecs_[i], top_vecs_[i]);</span>
<span class="lineNum">     527 </span><span class="lineCov">     115209 :     loss += layer_loss;</span>
<span class="lineNum">     528 </span><span class="lineCov">     115209 :     if (debug_info_) { ForwardDebugInfo(i); }</span>
<span class="lineNum">     529 </span><span class="lineCov">     230418 :     for (int c = 0; c &lt; after_forward_.size(); ++c) {</span>
<span class="lineNum">     530 </span><span class="lineNoCov">          0 :       after_forward_[c]-&gt;run(i);</span>
<span class="lineNum">     531 </span>            :     }
<span class="lineNum">     532 </span>            :   }
<span class="lineNum">     533 </span><span class="lineCov">      12101 :   return loss;</span>
<span class="lineNum">     534 </span>            : }
<span class="lineNum">     535 </span>            : 
<span class="lineNum">     536 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     537 </span><span class="lineNoCov">          0 : Dtype Net&lt;Dtype&gt;::ForwardFrom(int start) {</span>
<span class="lineNum">     538 </span><span class="lineNoCov">          0 :   return ForwardFromTo(start, layers_.size() - 1);</span>
<span class="lineNum">     539 </span>            : }
<a name="540"><span class="lineNum">     540 </span>            : </a>
<span class="lineNum">     541 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     542 </span><span class="lineNoCov">          0 : Dtype Net&lt;Dtype&gt;::ForwardTo(int end) {</span>
<span class="lineNum">     543 </span><span class="lineNoCov">          0 :   return ForwardFromTo(0, end);</span>
<span class="lineNum">     544 </span>            : }
<span class="lineNum">     545 </span>            : 
<span class="lineNum">     546 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     547 </span><span class="lineCov">      12101 : const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; Net&lt;Dtype&gt;::Forward(Dtype* loss) {</span>
<span class="lineNum">     548 </span><span class="lineCov">      12101 :   if (loss != NULL) {</span>
<span class="lineNum">     549 </span><span class="lineCov">      12101 :     *loss = ForwardFromTo(0, layers_.size() - 1);</span>
<span class="lineNum">     550 </span>            :   } else {
<span class="lineNum">     551 </span><span class="lineNoCov">          0 :     ForwardFromTo(0, layers_.size() - 1);</span>
<span class="lineNum">     552 </span>            :   }
<span class="lineNum">     553 </span><span class="lineCov">      12101 :   return net_output_blobs_;</span>
<span class="lineNum">     554 </span>            : }
<span class="lineNum">     555 </span>            : 
<span class="lineNum">     556 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     557 </span><span class="lineNoCov">          0 : const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; Net&lt;Dtype&gt;::Forward(</span>
<span class="lineNum">     558 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt; &amp; bottom, Dtype* loss) {
<span class="lineNum">     559 </span><span class="lineNoCov">          0 :   LOG_EVERY_N(WARNING, 1000) &lt;&lt; &quot;DEPRECATED: Forward(bottom, loss) &quot;</span>
<span class="lineNum">     560 </span>            :       &lt;&lt; &quot;will be removed in a future version. Use Forward(loss).&quot;;
<span class="lineNum">     561 </span>            :   // Copy bottom to net bottoms
<span class="lineNum">     562 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; bottom.size(); ++i) {</span>
<span class="lineNum">     563 </span><span class="lineNoCov">          0 :     net_input_blobs_[i]-&gt;CopyFrom(*bottom[i]);</span>
<span class="lineNum">     564 </span>            :   }
<span class="lineNum">     565 </span><span class="lineNoCov">          0 :   return Forward(loss);</span>
<span class="lineNum">     566 </span>            : }
<span class="lineNum">     567 </span>            : 
<span class="lineNum">     568 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     569 </span><span class="lineCov">      10000 : void Net&lt;Dtype&gt;::BackwardFromTo(int start, int end) {</span>
<span class="lineNum">     570 </span><span class="lineCov">      10000 :   CHECK_GE(end, 0);</span>
<span class="lineNum">     571 </span><span class="lineCov">      30000 :   CHECK_LT(start, layers_.size());</span>
<span class="lineNum">     572 </span><span class="lineCov">     190000 :   for (int i = start; i &gt;= end; --i) {</span>
<span class="lineNum">     573 </span><span class="lineCov">     180000 :     for (int c = 0; c &lt; before_backward_.size(); ++c) {</span>
<span class="lineNum">     574 </span><span class="lineNoCov">          0 :       before_backward_[c]-&gt;run(i);</span>
<span class="lineNum">     575 </span>            :     }
<span class="lineNum">     576 </span><span class="lineCov">     180000 :     if (layer_need_backward_[i]) {</span>
<span class="lineNum">     577 </span><span class="lineCov">      80000 :       layers_[i]-&gt;Backward(</span>
<span class="lineNum">     578 </span>            :           top_vecs_[i], bottom_need_backward_[i], bottom_vecs_[i]);
<span class="lineNum">     579 </span><span class="lineCov">      80000 :       if (debug_info_) { BackwardDebugInfo(i); }</span>
<span class="lineNum">     580 </span>            :     }
<span class="lineNum">     581 </span><span class="lineCov">     180000 :     for (int c = 0; c &lt; after_backward_.size(); ++c) {</span>
<span class="lineNum">     582 </span><span class="lineNoCov">          0 :       after_backward_[c]-&gt;run(i);</span>
<span class="lineNum">     583 </span>            :     }
<span class="lineNum">     584 </span>            :   }
<span class="lineNum">     585 </span><span class="lineCov">      10000 : }</span>
<span class="lineNum">     586 </span>            : 
<span class="lineNum">     587 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     588 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::ForwardDebugInfo(const int layer_id) {</span>
<span class="lineNum">     589 </span><span class="lineNoCov">          0 :   for (int top_id = 0; top_id &lt; top_vecs_[layer_id].size(); ++top_id) {</span>
<span class="lineNum">     590 </span><span class="lineNoCov">          0 :     const Blob&lt;Dtype&gt;&amp; blob = *top_vecs_[layer_id][top_id];</span>
<span class="lineNum">     591 </span><span class="lineNoCov">          0 :     const string&amp; blob_name = blob_names_[top_id_vecs_[layer_id][top_id]];</span>
<span class="lineNum">     592 </span><span class="lineNoCov">          0 :     const Dtype data_abs_val_mean = blob.asum_data() / blob.count();</span>
<span class="lineNum">     593 </span><span class="lineNoCov">          0 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     594 </span>            :         &lt;&lt; &quot;    [Forward] &quot;
<span class="lineNum">     595 </span>            :         &lt;&lt; &quot;Layer &quot; &lt;&lt; layer_names_[layer_id]
<span class="lineNum">     596 </span>            :         &lt;&lt; &quot;, top blob &quot; &lt;&lt; blob_name
<span class="lineNum">     597 </span>            :         &lt;&lt; &quot; data: &quot; &lt;&lt; data_abs_val_mean;
<span class="lineNum">     598 </span>            :   }
<span class="lineNum">     599 </span><span class="lineNoCov">          0 :   for (int param_id = 0; param_id &lt; layers_[layer_id]-&gt;blobs().size();</span>
<span class="lineNum">     600 </span>            :        ++param_id) {
<span class="lineNum">     601 </span>            :     const Blob&lt;Dtype&gt;&amp; blob = *layers_[layer_id]-&gt;blobs()[param_id];
<span class="lineNum">     602 </span><span class="lineNoCov">          0 :     const int net_param_id = param_id_vecs_[layer_id][param_id];</span>
<span class="lineNum">     603 </span><span class="lineNoCov">          0 :     const string&amp; blob_name = param_display_names_[net_param_id];</span>
<span class="lineNum">     604 </span><span class="lineNoCov">          0 :     const Dtype data_abs_val_mean = blob.asum_data() / blob.count();</span>
<span class="lineNum">     605 </span><span class="lineNoCov">          0 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     606 </span>            :         &lt;&lt; &quot;    [Forward] &quot;
<span class="lineNum">     607 </span>            :         &lt;&lt; &quot;Layer &quot; &lt;&lt; layer_names_[layer_id]
<span class="lineNum">     608 </span>            :         &lt;&lt; &quot;, param blob &quot; &lt;&lt; blob_name
<span class="lineNum">     609 </span>            :         &lt;&lt; &quot; data: &quot; &lt;&lt; data_abs_val_mean;
<span class="lineNum">     610 </span>            :   }
<span class="lineNum">     611 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     612 </span>            : 
<span class="lineNum">     613 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     614 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::BackwardDebugInfo(const int layer_id) {</span>
<span class="lineNum">     615 </span><span class="lineNoCov">          0 :   const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom_vec = bottom_vecs_[layer_id];</span>
<span class="lineNum">     616 </span><span class="lineNoCov">          0 :   for (int bottom_id = 0; bottom_id &lt; bottom_vec.size(); ++bottom_id) {</span>
<span class="lineNum">     617 </span><span class="lineNoCov">          0 :     if (!bottom_need_backward_[layer_id][bottom_id]) { continue; }</span>
<span class="lineNum">     618 </span><span class="lineNoCov">          0 :     const Blob&lt;Dtype&gt;&amp; blob = *bottom_vec[bottom_id];</span>
<span class="lineNum">     619 </span><span class="lineNoCov">          0 :     const string&amp; blob_name = blob_names_[bottom_id_vecs_[layer_id][bottom_id]];</span>
<span class="lineNum">     620 </span><span class="lineNoCov">          0 :     const Dtype diff_abs_val_mean = blob.asum_diff() / blob.count();</span>
<span class="lineNum">     621 </span><span class="lineNoCov">          0 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     622 </span>            :         &lt;&lt; &quot;    [Backward] &quot;
<span class="lineNum">     623 </span>            :         &lt;&lt; &quot;Layer &quot; &lt;&lt; layer_names_[layer_id]
<span class="lineNum">     624 </span>            :         &lt;&lt; &quot;, bottom blob &quot; &lt;&lt; blob_name
<span class="lineNum">     625 </span>            :         &lt;&lt; &quot; diff: &quot; &lt;&lt; diff_abs_val_mean;
<span class="lineNum">     626 </span>            :   }
<span class="lineNum">     627 </span><span class="lineNoCov">          0 :   for (int param_id = 0; param_id &lt; layers_[layer_id]-&gt;blobs().size();</span>
<span class="lineNum">     628 </span>            :        ++param_id) {
<span class="lineNum">     629 </span><span class="lineNoCov">          0 :     if (!layers_[layer_id]-&gt;param_propagate_down(param_id)) { continue; }</span>
<span class="lineNum">     630 </span>            :     const Blob&lt;Dtype&gt;&amp; blob = *layers_[layer_id]-&gt;blobs()[param_id];
<span class="lineNum">     631 </span><span class="lineNoCov">          0 :     const Dtype diff_abs_val_mean = blob.asum_diff() / blob.count();</span>
<span class="lineNum">     632 </span><span class="lineNoCov">          0 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     633 </span>            :         &lt;&lt; &quot;    [Backward] &quot;
<span class="lineNum">     634 </span>            :         &lt;&lt; &quot;Layer &quot; &lt;&lt; layer_names_[layer_id]
<span class="lineNum">     635 </span><span class="lineNoCov">          0 :         &lt;&lt; &quot;, param blob &quot; &lt;&lt; param_id</span>
<span class="lineNum">     636 </span>            :         &lt;&lt; &quot; diff: &quot; &lt;&lt; diff_abs_val_mean;
<span class="lineNum">     637 </span>            :   }
<span class="lineNum">     638 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     639 </span>            : 
<span class="lineNum">     640 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     641 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::UpdateDebugInfo(const int param_id) {</span>
<span class="lineNum">     642 </span><span class="lineNoCov">          0 :   const Blob&lt;Dtype&gt;&amp; blob = *params_[param_id];</span>
<span class="lineNum">     643 </span><span class="lineNoCov">          0 :   const int param_owner = param_owners_[param_id];</span>
<span class="lineNum">     644 </span><span class="lineNoCov">          0 :   const string&amp; layer_name = layer_names_[param_layer_indices_[param_id].first];</span>
<span class="lineNum">     645 </span>            :   const string&amp; param_display_name = param_display_names_[param_id];
<span class="lineNum">     646 </span><span class="lineNoCov">          0 :   const Dtype diff_abs_val_mean = blob.asum_diff() / blob.count();</span>
<span class="lineNum">     647 </span><span class="lineNoCov">          0 :   if (param_owner &lt; 0) {</span>
<span class="lineNum">     648 </span><span class="lineNoCov">          0 :     const Dtype data_abs_val_mean = blob.asum_data() / blob.count();</span>
<span class="lineNum">     649 </span><span class="lineNoCov">          0 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     650 </span>            :         &lt;&lt; &quot;    [Update] Layer &quot; &lt;&lt; layer_name
<span class="lineNum">     651 </span>            :         &lt;&lt; &quot;, param &quot; &lt;&lt; param_display_name
<span class="lineNum">     652 </span>            :         &lt;&lt; &quot; data: &quot; &lt;&lt; data_abs_val_mean
<span class="lineNum">     653 </span>            :         &lt;&lt; &quot;; diff: &quot; &lt;&lt; diff_abs_val_mean;
<span class="lineNum">     654 </span>            :   } else {
<span class="lineNum">     655 </span>            :     const string&amp; owner_layer_name =
<span class="lineNum">     656 </span><span class="lineNoCov">          0 :         layer_names_[param_layer_indices_[param_owner].first];</span>
<span class="lineNum">     657 </span><span class="lineNoCov">          0 :     LOG_IF(INFO, Caffe::root_solver())</span>
<span class="lineNum">     658 </span>            :         &lt;&lt; &quot;    [Update] Layer &quot; &lt;&lt; layer_name
<span class="lineNum">     659 </span>            :         &lt;&lt; &quot;, param blob &quot; &lt;&lt; param_display_name
<span class="lineNum">     660 </span>            :         &lt;&lt; &quot; (owned by layer &quot; &lt;&lt; owner_layer_name &lt;&lt; &quot;, &quot; &lt;&lt; &quot;param &quot;
<span class="lineNum">     661 </span><span class="lineNoCov">          0 :         &lt;&lt; param_display_names_[param_owners_[param_id]] &lt;&lt; &quot;)&quot;</span>
<span class="lineNum">     662 </span>            :         &lt;&lt; &quot; diff: &quot; &lt;&lt; diff_abs_val_mean;
<span class="lineNum">     663 </span>            :   }
<span class="lineNum">     664 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     665 </span>            : 
<span class="lineNum">     666 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     667 </span><span class="lineCov">         21 : void Net&lt;Dtype&gt;::ShareTrainedLayersWith(const Net* other) {</span>
<span class="lineNum">     668 </span><span class="lineCov">         21 :   int num_source_layers = other-&gt;layers().size();</span>
<span class="lineNum">     669 </span><span class="lineCov">        399 :   for (int i = 0; i &lt; num_source_layers; ++i) {</span>
<span class="lineNum">     670 </span><span class="lineCov">        189 :     Layer&lt;Dtype&gt;* source_layer = other-&gt;layers()[i].get();</span>
<span class="lineNum">     671 </span>            :     const string&amp; source_layer_name = other-&gt;layer_names()[i];
<span class="lineNum">     672 </span>            :     int target_layer_id = 0;
<span class="lineNum">     673 </span><span class="lineCov">       4431 :     while (target_layer_id != layer_names_.size() &amp;&amp;</span>
<span class="lineNum">     674 </span>            :         layer_names_[target_layer_id] != source_layer_name) {
<span class="lineNum">     675 </span><span class="lineCov">        966 :       ++target_layer_id;</span>
<span class="lineNum">     676 </span>            :     }
<span class="lineNum">     677 </span><span class="lineCov">        189 :     if (target_layer_id == layer_names_.size()) {</span>
<span class="lineNum">     678 </span><span class="lineNoCov">          0 :       LOG(INFO) &lt;&lt; &quot;Ignoring source layer &quot; &lt;&lt; source_layer_name;</span>
<span class="lineNum">     679 </span><span class="lineNoCov">          0 :       continue;</span>
<span class="lineNum">     680 </span>            :     }
<span class="lineNum">     681 </span>            :     DLOG(INFO) &lt;&lt; &quot;Copying source layer &quot; &lt;&lt; source_layer_name;
<span class="lineNum">     682 </span>            :     vector&lt;shared_ptr&lt;Blob&lt;Dtype&gt; &gt; &gt;&amp; target_blobs =
<span class="lineNum">     683 </span>            :         layers_[target_layer_id]-&gt;blobs();
<span class="lineNum">     684 </span><span class="lineCov">        756 :     CHECK_EQ(target_blobs.size(), source_layer-&gt;blobs().size())</span>
<span class="lineNum">     685 </span>            :         &lt;&lt; &quot;Incompatible number of blobs for layer &quot; &lt;&lt; source_layer_name;
<span class="lineNum">     686 </span><span class="lineCov">        882 :     for (int j = 0; j &lt; target_blobs.size(); ++j) {</span>
<span class="lineNum">     687 </span>            :       Blob&lt;Dtype&gt;* source_blob = source_layer-&gt;blobs()[j].get();
<span class="lineNum">     688 </span><span class="lineCov">        504 :       CHECK(target_blobs[j]-&gt;shape() == source_blob-&gt;shape())</span>
<span class="lineNum">     689 </span><span class="lineNoCov">          0 :           &lt;&lt; &quot;Cannot share param &quot; &lt;&lt; j &lt;&lt; &quot; weights from layer '&quot;</span>
<span class="lineNum">     690 </span>            :           &lt;&lt; source_layer_name &lt;&lt; &quot;'; shape mismatch.  Source param shape is &quot;
<span class="lineNum">     691 </span><span class="lineCov">        168 :           &lt;&lt; source_blob-&gt;shape_string() &lt;&lt; &quot;; target param shape is &quot;</span>
<span class="lineNum">     692 </span><span class="lineCov">        168 :           &lt;&lt; target_blobs[j]-&gt;shape_string();</span>
<span class="lineNum">     693 </span><span class="lineCov">        168 :       target_blobs[j]-&gt;ShareData(*source_blob);</span>
<span class="lineNum">     694 </span>            :     }
<span class="lineNum">     695 </span>            :   }
<span class="lineNum">     696 </span><span class="lineCov">         21 : }</span>
<a name="697"><span class="lineNum">     697 </span>            : </a>
<span class="lineNum">     698 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     699 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::BackwardFrom(int start) {</span>
<span class="lineNum">     700 </span><span class="lineNoCov">          0 :   BackwardFromTo(start, 0);</span>
<span class="lineNum">     701 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     702 </span>            : 
<span class="lineNum">     703 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     704 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::BackwardTo(int end) {</span>
<span class="lineNum">     705 </span><span class="lineNoCov">          0 :   BackwardFromTo(layers_.size() - 1, end);</span>
<span class="lineNum">     706 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     707 </span>            : 
<span class="lineNum">     708 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     709 </span><span class="lineCov">      10000 : void Net&lt;Dtype&gt;::Backward() {</span>
<span class="lineNum">     710 </span><span class="lineCov">      10000 :   BackwardFromTo(layers_.size() - 1, 0);</span>
<span class="lineNum">     711 </span><span class="lineCov">      10000 :   if (debug_info_) {</span>
<span class="lineNum">     712 </span>            :     Dtype asum_data = 0, asum_diff = 0, sumsq_data = 0, sumsq_diff = 0;
<span class="lineNum">     713 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; learnable_params_.size(); ++i) {</span>
<span class="lineNum">     714 </span><span class="lineNoCov">          0 :       asum_data += learnable_params_[i]-&gt;asum_data();</span>
<span class="lineNum">     715 </span><span class="lineNoCov">          0 :       asum_diff += learnable_params_[i]-&gt;asum_diff();</span>
<span class="lineNum">     716 </span><span class="lineNoCov">          0 :       sumsq_data += learnable_params_[i]-&gt;sumsq_data();</span>
<span class="lineNum">     717 </span><span class="lineNoCov">          0 :       sumsq_diff += learnable_params_[i]-&gt;sumsq_diff();</span>
<span class="lineNum">     718 </span>            :     }
<span class="lineNum">     719 </span><span class="lineNoCov">          0 :     const Dtype l2norm_data = std::sqrt(sumsq_data);</span>
<span class="lineNum">     720 </span><span class="lineNoCov">          0 :     const Dtype l2norm_diff = std::sqrt(sumsq_diff);</span>
<span class="lineNum">     721 </span><span class="lineNoCov">          0 :     LOG(ERROR) &lt;&lt; &quot;    [Backward] All net params (data, diff): &quot;</span>
<span class="lineNum">     722 </span>            :                &lt;&lt; &quot;L1 norm = (&quot; &lt;&lt; asum_data &lt;&lt; &quot;, &quot; &lt;&lt; asum_diff &lt;&lt; &quot;); &quot;
<span class="lineNum">     723 </span>            :                &lt;&lt; &quot;L2 norm = (&quot; &lt;&lt; l2norm_data &lt;&lt; &quot;, &quot; &lt;&lt; l2norm_diff &lt;&lt; &quot;)&quot;;
<span class="lineNum">     724 </span>            :   }
<span class="lineNum">     725 </span><span class="lineCov">      10000 : }</span>
<span class="lineNum">     726 </span>            : 
<span class="lineNum">     727 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     728 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::Reshape() {</span>
<span class="lineNum">     729 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; layers_.size(); ++i) {</span>
<span class="lineNum">     730 </span><span class="lineNoCov">          0 :     layers_[i]-&gt;Reshape(bottom_vecs_[i], top_vecs_[i]);</span>
<span class="lineNum">     731 </span>            :   }
<span class="lineNum">     732 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     733 </span>            : 
<span class="lineNum">     734 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     735 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::CopyTrainedLayersFrom(const NetParameter&amp; param) {</span>
<span class="lineNum">     736 </span>            :   int num_source_layers = param.layer_size();
<span class="lineNum">     737 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; num_source_layers; ++i) {</span>
<span class="lineNum">     738 </span>            :     const LayerParameter&amp; source_layer = param.layer(i);
<span class="lineNum">     739 </span>            :     const string&amp; source_layer_name = source_layer.name();
<span class="lineNum">     740 </span>            :     int target_layer_id = 0;
<span class="lineNum">     741 </span><span class="lineNoCov">          0 :     while (target_layer_id != layer_names_.size() &amp;&amp;</span>
<span class="lineNum">     742 </span>            :         layer_names_[target_layer_id] != source_layer_name) {
<span class="lineNum">     743 </span><span class="lineNoCov">          0 :       ++target_layer_id;</span>
<span class="lineNum">     744 </span>            :     }
<span class="lineNum">     745 </span><span class="lineNoCov">          0 :     if (target_layer_id == layer_names_.size()) {</span>
<span class="lineNum">     746 </span><span class="lineNoCov">          0 :       LOG(INFO) &lt;&lt; &quot;Ignoring source layer &quot; &lt;&lt; source_layer_name;</span>
<span class="lineNum">     747 </span><span class="lineNoCov">          0 :       continue;</span>
<span class="lineNum">     748 </span>            :     }
<span class="lineNum">     749 </span>            :     DLOG(INFO) &lt;&lt; &quot;Copying source layer &quot; &lt;&lt; source_layer_name;
<span class="lineNum">     750 </span>            :     vector&lt;shared_ptr&lt;Blob&lt;Dtype&gt; &gt; &gt;&amp; target_blobs =
<span class="lineNum">     751 </span>            :         layers_[target_layer_id]-&gt;blobs();
<span class="lineNum">     752 </span><span class="lineNoCov">          0 :     CHECK_EQ(target_blobs.size(), source_layer.blobs_size())</span>
<span class="lineNum">     753 </span>            :         &lt;&lt; &quot;Incompatible number of blobs for layer &quot; &lt;&lt; source_layer_name;
<span class="lineNum">     754 </span><span class="lineNoCov">          0 :     for (int j = 0; j &lt; target_blobs.size(); ++j) {</span>
<span class="lineNum">     755 </span><span class="lineNoCov">          0 :       if (!target_blobs[j]-&gt;ShapeEquals(source_layer.blobs(j))) {</span>
<span class="lineNum">     756 </span><span class="lineNoCov">          0 :         Blob&lt;Dtype&gt; source_blob;</span>
<span class="lineNum">     757 </span>            :         const bool kReshape = true;
<span class="lineNum">     758 </span><span class="lineNoCov">          0 :         source_blob.FromProto(source_layer.blobs(j), kReshape);</span>
<span class="lineNum">     759 </span><span class="lineNoCov">          0 :         LOG(FATAL) &lt;&lt; &quot;Cannot copy param &quot; &lt;&lt; j &lt;&lt; &quot; weights from layer '&quot;</span>
<span class="lineNum">     760 </span>            :             &lt;&lt; source_layer_name &lt;&lt; &quot;'; shape mismatch.  Source param shape is &quot;
<span class="lineNum">     761 </span><span class="lineNoCov">          0 :             &lt;&lt; source_blob.shape_string() &lt;&lt; &quot;; target param shape is &quot;</span>
<span class="lineNum">     762 </span><span class="lineNoCov">          0 :             &lt;&lt; target_blobs[j]-&gt;shape_string() &lt;&lt; &quot;. &quot;</span>
<span class="lineNum">     763 </span>            :             &lt;&lt; &quot;To learn this layer's parameters from scratch rather than &quot;
<span class="lineNum">     764 </span>            :             &lt;&lt; &quot;copying from a saved net, rename the layer.&quot;;
<span class="lineNum">     765 </span>            :       }
<span class="lineNum">     766 </span>            :       const bool kReshape = false;
<span class="lineNum">     767 </span><span class="lineNoCov">          0 :       target_blobs[j]-&gt;FromProto(source_layer.blobs(j), kReshape);</span>
<span class="lineNum">     768 </span>            :     }
<span class="lineNum">     769 </span>            :   }
<span class="lineNum">     770 </span><span class="lineNoCov">          0 : }</span>
<a name="771"><span class="lineNum">     771 </span>            : </a>
<span class="lineNum">     772 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     773 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::CopyTrainedLayersFrom(const string&amp; trained_filename) {</span>
<span class="lineNum">     774 </span><span class="lineNoCov">          0 :   if (H5Fis_hdf5(trained_filename.c_str())) {</span>
<span class="lineNum">     775 </span><span class="lineNoCov">          0 :     CopyTrainedLayersFromHDF5(trained_filename);</span>
<span class="lineNum">     776 </span>            :   } else {
<span class="lineNum">     777 </span><span class="lineNoCov">          0 :     CopyTrainedLayersFromBinaryProto(trained_filename);</span>
<span class="lineNum">     778 </span>            :   }
<span class="lineNum">     779 </span><span class="lineNoCov">          0 : }</span>
<a name="780"><span class="lineNum">     780 </span>            : </a>
<span class="lineNum">     781 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     782 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::CopyTrainedLayersFromBinaryProto(</span>
<span class="lineNum">     783 </span>            :     const string&amp; trained_filename) {
<span class="lineNum">     784 </span><span class="lineNoCov">          0 :   NetParameter param;</span>
<span class="lineNum">     785 </span><span class="lineNoCov">          0 :   ReadNetParamsFromBinaryFileOrDie(trained_filename, &amp;param);</span>
<span class="lineNum">     786 </span><span class="lineNoCov">          0 :   CopyTrainedLayersFrom(param);</span>
<span class="lineNum">     787 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     788 </span>            : 
<span class="lineNum">     789 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     790 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::CopyTrainedLayersFromHDF5(const string&amp; trained_filename) {</span>
<span class="lineNum">     791 </span>            : #ifdef USE_HDF5
<span class="lineNum">     792 </span><span class="lineNoCov">          0 :   hid_t file_hid = H5Fopen(trained_filename.c_str(), H5F_ACC_RDONLY,</span>
<span class="lineNum">     793 </span><span class="lineNoCov">          0 :                            H5P_DEFAULT);</span>
<span class="lineNum">     794 </span><span class="lineNoCov">          0 :   CHECK_GE(file_hid, 0) &lt;&lt; &quot;Couldn't open &quot; &lt;&lt; trained_filename;</span>
<span class="lineNum">     795 </span><span class="lineNoCov">          0 :   hid_t data_hid = H5Gopen2(file_hid, &quot;data&quot;, H5P_DEFAULT);</span>
<span class="lineNum">     796 </span><span class="lineNoCov">          0 :   CHECK_GE(data_hid, 0) &lt;&lt; &quot;Error reading weights from &quot; &lt;&lt; trained_filename;</span>
<span class="lineNum">     797 </span><span class="lineNoCov">          0 :   int num_layers = hdf5_get_num_links(data_hid);</span>
<span class="lineNum">     798 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; num_layers; ++i) {</span>
<span class="lineNum">     799 </span><span class="lineNoCov">          0 :     string source_layer_name = hdf5_get_name_by_idx(data_hid, i);</span>
<span class="lineNum">     800 </span><span class="lineNoCov">          0 :     if (!layer_names_index_.count(source_layer_name)) {</span>
<span class="lineNum">     801 </span><span class="lineNoCov">          0 :       LOG(INFO) &lt;&lt; &quot;Ignoring source layer &quot; &lt;&lt; source_layer_name;</span>
<span class="lineNum">     802 </span>            :       continue;
<span class="lineNum">     803 </span>            :     }
<span class="lineNum">     804 </span><span class="lineNoCov">          0 :     int target_layer_id = layer_names_index_[source_layer_name];</span>
<span class="lineNum">     805 </span>            :     DLOG(INFO) &lt;&lt; &quot;Copying source layer &quot; &lt;&lt; source_layer_name;
<span class="lineNum">     806 </span>            :     vector&lt;shared_ptr&lt;Blob&lt;Dtype&gt; &gt; &gt;&amp; target_blobs =
<span class="lineNum">     807 </span><span class="lineNoCov">          0 :         layers_[target_layer_id]-&gt;blobs();</span>
<span class="lineNum">     808 </span>            :     hid_t layer_hid = H5Gopen2(data_hid, source_layer_name.c_str(),
<span class="lineNum">     809 </span><span class="lineNoCov">          0 :         H5P_DEFAULT);</span>
<span class="lineNum">     810 </span><span class="lineNoCov">          0 :     CHECK_GE(layer_hid, 0)</span>
<span class="lineNum">     811 </span>            :         &lt;&lt; &quot;Error reading weights from &quot; &lt;&lt; trained_filename;
<span class="lineNum">     812 </span>            :     // Check that source layer doesn't have more params than target layer
<span class="lineNum">     813 </span><span class="lineNoCov">          0 :     int num_source_params = hdf5_get_num_links(layer_hid);</span>
<span class="lineNum">     814 </span><span class="lineNoCov">          0 :     CHECK_LE(num_source_params, target_blobs.size())</span>
<span class="lineNum">     815 </span>            :         &lt;&lt; &quot;Incompatible number of blobs for layer &quot; &lt;&lt; source_layer_name;
<span class="lineNum">     816 </span><span class="lineNoCov">          0 :     for (int j = 0; j &lt; target_blobs.size(); ++j) {</span>
<span class="lineNum">     817 </span><span class="lineNoCov">          0 :       ostringstream oss;</span>
<span class="lineNum">     818 </span><span class="lineNoCov">          0 :       oss &lt;&lt; j;</span>
<span class="lineNum">     819 </span>            :       string dataset_name = oss.str();
<span class="lineNum">     820 </span><span class="lineNoCov">          0 :       int target_net_param_id = param_id_vecs_[target_layer_id][j];</span>
<span class="lineNum">     821 </span><span class="lineNoCov">          0 :       if (!H5Lexists(layer_hid, dataset_name.c_str(), H5P_DEFAULT)) {</span>
<span class="lineNum">     822 </span>            :         // Target param doesn't exist in source weights...
<span class="lineNum">     823 </span><span class="lineNoCov">          0 :         if (param_owners_[target_net_param_id] != -1) {</span>
<span class="lineNum">     824 </span>            :           // ...but it's weight-shared in target, so that's fine.
<span class="lineNum">     825 </span>            :           continue;
<span class="lineNum">     826 </span>            :         } else {
<span class="lineNum">     827 </span><span class="lineNoCov">          0 :           LOG(FATAL) &lt;&lt; &quot;Incompatible number of blobs for layer &quot;</span>
<span class="lineNum">     828 </span>            :               &lt;&lt; source_layer_name;
<span class="lineNum">     829 </span>            :         }
<span class="lineNum">     830 </span>            :       }
<span class="lineNum">     831 </span><span class="lineNoCov">          0 :       hdf5_load_nd_dataset(layer_hid, dataset_name.c_str(), 0, kMaxBlobAxes,</span>
<span class="lineNum">     832 </span>            :           target_blobs[j].get());
<span class="lineNum">     833 </span>            :     }
<span class="lineNum">     834 </span><span class="lineNoCov">          0 :     H5Gclose(layer_hid);</span>
<span class="lineNum">     835 </span>            :   }
<span class="lineNum">     836 </span><span class="lineNoCov">          0 :   H5Gclose(data_hid);</span>
<span class="lineNum">     837 </span><span class="lineNoCov">          0 :   H5Fclose(file_hid);</span>
<span class="lineNum">     838 </span>            : #else
<span class="lineNum">     839 </span>            :   LOG(FATAL) &lt;&lt; &quot;CopyTrainedLayersFromHDF5 requires hdf5;&quot;
<span class="lineNum">     840 </span>            :              &lt;&lt; &quot; compile with USE_HDF5.&quot;;
<span class="lineNum">     841 </span>            : #endif  // USE_HDF5
<span class="lineNum">     842 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     843 </span>            : 
<span class="lineNum">     844 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     845 </span><span class="lineCov">          2 : void Net&lt;Dtype&gt;::ToProto(NetParameter* param, bool write_diff) const {</span>
<span class="lineNum">     846 </span><span class="lineCov">          2 :   param-&gt;Clear();</span>
<span class="lineNum">     847 </span><span class="lineCov">          2 :   param-&gt;set_name(name_);</span>
<span class="lineNum">     848 </span>            :   // Add bottom and top
<span class="lineNum">     849 </span>            :   DLOG(INFO) &lt;&lt; &quot;Serializing &quot; &lt;&lt; layers_.size() &lt;&lt; &quot; layers&quot;;
<span class="lineNum">     850 </span><span class="lineCov">         58 :   for (int i = 0; i &lt; layers_.size(); ++i) {</span>
<span class="lineNum">     851 </span>            :     LayerParameter* layer_param = param-&gt;add_layer();
<span class="lineNum">     852 </span><span class="lineCov">         18 :     layers_[i]-&gt;ToProto(layer_param, write_diff);</span>
<span class="lineNum">     853 </span>            :   }
<span class="lineNum">     854 </span><span class="lineCov">          2 : }</span>
<span class="lineNum">     855 </span>            : 
<span class="lineNum">     856 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     857 </span><span class="lineNoCov">          0 : void Net&lt;Dtype&gt;::ToHDF5(const string&amp; filename, bool write_diff) const {</span>
<span class="lineNum">     858 </span>            : // This code is taken from https://github.com/sh1r0/caffe-android-lib
<span class="lineNum">     859 </span>            : #ifdef USE_HDF5
<span class="lineNum">     860 </span><span class="lineNoCov">          0 :   hid_t file_hid = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT,</span>
<span class="lineNum">     861 </span><span class="lineNoCov">          0 :       H5P_DEFAULT);</span>
<span class="lineNum">     862 </span><span class="lineNoCov">          0 :   CHECK_GE(file_hid, 0)</span>
<span class="lineNum">     863 </span>            :       &lt;&lt; &quot;Couldn't open &quot; &lt;&lt; filename &lt;&lt; &quot; to save weights.&quot;;
<span class="lineNum">     864 </span>            :   hid_t data_hid = H5Gcreate2(file_hid, &quot;data&quot;, H5P_DEFAULT, H5P_DEFAULT,
<span class="lineNum">     865 </span><span class="lineNoCov">          0 :       H5P_DEFAULT);</span>
<span class="lineNum">     866 </span><span class="lineNoCov">          0 :   CHECK_GE(data_hid, 0) &lt;&lt; &quot;Error saving weights to &quot; &lt;&lt; filename &lt;&lt; &quot;.&quot;;</span>
<span class="lineNum">     867 </span>            :   hid_t diff_hid = -1;
<span class="lineNum">     868 </span><span class="lineNoCov">          0 :   if (write_diff) {</span>
<span class="lineNum">     869 </span><span class="lineNoCov">          0 :     diff_hid = H5Gcreate2(file_hid, &quot;diff&quot;, H5P_DEFAULT, H5P_DEFAULT,</span>
<span class="lineNum">     870 </span>            :         H5P_DEFAULT);
<span class="lineNum">     871 </span><span class="lineNoCov">          0 :     CHECK_GE(diff_hid, 0) &lt;&lt; &quot;Error saving weights to &quot; &lt;&lt; filename &lt;&lt; &quot;.&quot;;</span>
<span class="lineNum">     872 </span>            :   }
<span class="lineNum">     873 </span><span class="lineNoCov">          0 :   for (int layer_id = 0; layer_id &lt; layers_.size(); ++layer_id) {</span>
<span class="lineNum">     874 </span>            :     const LayerParameter&amp; layer_param = layers_[layer_id]-&gt;layer_param();
<span class="lineNum">     875 </span>            :     string layer_name = layer_param.name();
<span class="lineNum">     876 </span>            :     hid_t layer_data_hid = H5Gcreate2(data_hid, layer_name.c_str(),
<span class="lineNum">     877 </span><span class="lineNoCov">          0 :         H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);</span>
<span class="lineNum">     878 </span><span class="lineNoCov">          0 :     CHECK_GE(layer_data_hid, 0)</span>
<span class="lineNum">     879 </span>            :         &lt;&lt; &quot;Error saving weights to &quot; &lt;&lt; filename &lt;&lt; &quot;.&quot;;
<span class="lineNum">     880 </span>            :     hid_t layer_diff_hid = -1;
<span class="lineNum">     881 </span><span class="lineNoCov">          0 :     if (write_diff) {</span>
<span class="lineNum">     882 </span><span class="lineNoCov">          0 :       layer_diff_hid = H5Gcreate2(diff_hid, layer_name.c_str(),</span>
<span class="lineNum">     883 </span>            :           H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
<span class="lineNum">     884 </span><span class="lineNoCov">          0 :       CHECK_GE(layer_diff_hid, 0)</span>
<span class="lineNum">     885 </span>            :           &lt;&lt; &quot;Error saving weights to &quot; &lt;&lt; filename &lt;&lt; &quot;.&quot;;
<span class="lineNum">     886 </span>            :     }
<span class="lineNum">     887 </span><span class="lineNoCov">          0 :     int num_params = layers_[layer_id]-&gt;blobs().size();</span>
<span class="lineNum">     888 </span><span class="lineNoCov">          0 :     for (int param_id = 0; param_id &lt; num_params; ++param_id) {</span>
<span class="lineNum">     889 </span><span class="lineNoCov">          0 :       ostringstream dataset_name;</span>
<span class="lineNum">     890 </span><span class="lineNoCov">          0 :       dataset_name &lt;&lt; param_id;</span>
<span class="lineNum">     891 </span><span class="lineNoCov">          0 :       const int net_param_id = param_id_vecs_[layer_id][param_id];</span>
<span class="lineNum">     892 </span><span class="lineNoCov">          0 :       if (param_owners_[net_param_id] == -1) {</span>
<span class="lineNum">     893 </span>            :         // Only save params that own themselves
<span class="lineNum">     894 </span><span class="lineNoCov">          0 :         hdf5_save_nd_dataset&lt;Dtype&gt;(layer_data_hid, dataset_name.str(),</span>
<span class="lineNum">     895 </span>            :             *params_[net_param_id]);
<span class="lineNum">     896 </span>            :       }
<span class="lineNum">     897 </span><span class="lineNoCov">          0 :       if (write_diff) {</span>
<span class="lineNum">     898 </span>            :         // Write diffs regardless of weight-sharing
<span class="lineNum">     899 </span><span class="lineNoCov">          0 :         hdf5_save_nd_dataset&lt;Dtype&gt;(layer_diff_hid, dataset_name.str(),</span>
<span class="lineNum">     900 </span>            :             *params_[net_param_id], true);
<span class="lineNum">     901 </span>            :       }
<span class="lineNum">     902 </span>            :     }
<span class="lineNum">     903 </span><span class="lineNoCov">          0 :     H5Gclose(layer_data_hid);</span>
<span class="lineNum">     904 </span><span class="lineNoCov">          0 :     if (write_diff) {</span>
<span class="lineNum">     905 </span><span class="lineNoCov">          0 :       H5Gclose(layer_diff_hid);</span>
<span class="lineNum">     906 </span>            :     }
<span class="lineNum">     907 </span>            :   }
<span class="lineNum">     908 </span><span class="lineNoCov">          0 :   H5Gclose(data_hid);</span>
<span class="lineNum">     909 </span><span class="lineNoCov">          0 :   if (write_diff) {</span>
<span class="lineNum">     910 </span><span class="lineNoCov">          0 :     H5Gclose(diff_hid);</span>
<span class="lineNum">     911 </span>            :   }
<span class="lineNum">     912 </span><span class="lineNoCov">          0 :   H5Fclose(file_hid);</span>
<span class="lineNum">     913 </span>            : // This code is taken from https://github.com/sh1r0/caffe-android-lib
<span class="lineNum">     914 </span>            : #else
<span class="lineNum">     915 </span>            :   LOG(FATAL) &lt;&lt; &quot;ToHDF5 requires hdf5; compile with USE_HDF5.&quot;;
<span class="lineNum">     916 </span>            : #endif  // USE_HDF5
<span class="lineNum">     917 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     918 </span>            : 
<span class="lineNum">     919 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     920 </span><span class="lineCov">      10000 : void Net&lt;Dtype&gt;::Update() {</span>
<span class="lineNum">     921 </span><span class="lineCov">     260000 :   for (int i = 0; i &lt; learnable_params_.size(); ++i) {</span>
<span class="lineNum">     922 </span><span class="lineCov">      80000 :     learnable_params_[i]-&gt;Update();</span>
<span class="lineNum">     923 </span>            :   }
<span class="lineNum">     924 </span><span class="lineCov">      10000 : }</span>
<span class="lineNum">     925 </span>            : 
<span class="lineNum">     926 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     927 </span><span class="lineCov">      10000 : void Net&lt;Dtype&gt;::ClearParamDiffs() {</span>
<span class="lineNum">     928 </span><span class="lineCov">     260000 :   for (int i = 0; i &lt; learnable_params_.size(); ++i) {</span>
<span class="lineNum">     929 </span><span class="lineCov">      80000 :     Blob&lt;Dtype&gt;* blob = learnable_params_[i];</span>
<span class="lineNum">     930 </span><span class="lineCov">      80000 :     switch (Caffe::mode()) {</span>
<span class="lineNum">     931 </span>            :     case Caffe::CPU:
<span class="lineNum">     932 </span><span class="lineCov">      80000 :       caffe_set(blob-&gt;count(), static_cast&lt;Dtype&gt;(0),</span>
<span class="lineNum">     933 </span>            :                 blob-&gt;mutable_cpu_diff());
<span class="lineNum">     934 </span><span class="lineCov">      80000 :       break;</span>
<span class="lineNum">     935 </span>            :     case Caffe::GPU:
<span class="lineNum">     936 </span>            : #ifndef CPU_ONLY
<span class="lineNum">     937 </span>            :       caffe_gpu_set(blob-&gt;count(), static_cast&lt;Dtype&gt;(0),
<span class="lineNum">     938 </span>            :                     blob-&gt;mutable_gpu_diff());
<span class="lineNum">     939 </span>            : #else
<span class="lineNum">     940 </span><span class="lineNoCov">          0 :       NO_GPU;</span>
<span class="lineNum">     941 </span>            : #endif
<span class="lineNum">     942 </span>            :       break;
<span class="lineNum">     943 </span>            :     }
<span class="lineNum">     944 </span>            :   }
<span class="lineNum">     945 </span><span class="lineCov">      10000 : }</span>
<span class="lineNum">     946 </span>            : 
<span class="lineNum">     947 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     948 </span><span class="lineCov">          2 : void Net&lt;Dtype&gt;::ShareWeights() {</span>
<span class="lineNum">     949 </span><span class="lineCov">         52 :   for (int i = 0; i &lt; params_.size(); ++i) {</span>
<span class="lineNum">     950 </span><span class="lineCov">         16 :     if (param_owners_[i] &lt; 0) { continue; }</span>
<span class="lineNum">     951 </span><span class="lineNoCov">          0 :     params_[i]-&gt;ShareData(*params_[param_owners_[i]]);</span>
<span class="lineNum">     952 </span><span class="lineNoCov">          0 :     params_[i]-&gt;ShareDiff(*params_[param_owners_[i]]);</span>
<span class="lineNum">     953 </span>            :   }
<span class="lineNum">     954 </span><span class="lineCov">          2 : }</span>
<a name="955"><span class="lineNum">     955 </span>            : </a>
<span class="lineNum">     956 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     957 </span><span class="lineNoCov">          0 : bool Net&lt;Dtype&gt;::has_blob(const string&amp; blob_name) const {</span>
<span class="lineNum">     958 </span><span class="lineNoCov">          0 :   return blob_names_index_.find(blob_name) != blob_names_index_.end();</span>
<span class="lineNum">     959 </span>            : }
<span class="lineNum">     960 </span>            : 
<span class="lineNum">     961 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     962 </span><span class="lineNoCov">          0 : const shared_ptr&lt;Blob&lt;Dtype&gt; &gt; Net&lt;Dtype&gt;::blob_by_name(</span>
<span class="lineNum">     963 </span>            :     const string&amp; blob_name) const {
<span class="lineNum">     964 </span>            :   shared_ptr&lt;Blob&lt;Dtype&gt; &gt; blob_ptr;
<span class="lineNum">     965 </span><span class="lineNoCov">          0 :   if (has_blob(blob_name)) {</span>
<span class="lineNum">     966 </span><span class="lineNoCov">          0 :     blob_ptr = blobs_[blob_names_index_.find(blob_name)-&gt;second];</span>
<span class="lineNum">     967 </span>            :   } else {
<span class="lineNum">     968 </span><span class="lineNoCov">          0 :     blob_ptr.reset((Blob&lt;Dtype&gt;*)(NULL));</span>
<span class="lineNum">     969 </span><span class="lineNoCov">          0 :     LOG(WARNING) &lt;&lt; &quot;Unknown blob name &quot; &lt;&lt; blob_name;</span>
<span class="lineNum">     970 </span>            :   }
<span class="lineNum">     971 </span><span class="lineNoCov">          0 :   return blob_ptr;</span>
<span class="lineNum">     972 </span>            : }
<a name="973"><span class="lineNum">     973 </span>            : </a>
<span class="lineNum">     974 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     975 </span><span class="lineNoCov">          0 : bool Net&lt;Dtype&gt;::has_layer(const string&amp; layer_name) const {</span>
<span class="lineNum">     976 </span><span class="lineNoCov">          0 :   return layer_names_index_.find(layer_name) != layer_names_index_.end();</span>
<span class="lineNum">     977 </span>            : }
<span class="lineNum">     978 </span>            : 
<span class="lineNum">     979 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     980 </span><span class="lineNoCov">          0 : const shared_ptr&lt;Layer&lt;Dtype&gt; &gt; Net&lt;Dtype&gt;::layer_by_name(</span>
<span class="lineNum">     981 </span>            :     const string&amp; layer_name) const {
<span class="lineNum">     982 </span>            :   shared_ptr&lt;Layer&lt;Dtype&gt; &gt; layer_ptr;
<span class="lineNum">     983 </span><span class="lineNoCov">          0 :   if (has_layer(layer_name)) {</span>
<span class="lineNum">     984 </span><span class="lineNoCov">          0 :     layer_ptr = layers_[layer_names_index_.find(layer_name)-&gt;second];</span>
<span class="lineNum">     985 </span>            :   } else {
<span class="lineNum">     986 </span><span class="lineNoCov">          0 :     layer_ptr.reset((Layer&lt;Dtype&gt;*)(NULL));</span>
<span class="lineNum">     987 </span><span class="lineNoCov">          0 :     LOG(WARNING) &lt;&lt; &quot;Unknown layer name &quot; &lt;&lt; layer_name;</span>
<span class="lineNum">     988 </span>            :   }
<span class="lineNum">     989 </span><span class="lineNoCov">          0 :   return layer_ptr;</span>
<span class="lineNum">     990 </span>            : }
<span class="lineNum">     991 </span>            : 
<a name="992"><span class="lineNum">     992 </span>            : INSTANTIATE_CLASS(Net);</a>
<span class="lineNum">     993 </span>            : 
<span class="lineNum">     994 </span><span class="lineCov">          2 : }  // namespace caffe</span>
</pre>
      </td>
    </tr>
  </table>
  <br>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="ruler"><img src="../../glass.png" width=3 height=3 alt=""></td></tr>
    <tr><td class="versionInfo">Generated by: <a href="http://ltp.sourceforge.net/coverage/lcov.php" target="_parent">LCOV version 1.12</a></td></tr>
  </table>
  <br>

</body>
</html>
