<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

<html lang="en">

<head>
  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  <title>LCOV - code analysis - src/caffe/blob.cpp</title>
  <link rel="stylesheet" type="text/css" href="../../gcov.css">
</head>

<body>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="title">LCOV - code coverage report</td></tr>
    <tr><td class="ruler"><img src="../../glass.png" width=3 height=3 alt=""></td></tr>

    <tr>
      <td width="100%">
        <table cellpadding=1 border=0 width="100%">
          <tr>
            <td width="10%" class="headerItem">Current view:</td>
            <td width="35%" class="headerValue"><a href="../../index.html">top level</a> - <a href="index.html">src/caffe</a> - blob.cpp<span style="font-size: 80%;"> (source / <a href="blob.cpp.func-sort-c.html">functions</a>)</span></td>
            <td width="5%"></td>
            <td width="15%"></td>
            <td width="10%" class="headerCovTableHead">Hit</td>
            <td width="10%" class="headerCovTableHead">Total</td>
            <td width="15%" class="headerCovTableHead">Coverage</td>
          </tr>
          <tr>
            <td class="headerItem">Test:</td>
            <td class="headerValue">code analysis</td>
            <td></td>
            <td class="headerItem">Lines:</td>
            <td class="headerCovTableEntry">74</td>
            <td class="headerCovTableEntry">257</td>
            <td class="headerCovTableEntryLo">28.8 %</td>
          </tr>
          <tr>
            <td class="headerItem">Date:</td>
            <td class="headerValue">2020-09-11 22:25:26</td>
            <td></td>
            <td class="headerItem">Functions:</td>
            <td class="headerCovTableEntry">17</td>
            <td class="headerCovTableEntry">119</td>
            <td class="headerCovTableEntryLo">14.3 %</td>
          </tr>
          <tr>
            <td class="headerItem">Legend:</td>
            <td class="headerValueLeg">            Lines:
            <span class="coverLegendCov">hit</span>
            <span class="coverLegendNoCov">not hit</span>
</td>
            <td></td>
          </tr>
          <tr><td><img src="../../glass.png" width=3 height=3 alt=""></td></tr>
        </table>
      </td>
    </tr>

    <tr><td class="ruler"><img src="../../glass.png" width=3 height=3 alt=""></td></tr>
  </table>

  <table cellpadding=0 cellspacing=0 border=0>
    <tr>
      <td><br></td>
    </tr>
    <tr>
      <td>
<pre class="sourceHeading">          Line data    Source code</pre>
<pre class="source">
<a name="1"><span class="lineNum">       1 </span>            : #include &lt;climits&gt;</a>
<span class="lineNum">       2 </span>            : #include &lt;vector&gt;
<span class="lineNum">       3 </span>            : 
<span class="lineNum">       4 </span>            : #include &quot;caffe/blob.hpp&quot;
<span class="lineNum">       5 </span>            : #include &quot;caffe/common.hpp&quot;
<span class="lineNum">       6 </span>            : #include &quot;caffe/syncedmem.hpp&quot;
<span class="lineNum">       7 </span>            : #include &quot;caffe/util/math_functions.hpp&quot;
<span class="lineNum">       8 </span>            : 
<span class="lineNum">       9 </span>            : namespace caffe {
<a name="10"><span class="lineNum">      10 </span>            : </a>
<span class="lineNum">      11 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      12 </span><span class="lineCov">        404 : void Blob&lt;Dtype&gt;::Reshape(const int num, const int channels, const int height,</span>
<span class="lineNum">      13 </span>            :     const int width) {
<span class="lineNum">      14 </span><span class="lineCov">        404 :   vector&lt;int&gt; shape(4);</span>
<span class="lineNum">      15 </span><span class="lineCov">        404 :   shape[0] = num;</span>
<span class="lineNum">      16 </span><span class="lineCov">        404 :   shape[1] = channels;</span>
<span class="lineNum">      17 </span><span class="lineCov">        404 :   shape[2] = height;</span>
<span class="lineNum">      18 </span><span class="lineCov">        404 :   shape[3] = width;</span>
<span class="lineNum">      19 </span><span class="lineCov">        404 :   Reshape(shape);</span>
<span class="lineNum">      20 </span><span class="lineCov">        404 : }</span>
<a name="21"><span class="lineNum">      21 </span>            : </a>
<span class="lineNum">      22 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      23 </span><span class="lineCov">       3362 : void Blob&lt;Dtype&gt;::Reshape(const vector&lt;int&gt;&amp; shape) {</span>
<span class="lineNum">      24 </span><span class="lineCov">      13448 :   CHECK_LE(shape.size(), kMaxBlobAxes);</span>
<span class="lineNum">      25 </span><span class="lineCov">       3362 :   count_ = 1;</span>
<span class="lineNum">      26 </span><span class="lineCov">       3362 :   shape_.resize(shape.size());</span>
<span class="lineNum">      27 </span><span class="lineCov">       6672 :   if (!shape_data_ || shape_data_-&gt;size() &lt; shape.size() * sizeof(int)) {</span>
<span class="lineNum">      28 </span><span class="lineCov">         52 :     shape_data_.reset(new SyncedMemory(shape.size() * sizeof(int)));</span>
<span class="lineNum">      29 </span>            :   }
<span class="lineNum">      30 </span><span class="lineCov">       3362 :   int* shape_data = static_cast&lt;int*&gt;(shape_data_-&gt;mutable_cpu_data());</span>
<span class="lineNum">      31 </span><span class="lineCov">      28429 :   for (int i = 0; i &lt; shape.size(); ++i) {</span>
<span class="lineNum">      32 </span><span class="lineCov">      14470 :     CHECK_GE(shape[i], 0);</span>
<span class="lineNum">      33 </span><span class="lineCov">       7235 :     if (count_ != 0) {</span>
<span class="lineNum">      34 </span><span class="lineCov">      21705 :       CHECK_LE(shape[i], INT_MAX / count_) &lt;&lt; &quot;blob size exceeds INT_MAX&quot;;</span>
<span class="lineNum">      35 </span>            :     }
<span class="lineNum">      36 </span><span class="lineCov">       7235 :     count_ *= shape[i];</span>
<span class="lineNum">      37 </span><span class="lineCov">       7235 :     shape_[i] = shape[i];</span>
<span class="lineNum">      38 </span><span class="lineCov">       7235 :     shape_data[i] = shape[i];</span>
<span class="lineNum">      39 </span>            :   }
<span class="lineNum">      40 </span><span class="lineCov">       3362 :   if (count_ &gt; capacity_) {</span>
<span class="lineNum">      41 </span><span class="lineCov">         52 :     capacity_ = count_;</span>
<span class="lineNum">      42 </span><span class="lineCov">         52 :     data_.reset(new SyncedMemory(capacity_ * sizeof(Dtype)));</span>
<span class="lineNum">      43 </span><span class="lineCov">         52 :     diff_.reset(new SyncedMemory(capacity_ * sizeof(Dtype)));</span>
<span class="lineNum">      44 </span>            :   }
<span class="lineNum">      45 </span><span class="lineCov">       3362 : }</span>
<a name="46"><span class="lineNum">      46 </span>            : </a>
<span class="lineNum">      47 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      48 </span><span class="lineNoCov">          0 : void Blob&lt;Dtype&gt;::Reshape(const BlobShape&amp; shape) {</span>
<span class="lineNum">      49 </span><span class="lineNoCov">          0 :   CHECK_LE(shape.dim_size(), kMaxBlobAxes);</span>
<span class="lineNum">      50 </span><span class="lineNoCov">          0 :   vector&lt;int&gt; shape_vec(shape.dim_size());</span>
<span class="lineNum">      51 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; shape.dim_size(); ++i) {</span>
<span class="lineNum">      52 </span><span class="lineNoCov">          0 :     shape_vec[i] = shape.dim(i);</span>
<span class="lineNum">      53 </span>            :   }
<span class="lineNum">      54 </span><span class="lineNoCov">          0 :   Reshape(shape_vec);</span>
<span class="lineNum">      55 </span><span class="lineNoCov">          0 : }</span>
<a name="56"><span class="lineNum">      56 </span>            : </a>
<span class="lineNum">      57 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      58 </span><span class="lineCov">        907 : void Blob&lt;Dtype&gt;::ReshapeLike(const Blob&lt;Dtype&gt;&amp; other) {</span>
<span class="lineNum">      59 </span><span class="lineCov">        907 :   Reshape(other.shape());</span>
<span class="lineNum">      60 </span><span class="lineCov">        907 : }</span>
<a name="61"><span class="lineNum">      61 </span>            : </a>
<span class="lineNum">      62 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      63 </span><span class="lineNoCov">          0 : Blob&lt;Dtype&gt;::Blob(const int num, const int channels, const int height,</span>
<span class="lineNum">      64 </span>            :     const int width)
<span class="lineNum">      65 </span>            :   // capacity_ must be initialized before calling Reshape
<span class="lineNum">      66 </span><span class="lineNoCov">          0 :   : capacity_(0) {</span>
<span class="lineNum">      67 </span><span class="lineNoCov">          0 :   Reshape(num, channels, height, width);</span>
<span class="lineNum">      68 </span><span class="lineNoCov">          0 : }</span>
<a name="69"><span class="lineNum">      69 </span>            : </a>
<span class="lineNum">      70 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      71 </span><span class="lineCov">          8 : Blob&lt;Dtype&gt;::Blob(const vector&lt;int&gt;&amp; shape)</span>
<span class="lineNum">      72 </span>            :   // capacity_ must be initialized before calling Reshape
<span class="lineNum">      73 </span><span class="lineCov">          8 :   : capacity_(0) {</span>
<span class="lineNum">      74 </span><span class="lineCov">          8 :   Reshape(shape);</span>
<span class="lineNum">      75 </span><span class="lineCov">          8 : }</span>
<a name="76"><span class="lineNum">      76 </span>            : </a>
<span class="lineNum">      77 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      78 </span><span class="lineNoCov">          0 : const int* Blob&lt;Dtype&gt;::gpu_shape() const {</span>
<span class="lineNum">      79 </span><span class="lineNoCov">          0 :   CHECK(shape_data_);</span>
<span class="lineNum">      80 </span><span class="lineNoCov">          0 :   return (const int*)shape_data_-&gt;gpu_data();</span>
<span class="lineNum">      81 </span>            : }
<a name="82"><span class="lineNum">      82 </span>            : </a>
<span class="lineNum">      83 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      84 </span><span class="lineCov">     283208 : const Dtype* Blob&lt;Dtype&gt;::cpu_data() const {</span>
<span class="lineNum">      85 </span><span class="lineCov">     566416 :   CHECK(data_);</span>
<span class="lineNum">      86 </span><span class="lineCov">     283208 :   return (const Dtype*)data_-&gt;cpu_data();</span>
<span class="lineNum">      87 </span>            : }
<a name="88"><span class="lineNum">      88 </span>            : </a>
<span class="lineNum">      89 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      90 </span><span class="lineCov">      10500 : void Blob&lt;Dtype&gt;::set_cpu_data(Dtype* data) {</span>
<span class="lineNum">      91 </span><span class="lineCov">      21000 :   CHECK(data);</span>
<span class="lineNum">      92 </span>            :   // Make sure CPU and GPU sizes remain equal
<span class="lineNum">      93 </span><span class="lineCov">      10500 :   size_t size = count_ * sizeof(Dtype);</span>
<span class="lineNum">      94 </span><span class="lineCov">      10500 :   if (data_-&gt;size() != size) {</span>
<span class="lineNum">      95 </span><span class="lineNoCov">          0 :     data_.reset(new SyncedMemory(size));</span>
<span class="lineNum">      96 </span><span class="lineNoCov">          0 :     diff_.reset(new SyncedMemory(size));</span>
<span class="lineNum">      97 </span>            :   }
<span class="lineNum">      98 </span><span class="lineCov">      10500 :   data_-&gt;set_cpu_data(data);</span>
<span class="lineNum">      99 </span><span class="lineCov">      10500 : }</span>
<a name="100"><span class="lineNum">     100 </span>            : </a>
<span class="lineNum">     101 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     102 </span><span class="lineNoCov">          0 : const Dtype* Blob&lt;Dtype&gt;::gpu_data() const {</span>
<span class="lineNum">     103 </span><span class="lineNoCov">          0 :   CHECK(data_);</span>
<span class="lineNum">     104 </span><span class="lineNoCov">          0 :   return (const Dtype*)data_-&gt;gpu_data();</span>
<span class="lineNum">     105 </span>            : }
<a name="106"><span class="lineNum">     106 </span>            : </a>
<span class="lineNum">     107 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     108 </span><span class="lineNoCov">          0 : void Blob&lt;Dtype&gt;::set_gpu_data(Dtype* data) {</span>
<span class="lineNum">     109 </span><span class="lineNoCov">          0 :   CHECK(data);</span>
<span class="lineNum">     110 </span>            :   // Make sure CPU and GPU sizes remain equal
<span class="lineNum">     111 </span><span class="lineNoCov">          0 :   size_t size = count_ * sizeof(Dtype);</span>
<span class="lineNum">     112 </span><span class="lineNoCov">          0 :   if (data_-&gt;size() != size) {</span>
<span class="lineNum">     113 </span><span class="lineNoCov">          0 :     data_.reset(new SyncedMemory(size));</span>
<span class="lineNum">     114 </span><span class="lineNoCov">          0 :     diff_.reset(new SyncedMemory(size));</span>
<span class="lineNum">     115 </span>            :   }
<span class="lineNum">     116 </span><span class="lineNoCov">          0 :   data_-&gt;set_gpu_data(data);</span>
<span class="lineNum">     117 </span><span class="lineNoCov">          0 : }</span>
<a name="118"><span class="lineNum">     118 </span>            : </a>
<span class="lineNum">     119 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     120 </span><span class="lineCov">        200 : const Dtype* Blob&lt;Dtype&gt;::cpu_diff() const {</span>
<span class="lineNum">     121 </span><span class="lineCov">        400 :   CHECK(diff_);</span>
<span class="lineNum">     122 </span><span class="lineCov">        200 :   return (const Dtype*)diff_-&gt;cpu_data();</span>
<span class="lineNum">     123 </span>            : }
<a name="124"><span class="lineNum">     124 </span>            : </a>
<span class="lineNum">     125 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     126 </span><span class="lineNoCov">          0 : const Dtype* Blob&lt;Dtype&gt;::gpu_diff() const {</span>
<span class="lineNum">     127 </span><span class="lineNoCov">          0 :   CHECK(diff_);</span>
<span class="lineNum">     128 </span><span class="lineNoCov">          0 :   return (const Dtype*)diff_-&gt;gpu_data();</span>
<span class="lineNum">     129 </span>            : }
<a name="130"><span class="lineNum">     130 </span>            : </a>
<span class="lineNum">     131 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     132 </span><span class="lineCov">      53229 : Dtype* Blob&lt;Dtype&gt;::mutable_cpu_data() {</span>
<span class="lineNum">     133 </span><span class="lineCov">     106458 :   CHECK(data_);</span>
<span class="lineNum">     134 </span><span class="lineCov">      53229 :   return static_cast&lt;Dtype*&gt;(data_-&gt;mutable_cpu_data());</span>
<span class="lineNum">     135 </span>            : }
<a name="136"><span class="lineNum">     136 </span>            : </a>
<span class="lineNum">     137 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     138 </span><span class="lineNoCov">          0 : Dtype* Blob&lt;Dtype&gt;::mutable_gpu_data() {</span>
<span class="lineNum">     139 </span><span class="lineNoCov">          0 :   CHECK(data_);</span>
<span class="lineNum">     140 </span><span class="lineNoCov">          0 :   return static_cast&lt;Dtype*&gt;(data_-&gt;mutable_gpu_data());</span>
<span class="lineNum">     141 </span>            : }
<a name="142"><span class="lineNum">     142 </span>            : </a>
<span class="lineNum">     143 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     144 </span><span class="lineCov">          2 : Dtype* Blob&lt;Dtype&gt;::mutable_cpu_diff() {</span>
<span class="lineNum">     145 </span><span class="lineCov">          4 :   CHECK(diff_);</span>
<span class="lineNum">     146 </span><span class="lineCov">          2 :   return static_cast&lt;Dtype*&gt;(diff_-&gt;mutable_cpu_data());</span>
<span class="lineNum">     147 </span>            : }
<a name="148"><span class="lineNum">     148 </span>            : </a>
<span class="lineNum">     149 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     150 </span><span class="lineNoCov">          0 : Dtype* Blob&lt;Dtype&gt;::mutable_gpu_diff() {</span>
<span class="lineNum">     151 </span><span class="lineNoCov">          0 :   CHECK(diff_);</span>
<span class="lineNum">     152 </span><span class="lineNoCov">          0 :   return static_cast&lt;Dtype*&gt;(diff_-&gt;mutable_gpu_data());</span>
<span class="lineNum">     153 </span>            : }
<a name="154"><span class="lineNum">     154 </span>            : </a>
<span class="lineNum">     155 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     156 </span><span class="lineCov">        400 : void Blob&lt;Dtype&gt;::ShareData(const Blob&amp; other) {</span>
<span class="lineNum">     157 </span><span class="lineCov">        800 :   CHECK_EQ(count_, other.count());</span>
<span class="lineNum">     158 </span><span class="lineCov">        400 :   data_ = other.data();</span>
<span class="lineNum">     159 </span><span class="lineCov">        400 : }</span>
<a name="160"><span class="lineNum">     160 </span>            : </a>
<span class="lineNum">     161 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     162 </span><span class="lineNoCov">          0 : void Blob&lt;Dtype&gt;::ShareDiff(const Blob&amp; other) {</span>
<span class="lineNum">     163 </span><span class="lineNoCov">          0 :   CHECK_EQ(count_, other.count());</span>
<span class="lineNum">     164 </span><span class="lineNoCov">          0 :   diff_ = other.diff();</span>
<span class="lineNum">     165 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     166 </span>            : 
<span class="lineNum">     167 </span>            : // The &quot;update&quot; method is used for parameter blobs in a Net, which are stored
<a name="168"><span class="lineNum">     168 </span>            : // as Blob&lt;float&gt; or Blob&lt;double&gt; -- hence we do not define it for</a>
<a name="169"><span class="lineNum">     169 </span>            : // Blob&lt;int&gt; or Blob&lt;unsigned int&gt;.</a>
<span class="lineNum">     170 </span><span class="lineNoCov">          0 : template &lt;&gt; void Blob&lt;unsigned int&gt;::Update() { NOT_IMPLEMENTED; }</span>
<span class="lineNum">     171 </span><span class="lineNoCov">          0 : template &lt;&gt; void Blob&lt;int&gt;::Update() { NOT_IMPLEMENTED; }</span>
<a name="172"><span class="lineNum">     172 </span>            : </a>
<span class="lineNum">     173 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     174 </span><span class="lineNoCov">          0 : void Blob&lt;Dtype&gt;::Update() {</span>
<span class="lineNum">     175 </span>            :   // We will perform update based on where the data is located.
<span class="lineNum">     176 </span><span class="lineNoCov">          0 :   switch (data_-&gt;head()) {</span>
<span class="lineNum">     177 </span>            :   case SyncedMemory::HEAD_AT_CPU:
<span class="lineNum">     178 </span>            :     // perform computation on CPU
<span class="lineNum">     179 </span><span class="lineNoCov">          0 :     caffe_axpy&lt;Dtype&gt;(count_, Dtype(-1),</span>
<span class="lineNum">     180 </span><span class="lineNoCov">          0 :         static_cast&lt;const Dtype*&gt;(diff_-&gt;cpu_data()),</span>
<span class="lineNum">     181 </span><span class="lineNoCov">          0 :         static_cast&lt;Dtype*&gt;(data_-&gt;mutable_cpu_data()));</span>
<span class="lineNum">     182 </span>            :     break;
<span class="lineNum">     183 </span>            :   case SyncedMemory::HEAD_AT_GPU:
<span class="lineNum">     184 </span>            :   case SyncedMemory::SYNCED:
<span class="lineNum">     185 </span>            : #ifndef CPU_ONLY
<span class="lineNum">     186 </span>            :     // perform computation on GPU
<span class="lineNum">     187 </span>            :     caffe_gpu_axpy&lt;Dtype&gt;(count_, Dtype(-1),
<span class="lineNum">     188 </span>            :         static_cast&lt;const Dtype*&gt;(diff_-&gt;gpu_data()),
<span class="lineNum">     189 </span>            :         static_cast&lt;Dtype*&gt;(data_-&gt;mutable_gpu_data()));
<span class="lineNum">     190 </span>            : #else
<span class="lineNum">     191 </span><span class="lineNoCov">          0 :     NO_GPU;</span>
<span class="lineNum">     192 </span>            : #endif
<span class="lineNum">     193 </span>            :     break;
<span class="lineNum">     194 </span>            :   default:
<span class="lineNum">     195 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Syncedmem not initialized.&quot;;</span>
<span class="lineNum">     196 </span>            :   }
<a name="197"><span class="lineNum">     197 </span><span class="lineNoCov">          0 : }</span></a>
<span class="lineNum">     198 </span>            : 
<span class="lineNum">     199 </span><span class="lineNoCov">          0 : template &lt;&gt; unsigned int Blob&lt;unsigned int&gt;::asum_data() const {</span>
<span class="lineNum">     200 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<span class="lineNum">     201 </span>            :   return 0;
<a name="202"><span class="lineNum">     202 </span>            : }</a>
<span class="lineNum">     203 </span>            : 
<span class="lineNum">     204 </span><span class="lineNoCov">          0 : template &lt;&gt; int Blob&lt;int&gt;::asum_data() const {</span>
<span class="lineNum">     205 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<span class="lineNum">     206 </span>            :   return 0;
<span class="lineNum">     207 </span>            : }
<a name="208"><span class="lineNum">     208 </span>            : </a>
<span class="lineNum">     209 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     210 </span><span class="lineNoCov">          0 : Dtype Blob&lt;Dtype&gt;::asum_data() const {</span>
<span class="lineNum">     211 </span><span class="lineNoCov">          0 :   if (!data_) { return 0; }</span>
<span class="lineNum">     212 </span><span class="lineNoCov">          0 :   switch (data_-&gt;head()) {</span>
<span class="lineNum">     213 </span>            :   case SyncedMemory::HEAD_AT_CPU:
<span class="lineNum">     214 </span><span class="lineNoCov">          0 :     return caffe_cpu_asum(count_, cpu_data());</span>
<span class="lineNum">     215 </span>            :   case SyncedMemory::HEAD_AT_GPU:
<span class="lineNum">     216 </span>            :   case SyncedMemory::SYNCED:
<span class="lineNum">     217 </span>            : #ifndef CPU_ONLY
<span class="lineNum">     218 </span>            :   {
<span class="lineNum">     219 </span>            :     Dtype asum;
<span class="lineNum">     220 </span>            :     caffe_gpu_asum(count_, gpu_data(), &amp;asum);
<span class="lineNum">     221 </span>            :     return asum;
<span class="lineNum">     222 </span>            :   }
<span class="lineNum">     223 </span>            : #else
<span class="lineNum">     224 </span><span class="lineNoCov">          0 :     NO_GPU;</span>
<span class="lineNum">     225 </span>            : #endif
<span class="lineNum">     226 </span>            :   case SyncedMemory::UNINITIALIZED:
<span class="lineNum">     227 </span>            :     return 0;
<span class="lineNum">     228 </span>            :   default:
<span class="lineNum">     229 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Unknown SyncedMemory head state: &quot; &lt;&lt; data_-&gt;head();</span>
<span class="lineNum">     230 </span>            :   }
<span class="lineNum">     231 </span>            :   return 0;
<a name="232"><span class="lineNum">     232 </span>            : }</a>
<span class="lineNum">     233 </span>            : 
<span class="lineNum">     234 </span><span class="lineNoCov">          0 : template &lt;&gt; unsigned int Blob&lt;unsigned int&gt;::asum_diff() const {</span>
<span class="lineNum">     235 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<span class="lineNum">     236 </span>            :   return 0;
<a name="237"><span class="lineNum">     237 </span>            : }</a>
<span class="lineNum">     238 </span>            : 
<span class="lineNum">     239 </span><span class="lineNoCov">          0 : template &lt;&gt; int Blob&lt;int&gt;::asum_diff() const {</span>
<span class="lineNum">     240 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<span class="lineNum">     241 </span>            :   return 0;
<span class="lineNum">     242 </span>            : }
<a name="243"><span class="lineNum">     243 </span>            : </a>
<span class="lineNum">     244 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     245 </span><span class="lineNoCov">          0 : Dtype Blob&lt;Dtype&gt;::asum_diff() const {</span>
<span class="lineNum">     246 </span><span class="lineNoCov">          0 :   if (!diff_) { return 0; }</span>
<span class="lineNum">     247 </span><span class="lineNoCov">          0 :   switch (diff_-&gt;head()) {</span>
<span class="lineNum">     248 </span>            :   case SyncedMemory::HEAD_AT_CPU:
<span class="lineNum">     249 </span><span class="lineNoCov">          0 :     return caffe_cpu_asum(count_, cpu_diff());</span>
<span class="lineNum">     250 </span>            :   case SyncedMemory::HEAD_AT_GPU:
<span class="lineNum">     251 </span>            :   case SyncedMemory::SYNCED:
<span class="lineNum">     252 </span>            : #ifndef CPU_ONLY
<span class="lineNum">     253 </span>            :   {
<span class="lineNum">     254 </span>            :     Dtype asum;
<span class="lineNum">     255 </span>            :     caffe_gpu_asum(count_, gpu_diff(), &amp;asum);
<span class="lineNum">     256 </span>            :     return asum;
<span class="lineNum">     257 </span>            :   }
<span class="lineNum">     258 </span>            : #else
<span class="lineNum">     259 </span><span class="lineNoCov">          0 :     NO_GPU;</span>
<span class="lineNum">     260 </span>            : #endif
<span class="lineNum">     261 </span>            :   case SyncedMemory::UNINITIALIZED:
<span class="lineNum">     262 </span>            :     return 0;
<span class="lineNum">     263 </span>            :   default:
<span class="lineNum">     264 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Unknown SyncedMemory head state: &quot; &lt;&lt; diff_-&gt;head();</span>
<span class="lineNum">     265 </span>            :   }
<span class="lineNum">     266 </span>            :   return 0;
<a name="267"><span class="lineNum">     267 </span>            : }</a>
<span class="lineNum">     268 </span>            : 
<span class="lineNum">     269 </span><span class="lineNoCov">          0 : template &lt;&gt; unsigned int Blob&lt;unsigned int&gt;::sumsq_data() const {</span>
<span class="lineNum">     270 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<span class="lineNum">     271 </span>            :   return 0;
<a name="272"><span class="lineNum">     272 </span>            : }</a>
<span class="lineNum">     273 </span>            : 
<span class="lineNum">     274 </span><span class="lineNoCov">          0 : template &lt;&gt; int Blob&lt;int&gt;::sumsq_data() const {</span>
<span class="lineNum">     275 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<span class="lineNum">     276 </span>            :   return 0;
<span class="lineNum">     277 </span>            : }
<a name="278"><span class="lineNum">     278 </span>            : </a>
<span class="lineNum">     279 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     280 </span><span class="lineNoCov">          0 : Dtype Blob&lt;Dtype&gt;::sumsq_data() const {</span>
<span class="lineNum">     281 </span>            :   Dtype sumsq;
<span class="lineNum">     282 </span>            :   const Dtype* data;
<span class="lineNum">     283 </span><span class="lineNoCov">          0 :   if (!data_) { return 0; }</span>
<span class="lineNum">     284 </span><span class="lineNoCov">          0 :   switch (data_-&gt;head()) {</span>
<span class="lineNum">     285 </span>            :   case SyncedMemory::HEAD_AT_CPU:
<span class="lineNum">     286 </span><span class="lineNoCov">          0 :     data = cpu_data();</span>
<span class="lineNum">     287 </span><span class="lineNoCov">          0 :     sumsq = caffe_cpu_dot(count_, data, data);</span>
<span class="lineNum">     288 </span>            :     break;
<span class="lineNum">     289 </span>            :   case SyncedMemory::HEAD_AT_GPU:
<span class="lineNum">     290 </span>            :   case SyncedMemory::SYNCED:
<span class="lineNum">     291 </span>            : #ifndef CPU_ONLY
<span class="lineNum">     292 </span>            :     data = gpu_data();
<span class="lineNum">     293 </span>            :     caffe_gpu_dot(count_, data, data, &amp;sumsq);
<span class="lineNum">     294 </span>            : #else
<span class="lineNum">     295 </span><span class="lineNoCov">          0 :     NO_GPU;</span>
<span class="lineNum">     296 </span>            : #endif
<span class="lineNum">     297 </span>            :     break;
<span class="lineNum">     298 </span>            :   case SyncedMemory::UNINITIALIZED:
<span class="lineNum">     299 </span>            :     return 0;
<span class="lineNum">     300 </span>            :   default:
<span class="lineNum">     301 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Unknown SyncedMemory head state: &quot; &lt;&lt; data_-&gt;head();</span>
<span class="lineNum">     302 </span>            :   }
<span class="lineNum">     303 </span><span class="lineNoCov">          0 :   return sumsq;</span>
<a name="304"><span class="lineNum">     304 </span>            : }</a>
<span class="lineNum">     305 </span>            : 
<span class="lineNum">     306 </span><span class="lineNoCov">          0 : template &lt;&gt; unsigned int Blob&lt;unsigned int&gt;::sumsq_diff() const {</span>
<span class="lineNum">     307 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<span class="lineNum">     308 </span>            :   return 0;
<a name="309"><span class="lineNum">     309 </span>            : }</a>
<span class="lineNum">     310 </span>            : 
<span class="lineNum">     311 </span><span class="lineNoCov">          0 : template &lt;&gt; int Blob&lt;int&gt;::sumsq_diff() const {</span>
<span class="lineNum">     312 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<span class="lineNum">     313 </span>            :   return 0;
<span class="lineNum">     314 </span>            : }
<a name="315"><span class="lineNum">     315 </span>            : </a>
<span class="lineNum">     316 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     317 </span><span class="lineNoCov">          0 : Dtype Blob&lt;Dtype&gt;::sumsq_diff() const {</span>
<span class="lineNum">     318 </span>            :   Dtype sumsq;
<span class="lineNum">     319 </span>            :   const Dtype* diff;
<span class="lineNum">     320 </span><span class="lineNoCov">          0 :   if (!diff_) { return 0; }</span>
<span class="lineNum">     321 </span><span class="lineNoCov">          0 :   switch (diff_-&gt;head()) {</span>
<span class="lineNum">     322 </span>            :   case SyncedMemory::HEAD_AT_CPU:
<span class="lineNum">     323 </span><span class="lineNoCov">          0 :     diff = cpu_diff();</span>
<span class="lineNum">     324 </span><span class="lineNoCov">          0 :     sumsq = caffe_cpu_dot(count_, diff, diff);</span>
<span class="lineNum">     325 </span>            :     break;
<span class="lineNum">     326 </span>            :   case SyncedMemory::HEAD_AT_GPU:
<span class="lineNum">     327 </span>            :   case SyncedMemory::SYNCED:
<span class="lineNum">     328 </span>            : #ifndef CPU_ONLY
<span class="lineNum">     329 </span>            :     diff = gpu_diff();
<span class="lineNum">     330 </span>            :     caffe_gpu_dot(count_, diff, diff, &amp;sumsq);
<span class="lineNum">     331 </span>            :     break;
<span class="lineNum">     332 </span>            : #else
<span class="lineNum">     333 </span><span class="lineNoCov">          0 :     NO_GPU;</span>
<span class="lineNum">     334 </span>            : #endif
<span class="lineNum">     335 </span>            :   case SyncedMemory::UNINITIALIZED:
<span class="lineNum">     336 </span>            :     return 0;
<span class="lineNum">     337 </span>            :   default:
<span class="lineNum">     338 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Unknown SyncedMemory head state: &quot; &lt;&lt; data_-&gt;head();</span>
<span class="lineNum">     339 </span>            :   }
<span class="lineNum">     340 </span><span class="lineNoCov">          0 :   return sumsq;</span>
<a name="341"><span class="lineNum">     341 </span>            : }</a>
<span class="lineNum">     342 </span>            : 
<span class="lineNum">     343 </span><span class="lineNoCov">          0 : template &lt;&gt; void Blob&lt;unsigned int&gt;::scale_data(unsigned int scale_factor) {</span>
<span class="lineNum">     344 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<a name="345"><span class="lineNum">     345 </span>            : }</a>
<span class="lineNum">     346 </span>            : 
<span class="lineNum">     347 </span><span class="lineNoCov">          0 : template &lt;&gt; void Blob&lt;int&gt;::scale_data(int scale_factor) {</span>
<span class="lineNum">     348 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<span class="lineNum">     349 </span>            : }
<a name="350"><span class="lineNum">     350 </span>            : </a>
<span class="lineNum">     351 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     352 </span><span class="lineNoCov">          0 : void Blob&lt;Dtype&gt;::scale_data(Dtype scale_factor) {</span>
<span class="lineNum">     353 </span>            :   Dtype* data;
<span class="lineNum">     354 </span><span class="lineNoCov">          0 :   if (!data_) { return; }</span>
<span class="lineNum">     355 </span><span class="lineNoCov">          0 :   switch (data_-&gt;head()) {</span>
<span class="lineNum">     356 </span>            :   case SyncedMemory::HEAD_AT_CPU:
<span class="lineNum">     357 </span><span class="lineNoCov">          0 :     data = mutable_cpu_data();</span>
<span class="lineNum">     358 </span><span class="lineNoCov">          0 :     caffe_scal(count_, scale_factor, data);</span>
<span class="lineNum">     359 </span><span class="lineNoCov">          0 :     return;</span>
<span class="lineNum">     360 </span>            :   case SyncedMemory::HEAD_AT_GPU:
<span class="lineNum">     361 </span>            :   case SyncedMemory::SYNCED:
<span class="lineNum">     362 </span>            : #ifndef CPU_ONLY
<span class="lineNum">     363 </span>            :     data = mutable_gpu_data();
<span class="lineNum">     364 </span>            :     caffe_gpu_scal(count_, scale_factor, data);
<span class="lineNum">     365 </span>            :     return;
<span class="lineNum">     366 </span>            : #else
<span class="lineNum">     367 </span><span class="lineNoCov">          0 :     NO_GPU;</span>
<span class="lineNum">     368 </span>            : #endif
<span class="lineNum">     369 </span>            :   case SyncedMemory::UNINITIALIZED:
<span class="lineNum">     370 </span>            :     return;
<span class="lineNum">     371 </span>            :   default:
<span class="lineNum">     372 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Unknown SyncedMemory head state: &quot; &lt;&lt; data_-&gt;head();</span>
<span class="lineNum">     373 </span>            :   }
<a name="374"><span class="lineNum">     374 </span>            : }</a>
<span class="lineNum">     375 </span>            : 
<span class="lineNum">     376 </span><span class="lineNoCov">          0 : template &lt;&gt; void Blob&lt;unsigned int&gt;::scale_diff(unsigned int scale_factor) {</span>
<span class="lineNum">     377 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<a name="378"><span class="lineNum">     378 </span>            : }</a>
<span class="lineNum">     379 </span>            : 
<span class="lineNum">     380 </span><span class="lineNoCov">          0 : template &lt;&gt; void Blob&lt;int&gt;::scale_diff(int scale_factor) {</span>
<span class="lineNum">     381 </span><span class="lineNoCov">          0 :   NOT_IMPLEMENTED;</span>
<span class="lineNum">     382 </span>            : }
<a name="383"><span class="lineNum">     383 </span>            : </a>
<span class="lineNum">     384 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     385 </span><span class="lineNoCov">          0 : void Blob&lt;Dtype&gt;::scale_diff(Dtype scale_factor) {</span>
<span class="lineNum">     386 </span>            :   Dtype* diff;
<span class="lineNum">     387 </span><span class="lineNoCov">          0 :   if (!diff_) { return; }</span>
<span class="lineNum">     388 </span><span class="lineNoCov">          0 :   switch (diff_-&gt;head()) {</span>
<span class="lineNum">     389 </span>            :   case SyncedMemory::HEAD_AT_CPU:
<span class="lineNum">     390 </span><span class="lineNoCov">          0 :     diff = mutable_cpu_diff();</span>
<span class="lineNum">     391 </span><span class="lineNoCov">          0 :     caffe_scal(count_, scale_factor, diff);</span>
<span class="lineNum">     392 </span><span class="lineNoCov">          0 :     return;</span>
<span class="lineNum">     393 </span>            :   case SyncedMemory::HEAD_AT_GPU:
<span class="lineNum">     394 </span>            :   case SyncedMemory::SYNCED:
<span class="lineNum">     395 </span>            : #ifndef CPU_ONLY
<span class="lineNum">     396 </span>            :     diff = mutable_gpu_diff();
<span class="lineNum">     397 </span>            :     caffe_gpu_scal(count_, scale_factor, diff);
<span class="lineNum">     398 </span>            :     return;
<span class="lineNum">     399 </span>            : #else
<span class="lineNum">     400 </span><span class="lineNoCov">          0 :     NO_GPU;</span>
<span class="lineNum">     401 </span>            : #endif
<span class="lineNum">     402 </span>            :   case SyncedMemory::UNINITIALIZED:
<span class="lineNum">     403 </span>            :     return;
<span class="lineNum">     404 </span>            :   default:
<span class="lineNum">     405 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Unknown SyncedMemory head state: &quot; &lt;&lt; diff_-&gt;head();</span>
<span class="lineNum">     406 </span>            :   }
<span class="lineNum">     407 </span>            : }
<a name="408"><span class="lineNum">     408 </span>            : </a>
<span class="lineNum">     409 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     410 </span><span class="lineCov">         16 : bool Blob&lt;Dtype&gt;::ShapeEquals(const BlobProto&amp; other) {</span>
<span class="lineNum">     411 </span><span class="lineCov">         64 :   if (other.has_num() || other.has_channels() ||</span>
<span class="lineNum">     412 </span>            :       other.has_height() || other.has_width()) {
<span class="lineNum">     413 </span>            :     // Using deprecated 4D Blob dimensions --
<span class="lineNum">     414 </span>            :     // shape is (num, channels, height, width).
<span class="lineNum">     415 </span>            :     // Note: we do not use the normal Blob::num(), Blob::channels(), etc.
<span class="lineNum">     416 </span>            :     // methods as these index from the beginning of the blob shape, where legacy
<span class="lineNum">     417 </span>            :     // parameter blobs were indexed from the end of the blob shape (e.g., bias
<span class="lineNum">     418 </span>            :     // Blob shape (1 x 1 x 1 x N), IP layer weight Blob shape (1 x 1 x M x N)).
<span class="lineNum">     419 </span>            :     return shape_.size() &lt;= 4 &amp;&amp;
<span class="lineNum">     420 </span><span class="lineNoCov">          0 :            LegacyShape(-4) == other.num() &amp;&amp;</span>
<span class="lineNum">     421 </span><span class="lineNoCov">          0 :            LegacyShape(-3) == other.channels() &amp;&amp;</span>
<span class="lineNum">     422 </span><span class="lineNoCov">          0 :            LegacyShape(-2) == other.height() &amp;&amp;</span>
<span class="lineNum">     423 </span><span class="lineNoCov">          0 :            LegacyShape(-1) == other.width();</span>
<span class="lineNum">     424 </span>            :   }
<span class="lineNum">     425 </span><span class="lineCov">         32 :   vector&lt;int&gt; other_shape(other.shape().dim_size());</span>
<span class="lineNum">     426 </span><span class="lineCov">         80 :   for (int i = 0; i &lt; other.shape().dim_size(); ++i) {</span>
<span class="lineNum">     427 </span><span class="lineCov">         64 :     other_shape[i] = other.shape().dim(i);</span>
<span class="lineNum">     428 </span>            :   }
<span class="lineNum">     429 </span><span class="lineCov">         16 :   return shape_ == other_shape;</span>
<span class="lineNum">     430 </span>            : }
<a name="431"><span class="lineNum">     431 </span>            : </a>
<span class="lineNum">     432 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     433 </span><span class="lineNoCov">          0 : void Blob&lt;Dtype&gt;::CopyFrom(const Blob&amp; source, bool copy_diff, bool reshape) {</span>
<span class="lineNum">     434 </span><span class="lineNoCov">          0 :   if (source.count() != count_ || source.shape() != shape_) {</span>
<span class="lineNum">     435 </span><span class="lineNoCov">          0 :     if (reshape) {</span>
<span class="lineNum">     436 </span><span class="lineNoCov">          0 :       ReshapeLike(source);</span>
<span class="lineNum">     437 </span>            :     } else {
<span class="lineNum">     438 </span><span class="lineNoCov">          0 :       LOG(FATAL) &lt;&lt; &quot;Trying to copy blobs of different sizes.&quot;;</span>
<span class="lineNum">     439 </span>            :     }
<span class="lineNum">     440 </span>            :   }
<span class="lineNum">     441 </span><span class="lineNoCov">          0 :   switch (Caffe::mode()) {</span>
<span class="lineNum">     442 </span>            :   case Caffe::GPU:
<span class="lineNum">     443 </span><span class="lineNoCov">          0 :     if (copy_diff) {</span>
<span class="lineNum">     444 </span><span class="lineNoCov">          0 :       caffe_copy(count_, source.gpu_diff(),</span>
<span class="lineNum">     445 </span><span class="lineNoCov">          0 :           static_cast&lt;Dtype*&gt;(diff_-&gt;mutable_gpu_data()));</span>
<span class="lineNum">     446 </span>            :     } else {
<span class="lineNum">     447 </span><span class="lineNoCov">          0 :       caffe_copy(count_, source.gpu_data(),</span>
<span class="lineNum">     448 </span><span class="lineNoCov">          0 :           static_cast&lt;Dtype*&gt;(data_-&gt;mutable_gpu_data()));</span>
<span class="lineNum">     449 </span>            :     }
<span class="lineNum">     450 </span>            :     break;
<span class="lineNum">     451 </span>            :   case Caffe::CPU:
<span class="lineNum">     452 </span><span class="lineNoCov">          0 :     if (copy_diff) {</span>
<span class="lineNum">     453 </span><span class="lineNoCov">          0 :       caffe_copy(count_, source.cpu_diff(),</span>
<span class="lineNum">     454 </span><span class="lineNoCov">          0 :           static_cast&lt;Dtype*&gt;(diff_-&gt;mutable_cpu_data()));</span>
<span class="lineNum">     455 </span>            :     } else {
<span class="lineNum">     456 </span><span class="lineNoCov">          0 :       caffe_copy(count_, source.cpu_data(),</span>
<span class="lineNum">     457 </span><span class="lineNoCov">          0 :           static_cast&lt;Dtype*&gt;(data_-&gt;mutable_cpu_data()));</span>
<span class="lineNum">     458 </span>            :     }
<span class="lineNum">     459 </span>            :     break;
<span class="lineNum">     460 </span>            :   default:
<span class="lineNum">     461 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Unknown caffe mode.&quot;;</span>
<span class="lineNum">     462 </span>            :   }
<span class="lineNum">     463 </span><span class="lineNoCov">          0 : }</span>
<a name="464"><span class="lineNum">     464 </span>            : </a>
<span class="lineNum">     465 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     466 </span><span class="lineCov">          8 : void Blob&lt;Dtype&gt;::FromProto(const BlobProto&amp; proto, bool reshape) {</span>
<span class="lineNum">     467 </span><span class="lineCov">          8 :   if (reshape) {</span>
<span class="lineNum">     468 </span>            :     vector&lt;int&gt; shape;
<span class="lineNum">     469 </span><span class="lineNoCov">          0 :     if (proto.has_num() || proto.has_channels() ||</span>
<span class="lineNum">     470 </span>            :         proto.has_height() || proto.has_width()) {
<span class="lineNum">     471 </span>            :       // Using deprecated 4D Blob dimensions --
<span class="lineNum">     472 </span>            :       // shape is (num, channels, height, width).
<span class="lineNum">     473 </span><span class="lineNoCov">          0 :       shape.resize(4);</span>
<span class="lineNum">     474 </span><span class="lineNoCov">          0 :       shape[0] = proto.num();</span>
<span class="lineNum">     475 </span><span class="lineNoCov">          0 :       shape[1] = proto.channels();</span>
<span class="lineNum">     476 </span><span class="lineNoCov">          0 :       shape[2] = proto.height();</span>
<span class="lineNum">     477 </span><span class="lineNoCov">          0 :       shape[3] = proto.width();</span>
<span class="lineNum">     478 </span>            :     } else {
<span class="lineNum">     479 </span><span class="lineNoCov">          0 :       shape.resize(proto.shape().dim_size());</span>
<span class="lineNum">     480 </span><span class="lineNoCov">          0 :       for (int i = 0; i &lt; proto.shape().dim_size(); ++i) {</span>
<span class="lineNum">     481 </span><span class="lineNoCov">          0 :         shape[i] = proto.shape().dim(i);</span>
<span class="lineNum">     482 </span>            :       }
<span class="lineNum">     483 </span>            :     }
<span class="lineNum">     484 </span><span class="lineNoCov">          0 :     Reshape(shape);</span>
<span class="lineNum">     485 </span>            :   } else {
<span class="lineNum">     486 </span><span class="lineCov">         16 :     CHECK(ShapeEquals(proto)) &lt;&lt; &quot;shape mismatch (reshape not set)&quot;;</span>
<span class="lineNum">     487 </span>            :   }
<span class="lineNum">     488 </span>            :   // copy data
<span class="lineNum">     489 </span><span class="lineCov">          8 :   Dtype* data_vec = mutable_cpu_data();</span>
<span class="lineNum">     490 </span><span class="lineCov">          8 :   if (proto.double_data_size() &gt; 0) {</span>
<span class="lineNum">     491 </span><span class="lineNoCov">          0 :     CHECK_EQ(count_, proto.double_data_size());</span>
<span class="lineNum">     492 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; count_; ++i) {</span>
<span class="lineNum">     493 </span><span class="lineNoCov">          0 :       data_vec[i] = proto.double_data(i);</span>
<span class="lineNum">     494 </span>            :     }
<span class="lineNum">     495 </span>            :   } else {
<span class="lineNum">     496 </span><span class="lineCov">         16 :     CHECK_EQ(count_, proto.data_size());</span>
<span class="lineNum">     497 </span><span class="lineCov">     862168 :     for (int i = 0; i &lt; count_; ++i) {</span>
<span class="lineNum">     498 </span><span class="lineCov">     862160 :       data_vec[i] = proto.data(i);</span>
<span class="lineNum">     499 </span>            :     }
<span class="lineNum">     500 </span>            :   }
<span class="lineNum">     501 </span><span class="lineCov">          8 :   if (proto.double_diff_size() &gt; 0) {</span>
<span class="lineNum">     502 </span><span class="lineNoCov">          0 :     CHECK_EQ(count_, proto.double_diff_size());</span>
<span class="lineNum">     503 </span><span class="lineNoCov">          0 :     Dtype* diff_vec = mutable_cpu_diff();</span>
<span class="lineNum">     504 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; count_; ++i) {</span>
<span class="lineNum">     505 </span><span class="lineNoCov">          0 :       diff_vec[i] = proto.double_diff(i);</span>
<span class="lineNum">     506 </span>            :     }
<span class="lineNum">     507 </span><span class="lineCov">          8 :   } else if (proto.diff_size() &gt; 0) {</span>
<span class="lineNum">     508 </span><span class="lineNoCov">          0 :     CHECK_EQ(count_, proto.diff_size());</span>
<span class="lineNum">     509 </span><span class="lineNoCov">          0 :     Dtype* diff_vec = mutable_cpu_diff();</span>
<span class="lineNum">     510 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; count_; ++i) {</span>
<span class="lineNum">     511 </span><span class="lineNoCov">          0 :       diff_vec[i] = proto.diff(i);</span>
<span class="lineNum">     512 </span>            :     }
<span class="lineNum">     513 </span>            :   }
<span class="lineNum">     514 </span><span class="lineCov">          8 : }</span>
<a name="515"><span class="lineNum">     515 </span>            : </a>
<span class="lineNum">     516 </span>            : template &lt;&gt;
<span class="lineNum">     517 </span><span class="lineNoCov">          0 : void Blob&lt;double&gt;::ToProto(BlobProto* proto, bool write_diff) const {</span>
<span class="lineNum">     518 </span>            :   proto-&gt;clear_shape();
<span class="lineNum">     519 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; shape_.size(); ++i) {</span>
<span class="lineNum">     520 </span><span class="lineNoCov">          0 :     proto-&gt;mutable_shape()-&gt;add_dim(shape_[i]);</span>
<span class="lineNum">     521 </span>            :   }
<span class="lineNum">     522 </span>            :   proto-&gt;clear_double_data();
<span class="lineNum">     523 </span>            :   proto-&gt;clear_double_diff();
<span class="lineNum">     524 </span><span class="lineNoCov">          0 :   const double* data_vec = cpu_data();</span>
<span class="lineNum">     525 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; count_; ++i) {</span>
<span class="lineNum">     526 </span><span class="lineNoCov">          0 :     proto-&gt;add_double_data(data_vec[i]);</span>
<span class="lineNum">     527 </span>            :   }
<span class="lineNum">     528 </span><span class="lineNoCov">          0 :   if (write_diff) {</span>
<span class="lineNum">     529 </span><span class="lineNoCov">          0 :     const double* diff_vec = cpu_diff();</span>
<span class="lineNum">     530 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; count_; ++i) {</span>
<span class="lineNum">     531 </span><span class="lineNoCov">          0 :       proto-&gt;add_double_diff(diff_vec[i]);</span>
<span class="lineNum">     532 </span>            :     }
<span class="lineNum">     533 </span>            :   }
<span class="lineNum">     534 </span><span class="lineNoCov">          0 : }</span>
<a name="535"><span class="lineNum">     535 </span>            : </a>
<span class="lineNum">     536 </span>            : template &lt;&gt;
<span class="lineNum">     537 </span><span class="lineNoCov">          0 : void Blob&lt;float&gt;::ToProto(BlobProto* proto, bool write_diff) const {</span>
<span class="lineNum">     538 </span>            :   proto-&gt;clear_shape();
<span class="lineNum">     539 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; shape_.size(); ++i) {</span>
<span class="lineNum">     540 </span><span class="lineNoCov">          0 :     proto-&gt;mutable_shape()-&gt;add_dim(shape_[i]);</span>
<span class="lineNum">     541 </span>            :   }
<span class="lineNum">     542 </span>            :   proto-&gt;clear_data();
<span class="lineNum">     543 </span>            :   proto-&gt;clear_diff();
<span class="lineNum">     544 </span><span class="lineNoCov">          0 :   const float* data_vec = cpu_data();</span>
<span class="lineNum">     545 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; count_; ++i) {</span>
<span class="lineNum">     546 </span><span class="lineNoCov">          0 :     proto-&gt;add_data(data_vec[i]);</span>
<span class="lineNum">     547 </span>            :   }
<span class="lineNum">     548 </span><span class="lineNoCov">          0 :   if (write_diff) {</span>
<span class="lineNum">     549 </span><span class="lineNoCov">          0 :     const float* diff_vec = cpu_diff();</span>
<span class="lineNum">     550 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; count_; ++i) {</span>
<span class="lineNum">     551 </span><span class="lineNoCov">          0 :       proto-&gt;add_diff(diff_vec[i]);</span>
<span class="lineNum">     552 </span>            :     }
<span class="lineNum">     553 </span>            :   }
<span class="lineNum">     554 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     555 </span>            : 
<span class="lineNum">     556 </span>            : INSTANTIATE_CLASS(Blob);
<span class="lineNum">     557 </span>            : template class Blob&lt;int&gt;;
<a name="558"><span class="lineNum">     558 </span>            : template class Blob&lt;unsigned int&gt;;</a>
<span class="lineNum">     559 </span>            : 
<span class="lineNum">     560 </span><span class="lineCov">          2 : }  // namespace caffe</span>
<span class="lineNum">     561 </span>            : 
</pre>
      </td>
    </tr>
  </table>
  <br>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="ruler"><img src="../../glass.png" width=3 height=3 alt=""></td></tr>
    <tr><td class="versionInfo">Generated by: <a href="http://ltp.sourceforge.net/coverage/lcov.php" target="_parent">LCOV version 1.12</a></td></tr>
  </table>
  <br>

</body>
</html>
