<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

<html lang="en">

<head>
  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  <title>LCOV - code analysis - src/caffe/layers/inner_product_layer.cpp</title>
  <link rel="stylesheet" type="text/css" href="../../../gcov.css">
</head>

<body>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="title">LCOV - code coverage report</td></tr>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>

    <tr>
      <td width="100%">
        <table cellpadding=1 border=0 width="100%">
          <tr>
            <td width="10%" class="headerItem">Current view:</td>
            <td width="35%" class="headerValue"><a href="../../../index.html">top level</a> - <a href="index.html">src/caffe/layers</a> - inner_product_layer.cpp<span style="font-size: 80%;"> (source / <a href="inner_product_layer.cpp.func-sort-c.html">functions</a>)</span></td>
            <td width="5%"></td>
            <td width="15%"></td>
            <td width="10%" class="headerCovTableHead">Hit</td>
            <td width="10%" class="headerCovTableHead">Total</td>
            <td width="15%" class="headerCovTableHead">Coverage</td>
          </tr>
          <tr>
            <td class="headerItem">Test:</td>
            <td class="headerValue">code analysis</td>
            <td></td>
            <td class="headerItem">Lines:</td>
            <td class="headerCovTableEntry">48</td>
            <td class="headerCovTableEntry">69</td>
            <td class="headerCovTableEntryLo">69.6 %</td>
          </tr>
          <tr>
            <td class="headerItem">Date:</td>
            <td class="headerValue">2020-09-11 22:25:26</td>
            <td></td>
            <td class="headerItem">Functions:</td>
            <td class="headerCovTableEntry">6</td>
            <td class="headerCovTableEntry">16</td>
            <td class="headerCovTableEntryLo">37.5 %</td>
          </tr>
          <tr>
            <td class="headerItem">Legend:</td>
            <td class="headerValueLeg">            Lines:
            <span class="coverLegendCov">hit</span>
            <span class="coverLegendNoCov">not hit</span>
</td>
            <td></td>
          </tr>
          <tr><td><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
        </table>
      </td>
    </tr>

    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
  </table>

  <table cellpadding=0 cellspacing=0 border=0>
    <tr>
      <td><br></td>
    </tr>
    <tr>
      <td>
<pre class="sourceHeading">          Line data    Source code</pre>
<pre class="source">
<a name="1"><span class="lineNum">       1 </span>            : #include &lt;vector&gt;</a>
<span class="lineNum">       2 </span>            : 
<span class="lineNum">       3 </span>            : #include &quot;caffe/filler.hpp&quot;
<span class="lineNum">       4 </span>            : #include &quot;caffe/layers/inner_product_layer.hpp&quot;
<span class="lineNum">       5 </span>            : #include &quot;caffe/util/math_functions.hpp&quot;
<span class="lineNum">       6 </span>            : 
<span class="lineNum">       7 </span>            : namespace caffe {
<span class="lineNum">       8 </span>            : 
<span class="lineNum">       9 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      10 </span><span class="lineCov">          2 : void InnerProductLayer&lt;Dtype&gt;::LayerSetUp(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">      11 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      12 </span><span class="lineCov">          2 :   const int num_output = this-&gt;layer_param_.inner_product_param().num_output();</span>
<span class="lineNum">      13 </span><span class="lineCov">          2 :   bias_term_ = this-&gt;layer_param_.inner_product_param().bias_term();</span>
<span class="lineNum">      14 </span><span class="lineCov">          2 :   transpose_ = this-&gt;layer_param_.inner_product_param().transpose();</span>
<span class="lineNum">      15 </span><span class="lineCov">          2 :   N_ = num_output;</span>
<span class="lineNum">      16 </span>            :   const int axis = bottom[0]-&gt;CanonicalAxisIndex(
<span class="lineNum">      17 </span><span class="lineCov">          2 :       this-&gt;layer_param_.inner_product_param().axis());</span>
<span class="lineNum">      18 </span>            :   // Dimensions starting from &quot;axis&quot; are &quot;flattened&quot; into a single
<span class="lineNum">      19 </span>            :   // length K_ vector. For example, if bottom[0]'s shape is (N, C, H, W),
<span class="lineNum">      20 </span>            :   // and axis == 1, N inner products with dimension CHW are performed.
<span class="lineNum">      21 </span><span class="lineCov">          4 :   K_ = bottom[0]-&gt;count(axis);</span>
<span class="lineNum">      22 </span>            :   // Check if we need to set up the weights
<span class="lineNum">      23 </span><span class="lineCov">          2 :   if (this-&gt;blobs_.size() &gt; 0) {</span>
<span class="lineNum">      24 </span><span class="lineNoCov">          0 :     LOG(INFO) &lt;&lt; &quot;Skipping parameter initialization&quot;;</span>
<span class="lineNum">      25 </span>            :   } else {
<span class="lineNum">      26 </span><span class="lineCov">          2 :     if (bias_term_) {</span>
<span class="lineNum">      27 </span><span class="lineCov">          4 :       this-&gt;blobs_.resize(2);</span>
<span class="lineNum">      28 </span>            :     } else {
<span class="lineNum">      29 </span><span class="lineNoCov">          0 :       this-&gt;blobs_.resize(1);</span>
<span class="lineNum">      30 </span>            :     }
<span class="lineNum">      31 </span>            :     // Initialize the weights
<span class="lineNum">      32 </span><span class="lineCov">          2 :     vector&lt;int&gt; weight_shape(2);</span>
<span class="lineNum">      33 </span><span class="lineCov">          2 :     if (transpose_) {</span>
<span class="lineNum">      34 </span><span class="lineNoCov">          0 :       weight_shape[0] = K_;</span>
<span class="lineNum">      35 </span><span class="lineNoCov">          0 :       weight_shape[1] = N_;</span>
<span class="lineNum">      36 </span>            :     } else {
<span class="lineNum">      37 </span><span class="lineCov">          2 :       weight_shape[0] = N_;</span>
<span class="lineNum">      38 </span><span class="lineCov">          2 :       weight_shape[1] = K_;</span>
<span class="lineNum">      39 </span>            :     }
<span class="lineNum">      40 </span><span class="lineCov">          2 :     this-&gt;blobs_[0].reset(new Blob&lt;Dtype&gt;(weight_shape));</span>
<span class="lineNum">      41 </span>            :     // fill the weights
<span class="lineNum">      42 </span>            :     shared_ptr&lt;Filler&lt;Dtype&gt; &gt; weight_filler(GetFiller&lt;Dtype&gt;(
<span class="lineNum">      43 </span><span class="lineCov">          2 :         this-&gt;layer_param_.inner_product_param().weight_filler()));</span>
<span class="lineNum">      44 </span><span class="lineCov">          2 :     weight_filler-&gt;Fill(this-&gt;blobs_[0].get());</span>
<span class="lineNum">      45 </span>            :     // If necessary, initialize and fill the bias term
<span class="lineNum">      46 </span><span class="lineCov">          2 :     if (bias_term_) {</span>
<span class="lineNum">      47 </span><span class="lineCov">          2 :       vector&lt;int&gt; bias_shape(1, N_);</span>
<span class="lineNum">      48 </span><span class="lineCov">          4 :       this-&gt;blobs_[1].reset(new Blob&lt;Dtype&gt;(bias_shape));</span>
<span class="lineNum">      49 </span>            :       shared_ptr&lt;Filler&lt;Dtype&gt; &gt; bias_filler(GetFiller&lt;Dtype&gt;(
<span class="lineNum">      50 </span><span class="lineCov">          2 :           this-&gt;layer_param_.inner_product_param().bias_filler()));</span>
<span class="lineNum">      51 </span><span class="lineCov">          2 :       bias_filler-&gt;Fill(this-&gt;blobs_[1].get());</span>
<span class="lineNum">      52 </span>            :     }
<span class="lineNum">      53 </span>            :   }  // parameter initialization
<span class="lineNum">      54 </span><span class="lineCov">          2 :   this-&gt;param_propagate_down_.resize(this-&gt;blobs_.size(), true);</span>
<span class="lineNum">      55 </span><span class="lineCov">          2 : }</span>
<span class="lineNum">      56 </span>            : 
<span class="lineNum">      57 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      58 </span><span class="lineCov">        202 : void InnerProductLayer&lt;Dtype&gt;::Reshape(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">      59 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      60 </span>            :   // Figure out the dimensions
<span class="lineNum">      61 </span>            :   const int axis = bottom[0]-&gt;CanonicalAxisIndex(
<span class="lineNum">      62 </span><span class="lineCov">        202 :       this-&gt;layer_param_.inner_product_param().axis());</span>
<span class="lineNum">      63 </span><span class="lineCov">        202 :   const int new_K = bottom[0]-&gt;count(axis);</span>
<span class="lineNum">      64 </span><span class="lineCov">        404 :   CHECK_EQ(K_, new_K)</span>
<span class="lineNum">      65 </span>            :       &lt;&lt; &quot;Input size incompatible with inner product parameters.&quot;;
<span class="lineNum">      66 </span>            :   // The first &quot;axis&quot; dimensions are independent inner products; the total
<span class="lineNum">      67 </span>            :   // number of these is M_, the product over these dimensions.
<span class="lineNum">      68 </span><span class="lineCov">        202 :   M_ = bottom[0]-&gt;count(0, axis);</span>
<span class="lineNum">      69 </span>            :   // The top shape will be the bottom shape with the flattened axes dropped,
<span class="lineNum">      70 </span>            :   // and replaced by a single axis with dimension num_output (N_).
<span class="lineNum">      71 </span><span class="lineCov">        404 :   vector&lt;int&gt; top_shape = bottom[0]-&gt;shape();</span>
<span class="lineNum">      72 </span><span class="lineCov">        202 :   top_shape.resize(axis + 1);</span>
<span class="lineNum">      73 </span><span class="lineCov">        404 :   top_shape[axis] = N_;</span>
<span class="lineNum">      74 </span><span class="lineCov">        202 :   top[0]-&gt;Reshape(top_shape);</span>
<span class="lineNum">      75 </span>            :   // Set up the bias multiplier
<span class="lineNum">      76 </span><span class="lineCov">        202 :   if (bias_term_) {</span>
<span class="lineNum">      77 </span><span class="lineCov">        202 :     vector&lt;int&gt; bias_shape(1, M_);</span>
<span class="lineNum">      78 </span><span class="lineCov">        202 :     bias_multiplier_.Reshape(bias_shape);</span>
<span class="lineNum">      79 </span><span class="lineCov">        202 :     caffe_set(M_, Dtype(1), bias_multiplier_.mutable_cpu_data());</span>
<span class="lineNum">      80 </span>            :   }
<span class="lineNum">      81 </span><span class="lineCov">        202 : }</span>
<a name="82"><span class="lineNum">      82 </span>            : </a>
<span class="lineNum">      83 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      84 </span><span class="lineCov">        200 : void InnerProductLayer&lt;Dtype&gt;::Forward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">      85 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      86 </span><span class="lineCov">        200 :   const Dtype* bottom_data = bottom[0]-&gt;cpu_data();</span>
<span class="lineNum">      87 </span><span class="lineCov">        200 :   Dtype* top_data = top[0]-&gt;mutable_cpu_data();</span>
<span class="lineNum">      88 </span><span class="lineCov">        200 :   const Dtype* weight = this-&gt;blobs_[0]-&gt;cpu_data();</span>
<span class="lineNum">      89 </span><span class="lineCov">        200 :   caffe_cpu_gemm&lt;Dtype&gt;(CblasNoTrans, transpose_ ? CblasNoTrans : CblasTrans,</span>
<span class="lineNum">      90 </span>            :       M_, N_, K_, (Dtype)1.,
<span class="lineNum">      91 </span>            :       bottom_data, weight, (Dtype)0., top_data);
<span class="lineNum">      92 </span><span class="lineCov">        200 :   if (bias_term_) {</span>
<span class="lineNum">      93 </span><span class="lineCov">        200 :     caffe_cpu_gemm&lt;Dtype&gt;(CblasNoTrans, CblasNoTrans, M_, N_, 1, (Dtype)1.,</span>
<span class="lineNum">      94 </span>            :         bias_multiplier_.cpu_data(),
<span class="lineNum">      95 </span>            :         this-&gt;blobs_[1]-&gt;cpu_data(), (Dtype)1., top_data);
<span class="lineNum">      96 </span>            :   }
<span class="lineNum">      97 </span><span class="lineCov">        200 : }</span>
<a name="98"><span class="lineNum">      98 </span>            : </a>
<span class="lineNum">      99 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     100 </span><span class="lineNoCov">          0 : void InnerProductLayer&lt;Dtype&gt;::Backward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top,</span>
<span class="lineNum">     101 </span>            :     const vector&lt;bool&gt;&amp; propagate_down,
<span class="lineNum">     102 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom) {
<span class="lineNum">     103 </span><span class="lineNoCov">          0 :   if (this-&gt;param_propagate_down_[0]) {</span>
<span class="lineNum">     104 </span><span class="lineNoCov">          0 :     const Dtype* top_diff = top[0]-&gt;cpu_diff();</span>
<span class="lineNum">     105 </span><span class="lineNoCov">          0 :     const Dtype* bottom_data = bottom[0]-&gt;cpu_data();</span>
<span class="lineNum">     106 </span>            :     // Gradient with respect to weight
<span class="lineNum">     107 </span><span class="lineNoCov">          0 :     if (transpose_) {</span>
<span class="lineNum">     108 </span><span class="lineNoCov">          0 :       caffe_cpu_gemm&lt;Dtype&gt;(CblasTrans, CblasNoTrans,</span>
<span class="lineNum">     109 </span>            :           K_, N_, M_,
<span class="lineNum">     110 </span>            :           (Dtype)1., bottom_data, top_diff,
<span class="lineNum">     111 </span>            :           (Dtype)1., this-&gt;blobs_[0]-&gt;mutable_cpu_diff());
<span class="lineNum">     112 </span>            :     } else {
<span class="lineNum">     113 </span><span class="lineNoCov">          0 :       caffe_cpu_gemm&lt;Dtype&gt;(CblasTrans, CblasNoTrans,</span>
<span class="lineNum">     114 </span>            :           N_, K_, M_,
<span class="lineNum">     115 </span>            :           (Dtype)1., top_diff, bottom_data,
<span class="lineNum">     116 </span>            :           (Dtype)1., this-&gt;blobs_[0]-&gt;mutable_cpu_diff());
<span class="lineNum">     117 </span>            :     }
<span class="lineNum">     118 </span>            :   }
<span class="lineNum">     119 </span><span class="lineNoCov">          0 :   if (bias_term_ &amp;&amp; this-&gt;param_propagate_down_[1]) {</span>
<span class="lineNum">     120 </span><span class="lineNoCov">          0 :     const Dtype* top_diff = top[0]-&gt;cpu_diff();</span>
<span class="lineNum">     121 </span>            :     // Gradient with respect to bias
<span class="lineNum">     122 </span><span class="lineNoCov">          0 :     caffe_cpu_gemv&lt;Dtype&gt;(CblasTrans, M_, N_, (Dtype)1., top_diff,</span>
<span class="lineNum">     123 </span>            :         bias_multiplier_.cpu_data(), (Dtype)1.,
<span class="lineNum">     124 </span>            :         this-&gt;blobs_[1]-&gt;mutable_cpu_diff());
<span class="lineNum">     125 </span>            :   }
<span class="lineNum">     126 </span><span class="lineNoCov">          0 :   if (propagate_down[0]) {</span>
<span class="lineNum">     127 </span><span class="lineNoCov">          0 :     const Dtype* top_diff = top[0]-&gt;cpu_diff();</span>
<span class="lineNum">     128 </span>            :     // Gradient with respect to bottom data
<span class="lineNum">     129 </span><span class="lineNoCov">          0 :     if (transpose_) {</span>
<span class="lineNum">     130 </span><span class="lineNoCov">          0 :       caffe_cpu_gemm&lt;Dtype&gt;(CblasNoTrans, CblasTrans,</span>
<span class="lineNum">     131 </span>            :           M_, K_, N_,
<span class="lineNum">     132 </span>            :           (Dtype)1., top_diff, this-&gt;blobs_[0]-&gt;cpu_data(),
<span class="lineNum">     133 </span>            :           (Dtype)0., bottom[0]-&gt;mutable_cpu_diff());
<span class="lineNum">     134 </span>            :     } else {
<span class="lineNum">     135 </span><span class="lineNoCov">          0 :       caffe_cpu_gemm&lt;Dtype&gt;(CblasNoTrans, CblasNoTrans,</span>
<span class="lineNum">     136 </span>            :           M_, K_, N_,
<span class="lineNum">     137 </span>            :           (Dtype)1., top_diff, this-&gt;blobs_[0]-&gt;cpu_data(),
<span class="lineNum">     138 </span>            :           (Dtype)0., bottom[0]-&gt;mutable_cpu_diff());
<span class="lineNum">     139 </span>            :     }
<span class="lineNum">     140 </span>            :   }
<span class="lineNum">     141 </span><span class="lineNoCov">          0 : }</span>
<a name="142"><span class="lineNum">     142 </span>            : </a>
<span class="lineNum">     143 </span>            : #ifdef CPU_ONLY
<span class="lineNum">     144 </span><span class="lineNoCov">          0 : STUB_GPU(InnerProductLayer);</span>
<span class="lineNum">     145 </span>            : #endif
<span class="lineNum">     146 </span>            : 
<span class="lineNum">     147 </span>            : INSTANTIATE_CLASS(InnerProductLayer);
<a name="148"><span class="lineNum">     148 </span><span class="lineCov">          7 : REGISTER_LAYER_CLASS(InnerProduct);</span></a>
<span class="lineNum">     149 </span>            : 
<span class="lineNum">     150 </span><span class="lineCov">          3 : }  // namespace caffe</span>
</pre>
      </td>
    </tr>
  </table>
  <br>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
    <tr><td class="versionInfo">Generated by: <a href="http://ltp.sourceforge.net/coverage/lcov.php" target="_parent">LCOV version 1.12</a></td></tr>
  </table>
  <br>

</body>
</html>
