<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

<html lang="en">

<head>
  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  <title>LCOV - code analysis - src/caffe/layers/softmax_layer.cpp</title>
  <link rel="stylesheet" type="text/css" href="../../../gcov.css">
</head>

<body>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="title">LCOV - code coverage report</td></tr>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>

    <tr>
      <td width="100%">
        <table cellpadding=1 border=0 width="100%">
          <tr>
            <td width="10%" class="headerItem">Current view:</td>
            <td width="35%" class="headerValue"><a href="../../../index.html">top level</a> - <a href="index.html">src/caffe/layers</a> - softmax_layer.cpp<span style="font-size: 80%;"> (source / <a href="softmax_layer.cpp.func-sort-c.html">functions</a>)</span></td>
            <td width="5%"></td>
            <td width="15%"></td>
            <td width="10%" class="headerCovTableHead">Hit</td>
            <td width="10%" class="headerCovTableHead">Total</td>
            <td width="15%" class="headerCovTableHead">Coverage</td>
          </tr>
          <tr>
            <td class="headerItem">Test:</td>
            <td class="headerValue">code analysis</td>
            <td></td>
            <td class="headerItem">Lines:</td>
            <td class="headerCovTableEntry">34</td>
            <td class="headerCovTableEntry">51</td>
            <td class="headerCovTableEntryLo">66.7 %</td>
          </tr>
          <tr>
            <td class="headerItem">Date:</td>
            <td class="headerValue">2020-09-11 22:25:26</td>
            <td></td>
            <td class="headerItem">Functions:</td>
            <td class="headerCovTableEntry">3</td>
            <td class="headerCovTableEntry">11</td>
            <td class="headerCovTableEntryLo">27.3 %</td>
          </tr>
          <tr>
            <td class="headerItem">Legend:</td>
            <td class="headerValueLeg">            Lines:
            <span class="coverLegendCov">hit</span>
            <span class="coverLegendNoCov">not hit</span>
</td>
            <td></td>
          </tr>
          <tr><td><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
        </table>
      </td>
    </tr>

    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
  </table>

  <table cellpadding=0 cellspacing=0 border=0>
    <tr>
      <td><br></td>
    </tr>
    <tr>
      <td>
<pre class="sourceHeading">          Line data    Source code</pre>
<pre class="source">
<a name="1"><span class="lineNum">       1 </span>            : #include &lt;algorithm&gt;</a>
<span class="lineNum">       2 </span>            : #include &lt;vector&gt;
<span class="lineNum">       3 </span>            : 
<span class="lineNum">       4 </span>            : #include &quot;caffe/layers/softmax_layer.hpp&quot;
<span class="lineNum">       5 </span>            : #include &quot;caffe/util/math_functions.hpp&quot;
<span class="lineNum">       6 </span>            : 
<span class="lineNum">       7 </span>            : namespace caffe {
<span class="lineNum">       8 </span>            : 
<span class="lineNum">       9 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      10 </span><span class="lineCov">        202 : void SoftmaxLayer&lt;Dtype&gt;::Reshape(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">      11 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      12 </span><span class="lineCov">        202 :   softmax_axis_ =</span>
<span class="lineNum">      13 </span>            :       bottom[0]-&gt;CanonicalAxisIndex(this-&gt;layer_param_.softmax_param().axis());
<span class="lineNum">      14 </span><span class="lineCov">        202 :   top[0]-&gt;ReshapeLike(*bottom[0]);</span>
<span class="lineNum">      15 </span><span class="lineCov">        404 :   vector&lt;int&gt; mult_dims(1, bottom[0]-&gt;shape(softmax_axis_));</span>
<span class="lineNum">      16 </span><span class="lineCov">        202 :   sum_multiplier_.Reshape(mult_dims);</span>
<span class="lineNum">      17 </span><span class="lineCov">        202 :   Dtype* multiplier_data = sum_multiplier_.mutable_cpu_data();</span>
<span class="lineNum">      18 </span><span class="lineCov">        202 :   caffe_set(sum_multiplier_.count(), Dtype(1), multiplier_data);</span>
<span class="lineNum">      19 </span><span class="lineCov">        202 :   outer_num_ = bottom[0]-&gt;count(0, softmax_axis_);</span>
<span class="lineNum">      20 </span><span class="lineCov">        404 :   inner_num_ = bottom[0]-&gt;count(softmax_axis_ + 1);</span>
<span class="lineNum">      21 </span><span class="lineCov">        404 :   vector&lt;int&gt; scale_dims = bottom[0]-&gt;shape();</span>
<span class="lineNum">      22 </span><span class="lineCov">        404 :   scale_dims[softmax_axis_] = 1;</span>
<span class="lineNum">      23 </span><span class="lineCov">        202 :   scale_.Reshape(scale_dims);</span>
<span class="lineNum">      24 </span><span class="lineCov">        202 : }</span>
<span class="lineNum">      25 </span>            : 
<span class="lineNum">      26 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      27 </span><span class="lineCov">        100 : void SoftmaxLayer&lt;Dtype&gt;::Forward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">      28 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      29 </span><span class="lineCov">        100 :   const Dtype* bottom_data = bottom[0]-&gt;cpu_data();</span>
<span class="lineNum">      30 </span><span class="lineCov">        100 :   Dtype* top_data = top[0]-&gt;mutable_cpu_data();</span>
<span class="lineNum">      31 </span><span class="lineCov">        100 :   Dtype* scale_data = scale_.mutable_cpu_data();</span>
<span class="lineNum">      32 </span><span class="lineCov">        100 :   int channels = bottom[0]-&gt;shape(softmax_axis_);</span>
<span class="lineNum">      33 </span><span class="lineCov">        100 :   int dim = bottom[0]-&gt;count() / outer_num_;</span>
<span class="lineNum">      34 </span><span class="lineCov">        100 :   caffe_copy(bottom[0]-&gt;count(), bottom_data, top_data);</span>
<span class="lineNum">      35 </span>            :   // We need to subtract the max to avoid numerical issues, compute the exp,
<span class="lineNum">      36 </span>            :   // and then normalize.
<span class="lineNum">      37 </span><span class="lineCov">      20100 :   for (int i = 0; i &lt; outer_num_; ++i) {</span>
<span class="lineNum">      38 </span>            :     // initialize scale_data to the first plane
<span class="lineNum">      39 </span><span class="lineCov">      10000 :     caffe_copy(inner_num_, bottom_data + i * dim, scale_data);</span>
<span class="lineNum">      40 </span><span class="lineCov">     210000 :     for (int j = 0; j &lt; channels; j++) {</span>
<span class="lineNum">      41 </span><span class="lineCov">     300000 :       for (int k = 0; k &lt; inner_num_; k++) {</span>
<span class="lineNum">      42 </span><span class="lineCov">     200000 :         scale_data[k] = std::max(scale_data[k],</span>
<span class="lineNum">      43 </span><span class="lineCov">     100000 :             bottom_data[i * dim + j * inner_num_ + k]);</span>
<span class="lineNum">      44 </span>            :       }
<span class="lineNum">      45 </span>            :     }
<span class="lineNum">      46 </span>            :     // subtraction
<span class="lineNum">      47 </span><span class="lineCov">      10000 :     caffe_cpu_gemm&lt;Dtype&gt;(CblasNoTrans, CblasNoTrans, channels, inner_num_,</span>
<span class="lineNum">      48 </span>            :         1, -1., sum_multiplier_.cpu_data(), scale_data, 1., top_data);
<span class="lineNum">      49 </span>            :     // exponentiation
<span class="lineNum">      50 </span><span class="lineCov">      10000 :     caffe_exp&lt;Dtype&gt;(dim, top_data, top_data);</span>
<span class="lineNum">      51 </span>            :     // sum after exp
<span class="lineNum">      52 </span><span class="lineCov">      10000 :     caffe_cpu_gemv&lt;Dtype&gt;(CblasTrans, channels, inner_num_, 1.,</span>
<span class="lineNum">      53 </span>            :         top_data, sum_multiplier_.cpu_data(), 0., scale_data);
<span class="lineNum">      54 </span>            :     // division
<span class="lineNum">      55 </span><span class="lineCov">     210000 :     for (int j = 0; j &lt; channels; j++) {</span>
<span class="lineNum">      56 </span><span class="lineCov">     100000 :       caffe_div(inner_num_, top_data, scale_data, top_data);</span>
<span class="lineNum">      57 </span><span class="lineCov">     100000 :       top_data += inner_num_;</span>
<span class="lineNum">      58 </span>            :     }
<span class="lineNum">      59 </span>            :   }
<span class="lineNum">      60 </span><span class="lineCov">        100 : }</span>
<span class="lineNum">      61 </span>            : 
<span class="lineNum">      62 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      63 </span><span class="lineNoCov">          0 : void SoftmaxLayer&lt;Dtype&gt;::Backward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top,</span>
<span class="lineNum">      64 </span>            :     const vector&lt;bool&gt;&amp; propagate_down,
<span class="lineNum">      65 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom) {
<span class="lineNum">      66 </span><span class="lineNoCov">          0 :   const Dtype* top_diff = top[0]-&gt;cpu_diff();</span>
<span class="lineNum">      67 </span><span class="lineNoCov">          0 :   const Dtype* top_data = top[0]-&gt;cpu_data();</span>
<span class="lineNum">      68 </span><span class="lineNoCov">          0 :   Dtype* bottom_diff = bottom[0]-&gt;mutable_cpu_diff();</span>
<span class="lineNum">      69 </span><span class="lineNoCov">          0 :   Dtype* scale_data = scale_.mutable_cpu_data();</span>
<span class="lineNum">      70 </span><span class="lineNoCov">          0 :   int channels = top[0]-&gt;shape(softmax_axis_);</span>
<span class="lineNum">      71 </span><span class="lineNoCov">          0 :   int dim = top[0]-&gt;count() / outer_num_;</span>
<span class="lineNum">      72 </span><span class="lineNoCov">          0 :   caffe_copy(top[0]-&gt;count(), top_diff, bottom_diff);</span>
<span class="lineNum">      73 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; outer_num_; ++i) {</span>
<span class="lineNum">      74 </span>            :     // compute dot(top_diff, top_data) and subtract them from the bottom diff
<span class="lineNum">      75 </span><span class="lineNoCov">          0 :     for (int k = 0; k &lt; inner_num_; ++k) {</span>
<span class="lineNum">      76 </span><span class="lineNoCov">          0 :       scale_data[k] = caffe_cpu_strided_dot&lt;Dtype&gt;(channels,</span>
<span class="lineNum">      77 </span><span class="lineNoCov">          0 :           bottom_diff + i * dim + k, inner_num_,</span>
<span class="lineNum">      78 </span><span class="lineNoCov">          0 :           top_data + i * dim + k, inner_num_);</span>
<span class="lineNum">      79 </span>            :     }
<span class="lineNum">      80 </span>            :     // subtraction
<span class="lineNum">      81 </span><span class="lineNoCov">          0 :     caffe_cpu_gemm&lt;Dtype&gt;(CblasNoTrans, CblasNoTrans, channels, inner_num_, 1,</span>
<span class="lineNum">      82 </span>            :         -1., sum_multiplier_.cpu_data(), scale_data, 1., bottom_diff + i * dim);
<span class="lineNum">      83 </span>            :   }
<span class="lineNum">      84 </span>            :   // elementwise multiplication
<span class="lineNum">      85 </span><span class="lineNoCov">          0 :   caffe_mul(top[0]-&gt;count(), bottom_diff, top_data, bottom_diff);</span>
<span class="lineNum">      86 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">      87 </span>            : 
<a name="88"><span class="lineNum">      88 </span>            : </a>
<span class="lineNum">      89 </span>            : #ifdef CPU_ONLY
<span class="lineNum">      90 </span><span class="lineNoCov">          0 : STUB_GPU(SoftmaxLayer);</span>
<span class="lineNum">      91 </span>            : #endif
<span class="lineNum">      92 </span>            : 
<a name="93"><span class="lineNum">      93 </span>            : INSTANTIATE_CLASS(SoftmaxLayer);</a>
<span class="lineNum">      94 </span>            : 
<span class="lineNum">      95 </span><span class="lineCov">          2 : }  // namespace caffe</span>
</pre>
      </td>
    </tr>
  </table>
  <br>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
    <tr><td class="versionInfo">Generated by: <a href="http://ltp.sourceforge.net/coverage/lcov.php" target="_parent">LCOV version 1.12</a></td></tr>
  </table>
  <br>

</body>
</html>
