<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

<html lang="en">

<head>
  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  <title>LCOV - code analysis - src/caffe/layers/lrn_layer.cpp</title>
  <link rel="stylesheet" type="text/css" href="../../../gcov.css">
</head>

<body>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="title">LCOV - code coverage report</td></tr>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>

    <tr>
      <td width="100%">
        <table cellpadding=1 border=0 width="100%">
          <tr>
            <td width="10%" class="headerItem">Current view:</td>
            <td width="35%" class="headerValue"><a href="../../../index.html">top level</a> - <a href="index.html">src/caffe/layers</a> - lrn_layer.cpp<span style="font-size: 80%;"> (source / <a href="lrn_layer.cpp.func-sort-c.html">functions</a>)</span></td>
            <td width="5%"></td>
            <td width="15%"></td>
            <td width="10%" class="headerCovTableHead">Hit</td>
            <td width="10%" class="headerCovTableHead">Total</td>
            <td width="15%" class="headerCovTableHead">Coverage</td>
          </tr>
          <tr>
            <td class="headerItem">Test:</td>
            <td class="headerValue">code analysis</td>
            <td></td>
            <td class="headerItem">Lines:</td>
            <td class="headerCovTableEntry">1</td>
            <td class="headerCovTableEntry">161</td>
            <td class="headerCovTableEntryLo">0.6 %</td>
          </tr>
          <tr>
            <td class="headerItem">Date:</td>
            <td class="headerValue">2020-09-11 22:50:33</td>
            <td></td>
            <td class="headerItem">Functions:</td>
            <td class="headerCovTableEntry">1</td>
            <td class="headerCovTableEntry">25</td>
            <td class="headerCovTableEntryLo">4.0 %</td>
          </tr>
          <tr>
            <td class="headerItem">Legend:</td>
            <td class="headerValueLeg">            Lines:
            <span class="coverLegendCov">hit</span>
            <span class="coverLegendNoCov">not hit</span>
</td>
            <td></td>
          </tr>
          <tr><td><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
        </table>
      </td>
    </tr>

    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
  </table>

  <table cellpadding=0 cellspacing=0 border=0>
    <tr>
      <td><br></td>
    </tr>
    <tr>
      <td>
<pre class="sourceHeading">          Line data    Source code</pre>
<pre class="source">
<a name="1"><span class="lineNum">       1 </span>            : #include &lt;vector&gt;</a>
<span class="lineNum">       2 </span>            : 
<span class="lineNum">       3 </span>            : #include &quot;caffe/layers/lrn_layer.hpp&quot;
<span class="lineNum">       4 </span>            : #include &quot;caffe/util/math_functions.hpp&quot;
<span class="lineNum">       5 </span>            : 
<span class="lineNum">       6 </span>            : namespace caffe {
<a name="7"><span class="lineNum">       7 </span>            : </a>
<span class="lineNum">       8 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">       9 </span><span class="lineNoCov">          0 : void LRNLayer&lt;Dtype&gt;::LayerSetUp(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">      10 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      11 </span><span class="lineNoCov">          0 :   size_ = this-&gt;layer_param_.lrn_param().local_size();</span>
<span class="lineNum">      12 </span><span class="lineNoCov">          0 :   CHECK_EQ(size_ % 2, 1) &lt;&lt; &quot;LRN only supports odd values for local_size&quot;;</span>
<span class="lineNum">      13 </span><span class="lineNoCov">          0 :   pre_pad_ = (size_ - 1) / 2;</span>
<span class="lineNum">      14 </span><span class="lineNoCov">          0 :   alpha_ = this-&gt;layer_param_.lrn_param().alpha();</span>
<span class="lineNum">      15 </span><span class="lineNoCov">          0 :   beta_ = this-&gt;layer_param_.lrn_param().beta();</span>
<span class="lineNum">      16 </span><span class="lineNoCov">          0 :   k_ = this-&gt;layer_param_.lrn_param().k();</span>
<span class="lineNum">      17 </span><span class="lineNoCov">          0 :   if (this-&gt;layer_param_.lrn_param().norm_region() ==</span>
<span class="lineNum">      18 </span>            :       LRNParameter_NormRegion_WITHIN_CHANNEL) {
<span class="lineNum">      19 </span>            :     // Set up split_layer_ to use inputs in the numerator and denominator.
<span class="lineNum">      20 </span>            :     split_top_vec_.clear();
<span class="lineNum">      21 </span><span class="lineNoCov">          0 :     split_top_vec_.push_back(&amp;product_input_);</span>
<span class="lineNum">      22 </span><span class="lineNoCov">          0 :     split_top_vec_.push_back(&amp;square_input_);</span>
<span class="lineNum">      23 </span><span class="lineNoCov">          0 :     LayerParameter split_param;</span>
<span class="lineNum">      24 </span><span class="lineNoCov">          0 :     split_layer_.reset(new SplitLayer&lt;Dtype&gt;(split_param));</span>
<span class="lineNum">      25 </span><span class="lineNoCov">          0 :     split_layer_-&gt;SetUp(bottom, split_top_vec_);</span>
<span class="lineNum">      26 </span>            :     // Set up square_layer_ to square the inputs.
<span class="lineNum">      27 </span>            :     square_bottom_vec_.clear();
<span class="lineNum">      28 </span>            :     square_top_vec_.clear();
<span class="lineNum">      29 </span><span class="lineNoCov">          0 :     square_bottom_vec_.push_back(&amp;square_input_);</span>
<span class="lineNum">      30 </span><span class="lineNoCov">          0 :     square_top_vec_.push_back(&amp;square_output_);</span>
<span class="lineNum">      31 </span><span class="lineNoCov">          0 :     LayerParameter square_param;</span>
<span class="lineNum">      32 </span><span class="lineNoCov">          0 :     square_param.mutable_power_param()-&gt;set_power(Dtype(2));</span>
<span class="lineNum">      33 </span><span class="lineNoCov">          0 :     square_layer_.reset(new PowerLayer&lt;Dtype&gt;(square_param));</span>
<span class="lineNum">      34 </span><span class="lineNoCov">          0 :     square_layer_-&gt;SetUp(square_bottom_vec_, square_top_vec_);</span>
<span class="lineNum">      35 </span>            :     // Set up pool_layer_ to sum over square neighborhoods of the input.
<span class="lineNum">      36 </span>            :     pool_top_vec_.clear();
<span class="lineNum">      37 </span><span class="lineNoCov">          0 :     pool_top_vec_.push_back(&amp;pool_output_);</span>
<span class="lineNum">      38 </span><span class="lineNoCov">          0 :     LayerParameter pool_param;</span>
<span class="lineNum">      39 </span><span class="lineNoCov">          0 :     pool_param.mutable_pooling_param()-&gt;set_pool(</span>
<span class="lineNum">      40 </span>            :         PoolingParameter_PoolMethod_AVE);
<span class="lineNum">      41 </span><span class="lineNoCov">          0 :     pool_param.mutable_pooling_param()-&gt;set_pad(pre_pad_);</span>
<span class="lineNum">      42 </span><span class="lineNoCov">          0 :     pool_param.mutable_pooling_param()-&gt;set_kernel_size(size_);</span>
<span class="lineNum">      43 </span><span class="lineNoCov">          0 :     pool_layer_.reset(new PoolingLayer&lt;Dtype&gt;(pool_param));</span>
<span class="lineNum">      44 </span><span class="lineNoCov">          0 :     pool_layer_-&gt;SetUp(square_top_vec_, pool_top_vec_);</span>
<span class="lineNum">      45 </span>            :     // Set up power_layer_ to compute (1 + alpha_/N^2 s)^-beta_, where s is
<span class="lineNum">      46 </span>            :     // the sum of a squared neighborhood (the output of pool_layer_).
<span class="lineNum">      47 </span>            :     power_top_vec_.clear();
<span class="lineNum">      48 </span><span class="lineNoCov">          0 :     power_top_vec_.push_back(&amp;power_output_);</span>
<span class="lineNum">      49 </span><span class="lineNoCov">          0 :     LayerParameter power_param;</span>
<span class="lineNum">      50 </span><span class="lineNoCov">          0 :     power_param.mutable_power_param()-&gt;set_power(-beta_);</span>
<span class="lineNum">      51 </span><span class="lineNoCov">          0 :     power_param.mutable_power_param()-&gt;set_scale(alpha_);</span>
<span class="lineNum">      52 </span><span class="lineNoCov">          0 :     power_param.mutable_power_param()-&gt;set_shift(Dtype(1));</span>
<span class="lineNum">      53 </span><span class="lineNoCov">          0 :     power_layer_.reset(new PowerLayer&lt;Dtype&gt;(power_param));</span>
<span class="lineNum">      54 </span><span class="lineNoCov">          0 :     power_layer_-&gt;SetUp(pool_top_vec_, power_top_vec_);</span>
<span class="lineNum">      55 </span>            :     // Set up a product_layer_ to compute outputs by multiplying inputs by the
<span class="lineNum">      56 </span>            :     // inverse demoninator computed by the power layer.
<span class="lineNum">      57 </span>            :     product_bottom_vec_.clear();
<span class="lineNum">      58 </span><span class="lineNoCov">          0 :     product_bottom_vec_.push_back(&amp;product_input_);</span>
<span class="lineNum">      59 </span><span class="lineNoCov">          0 :     product_bottom_vec_.push_back(&amp;power_output_);</span>
<span class="lineNum">      60 </span><span class="lineNoCov">          0 :     LayerParameter product_param;</span>
<span class="lineNum">      61 </span><span class="lineNoCov">          0 :     EltwiseParameter* eltwise_param = product_param.mutable_eltwise_param();</span>
<span class="lineNum">      62 </span>            :     eltwise_param-&gt;set_operation(EltwiseParameter_EltwiseOp_PROD);
<span class="lineNum">      63 </span><span class="lineNoCov">          0 :     product_layer_.reset(new EltwiseLayer&lt;Dtype&gt;(product_param));</span>
<span class="lineNum">      64 </span><span class="lineNoCov">          0 :     product_layer_-&gt;SetUp(product_bottom_vec_, top);</span>
<span class="lineNum">      65 </span>            :   }
<span class="lineNum">      66 </span><span class="lineNoCov">          0 : }</span>
<a name="67"><span class="lineNum">      67 </span>            : </a>
<span class="lineNum">      68 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      69 </span><span class="lineNoCov">          0 : void LRNLayer&lt;Dtype&gt;::Reshape(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">      70 </span>            :       const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      71 </span><span class="lineNoCov">          0 :   CHECK_EQ(4, bottom[0]-&gt;num_axes()) &lt;&lt; &quot;Input must have 4 axes, &quot;</span>
<span class="lineNum">      72 </span>            :       &lt;&lt; &quot;corresponding to (num, channels, height, width)&quot;;
<span class="lineNum">      73 </span><span class="lineNoCov">          0 :   num_ = bottom[0]-&gt;num();</span>
<span class="lineNum">      74 </span><span class="lineNoCov">          0 :   channels_ = bottom[0]-&gt;channels();</span>
<span class="lineNum">      75 </span><span class="lineNoCov">          0 :   height_ = bottom[0]-&gt;height();</span>
<span class="lineNum">      76 </span><span class="lineNoCov">          0 :   width_ = bottom[0]-&gt;width();</span>
<span class="lineNum">      77 </span><span class="lineNoCov">          0 :   switch (this-&gt;layer_param_.lrn_param().norm_region()) {</span>
<span class="lineNum">      78 </span>            :   case LRNParameter_NormRegion_ACROSS_CHANNELS:
<span class="lineNum">      79 </span><span class="lineNoCov">          0 :     top[0]-&gt;Reshape(num_, channels_, height_, width_);</span>
<span class="lineNum">      80 </span><span class="lineNoCov">          0 :     scale_.Reshape(num_, channels_, height_, width_);</span>
<span class="lineNum">      81 </span><span class="lineNoCov">          0 :     break;</span>
<span class="lineNum">      82 </span>            :   case LRNParameter_NormRegion_WITHIN_CHANNEL:
<span class="lineNum">      83 </span><span class="lineNoCov">          0 :     split_layer_-&gt;Reshape(bottom, split_top_vec_);</span>
<span class="lineNum">      84 </span><span class="lineNoCov">          0 :     square_layer_-&gt;Reshape(square_bottom_vec_, square_top_vec_);</span>
<span class="lineNum">      85 </span><span class="lineNoCov">          0 :     pool_layer_-&gt;Reshape(square_top_vec_, pool_top_vec_);</span>
<span class="lineNum">      86 </span><span class="lineNoCov">          0 :     power_layer_-&gt;Reshape(pool_top_vec_, power_top_vec_);</span>
<span class="lineNum">      87 </span><span class="lineNoCov">          0 :     product_layer_-&gt;Reshape(product_bottom_vec_, top);</span>
<span class="lineNum">      88 </span><span class="lineNoCov">          0 :     break;</span>
<span class="lineNum">      89 </span>            :   }
<span class="lineNum">      90 </span><span class="lineNoCov">          0 : }</span>
<a name="91"><span class="lineNum">      91 </span>            : </a>
<span class="lineNum">      92 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      93 </span><span class="lineNoCov">          0 : void LRNLayer&lt;Dtype&gt;::Forward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">      94 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      95 </span><span class="lineNoCov">          0 :   switch (this-&gt;layer_param_.lrn_param().norm_region()) {</span>
<span class="lineNum">      96 </span>            :   case LRNParameter_NormRegion_ACROSS_CHANNELS:
<span class="lineNum">      97 </span><span class="lineNoCov">          0 :     CrossChannelForward_cpu(bottom, top);</span>
<span class="lineNum">      98 </span><span class="lineNoCov">          0 :     break;</span>
<span class="lineNum">      99 </span>            :   case LRNParameter_NormRegion_WITHIN_CHANNEL:
<span class="lineNum">     100 </span><span class="lineNoCov">          0 :     WithinChannelForward(bottom, top);</span>
<span class="lineNum">     101 </span><span class="lineNoCov">          0 :     break;</span>
<span class="lineNum">     102 </span>            :   default:
<span class="lineNum">     103 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Unknown normalization region.&quot;;</span>
<span class="lineNum">     104 </span>            :   }
<span class="lineNum">     105 </span><span class="lineNoCov">          0 : }</span>
<a name="106"><span class="lineNum">     106 </span>            : </a>
<span class="lineNum">     107 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     108 </span><span class="lineNoCov">          0 : void LRNLayer&lt;Dtype&gt;::CrossChannelForward_cpu(</span>
<span class="lineNum">     109 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom, const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">     110 </span><span class="lineNoCov">          0 :   const Dtype* bottom_data = bottom[0]-&gt;cpu_data();</span>
<span class="lineNum">     111 </span><span class="lineNoCov">          0 :   Dtype* top_data = top[0]-&gt;mutable_cpu_data();</span>
<span class="lineNum">     112 </span><span class="lineNoCov">          0 :   Dtype* scale_data = scale_.mutable_cpu_data();</span>
<span class="lineNum">     113 </span>            :   // start with the constant value
<span class="lineNum">     114 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; scale_.count(); ++i) {</span>
<span class="lineNum">     115 </span><span class="lineNoCov">          0 :     scale_data[i] = k_;</span>
<span class="lineNum">     116 </span>            :   }
<span class="lineNum">     117 </span><span class="lineNoCov">          0 :   Blob&lt;Dtype&gt; padded_square(1, channels_ + size_ - 1, height_, width_);</span>
<span class="lineNum">     118 </span><span class="lineNoCov">          0 :   Dtype* padded_square_data = padded_square.mutable_cpu_data();</span>
<span class="lineNum">     119 </span><span class="lineNoCov">          0 :   caffe_set(padded_square.count(), Dtype(0), padded_square_data);</span>
<span class="lineNum">     120 </span><span class="lineNoCov">          0 :   Dtype alpha_over_size = alpha_ / size_;</span>
<span class="lineNum">     121 </span>            :   // go through the images
<span class="lineNum">     122 </span><span class="lineNoCov">          0 :   for (int n = 0; n &lt; num_; ++n) {</span>
<span class="lineNum">     123 </span>            :     // compute the padded square
<span class="lineNum">     124 </span><span class="lineNoCov">          0 :     caffe_sqr(channels_ * height_ * width_,</span>
<span class="lineNum">     125 </span><span class="lineNoCov">          0 :         bottom_data + bottom[0]-&gt;offset(n),</span>
<span class="lineNum">     126 </span><span class="lineNoCov">          0 :         padded_square_data + padded_square.offset(0, pre_pad_));</span>
<span class="lineNum">     127 </span>            :     // Create the first channel scale
<span class="lineNum">     128 </span><span class="lineNoCov">          0 :     for (int c = 0; c &lt; size_; ++c) {</span>
<span class="lineNum">     129 </span><span class="lineNoCov">          0 :       caffe_axpy&lt;Dtype&gt;(height_ * width_, alpha_over_size,</span>
<span class="lineNum">     130 </span><span class="lineNoCov">          0 :           padded_square_data + padded_square.offset(0, c),</span>
<span class="lineNum">     131 </span><span class="lineNoCov">          0 :           scale_data + scale_.offset(n, 0));</span>
<span class="lineNum">     132 </span>            :     }
<span class="lineNum">     133 </span><span class="lineNoCov">          0 :     for (int c = 1; c &lt; channels_; ++c) {</span>
<span class="lineNum">     134 </span>            :       // copy previous scale
<span class="lineNum">     135 </span><span class="lineNoCov">          0 :       caffe_copy&lt;Dtype&gt;(height_ * width_,</span>
<span class="lineNum">     136 </span><span class="lineNoCov">          0 :           scale_data + scale_.offset(n, c - 1),</span>
<span class="lineNum">     137 </span><span class="lineNoCov">          0 :           scale_data + scale_.offset(n, c));</span>
<span class="lineNum">     138 </span>            :       // add head
<span class="lineNum">     139 </span><span class="lineNoCov">          0 :       caffe_axpy&lt;Dtype&gt;(height_ * width_, alpha_over_size,</span>
<span class="lineNum">     140 </span><span class="lineNoCov">          0 :           padded_square_data + padded_square.offset(0, c + size_ - 1),</span>
<span class="lineNum">     141 </span><span class="lineNoCov">          0 :           scale_data + scale_.offset(n, c));</span>
<span class="lineNum">     142 </span>            :       // subtract tail
<span class="lineNum">     143 </span><span class="lineNoCov">          0 :       caffe_axpy&lt;Dtype&gt;(height_ * width_, -alpha_over_size,</span>
<span class="lineNum">     144 </span><span class="lineNoCov">          0 :           padded_square_data + padded_square.offset(0, c - 1),</span>
<span class="lineNum">     145 </span><span class="lineNoCov">          0 :           scale_data + scale_.offset(n, c));</span>
<span class="lineNum">     146 </span>            :     }
<span class="lineNum">     147 </span>            :   }
<span class="lineNum">     148 </span>            : 
<span class="lineNum">     149 </span>            :   // In the end, compute output
<span class="lineNum">     150 </span><span class="lineNoCov">          0 :   caffe_powx&lt;Dtype&gt;(scale_.count(), scale_data, -beta_, top_data);</span>
<span class="lineNum">     151 </span><span class="lineNoCov">          0 :   caffe_mul&lt;Dtype&gt;(scale_.count(), top_data, bottom_data, top_data);</span>
<span class="lineNum">     152 </span><span class="lineNoCov">          0 : }</span>
<a name="153"><span class="lineNum">     153 </span>            : </a>
<span class="lineNum">     154 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     155 </span><span class="lineNoCov">          0 : void LRNLayer&lt;Dtype&gt;::WithinChannelForward(</span>
<span class="lineNum">     156 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom, const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">     157 </span><span class="lineNoCov">          0 :   split_layer_-&gt;Forward(bottom, split_top_vec_);</span>
<span class="lineNum">     158 </span><span class="lineNoCov">          0 :   square_layer_-&gt;Forward(square_bottom_vec_, square_top_vec_);</span>
<span class="lineNum">     159 </span><span class="lineNoCov">          0 :   pool_layer_-&gt;Forward(square_top_vec_, pool_top_vec_);</span>
<span class="lineNum">     160 </span><span class="lineNoCov">          0 :   power_layer_-&gt;Forward(pool_top_vec_, power_top_vec_);</span>
<span class="lineNum">     161 </span><span class="lineNoCov">          0 :   product_layer_-&gt;Forward(product_bottom_vec_, top);</span>
<span class="lineNum">     162 </span><span class="lineNoCov">          0 : }</span>
<a name="163"><span class="lineNum">     163 </span>            : </a>
<span class="lineNum">     164 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     165 </span><span class="lineNoCov">          0 : void LRNLayer&lt;Dtype&gt;::Backward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top,</span>
<span class="lineNum">     166 </span>            :     const vector&lt;bool&gt;&amp; propagate_down, const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom) {
<span class="lineNum">     167 </span><span class="lineNoCov">          0 :   switch (this-&gt;layer_param_.lrn_param().norm_region()) {</span>
<span class="lineNum">     168 </span>            :   case LRNParameter_NormRegion_ACROSS_CHANNELS:
<span class="lineNum">     169 </span><span class="lineNoCov">          0 :     CrossChannelBackward_cpu(top, propagate_down, bottom);</span>
<span class="lineNum">     170 </span><span class="lineNoCov">          0 :     break;</span>
<span class="lineNum">     171 </span>            :   case LRNParameter_NormRegion_WITHIN_CHANNEL:
<span class="lineNum">     172 </span><span class="lineNoCov">          0 :     WithinChannelBackward(top, propagate_down, bottom);</span>
<span class="lineNum">     173 </span><span class="lineNoCov">          0 :     break;</span>
<span class="lineNum">     174 </span>            :   default:
<span class="lineNum">     175 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; &quot;Unknown normalization region.&quot;;</span>
<span class="lineNum">     176 </span>            :   }
<span class="lineNum">     177 </span><span class="lineNoCov">          0 : }</span>
<a name="178"><span class="lineNum">     178 </span>            : </a>
<span class="lineNum">     179 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     180 </span><span class="lineNoCov">          0 : void LRNLayer&lt;Dtype&gt;::CrossChannelBackward_cpu(</span>
<span class="lineNum">     181 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top, const vector&lt;bool&gt;&amp; propagate_down,
<span class="lineNum">     182 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom) {
<span class="lineNum">     183 </span><span class="lineNoCov">          0 :   const Dtype* top_diff = top[0]-&gt;cpu_diff();</span>
<span class="lineNum">     184 </span><span class="lineNoCov">          0 :   const Dtype* top_data = top[0]-&gt;cpu_data();</span>
<span class="lineNum">     185 </span><span class="lineNoCov">          0 :   const Dtype* bottom_data = bottom[0]-&gt;cpu_data();</span>
<span class="lineNum">     186 </span><span class="lineNoCov">          0 :   const Dtype* scale_data = scale_.cpu_data();</span>
<span class="lineNum">     187 </span><span class="lineNoCov">          0 :   Dtype* bottom_diff = bottom[0]-&gt;mutable_cpu_diff();</span>
<span class="lineNum">     188 </span><span class="lineNoCov">          0 :   Blob&lt;Dtype&gt; padded_ratio(1, channels_ + size_ - 1, height_, width_);</span>
<span class="lineNum">     189 </span><span class="lineNoCov">          0 :   Blob&lt;Dtype&gt; accum_ratio(1, 1, height_, width_);</span>
<span class="lineNum">     190 </span><span class="lineNoCov">          0 :   Dtype* padded_ratio_data = padded_ratio.mutable_cpu_data();</span>
<span class="lineNum">     191 </span><span class="lineNoCov">          0 :   Dtype* accum_ratio_data = accum_ratio.mutable_cpu_data();</span>
<span class="lineNum">     192 </span>            :   // We hack a little bit by using the diff() to store an additional result
<span class="lineNum">     193 </span><span class="lineNoCov">          0 :   Dtype* accum_ratio_times_bottom = accum_ratio.mutable_cpu_diff();</span>
<span class="lineNum">     194 </span><span class="lineNoCov">          0 :   caffe_set(padded_ratio.count(), Dtype(0), padded_ratio_data);</span>
<span class="lineNum">     195 </span><span class="lineNoCov">          0 :   Dtype cache_ratio_value = 2. * alpha_ * beta_ / size_;</span>
<span class="lineNum">     196 </span>            : 
<span class="lineNum">     197 </span><span class="lineNoCov">          0 :   caffe_powx&lt;Dtype&gt;(scale_.count(), scale_data, -beta_, bottom_diff);</span>
<span class="lineNum">     198 </span><span class="lineNoCov">          0 :   caffe_mul&lt;Dtype&gt;(scale_.count(), top_diff, bottom_diff, bottom_diff);</span>
<span class="lineNum">     199 </span>            : 
<span class="lineNum">     200 </span>            :   // go through individual data
<span class="lineNum">     201 </span><span class="lineNoCov">          0 :   int inverse_pre_pad = size_ - (size_ + 1) / 2;</span>
<span class="lineNum">     202 </span><span class="lineNoCov">          0 :   for (int n = 0; n &lt; num_; ++n) {</span>
<span class="lineNum">     203 </span><span class="lineNoCov">          0 :     int block_offset = scale_.offset(n);</span>
<span class="lineNum">     204 </span>            :     // first, compute diff_i * y_i / s_i
<span class="lineNum">     205 </span><span class="lineNoCov">          0 :     caffe_mul&lt;Dtype&gt;(channels_ * height_ * width_,</span>
<span class="lineNum">     206 </span>            :         top_diff + block_offset, top_data + block_offset,
<span class="lineNum">     207 </span><span class="lineNoCov">          0 :         padded_ratio_data + padded_ratio.offset(0, inverse_pre_pad));</span>
<span class="lineNum">     208 </span><span class="lineNoCov">          0 :     caffe_div&lt;Dtype&gt;(channels_ * height_ * width_,</span>
<span class="lineNum">     209 </span><span class="lineNoCov">          0 :         padded_ratio_data + padded_ratio.offset(0, inverse_pre_pad),</span>
<span class="lineNum">     210 </span>            :         scale_data + block_offset,
<span class="lineNum">     211 </span><span class="lineNoCov">          0 :         padded_ratio_data + padded_ratio.offset(0, inverse_pre_pad));</span>
<span class="lineNum">     212 </span>            :     // Now, compute the accumulated ratios and the bottom diff
<span class="lineNum">     213 </span><span class="lineNoCov">          0 :     caffe_set(accum_ratio.count(), Dtype(0), accum_ratio_data);</span>
<span class="lineNum">     214 </span><span class="lineNoCov">          0 :     for (int c = 0; c &lt; size_ - 1; ++c) {</span>
<span class="lineNum">     215 </span><span class="lineNoCov">          0 :       caffe_axpy&lt;Dtype&gt;(height_ * width_, 1.,</span>
<span class="lineNum">     216 </span><span class="lineNoCov">          0 :           padded_ratio_data + padded_ratio.offset(0, c), accum_ratio_data);</span>
<span class="lineNum">     217 </span>            :     }
<span class="lineNum">     218 </span><span class="lineNoCov">          0 :     for (int c = 0; c &lt; channels_; ++c) {</span>
<span class="lineNum">     219 </span><span class="lineNoCov">          0 :       caffe_axpy&lt;Dtype&gt;(height_ * width_, 1.,</span>
<span class="lineNum">     220 </span><span class="lineNoCov">          0 :           padded_ratio_data + padded_ratio.offset(0, c + size_ - 1),</span>
<span class="lineNum">     221 </span>            :           accum_ratio_data);
<span class="lineNum">     222 </span>            :       // compute bottom diff
<span class="lineNum">     223 </span><span class="lineNoCov">          0 :       caffe_mul&lt;Dtype&gt;(height_ * width_,</span>
<span class="lineNum">     224 </span><span class="lineNoCov">          0 :           bottom_data + top[0]-&gt;offset(n, c),</span>
<span class="lineNum">     225 </span>            :           accum_ratio_data, accum_ratio_times_bottom);
<span class="lineNum">     226 </span><span class="lineNoCov">          0 :       caffe_axpy&lt;Dtype&gt;(height_ * width_, -cache_ratio_value,</span>
<span class="lineNum">     227 </span><span class="lineNoCov">          0 :           accum_ratio_times_bottom, bottom_diff + top[0]-&gt;offset(n, c));</span>
<span class="lineNum">     228 </span><span class="lineNoCov">          0 :       caffe_axpy&lt;Dtype&gt;(height_ * width_, -1.,</span>
<span class="lineNum">     229 </span><span class="lineNoCov">          0 :           padded_ratio_data + padded_ratio.offset(0, c), accum_ratio_data);</span>
<span class="lineNum">     230 </span>            :     }
<span class="lineNum">     231 </span>            :   }
<span class="lineNum">     232 </span><span class="lineNoCov">          0 : }</span>
<a name="233"><span class="lineNum">     233 </span>            : </a>
<span class="lineNum">     234 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     235 </span><span class="lineNoCov">          0 : void LRNLayer&lt;Dtype&gt;::WithinChannelBackward(</span>
<span class="lineNum">     236 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top, const vector&lt;bool&gt;&amp; propagate_down,
<span class="lineNum">     237 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom) {
<span class="lineNum">     238 </span><span class="lineNoCov">          0 :   if (propagate_down[0]) {</span>
<span class="lineNum">     239 </span><span class="lineNoCov">          0 :     vector&lt;bool&gt; product_propagate_down(2, true);</span>
<span class="lineNum">     240 </span><span class="lineNoCov">          0 :     product_layer_-&gt;Backward(top, product_propagate_down, product_bottom_vec_);</span>
<span class="lineNum">     241 </span><span class="lineNoCov">          0 :     power_layer_-&gt;Backward(power_top_vec_, propagate_down, pool_top_vec_);</span>
<span class="lineNum">     242 </span><span class="lineNoCov">          0 :     pool_layer_-&gt;Backward(pool_top_vec_, propagate_down, square_top_vec_);</span>
<span class="lineNum">     243 </span><span class="lineNoCov">          0 :     square_layer_-&gt;Backward(square_top_vec_, propagate_down,</span>
<span class="lineNum">     244 </span>            :                             square_bottom_vec_);
<span class="lineNum">     245 </span><span class="lineNoCov">          0 :     split_layer_-&gt;Backward(split_top_vec_, propagate_down, bottom);</span>
<span class="lineNum">     246 </span>            :   }
<span class="lineNum">     247 </span><span class="lineNoCov">          0 : }</span>
<a name="248"><span class="lineNum">     248 </span>            : </a>
<a name="249"><span class="lineNum">     249 </span>            : #ifdef CPU_ONLY</a>
<a name="250"><span class="lineNum">     250 </span><span class="lineNoCov">          0 : STUB_GPU(LRNLayer);</span></a>
<span class="lineNum">     251 </span><span class="lineNoCov">          0 : STUB_GPU_FORWARD(LRNLayer, CrossChannelForward);</span>
<span class="lineNum">     252 </span><span class="lineNoCov">          0 : STUB_GPU_BACKWARD(LRNLayer, CrossChannelBackward);</span>
<span class="lineNum">     253 </span>            : #endif
<span class="lineNum">     254 </span>            : 
<a name="255"><span class="lineNum">     255 </span>            : INSTANTIATE_CLASS(LRNLayer);</a>
<span class="lineNum">     256 </span>            : 
<span class="lineNum">     257 </span><span class="lineCov">          2 : }  // namespace caffe</span>
</pre>
      </td>
    </tr>
  </table>
  <br>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
    <tr><td class="versionInfo">Generated by: <a href="http://ltp.sourceforge.net/coverage/lcov.php" target="_parent">LCOV version 1.12</a></td></tr>
  </table>
  <br>

</body>
</html>
