<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">

<html lang="en">

<head>
  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
  <title>LCOV - code analysis - src/caffe/layers/infogain_loss_layer.cpp</title>
  <link rel="stylesheet" type="text/css" href="../../../gcov.css">
</head>

<body>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="title">LCOV - code coverage report</td></tr>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>

    <tr>
      <td width="100%">
        <table cellpadding=1 border=0 width="100%">
          <tr>
            <td width="10%" class="headerItem">Current view:</td>
            <td width="35%" class="headerValue"><a href="../../../index.html">top level</a> - <a href="index.html">src/caffe/layers</a> - infogain_loss_layer.cpp<span style="font-size: 80%;"> (source / <a href="infogain_loss_layer.cpp.func-sort-c.html">functions</a>)</span></td>
            <td width="5%"></td>
            <td width="15%"></td>
            <td width="10%" class="headerCovTableHead">Hit</td>
            <td width="10%" class="headerCovTableHead">Total</td>
            <td width="15%" class="headerCovTableHead">Coverage</td>
          </tr>
          <tr>
            <td class="headerItem">Test:</td>
            <td class="headerValue">code analysis</td>
            <td></td>
            <td class="headerItem">Lines:</td>
            <td class="headerCovTableEntry">2</td>
            <td class="headerCovTableEntry">112</td>
            <td class="headerCovTableEntryLo">1.8 %</td>
          </tr>
          <tr>
            <td class="headerItem">Date:</td>
            <td class="headerValue">2020-09-11 22:50:33</td>
            <td></td>
            <td class="headerItem">Functions:</td>
            <td class="headerCovTableEntry">2</td>
            <td class="headerCovTableEntry">16</td>
            <td class="headerCovTableEntryLo">12.5 %</td>
          </tr>
          <tr>
            <td class="headerItem">Legend:</td>
            <td class="headerValueLeg">            Lines:
            <span class="coverLegendCov">hit</span>
            <span class="coverLegendNoCov">not hit</span>
</td>
            <td></td>
          </tr>
          <tr><td><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
        </table>
      </td>
    </tr>

    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
  </table>

  <table cellpadding=0 cellspacing=0 border=0>
    <tr>
      <td><br></td>
    </tr>
    <tr>
      <td>
<pre class="sourceHeading">          Line data    Source code</pre>
<pre class="source">
<a name="1"><span class="lineNum">       1 </span>            : #include &lt;algorithm&gt;</a>
<span class="lineNum">       2 </span>            : #include &lt;cmath&gt;
<span class="lineNum">       3 </span>            : #include &lt;vector&gt;
<span class="lineNum">       4 </span>            : 
<span class="lineNum">       5 </span>            : #include &quot;caffe/layers/infogain_loss_layer.hpp&quot;
<span class="lineNum">       6 </span>            : #include &quot;caffe/util/io.hpp&quot;  // for bolb reading of matrix H
<span class="lineNum">       7 </span>            : #include &quot;caffe/util/math_functions.hpp&quot;
<span class="lineNum">       8 </span>            : 
<span class="lineNum">       9 </span>            : namespace caffe {
<span class="lineNum">      10 </span>            : 
<span class="lineNum">      11 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      12 </span><span class="lineNoCov">          0 : void InfogainLossLayer&lt;Dtype&gt;::LayerSetUp(</span>
<span class="lineNum">      13 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom, const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      14 </span><span class="lineNoCov">          0 :   LossLayer&lt;Dtype&gt;::LayerSetUp(bottom, top);</span>
<span class="lineNum">      15 </span>            :   // internal softmax layer
<span class="lineNum">      16 </span><span class="lineNoCov">          0 :   LayerParameter softmax_layer_param(this-&gt;layer_param_);</span>
<span class="lineNum">      17 </span><span class="lineNoCov">          0 :   SoftmaxParameter* softmax_param = softmax_layer_param.mutable_softmax_param();</span>
<span class="lineNum">      18 </span>            :   softmax_param-&gt;set_axis(this-&gt;layer_param_.infogain_loss_param().axis());
<span class="lineNum">      19 </span><span class="lineNoCov">          0 :   softmax_layer_param.set_type(&quot;Softmax&quot;);</span>
<span class="lineNum">      20 </span>            :   softmax_layer_param.clear_loss_weight();
<span class="lineNum">      21 </span>            :   softmax_layer_param.add_loss_weight(1);
<span class="lineNum">      22 </span><span class="lineNoCov">          0 :   softmax_layer_ = LayerRegistry&lt;Dtype&gt;::CreateLayer(softmax_layer_param);</span>
<span class="lineNum">      23 </span>            :   softmax_bottom_vec_.clear();
<span class="lineNum">      24 </span><span class="lineNoCov">          0 :   softmax_bottom_vec_.push_back(bottom[0]);</span>
<span class="lineNum">      25 </span>            :   softmax_top_vec_.clear();
<span class="lineNum">      26 </span><span class="lineNoCov">          0 :   softmax_top_vec_.push_back(&amp;prob_);</span>
<span class="lineNum">      27 </span><span class="lineNoCov">          0 :   softmax_layer_-&gt;SetUp(softmax_bottom_vec_, softmax_top_vec_);</span>
<span class="lineNum">      28 </span>            : 
<span class="lineNum">      29 </span>            :   // ignore label
<span class="lineNum">      30 </span><span class="lineNoCov">          0 :   has_ignore_label_ =</span>
<span class="lineNum">      31 </span>            :     this-&gt;layer_param_.loss_param().has_ignore_label();
<span class="lineNum">      32 </span><span class="lineNoCov">          0 :   if (has_ignore_label_) {</span>
<span class="lineNum">      33 </span><span class="lineNoCov">          0 :     ignore_label_ = this-&gt;layer_param_.loss_param().ignore_label();</span>
<span class="lineNum">      34 </span>            :   }
<span class="lineNum">      35 </span>            :   // normalization
<span class="lineNum">      36 </span><span class="lineNoCov">          0 :   CHECK(!this-&gt;layer_param_.loss_param().has_normalize())</span>
<span class="lineNum">      37 </span>            :     &lt;&lt; &quot;normalize is deprecated. use \&quot;normalization\&quot;&quot;;
<span class="lineNum">      38 </span><span class="lineNoCov">          0 :   normalization_ = this-&gt;layer_param_.loss_param().normalization();</span>
<span class="lineNum">      39 </span>            :   // matrix H
<span class="lineNum">      40 </span><span class="lineNoCov">          0 :   if (bottom.size() &lt; 3) {</span>
<span class="lineNum">      41 </span><span class="lineNoCov">          0 :     CHECK(this-&gt;layer_param_.infogain_loss_param().has_source())</span>
<span class="lineNum">      42 </span>            :         &lt;&lt; &quot;Infogain matrix source must be specified.&quot;;
<span class="lineNum">      43 </span><span class="lineNoCov">          0 :     BlobProto blob_proto;</span>
<span class="lineNum">      44 </span>            :     ReadProtoFromBinaryFile(
<span class="lineNum">      45 </span>            :       this-&gt;layer_param_.infogain_loss_param().source(), &amp;blob_proto);
<span class="lineNum">      46 </span><span class="lineNoCov">          0 :     infogain_.FromProto(blob_proto);</span>
<span class="lineNum">      47 </span>            :   }
<span class="lineNum">      48 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">      49 </span>            : 
<span class="lineNum">      50 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      51 </span><span class="lineNoCov">          0 : void InfogainLossLayer&lt;Dtype&gt;::Reshape(</span>
<span class="lineNum">      52 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom, const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">      53 </span><span class="lineNoCov">          0 :   LossLayer&lt;Dtype&gt;::Reshape(bottom, top);</span>
<span class="lineNum">      54 </span><span class="lineNoCov">          0 :   softmax_layer_-&gt;Reshape(softmax_bottom_vec_, softmax_top_vec_);</span>
<span class="lineNum">      55 </span><span class="lineNoCov">          0 :   infogain_axis_ =</span>
<span class="lineNum">      56 </span>            :     bottom[0]-&gt;CanonicalAxisIndex(
<span class="lineNum">      57 </span>            :       this-&gt;layer_param_.infogain_loss_param().axis());
<span class="lineNum">      58 </span><span class="lineNoCov">          0 :   outer_num_ = bottom[0]-&gt;count(0, infogain_axis_);</span>
<span class="lineNum">      59 </span><span class="lineNoCov">          0 :   inner_num_ = bottom[0]-&gt;count(infogain_axis_ + 1);</span>
<span class="lineNum">      60 </span><span class="lineNoCov">          0 :   CHECK_EQ(outer_num_ * inner_num_, bottom[1]-&gt;count())</span>
<span class="lineNum">      61 </span>            :       &lt;&lt; &quot;Number of labels must match number of predictions; &quot;
<span class="lineNum">      62 </span>            :       &lt;&lt; &quot;e.g., if infogain axis == 1 and prediction shape is (N, C, H, W), &quot;
<span class="lineNum">      63 </span>            :       &lt;&lt; &quot;label count (number of labels) must be N*H*W, &quot;
<span class="lineNum">      64 </span>            :       &lt;&lt; &quot;with integer values in {0, 1, ..., C-1}.&quot;;
<span class="lineNum">      65 </span><span class="lineNoCov">          0 :   num_labels_ = bottom[0]-&gt;shape(infogain_axis_);</span>
<span class="lineNum">      66 </span>            :   Blob&lt;Dtype&gt;* infogain = NULL;
<span class="lineNum">      67 </span><span class="lineNoCov">          0 :   if (bottom.size() &lt; 3) {</span>
<span class="lineNum">      68 </span><span class="lineNoCov">          0 :     infogain = &amp;infogain_;</span>
<span class="lineNum">      69 </span>            :   } else {
<span class="lineNum">      70 </span><span class="lineNoCov">          0 :     infogain = bottom[2];</span>
<span class="lineNum">      71 </span>            :   }
<span class="lineNum">      72 </span><span class="lineNoCov">          0 :   CHECK_EQ(infogain-&gt;count(), num_labels_*num_labels_);</span>
<span class="lineNum">      73 </span><span class="lineNoCov">          0 :   sum_rows_H_.Reshape(vector&lt;int&gt;(1, num_labels_));</span>
<span class="lineNum">      74 </span><span class="lineNoCov">          0 :   if (bottom.size() == 2) {</span>
<span class="lineNum">      75 </span>            :     // H is provided as a parameter and will not change. sum rows once
<span class="lineNum">      76 </span><span class="lineNoCov">          0 :     sum_rows_of_H(infogain);</span>
<span class="lineNum">      77 </span>            :   }
<span class="lineNum">      78 </span><span class="lineNoCov">          0 :   if (top.size() &gt;= 2) {</span>
<span class="lineNum">      79 </span>            :     // softmax output
<span class="lineNum">      80 </span><span class="lineNoCov">          0 :     top[1]-&gt;ReshapeLike(*bottom[0]);</span>
<span class="lineNum">      81 </span>            :   }
<span class="lineNum">      82 </span><span class="lineNoCov">          0 : }</span>
<a name="83"><span class="lineNum">      83 </span>            : </a>
<span class="lineNum">      84 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">      85 </span><span class="lineNoCov">          0 : Dtype InfogainLossLayer&lt;Dtype&gt;::get_normalizer(</span>
<span class="lineNum">      86 </span>            :     LossParameter_NormalizationMode normalization_mode, int valid_count) {
<span class="lineNum">      87 </span>            :   Dtype normalizer;
<span class="lineNum">      88 </span><span class="lineNoCov">          0 :   switch (normalization_mode) {</span>
<span class="lineNum">      89 </span>            :     case LossParameter_NormalizationMode_FULL:
<span class="lineNum">      90 </span><span class="lineNoCov">          0 :       normalizer = Dtype(outer_num_ * inner_num_);</span>
<span class="lineNum">      91 </span><span class="lineNoCov">          0 :       break;</span>
<span class="lineNum">      92 </span>            :     case LossParameter_NormalizationMode_VALID:
<span class="lineNum">      93 </span><span class="lineNoCov">          0 :       if (valid_count == -1) {</span>
<span class="lineNum">      94 </span><span class="lineNoCov">          0 :         normalizer = Dtype(outer_num_ * inner_num_);</span>
<span class="lineNum">      95 </span>            :       } else {
<span class="lineNum">      96 </span><span class="lineNoCov">          0 :         normalizer = Dtype(valid_count);</span>
<span class="lineNum">      97 </span>            :       }
<span class="lineNum">      98 </span>            :       break;
<span class="lineNum">      99 </span>            :     case LossParameter_NormalizationMode_BATCH_SIZE:
<span class="lineNum">     100 </span><span class="lineNoCov">          0 :       normalizer = Dtype(outer_num_);</span>
<span class="lineNum">     101 </span><span class="lineNoCov">          0 :       break;</span>
<span class="lineNum">     102 </span>            :     case LossParameter_NormalizationMode_NONE:
<span class="lineNum">     103 </span><span class="lineNoCov">          0 :       normalizer = Dtype(1);</span>
<span class="lineNum">     104 </span><span class="lineNoCov">          0 :       break;</span>
<span class="lineNum">     105 </span>            :     default:
<span class="lineNum">     106 </span><span class="lineNoCov">          0 :       LOG(FATAL) &lt;&lt; &quot;Unknown normalization mode: &quot;</span>
<span class="lineNum">     107 </span>            :           &lt;&lt; LossParameter_NormalizationMode_Name(normalization_mode);
<span class="lineNum">     108 </span>            :   }
<span class="lineNum">     109 </span>            :   // Some users will have no labels for some examples in order to 'turn off' a
<span class="lineNum">     110 </span>            :   // particular loss in a multi-task setup. The max prevents NaNs in that case.
<span class="lineNum">     111 </span><span class="lineNoCov">          0 :   return std::max(Dtype(1.0), normalizer);</span>
<span class="lineNum">     112 </span>            : }
<a name="113"><span class="lineNum">     113 </span>            : </a>
<span class="lineNum">     114 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     115 </span><span class="lineNoCov">          0 : void InfogainLossLayer&lt;Dtype&gt;::sum_rows_of_H(const Blob&lt;Dtype&gt;* H) {</span>
<span class="lineNum">     116 </span><span class="lineNoCov">          0 :   CHECK_EQ(H-&gt;count(), num_labels_*num_labels_)</span>
<span class="lineNum">     117 </span><span class="lineNoCov">          0 :     &lt;&lt; &quot;H must be &quot; &lt;&lt; num_labels_ &lt;&lt; &quot;x&quot; &lt;&lt; num_labels_;</span>
<span class="lineNum">     118 </span><span class="lineNoCov">          0 :   const Dtype* infogain_mat = H-&gt;cpu_data();</span>
<span class="lineNum">     119 </span><span class="lineNoCov">          0 :   Dtype* sum = sum_rows_H_.mutable_cpu_data();</span>
<span class="lineNum">     120 </span><span class="lineNoCov">          0 :   for ( int row = 0; row &lt; num_labels_ ; row++ ) {</span>
<span class="lineNum">     121 </span><span class="lineNoCov">          0 :     sum[row] = 0;</span>
<span class="lineNum">     122 </span><span class="lineNoCov">          0 :     for ( int col = 0; col &lt; num_labels_ ; col++ ) {</span>
<span class="lineNum">     123 </span><span class="lineNoCov">          0 :       sum[row] += infogain_mat[row*num_labels_+col];</span>
<span class="lineNum">     124 </span>            :     }
<span class="lineNum">     125 </span>            :   }
<span class="lineNum">     126 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     127 </span>            : 
<span class="lineNum">     128 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     129 </span><span class="lineNoCov">          0 : void InfogainLossLayer&lt;Dtype&gt;::Forward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom,</span>
<span class="lineNum">     130 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top) {
<span class="lineNum">     131 </span>            :   // The forward pass computes the softmax prob values.
<span class="lineNum">     132 </span><span class="lineNoCov">          0 :   softmax_layer_-&gt;Forward(softmax_bottom_vec_, softmax_top_vec_);</span>
<span class="lineNum">     133 </span><span class="lineNoCov">          0 :   const Dtype* prob_data = prob_.cpu_data();</span>
<span class="lineNum">     134 </span><span class="lineNoCov">          0 :   const Dtype* bottom_label = bottom[1]-&gt;cpu_data();</span>
<span class="lineNum">     135 </span>            :   const Dtype* infogain_mat = NULL;
<span class="lineNum">     136 </span><span class="lineNoCov">          0 :   if (bottom.size() &lt; 3) {</span>
<span class="lineNum">     137 </span><span class="lineNoCov">          0 :     infogain_mat = infogain_.cpu_data();</span>
<span class="lineNum">     138 </span>            :   } else {
<span class="lineNum">     139 </span><span class="lineNoCov">          0 :     infogain_mat = bottom[2]-&gt;cpu_data();</span>
<span class="lineNum">     140 </span>            :   }
<span class="lineNum">     141 </span>            :   int count = 0;
<span class="lineNum">     142 </span>            :   Dtype loss = 0;
<span class="lineNum">     143 </span><span class="lineNoCov">          0 :   for (int i = 0; i &lt; outer_num_; ++i) {</span>
<span class="lineNum">     144 </span><span class="lineNoCov">          0 :     for (int j = 0; j &lt; inner_num_; j++) {</span>
<span class="lineNum">     145 </span>            :       const int label_value =
<span class="lineNum">     146 </span><span class="lineNoCov">          0 :         static_cast&lt;int&gt;(bottom_label[i * inner_num_ + j]);</span>
<span class="lineNum">     147 </span><span class="lineNoCov">          0 :       if (has_ignore_label_ &amp;&amp; label_value == ignore_label_) {</span>
<span class="lineNum">     148 </span>            :         continue;
<span class="lineNum">     149 </span>            :       }
<span class="lineNum">     150 </span>            :       DCHECK_GE(label_value, 0);
<span class="lineNum">     151 </span>            :       DCHECK_LT(label_value, num_labels_);
<span class="lineNum">     152 </span><span class="lineNoCov">          0 :       for (int l = 0; l &lt; num_labels_; l++) {</span>
<span class="lineNum">     153 </span><span class="lineNoCov">          0 :         loss -= infogain_mat[label_value * num_labels_ + l] *</span>
<span class="lineNum">     154 </span><span class="lineNoCov">          0 :           log(std::max(</span>
<span class="lineNum">     155 </span><span class="lineNoCov">          0 :                 prob_data[i * inner_num_*num_labels_ + l * inner_num_ + j],</span>
<span class="lineNum">     156 </span>            :                 Dtype(kLOG_THRESHOLD)));
<span class="lineNum">     157 </span>            :       }
<span class="lineNum">     158 </span><span class="lineNoCov">          0 :       ++count;</span>
<span class="lineNum">     159 </span>            :     }
<span class="lineNum">     160 </span>            :   }
<span class="lineNum">     161 </span><span class="lineNoCov">          0 :   top[0]-&gt;mutable_cpu_data()[0] = loss / get_normalizer(normalization_, count);</span>
<span class="lineNum">     162 </span><span class="lineNoCov">          0 :   if (top.size() == 2) {</span>
<span class="lineNum">     163 </span><span class="lineNoCov">          0 :     top[1]-&gt;ShareData(prob_);</span>
<span class="lineNum">     164 </span>            :   }
<span class="lineNum">     165 </span><span class="lineNoCov">          0 : }</span>
<span class="lineNum">     166 </span>            : 
<span class="lineNum">     167 </span>            : template &lt;typename Dtype&gt;
<span class="lineNum">     168 </span><span class="lineNoCov">          0 : void InfogainLossLayer&lt;Dtype&gt;::Backward_cpu(const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; top,</span>
<span class="lineNum">     169 </span>            :     const vector&lt;bool&gt;&amp; propagate_down,
<span class="lineNum">     170 </span>            :     const vector&lt;Blob&lt;Dtype&gt;*&gt;&amp; bottom) {
<span class="lineNum">     171 </span><span class="lineNoCov">          0 :   if (propagate_down[1]) {</span>
<span class="lineNum">     172 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; this-&gt;type()</span>
<span class="lineNum">     173 </span>            :                &lt;&lt; &quot; Layer cannot backpropagate to label inputs.&quot;;
<span class="lineNum">     174 </span>            :   }
<span class="lineNum">     175 </span><span class="lineNoCov">          0 :   if (propagate_down.size() &gt; 2 &amp;&amp; propagate_down[2]) {</span>
<span class="lineNum">     176 </span><span class="lineNoCov">          0 :     LOG(FATAL) &lt;&lt; this-&gt;type()</span>
<span class="lineNum">     177 </span>            :                &lt;&lt; &quot; Layer cannot backpropagate to infogain inputs.&quot;;
<span class="lineNum">     178 </span>            :   }
<span class="lineNum">     179 </span><span class="lineNoCov">          0 :   if (propagate_down[0]) {</span>
<span class="lineNum">     180 </span><span class="lineNoCov">          0 :     const Dtype* prob_data = prob_.cpu_data();</span>
<span class="lineNum">     181 </span><span class="lineNoCov">          0 :     const Dtype* bottom_label = bottom[1]-&gt;cpu_data();</span>
<span class="lineNum">     182 </span>            :     const Dtype* infogain_mat = NULL;
<span class="lineNum">     183 </span><span class="lineNoCov">          0 :     if (bottom.size() &lt; 3) {</span>
<span class="lineNum">     184 </span><span class="lineNoCov">          0 :       infogain_mat = infogain_.cpu_data();</span>
<span class="lineNum">     185 </span>            :     } else {
<span class="lineNum">     186 </span><span class="lineNoCov">          0 :       infogain_mat = bottom[2]-&gt;cpu_data();</span>
<span class="lineNum">     187 </span>            :       // H is provided as a &quot;bottom&quot; and might change. sum rows every time.
<span class="lineNum">     188 </span><span class="lineNoCov">          0 :       sum_rows_of_H(bottom[2]);</span>
<span class="lineNum">     189 </span>            :     }
<span class="lineNum">     190 </span><span class="lineNoCov">          0 :     const Dtype* sum_rows_H = sum_rows_H_.cpu_data();</span>
<span class="lineNum">     191 </span><span class="lineNoCov">          0 :     Dtype* bottom_diff = bottom[0]-&gt;mutable_cpu_diff();</span>
<span class="lineNum">     192 </span><span class="lineNoCov">          0 :     const int dim = bottom[0]-&gt;count() / outer_num_;</span>
<span class="lineNum">     193 </span>            :     int count = 0;
<span class="lineNum">     194 </span><span class="lineNoCov">          0 :     for (int i = 0; i &lt; outer_num_; ++i) {</span>
<span class="lineNum">     195 </span><span class="lineNoCov">          0 :       for (int j = 0; j &lt; inner_num_; ++j) {</span>
<span class="lineNum">     196 </span>            :         const int label_value =
<span class="lineNum">     197 </span><span class="lineNoCov">          0 :           static_cast&lt;int&gt;(bottom_label[i * inner_num_ + j]);</span>
<span class="lineNum">     198 </span>            :         DCHECK_GE(label_value, 0);
<span class="lineNum">     199 </span>            :         DCHECK_LT(label_value, num_labels_);
<span class="lineNum">     200 </span><span class="lineNoCov">          0 :         if (has_ignore_label_ &amp;&amp; label_value == ignore_label_) {</span>
<span class="lineNum">     201 </span><span class="lineNoCov">          0 :           for (int l = 0; l &lt; num_labels_; ++l) {</span>
<span class="lineNum">     202 </span><span class="lineNoCov">          0 :             bottom_diff[i * dim + l * inner_num_ + j] = 0;</span>
<span class="lineNum">     203 </span>            :           }
<span class="lineNum">     204 </span>            :         } else {
<span class="lineNum">     205 </span><span class="lineNoCov">          0 :           for (int l = 0; l &lt; num_labels_; ++l) {</span>
<span class="lineNum">     206 </span><span class="lineNoCov">          0 :             bottom_diff[i * dim + l * inner_num_ + j] =</span>
<span class="lineNum">     207 </span><span class="lineNoCov">          0 :                prob_data[i*dim + l*inner_num_ + j]*sum_rows_H[label_value]</span>
<span class="lineNum">     208 </span><span class="lineNoCov">          0 :                - infogain_mat[label_value * num_labels_ + l];</span>
<span class="lineNum">     209 </span>            :           }
<span class="lineNum">     210 </span><span class="lineNoCov">          0 :           ++count;</span>
<span class="lineNum">     211 </span>            :         }
<span class="lineNum">     212 </span>            :       }
<span class="lineNum">     213 </span>            :     }
<span class="lineNum">     214 </span>            :     // Scale gradient
<span class="lineNum">     215 </span><span class="lineNoCov">          0 :     Dtype loss_weight = top[0]-&gt;cpu_diff()[0] /</span>
<span class="lineNum">     216 </span><span class="lineNoCov">          0 :                         get_normalizer(normalization_, count);</span>
<span class="lineNum">     217 </span><span class="lineNoCov">          0 :     caffe_scal(bottom[0]-&gt;count(), loss_weight, bottom_diff);</span>
<span class="lineNum">     218 </span>            :   }
<span class="lineNum">     219 </span><span class="lineNoCov">          0 : }</span>
<a name="220"><span class="lineNum">     220 </span>            : </a>
<a name="221"><span class="lineNum">     221 </span>            : INSTANTIATE_CLASS(InfogainLossLayer);</a>
<span class="lineNum">     222 </span><span class="lineCov">          3 : REGISTER_LAYER_CLASS(InfogainLoss);</span>
<span class="lineNum">     223 </span><span class="lineCov">          3 : }  // namespace caffe</span>
</pre>
      </td>
    </tr>
  </table>
  <br>

  <table width="100%" border=0 cellspacing=0 cellpadding=0>
    <tr><td class="ruler"><img src="../../../glass.png" width=3 height=3 alt=""></td></tr>
    <tr><td class="versionInfo">Generated by: <a href="http://ltp.sourceforge.net/coverage/lcov.php" target="_parent">LCOV version 1.12</a></td></tr>
  </table>
  <br>

</body>
</html>
