<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.13"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>Caffe: caffe::EuclideanLossLayer&lt; Dtype &gt; Class Template Reference</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
 <tbody>
 <tr style="height: 56px;">
  <td id="projectalign" style="padding-left: 0.5em;">
   <div id="projectname">Caffe
   </div>
  </td>
 </tr>
 </tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.13 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
$(function() {
  initMenu('',true,false,'search.php','Search');
  $(document).ready(function() { init_search(); });
});
</script>
<div id="main-nav"></div>
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
     onmouseover="return searchBox.OnSearchSelectShow()"
     onmouseout="return searchBox.OnSearchSelectHide()"
     onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>

<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0" 
        name="MSearchResults" id="MSearchResults">
</iframe>
</div>

<div id="nav-path" class="navpath">
  <ul>
<li class="navelem"><a class="el" href="namespacecaffe.html">caffe</a></li><li class="navelem"><a class="el" href="classcaffe_1_1EuclideanLossLayer.html">EuclideanLossLayer</a></li>  </ul>
</div>
</div><!-- top -->
<div class="header">
  <div class="summary">
<a href="#pub-methods">Public Member Functions</a> &#124;
<a href="#pro-methods">Protected Member Functions</a> &#124;
<a href="#pro-attribs">Protected Attributes</a> &#124;
<a href="classcaffe_1_1EuclideanLossLayer-members.html">List of all members</a>  </div>
  <div class="headertitle">
<div class="title">caffe::EuclideanLossLayer&lt; Dtype &gt; Class Template Reference</div>  </div>
</div><!--header-->
<div class="contents">

<p>Computes the Euclidean (L2) loss <img class="formulaInl" alt="$ E = \frac{1}{2N} \sum\limits_{n=1}^N \left| \left| \hat{y}_n - y_n \right| \right|_2^2 $" src="form_74.png"/> for real-valued regression tasks.  
 <a href="classcaffe_1_1EuclideanLossLayer.html#details">More...</a></p>

<p><code>#include &lt;<a class="el" href="euclidean__loss__layer_8hpp_source.html">euclidean_loss_layer.hpp</a>&gt;</code></p>
<div class="dynheader">
Inheritance diagram for caffe::EuclideanLossLayer&lt; Dtype &gt;:</div>
<div class="dyncontent">
 <div class="center">
  <img src="classcaffe_1_1EuclideanLossLayer.png" usemap="#caffe::EuclideanLossLayer_3C_20Dtype_20_3E_map" alt=""/>
  <map id="caffe::EuclideanLossLayer_3C_20Dtype_20_3E_map" name="caffe::EuclideanLossLayer_3C_20Dtype_20_3E_map">
<area href="classcaffe_1_1LossLayer.html" title="An interface for Layers that take two Blobs as input – usually (1) predictions and (2) ground-truth ..." alt="caffe::LossLayer&lt; Dtype &gt;" shape="rect" coords="0,56,219,80"/>
<area href="classcaffe_1_1Layer.html" title="An interface for the units of computation which can be composed into a Net. " alt="caffe::Layer&lt; Dtype &gt;" shape="rect" coords="0,0,219,24"/>
</map>
 </div></div>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr class="memitem:aea3a6d5454ee1a0db7cdb6c59bcfc5c8"><td class="memItemLeft" align="right" valign="top"><a id="aea3a6d5454ee1a0db7cdb6c59bcfc5c8"></a>
&#160;</td><td class="memItemRight" valign="bottom"><b>EuclideanLossLayer</b> (const LayerParameter &amp;param)</td></tr>
<tr class="separator:aea3a6d5454ee1a0db7cdb6c59bcfc5c8"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a9cbe90ea0130c31bd5b9419a1bbaa555"><td class="memItemLeft" align="right" valign="top">virtual void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1EuclideanLossLayer.html#a9cbe90ea0130c31bd5b9419a1bbaa555">Reshape</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;bottom, const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top)</td></tr>
<tr class="memdesc:a9cbe90ea0130c31bd5b9419a1bbaa555"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adjust the shapes of top blobs and internal buffers to accommodate the shapes of the bottom blobs.  <a href="#a9cbe90ea0130c31bd5b9419a1bbaa555">More...</a><br /></td></tr>
<tr class="separator:a9cbe90ea0130c31bd5b9419a1bbaa555"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:afe52ebbfc18c5cc36e0bdc35445c82e5"><td class="memItemLeft" align="right" valign="top"><a id="afe52ebbfc18c5cc36e0bdc35445c82e5"></a>
virtual const char *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1EuclideanLossLayer.html#afe52ebbfc18c5cc36e0bdc35445c82e5">type</a> () const</td></tr>
<tr class="memdesc:afe52ebbfc18c5cc36e0bdc35445c82e5"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the layer type. <br /></td></tr>
<tr class="separator:afe52ebbfc18c5cc36e0bdc35445c82e5"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a76dd3fde9f09cb9840f05ee035b5a2c5"><td class="memItemLeft" align="right" valign="top">virtual bool&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1EuclideanLossLayer.html#a76dd3fde9f09cb9840f05ee035b5a2c5">AllowForceBackward</a> (const int bottom_index) const</td></tr>
<tr class="separator:a76dd3fde9f09cb9840f05ee035b5a2c5"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="inherit_header pub_methods_classcaffe_1_1LossLayer"><td colspan="2" onclick="javascript:toggleInherit('pub_methods_classcaffe_1_1LossLayer')"><img src="closed.png" alt="-"/>&#160;Public Member Functions inherited from <a class="el" href="classcaffe_1_1LossLayer.html">caffe::LossLayer&lt; Dtype &gt;</a></td></tr>
<tr class="memitem:a16e133050e2d97c6f024ea74e3ba4ead inherit pub_methods_classcaffe_1_1LossLayer"><td class="memItemLeft" align="right" valign="top"><a id="a16e133050e2d97c6f024ea74e3ba4ead"></a>
&#160;</td><td class="memItemRight" valign="bottom"><b>LossLayer</b> (const LayerParameter &amp;param)</td></tr>
<tr class="separator:a16e133050e2d97c6f024ea74e3ba4ead inherit pub_methods_classcaffe_1_1LossLayer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:aa6fc7c2e90be66f1c1f0683637c949da inherit pub_methods_classcaffe_1_1LossLayer"><td class="memItemLeft" align="right" valign="top">virtual void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1LossLayer.html#aa6fc7c2e90be66f1c1f0683637c949da">LayerSetUp</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;bottom, const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top)</td></tr>
<tr class="memdesc:aa6fc7c2e90be66f1c1f0683637c949da inherit pub_methods_classcaffe_1_1LossLayer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Does layer-specific setup: your layer should implement this function as well as Reshape.  <a href="classcaffe_1_1LossLayer.html#aa6fc7c2e90be66f1c1f0683637c949da">More...</a><br /></td></tr>
<tr class="separator:aa6fc7c2e90be66f1c1f0683637c949da inherit pub_methods_classcaffe_1_1LossLayer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:af1620064baefb711e2c767bdc92b6fb1 inherit pub_methods_classcaffe_1_1LossLayer"><td class="memItemLeft" align="right" valign="top">virtual int&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1LossLayer.html#af1620064baefb711e2c767bdc92b6fb1">ExactNumBottomBlobs</a> () const</td></tr>
<tr class="memdesc:af1620064baefb711e2c767bdc92b6fb1 inherit pub_methods_classcaffe_1_1LossLayer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the exact number of bottom blobs required by the layer, or -1 if no exact number is required.  <a href="classcaffe_1_1LossLayer.html#af1620064baefb711e2c767bdc92b6fb1">More...</a><br /></td></tr>
<tr class="separator:af1620064baefb711e2c767bdc92b6fb1 inherit pub_methods_classcaffe_1_1LossLayer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ae98a9942cdb1c67e09d45cc2d876618e inherit pub_methods_classcaffe_1_1LossLayer"><td class="memItemLeft" align="right" valign="top"><a id="ae98a9942cdb1c67e09d45cc2d876618e"></a>
virtual bool&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1LossLayer.html#ae98a9942cdb1c67e09d45cc2d876618e">AutoTopBlobs</a> () const</td></tr>
<tr class="memdesc:ae98a9942cdb1c67e09d45cc2d876618e inherit pub_methods_classcaffe_1_1LossLayer"><td class="mdescLeft">&#160;</td><td class="mdescRight">For convenience and backwards compatibility, instruct the <a class="el" href="classcaffe_1_1Net.html" title="Connects Layers together into a directed acyclic graph (DAG) specified by a NetParameter. ">Net</a> to automatically allocate a single top <a class="el" href="classcaffe_1_1Blob.html" title="A wrapper around SyncedMemory holders serving as the basic computational unit through which Layers...">Blob</a> for LossLayers, into which they output their singleton loss, (even if the user didn't specify one in the prototxt, etc.). <br /></td></tr>
<tr class="separator:ae98a9942cdb1c67e09d45cc2d876618e inherit pub_methods_classcaffe_1_1LossLayer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:aa5d5ab714a14082f5343dc9c49025b23 inherit pub_methods_classcaffe_1_1LossLayer"><td class="memItemLeft" align="right" valign="top">virtual int&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1LossLayer.html#aa5d5ab714a14082f5343dc9c49025b23">ExactNumTopBlobs</a> () const</td></tr>
<tr class="memdesc:aa5d5ab714a14082f5343dc9c49025b23 inherit pub_methods_classcaffe_1_1LossLayer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the exact number of top blobs required by the layer, or -1 if no exact number is required.  <a href="classcaffe_1_1LossLayer.html#aa5d5ab714a14082f5343dc9c49025b23">More...</a><br /></td></tr>
<tr class="separator:aa5d5ab714a14082f5343dc9c49025b23 inherit pub_methods_classcaffe_1_1LossLayer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="inherit_header pub_methods_classcaffe_1_1Layer"><td colspan="2" onclick="javascript:toggleInherit('pub_methods_classcaffe_1_1Layer')"><img src="closed.png" alt="-"/>&#160;Public Member Functions inherited from <a class="el" href="classcaffe_1_1Layer.html">caffe::Layer&lt; Dtype &gt;</a></td></tr>
<tr class="memitem:a7b4e4ccea08c7b8b15acc6829d5735f6 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a7b4e4ccea08c7b8b15acc6829d5735f6">Layer</a> (const LayerParameter &amp;param)</td></tr>
<tr class="separator:a7b4e4ccea08c7b8b15acc6829d5735f6 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a18d6bfdb535ab8e96a971dec4ae39a84 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a18d6bfdb535ab8e96a971dec4ae39a84">SetUp</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;bottom, const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top)</td></tr>
<tr class="memdesc:a18d6bfdb535ab8e96a971dec4ae39a84 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Implements common layer setup functionality.  <a href="classcaffe_1_1Layer.html#a18d6bfdb535ab8e96a971dec4ae39a84">More...</a><br /></td></tr>
<tr class="separator:a18d6bfdb535ab8e96a971dec4ae39a84 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ab57d272dabe8c709d2a785eebe72ca57 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">Dtype&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#ab57d272dabe8c709d2a785eebe72ca57">Forward</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;bottom, const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top)</td></tr>
<tr class="memdesc:ab57d272dabe8c709d2a785eebe72ca57 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Given the bottom blobs, compute the top blobs and the loss.  <a href="classcaffe_1_1Layer.html#ab57d272dabe8c709d2a785eebe72ca57">More...</a><br /></td></tr>
<tr class="separator:ab57d272dabe8c709d2a785eebe72ca57 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a183d343f5183a4762307f2c5e6ed1e12 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a183d343f5183a4762307f2c5e6ed1e12">Backward</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top, const vector&lt; bool &gt; &amp;propagate_down, const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;bottom)</td></tr>
<tr class="memdesc:a183d343f5183a4762307f2c5e6ed1e12 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Given the top blob error gradients, compute the bottom blob error gradients.  <a href="classcaffe_1_1Layer.html#a183d343f5183a4762307f2c5e6ed1e12">More...</a><br /></td></tr>
<tr class="separator:a183d343f5183a4762307f2c5e6ed1e12 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:aaf4524ce8641a30a8a4784aee1b2b4c8 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top"><a id="aaf4524ce8641a30a8a4784aee1b2b4c8"></a>
vector&lt; shared_ptr&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; &gt; &gt; &amp;&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#aaf4524ce8641a30a8a4784aee1b2b4c8">blobs</a> ()</td></tr>
<tr class="memdesc:aaf4524ce8641a30a8a4784aee1b2b4c8 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the vector of learnable parameter blobs. <br /></td></tr>
<tr class="separator:aaf4524ce8641a30a8a4784aee1b2b4c8 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:adff82274f146e2b6922d0ebac2aaf215 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top"><a id="adff82274f146e2b6922d0ebac2aaf215"></a>
const LayerParameter &amp;&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#adff82274f146e2b6922d0ebac2aaf215">layer_param</a> () const</td></tr>
<tr class="memdesc:adff82274f146e2b6922d0ebac2aaf215 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the layer parameter. <br /></td></tr>
<tr class="separator:adff82274f146e2b6922d0ebac2aaf215 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a4a1754828dda22cc8daa2f63377f3579 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top"><a id="a4a1754828dda22cc8daa2f63377f3579"></a>
virtual void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a4a1754828dda22cc8daa2f63377f3579">ToProto</a> (LayerParameter *param, bool write_diff=false)</td></tr>
<tr class="memdesc:a4a1754828dda22cc8daa2f63377f3579 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Writes the layer parameter to a protocol buffer. <br /></td></tr>
<tr class="separator:a4a1754828dda22cc8daa2f63377f3579 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a899410336f30821644c8bd6c69a070c9 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top"><a id="a899410336f30821644c8bd6c69a070c9"></a>
Dtype&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a899410336f30821644c8bd6c69a070c9">loss</a> (const int top_index) const</td></tr>
<tr class="memdesc:a899410336f30821644c8bd6c69a070c9 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the scalar loss associated with a top blob at a given index. <br /></td></tr>
<tr class="separator:a899410336f30821644c8bd6c69a070c9 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a899b09f4b91ada8545b3a43ee91e0d69 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top"><a id="a899b09f4b91ada8545b3a43ee91e0d69"></a>
void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a899b09f4b91ada8545b3a43ee91e0d69">set_loss</a> (const int top_index, const Dtype value)</td></tr>
<tr class="memdesc:a899b09f4b91ada8545b3a43ee91e0d69 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Sets the loss associated with a top blob at a given index. <br /></td></tr>
<tr class="separator:a899b09f4b91ada8545b3a43ee91e0d69 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:aca3cb2bafaefda5d4760aaebd0b72def inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">virtual int&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#aca3cb2bafaefda5d4760aaebd0b72def">MinBottomBlobs</a> () const</td></tr>
<tr class="memdesc:aca3cb2bafaefda5d4760aaebd0b72def inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the minimum number of bottom blobs required by the layer, or -1 if no minimum number is required.  <a href="classcaffe_1_1Layer.html#aca3cb2bafaefda5d4760aaebd0b72def">More...</a><br /></td></tr>
<tr class="separator:aca3cb2bafaefda5d4760aaebd0b72def inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:af8bdc989053e0363ab032026b46de7c3 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">virtual int&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#af8bdc989053e0363ab032026b46de7c3">MaxBottomBlobs</a> () const</td></tr>
<tr class="memdesc:af8bdc989053e0363ab032026b46de7c3 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the maximum number of bottom blobs required by the layer, or -1 if no maximum number is required.  <a href="classcaffe_1_1Layer.html#af8bdc989053e0363ab032026b46de7c3">More...</a><br /></td></tr>
<tr class="separator:af8bdc989053e0363ab032026b46de7c3 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ab9e4c8d642e413948b131d851a8462a4 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">virtual int&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#ab9e4c8d642e413948b131d851a8462a4">MinTopBlobs</a> () const</td></tr>
<tr class="memdesc:ab9e4c8d642e413948b131d851a8462a4 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the minimum number of top blobs required by the layer, or -1 if no minimum number is required.  <a href="classcaffe_1_1Layer.html#ab9e4c8d642e413948b131d851a8462a4">More...</a><br /></td></tr>
<tr class="separator:ab9e4c8d642e413948b131d851a8462a4 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ac6c03df0b6e40e776c94001e19994a2e inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">virtual int&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#ac6c03df0b6e40e776c94001e19994a2e">MaxTopBlobs</a> () const</td></tr>
<tr class="memdesc:ac6c03df0b6e40e776c94001e19994a2e inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns the maximum number of top blobs required by the layer, or -1 if no maximum number is required.  <a href="classcaffe_1_1Layer.html#ac6c03df0b6e40e776c94001e19994a2e">More...</a><br /></td></tr>
<tr class="separator:ac6c03df0b6e40e776c94001e19994a2e inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:af452a938bc7596f9b5e9900c8dc4ab3d inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">virtual bool&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#af452a938bc7596f9b5e9900c8dc4ab3d">EqualNumBottomTopBlobs</a> () const</td></tr>
<tr class="memdesc:af452a938bc7596f9b5e9900c8dc4ab3d inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Returns true if the layer requires an equal number of bottom and top blobs.  <a href="classcaffe_1_1Layer.html#af452a938bc7596f9b5e9900c8dc4ab3d">More...</a><br /></td></tr>
<tr class="separator:af452a938bc7596f9b5e9900c8dc4ab3d inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a1a3708013b0231e71d725252e10ce6e3 inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">bool&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a1a3708013b0231e71d725252e10ce6e3">param_propagate_down</a> (const int param_id)</td></tr>
<tr class="memdesc:a1a3708013b0231e71d725252e10ce6e3 inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Specifies whether the layer should compute gradients w.r.t. a parameter at a particular index given by param_id.  <a href="classcaffe_1_1Layer.html#a1a3708013b0231e71d725252e10ce6e3">More...</a><br /></td></tr>
<tr class="separator:a1a3708013b0231e71d725252e10ce6e3 inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a9a6fcb843803ed556f0a69cc2864379b inherit pub_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top"><a id="a9a6fcb843803ed556f0a69cc2864379b"></a>
void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a9a6fcb843803ed556f0a69cc2864379b">set_param_propagate_down</a> (const int param_id, const bool value)</td></tr>
<tr class="memdesc:a9a6fcb843803ed556f0a69cc2864379b inherit pub_methods_classcaffe_1_1Layer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Sets whether the layer should compute gradients w.r.t. a parameter at a particular index given by param_id. <br /></td></tr>
<tr class="separator:a9a6fcb843803ed556f0a69cc2864379b inherit pub_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pro-methods"></a>
Protected Member Functions</h2></td></tr>
<tr class="memitem:a3bc5a947caadac1a352a89b08720c7e7"><td class="memItemLeft" align="right" valign="top">virtual void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1EuclideanLossLayer.html#a3bc5a947caadac1a352a89b08720c7e7">Forward_cpu</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;bottom, const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top)</td></tr>
<tr class="memdesc:a3bc5a947caadac1a352a89b08720c7e7"><td class="mdescLeft">&#160;</td><td class="mdescRight">Computes the Euclidean (L2) loss <img class="formulaInl" alt="$ E = \frac{1}{2N} \sum\limits_{n=1}^N \left| \left| \hat{y}_n - y_n \right| \right|_2^2 $" src="form_74.png"/> for real-valued regression tasks.  <a href="#a3bc5a947caadac1a352a89b08720c7e7">More...</a><br /></td></tr>
<tr class="separator:a3bc5a947caadac1a352a89b08720c7e7"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a35f3694e9dd7e1920e26158e761fd8a0"><td class="memItemLeft" align="right" valign="top"><a id="a35f3694e9dd7e1920e26158e761fd8a0"></a>
virtual void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1EuclideanLossLayer.html#a35f3694e9dd7e1920e26158e761fd8a0">Forward_gpu</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;bottom, const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top)</td></tr>
<tr class="memdesc:a35f3694e9dd7e1920e26158e761fd8a0"><td class="mdescLeft">&#160;</td><td class="mdescRight">Using the GPU device, compute the layer output. Fall back to <a class="el" href="classcaffe_1_1EuclideanLossLayer.html#a3bc5a947caadac1a352a89b08720c7e7" title="Computes the Euclidean (L2) loss  for real-valued regression tasks. ">Forward_cpu()</a> if unavailable. <br /></td></tr>
<tr class="separator:a35f3694e9dd7e1920e26158e761fd8a0"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a577f5dad9889d01461348a4c89277089"><td class="memItemLeft" align="right" valign="top">virtual void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1EuclideanLossLayer.html#a577f5dad9889d01461348a4c89277089">Backward_cpu</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top, const vector&lt; bool &gt; &amp;propagate_down, const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;bottom)</td></tr>
<tr class="memdesc:a577f5dad9889d01461348a4c89277089"><td class="mdescLeft">&#160;</td><td class="mdescRight">Computes the Euclidean error gradient w.r.t. the inputs.  <a href="#a577f5dad9889d01461348a4c89277089">More...</a><br /></td></tr>
<tr class="separator:a577f5dad9889d01461348a4c89277089"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:aa22a0ef4e0ab9d6a77be67c3d967b648"><td class="memItemLeft" align="right" valign="top"><a id="aa22a0ef4e0ab9d6a77be67c3d967b648"></a>
virtual void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1EuclideanLossLayer.html#aa22a0ef4e0ab9d6a77be67c3d967b648">Backward_gpu</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top, const vector&lt; bool &gt; &amp;propagate_down, const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;bottom)</td></tr>
<tr class="memdesc:aa22a0ef4e0ab9d6a77be67c3d967b648"><td class="mdescLeft">&#160;</td><td class="mdescRight">Using the GPU device, compute the gradients for any parameters and for the bottom blobs if propagate_down is true. Fall back to <a class="el" href="classcaffe_1_1EuclideanLossLayer.html#a577f5dad9889d01461348a4c89277089" title="Computes the Euclidean error gradient w.r.t. the inputs. ">Backward_cpu()</a> if unavailable. <br /></td></tr>
<tr class="separator:aa22a0ef4e0ab9d6a77be67c3d967b648"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="inherit_header pro_methods_classcaffe_1_1Layer"><td colspan="2" onclick="javascript:toggleInherit('pro_methods_classcaffe_1_1Layer')"><img src="closed.png" alt="-"/>&#160;Protected Member Functions inherited from <a class="el" href="classcaffe_1_1Layer.html">caffe::Layer&lt; Dtype &gt;</a></td></tr>
<tr class="memitem:a55c8036130225fbc874a986bdf4b27e2 inherit pro_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">virtual void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a55c8036130225fbc874a986bdf4b27e2">CheckBlobCounts</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;bottom, const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top)</td></tr>
<tr class="separator:a55c8036130225fbc874a986bdf4b27e2 inherit pro_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a04eb2a3d1d59c64cd64c233217d5d6fc inherit pro_methods_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a04eb2a3d1d59c64cd64c233217d5d6fc">SetLossWeights</a> (const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;top)</td></tr>
<tr class="separator:a04eb2a3d1d59c64cd64c233217d5d6fc inherit pro_methods_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pro-attribs"></a>
Protected Attributes</h2></td></tr>
<tr class="memitem:a47ec68365879c820f9e18e456f93376a"><td class="memItemLeft" align="right" valign="top"><a id="a47ec68365879c820f9e18e456f93376a"></a>
<a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt;&#160;</td><td class="memItemRight" valign="bottom"><b>diff_</b></td></tr>
<tr class="separator:a47ec68365879c820f9e18e456f93376a"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="inherit_header pro_attribs_classcaffe_1_1Layer"><td colspan="2" onclick="javascript:toggleInherit('pro_attribs_classcaffe_1_1Layer')"><img src="closed.png" alt="-"/>&#160;Protected Attributes inherited from <a class="el" href="classcaffe_1_1Layer.html">caffe::Layer&lt; Dtype &gt;</a></td></tr>
<tr class="memitem:a7ed12bb2df25c887e41d7ea9557fc701 inherit pro_attribs_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">LayerParameter&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a7ed12bb2df25c887e41d7ea9557fc701">layer_param_</a></td></tr>
<tr class="separator:a7ed12bb2df25c887e41d7ea9557fc701 inherit pro_attribs_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a1d04ad7f595a82a1c811f102d68b8a19 inherit pro_attribs_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">Phase&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a1d04ad7f595a82a1c811f102d68b8a19">phase_</a></td></tr>
<tr class="separator:a1d04ad7f595a82a1c811f102d68b8a19 inherit pro_attribs_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a8073fcf2c139b47eb99ce71b346b1321 inherit pro_attribs_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">vector&lt; shared_ptr&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; &gt; &gt;&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#a8073fcf2c139b47eb99ce71b346b1321">blobs_</a></td></tr>
<tr class="separator:a8073fcf2c139b47eb99ce71b346b1321 inherit pro_attribs_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:acd4a05def9ff3b42ad72404210613ef7 inherit pro_attribs_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">vector&lt; bool &gt;&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#acd4a05def9ff3b42ad72404210613ef7">param_propagate_down_</a></td></tr>
<tr class="separator:acd4a05def9ff3b42ad72404210613ef7 inherit pro_attribs_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:af6d347229a139500994e7a926c680486 inherit pro_attribs_classcaffe_1_1Layer"><td class="memItemLeft" align="right" valign="top">vector&lt; Dtype &gt;&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classcaffe_1_1Layer.html#af6d347229a139500994e7a926c680486">loss_</a></td></tr>
<tr class="separator:af6d347229a139500994e7a926c680486 inherit pro_attribs_classcaffe_1_1Layer"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><h3>template&lt;typename Dtype&gt;<br />
class caffe::EuclideanLossLayer&lt; Dtype &gt;</h3>

<p>Computes the Euclidean (L2) loss <img class="formulaInl" alt="$ E = \frac{1}{2N} \sum\limits_{n=1}^N \left| \left| \hat{y}_n - y_n \right| \right|_2^2 $" src="form_74.png"/> for real-valued regression tasks. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">bottom</td><td>input <a class="el" href="classcaffe_1_1Blob.html" title="A wrapper around SyncedMemory holders serving as the basic computational unit through which Layers...">Blob</a> vector (length 2)<ol type="1">
<li><img class="formulaInl" alt="$ (N \times C \times H \times W) $" src="form_10.png"/> the predictions <img class="formulaInl" alt="$ \hat{y} \in [-\infty, +\infty]$" src="form_75.png"/></li>
<li><img class="formulaInl" alt="$ (N \times C \times H \times W) $" src="form_10.png"/> the targets <img class="formulaInl" alt="$ y \in [-\infty, +\infty]$" src="form_76.png"/> </li>
</ol>
</td></tr>
    <tr><td class="paramname">top</td><td>output <a class="el" href="classcaffe_1_1Blob.html" title="A wrapper around SyncedMemory holders serving as the basic computational unit through which Layers...">Blob</a> vector (length 1)<ol type="1">
<li><img class="formulaInl" alt="$ (1 \times 1 \times 1 \times 1) $" src="form_26.png"/> the computed Euclidean loss: <img class="formulaInl" alt="$ E = \frac{1}{2n} \sum\limits_{n=1}^N \left| \left| \hat{y}_n - y_n \right| \right|_2^2 $" src="form_77.png"/></li>
</ol>
</td></tr>
  </table>
  </dd>
</dl>
<p>This can be used for least-squares regression tasks. An <a class="el" href="classcaffe_1_1InnerProductLayer.html" title="Also known as a &quot;fully-connected&quot; layer, computes an inner product with a set of learned weights...">InnerProductLayer</a> input to a <a class="el" href="classcaffe_1_1EuclideanLossLayer.html" title="Computes the Euclidean (L2) loss  for real-valued regression tasks. ">EuclideanLossLayer</a> exactly formulates a linear least squares regression problem. With non-zero weight decay the problem becomes one of ridge regression &ndash; see src/caffe/test/test_sgd_solver.cpp for a concrete example wherein we check that the gradients computed for a <a class="el" href="classcaffe_1_1Net.html" title="Connects Layers together into a directed acyclic graph (DAG) specified by a NetParameter. ">Net</a> with exactly this structure match hand-computed gradient formulas for ridge regression.</p>
<p>(Note: <a class="el" href="classcaffe_1_1Caffe.html">Caffe</a>, and SGD in general, is certainly <b>not</b> the best way to solve linear least squares problems! We use it only as an instructive example.) </p>
</div><h2 class="groupheader">Member Function Documentation</h2>
<a id="a76dd3fde9f09cb9840f05ee035b5a2c5"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a76dd3fde9f09cb9840f05ee035b5a2c5">&#9670;&nbsp;</a></span>AllowForceBackward()</h2>

<div class="memitem">
<div class="memproto">
<div class="memtemplate">
template&lt;typename Dtype &gt; </div>
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">virtual bool <a class="el" href="classcaffe_1_1EuclideanLossLayer.html">caffe::EuclideanLossLayer</a>&lt; Dtype &gt;::AllowForceBackward </td>
          <td>(</td>
          <td class="paramtype">const int&#160;</td>
          <td class="paramname"><em>bottom_index</em></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">
<p>Unlike most loss layers, in the <a class="el" href="classcaffe_1_1EuclideanLossLayer.html" title="Computes the Euclidean (L2) loss  for real-valued regression tasks. ">EuclideanLossLayer</a> we can backpropagate to both inputs &ndash; override to return true and always allow force_backward. </p>

<p>Reimplemented from <a class="el" href="classcaffe_1_1LossLayer.html#a36d35155bfe0de53a79c517f33759612">caffe::LossLayer&lt; Dtype &gt;</a>.</p>

</div>
</div>
<a id="a577f5dad9889d01461348a4c89277089"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a577f5dad9889d01461348a4c89277089">&#9670;&nbsp;</a></span>Backward_cpu()</h2>

<div class="memitem">
<div class="memproto">
<div class="memtemplate">
template&lt;typename Dtype &gt; </div>
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">void <a class="el" href="classcaffe_1_1EuclideanLossLayer.html">caffe::EuclideanLossLayer</a>&lt; Dtype &gt;::Backward_cpu </td>
          <td>(</td>
          <td class="paramtype">const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;&#160;</td>
          <td class="paramname"><em>top</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const vector&lt; bool &gt; &amp;&#160;</td>
          <td class="paramname"><em>propagate_down</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;&#160;</td>
          <td class="paramname"><em>bottom</em>&#160;</td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">protected</span><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Computes the Euclidean error gradient w.r.t. the inputs. </p>
<p>Unlike other children of <a class="el" href="classcaffe_1_1LossLayer.html" title="An interface for Layers that take two Blobs as input – usually (1) predictions and (2) ground-truth ...">LossLayer</a>, <a class="el" href="classcaffe_1_1EuclideanLossLayer.html" title="Computes the Euclidean (L2) loss  for real-valued regression tasks. ">EuclideanLossLayer</a> <b>can</b> compute gradients with respect to the label inputs bottom[1] (but still only will if propagate_down[1] is set, due to being produced by learnable parameters or if force_backward is set). In fact, this layer is "commutative" &ndash; the result is the same regardless of the order of the two bottoms.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">top</td><td>output <a class="el" href="classcaffe_1_1Blob.html" title="A wrapper around SyncedMemory holders serving as the basic computational unit through which Layers...">Blob</a> vector (length 1), providing the error gradient with respect to the outputs<ol type="1">
<li><img class="formulaInl" alt="$ (1 \times 1 \times 1 \times 1) $" src="form_26.png"/> This <a class="el" href="classcaffe_1_1Blob.html" title="A wrapper around SyncedMemory holders serving as the basic computational unit through which Layers...">Blob</a>'s diff will simply contain the loss_weight* <img class="formulaInl" alt="$ \lambda $" src="form_57.png"/>, as <img class="formulaInl" alt="$ \lambda $" src="form_57.png"/> is the coefficient of this layer's output <img class="formulaInl" alt="$\ell_i$" src="form_58.png"/> in the overall <a class="el" href="classcaffe_1_1Net.html" title="Connects Layers together into a directed acyclic graph (DAG) specified by a NetParameter. ">Net</a> loss <img class="formulaInl" alt="$ E = \lambda_i \ell_i + \mbox{other loss terms}$" src="form_59.png"/>; hence <img class="formulaInl" alt="$ \frac{\partial E}{\partial \ell_i} = \lambda_i $" src="form_60.png"/>. (*Assuming that this top <a class="el" href="classcaffe_1_1Blob.html" title="A wrapper around SyncedMemory holders serving as the basic computational unit through which Layers...">Blob</a> is not used as a bottom (input) by any other layer of the <a class="el" href="classcaffe_1_1Net.html" title="Connects Layers together into a directed acyclic graph (DAG) specified by a NetParameter. ">Net</a>.) </li>
</ol>
</td></tr>
    <tr><td class="paramname">propagate_down</td><td>see <a class="el" href="classcaffe_1_1Layer.html#a183d343f5183a4762307f2c5e6ed1e12" title="Given the top blob error gradients, compute the bottom blob error gradients. ">Layer::Backward</a>. </td></tr>
    <tr><td class="paramname">bottom</td><td>input <a class="el" href="classcaffe_1_1Blob.html" title="A wrapper around SyncedMemory holders serving as the basic computational unit through which Layers...">Blob</a> vector (length 2)<ol type="1">
<li><img class="formulaInl" alt="$ (N \times C \times H \times W) $" src="form_10.png"/> the predictions <img class="formulaInl" alt="$\hat{y}$" src="form_78.png"/>; Backward fills their diff with gradients <img class="formulaInl" alt="$ \frac{\partial E}{\partial \hat{y}} = \frac{1}{n} \sum\limits_{n=1}^N (\hat{y}_n - y_n) $" src="form_79.png"/> if propagate_down[0]</li>
<li><img class="formulaInl" alt="$ (N \times C \times H \times W) $" src="form_10.png"/> the targets <img class="formulaInl" alt="$y$" src="form_80.png"/>; Backward fills their diff with gradients <img class="formulaInl" alt="$ \frac{\partial E}{\partial y} = \frac{1}{n} \sum\limits_{n=1}^N (y_n - \hat{y}_n) $" src="form_81.png"/> if propagate_down[1] </li>
</ol>
</td></tr>
  </table>
  </dd>
</dl>

<p>Implements <a class="el" href="classcaffe_1_1Layer.html#a75c9b2a321dc713e0eaef530d02dc37f">caffe::Layer&lt; Dtype &gt;</a>.</p>

</div>
</div>
<a id="a3bc5a947caadac1a352a89b08720c7e7"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a3bc5a947caadac1a352a89b08720c7e7">&#9670;&nbsp;</a></span>Forward_cpu()</h2>

<div class="memitem">
<div class="memproto">
<div class="memtemplate">
template&lt;typename Dtype &gt; </div>
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">void <a class="el" href="classcaffe_1_1EuclideanLossLayer.html">caffe::EuclideanLossLayer</a>&lt; Dtype &gt;::Forward_cpu </td>
          <td>(</td>
          <td class="paramtype">const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;&#160;</td>
          <td class="paramname"><em>bottom</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;&#160;</td>
          <td class="paramname"><em>top</em>&#160;</td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">protected</span><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Computes the Euclidean (L2) loss <img class="formulaInl" alt="$ E = \frac{1}{2N} \sum\limits_{n=1}^N \left| \left| \hat{y}_n - y_n \right| \right|_2^2 $" src="form_74.png"/> for real-valued regression tasks. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">bottom</td><td>input <a class="el" href="classcaffe_1_1Blob.html" title="A wrapper around SyncedMemory holders serving as the basic computational unit through which Layers...">Blob</a> vector (length 2)<ol type="1">
<li><img class="formulaInl" alt="$ (N \times C \times H \times W) $" src="form_10.png"/> the predictions <img class="formulaInl" alt="$ \hat{y} \in [-\infty, +\infty]$" src="form_75.png"/></li>
<li><img class="formulaInl" alt="$ (N \times C \times H \times W) $" src="form_10.png"/> the targets <img class="formulaInl" alt="$ y \in [-\infty, +\infty]$" src="form_76.png"/> </li>
</ol>
</td></tr>
    <tr><td class="paramname">top</td><td>output <a class="el" href="classcaffe_1_1Blob.html" title="A wrapper around SyncedMemory holders serving as the basic computational unit through which Layers...">Blob</a> vector (length 1)<ol type="1">
<li><img class="formulaInl" alt="$ (1 \times 1 \times 1 \times 1) $" src="form_26.png"/> the computed Euclidean loss: <img class="formulaInl" alt="$ E = \frac{1}{2n} \sum\limits_{n=1}^N \left| \left| \hat{y}_n - y_n \right| \right|_2^2 $" src="form_77.png"/></li>
</ol>
</td></tr>
  </table>
  </dd>
</dl>
<p>This can be used for least-squares regression tasks. An <a class="el" href="classcaffe_1_1InnerProductLayer.html" title="Also known as a &quot;fully-connected&quot; layer, computes an inner product with a set of learned weights...">InnerProductLayer</a> input to a <a class="el" href="classcaffe_1_1EuclideanLossLayer.html" title="Computes the Euclidean (L2) loss  for real-valued regression tasks. ">EuclideanLossLayer</a> exactly formulates a linear least squares regression problem. With non-zero weight decay the problem becomes one of ridge regression &ndash; see src/caffe/test/test_sgd_solver.cpp for a concrete example wherein we check that the gradients computed for a <a class="el" href="classcaffe_1_1Net.html" title="Connects Layers together into a directed acyclic graph (DAG) specified by a NetParameter. ">Net</a> with exactly this structure match hand-computed gradient formulas for ridge regression.</p>
<p>(Note: <a class="el" href="classcaffe_1_1Caffe.html">Caffe</a>, and SGD in general, is certainly <b>not</b> the best way to solve linear least squares problems! We use it only as an instructive example.) </p>

<p>Implements <a class="el" href="classcaffe_1_1Layer.html#a576ac6a60b1e99fe383831f52a6cea77">caffe::Layer&lt; Dtype &gt;</a>.</p>

</div>
</div>
<a id="a9cbe90ea0130c31bd5b9419a1bbaa555"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a9cbe90ea0130c31bd5b9419a1bbaa555">&#9670;&nbsp;</a></span>Reshape()</h2>

<div class="memitem">
<div class="memproto">
<div class="memtemplate">
template&lt;typename Dtype &gt; </div>
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">void <a class="el" href="classcaffe_1_1EuclideanLossLayer.html">caffe::EuclideanLossLayer</a>&lt; Dtype &gt;::Reshape </td>
          <td>(</td>
          <td class="paramtype">const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;&#160;</td>
          <td class="paramname"><em>bottom</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const vector&lt; <a class="el" href="classcaffe_1_1Blob.html">Blob</a>&lt; Dtype &gt; *&gt; &amp;&#160;</td>
          <td class="paramname"><em>top</em>&#160;</td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Adjust the shapes of top blobs and internal buffers to accommodate the shapes of the bottom blobs. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">bottom</td><td>the input blobs, with the requested input shapes </td></tr>
    <tr><td class="paramname">top</td><td>the top blobs, which should be reshaped as needed</td></tr>
  </table>
  </dd>
</dl>
<p>This method should reshape top blobs as needed according to the shapes of the bottom (input) blobs, as well as reshaping any internal buffers and making any other necessary adjustments so that the layer can accommodate the bottom blobs. </p>

<p>Reimplemented from <a class="el" href="classcaffe_1_1LossLayer.html#abf00412194f5413ea9468ee44b0d986f">caffe::LossLayer&lt; Dtype &gt;</a>.</p>

</div>
</div>
<hr/>The documentation for this class was generated from the following files:<ul>
<li>include/caffe/layers/<a class="el" href="euclidean__loss__layer_8hpp_source.html">euclidean_loss_layer.hpp</a></li>
<li>src/caffe/layers/euclidean_loss_layer.cpp</li>
</ul>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated on Thu Aug 3 2017 23:11:23 for Caffe by &#160;<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.13
</small></address>
</body>
</html>
