<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en-US">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=11"/>
<meta name="generator" content="Doxygen 1.12.0"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>NeuZephyr: nz::nodes::io::OutputNode Class Reference</title>
<link rel="icon" href="NZ_logo2.png" type="image/x-icon" />
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
 <tbody>
 <tr id="projectrow">
  <td id="projectlogo"><img alt="Logo" src="NZ_logo2.png"/></td>
  <td id="projectalign">
   <div id="projectname">NeuZephyr
   </div>
   <div id="projectbrief">Simple DL Framework</div>
  </td>
 </tr>
 </tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.12.0 -->
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function() { codefold.init(0); });
/* @license-end */
</script>
  <div id="navrow1" class="tabs">
    <ul class="tablist">
      <li><a href="index.html"><span>Main&#160;Page</span></a></li>
      <li><a href="pages.html"><span>Related&#160;Pages</span></a></li>
      <li><a href="namespaces.html"><span>Namespaces</span></a></li>
      <li class="current"><a href="annotated.html"><span>Classes</span></a></li>
      <li><a href="files.html"><span>Files</span></a></li>
    </ul>
  </div>
  <div id="navrow2" class="tabs2">
    <ul class="tablist">
      <li><a href="annotated.html"><span>Class&#160;List</span></a></li>
      <li><a href="classes.html"><span>Class&#160;Index</span></a></li>
      <li><a href="inherits.html"><span>Class&#160;Hierarchy</span></a></li>
      <li><a href="functions.html"><span>Class&#160;Members</span></a></li>
    </ul>
  </div>
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function(){ initResizable(false); });
/* @license-end */
</script>
<div id="nav-path" class="navpath">
  <ul>
<li class="navelem"><b>nz</b></li><li class="navelem"><a class="el" href="namespacenz_1_1nodes.html">nodes</a></li><li class="navelem"><a class="el" href="namespacenz_1_1nodes_1_1io.html">io</a></li><li class="navelem"><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html">OutputNode</a></li>  </ul>
</div>
</div><!-- top -->
<div id="doc-content">
<div class="header">
  <div class="summary">
<a href="#pub-methods">Public Member Functions</a> &#124;
<a href="classnz_1_1nodes_1_1io_1_1_output_node-members.html">List of all members</a>  </div>
  <div class="headertitle"><div class="title">nz::nodes::io::OutputNode Class Reference</div></div>
</div><!--header-->
<div class="contents">

<p>Base class for loss function nodes in a computational graph.  
 <a href="#details">More...</a></p>
<div class="dynheader">
Inheritance diagram for nz::nodes::io::OutputNode:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1nodes_1_1io_1_1_output_node__inherit__graph.png" border="0" usemap="#anz_1_1nodes_1_1io_1_1_output_node_inherit__map" alt="Inheritance graph"/></div>
<map name="anz_1_1nodes_1_1io_1_1_output_node_inherit__map" id="anz_1_1nodes_1_1io_1_1_output_node_inherit__map">
<area shape="rect" title="Base class for loss function nodes in a computational graph." alt="" coords="123,80,297,107"/>
<area shape="rect" href="classnz_1_1nodes_1_1loss_1_1_binary_cross_entropy_node.html" title="Represents the Binary Cross&#45;Entropy (BCE) loss function node in a computational graph." alt="" coords="5,155,196,197"/>
<area shape="poly" title=" " alt="" coords="182,118,130,156,127,152,179,114"/>
<area shape="rect" href="classnz_1_1nodes_1_1loss_1_1_mean_squared_error_node.html" title="Represents the Mean Squared Error (MSE) loss function node in a computational graph." alt="" coords="220,155,418,197"/>
<area shape="poly" title=" " alt="" coords="240,114,293,152,290,156,237,118"/>
<area shape="rect" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph." alt="" coords="151,5,268,32"/>
<area shape="poly" title=" " alt="" coords="212,48,212,80,207,80,207,48"/>
</map>
<center><span class="legend">[<a href="graph_legend.html">legend</a>]</span></center></div>
<div class="dynheader">
Collaboration diagram for nz::nodes::io::OutputNode:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1nodes_1_1io_1_1_output_node__coll__graph.png" border="0" usemap="#anz_1_1nodes_1_1io_1_1_output_node_coll__map" alt="Collaboration graph"/></div>
<map name="anz_1_1nodes_1_1io_1_1_output_node_coll__map" id="anz_1_1nodes_1_1io_1_1_output_node_coll__map">
<area shape="rect" title="Base class for loss function nodes in a computational graph." alt="" coords="5,80,180,107"/>
<area shape="rect" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph." alt="" coords="34,5,151,32"/>
<area shape="poly" title=" " alt="" coords="95,48,95,80,90,80,90,48"/>
</map>
<center><span class="legend">[<a href="graph_legend.html">legend</a>]</span></center></div>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a id="pub-methods" name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr class="memitem:a98af165dc12d16d812708c3cdc9097b2" id="r_a98af165dc12d16d812708c3cdc9097b2"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a98af165dc12d16d812708c3cdc9097b2">OutputNode</a> (<a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *input)</td></tr>
<tr class="memdesc:a98af165dc12d16d812708c3cdc9097b2"><td class="mdescLeft">&#160;</td><td class="mdescRight">Constructor to initialize an <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> with a given input node.  <br /></td></tr>
<tr class="separator:a98af165dc12d16d812708c3cdc9097b2"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a1c05ec6cdbddef105a20c400d0515471" id="r_a1c05ec6cdbddef105a20c400d0515471"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a1c05ec6cdbddef105a20c400d0515471">forward</a> () override</td></tr>
<tr class="memdesc:a1c05ec6cdbddef105a20c400d0515471"><td class="mdescLeft">&#160;</td><td class="mdescRight">Forward pass for the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code>.  <br /></td></tr>
<tr class="separator:a1c05ec6cdbddef105a20c400d0515471"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a2f76355b646a9c9f1a0972ad87f6a260" id="r_a2f76355b646a9c9f1a0972ad87f6a260"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a2f76355b646a9c9f1a0972ad87f6a260">backward</a> () override</td></tr>
<tr class="memdesc:a2f76355b646a9c9f1a0972ad87f6a260"><td class="mdescLeft">&#160;</td><td class="mdescRight">Backward pass for the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code>.  <br /></td></tr>
<tr class="separator:a2f76355b646a9c9f1a0972ad87f6a260"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a7ac1292b280afcd86b31853b1275c1c4" id="r_a7ac1292b280afcd86b31853b1275c1c4"><td class="memItemLeft" align="right" valign="top">Tensor::value_type&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a7ac1292b280afcd86b31853b1275c1c4">getLoss</a> () const</td></tr>
<tr class="memdesc:a7ac1292b280afcd86b31853b1275c1c4"><td class="mdescLeft">&#160;</td><td class="mdescRight">Retrieves the loss value stored in the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code>.  <br /></td></tr>
<tr class="separator:a7ac1292b280afcd86b31853b1275c1c4"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ac340bd5a932808333e08e8bf24d53039" id="r_ac340bd5a932808333e08e8bf24d53039"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#ac340bd5a932808333e08e8bf24d53039">print</a> (std::ostream &amp;os) const override</td></tr>
<tr class="memdesc:ac340bd5a932808333e08e8bf24d53039"><td class="mdescLeft">&#160;</td><td class="mdescRight">Prints the type, data, gradient, and loss of the node.  <br /></td></tr>
<tr class="separator:ac340bd5a932808333e08e8bf24d53039"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="inherit_header pub_methods_classnz_1_1nodes_1_1_node"><td colspan="2" onclick="javascript:dynsection.toggleInherit('pub_methods_classnz_1_1nodes_1_1_node')"><img src="closed.png" alt="-"/>&#160;Public Member Functions inherited from <a class="el" href="classnz_1_1nodes_1_1_node.html">nz::nodes::Node</a></td></tr>
<tr class="memitem:a9b85913e12422bb4ac2fff483427bb47 inherit pub_methods_classnz_1_1nodes_1_1_node" id="r_a9b85913e12422bb4ac2fff483427bb47"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classnz_1_1nodes_1_1_node.html#a9b85913e12422bb4ac2fff483427bb47">dataInject</a> (Tensor::value_type *data, bool grad=false) const</td></tr>
<tr class="memdesc:a9b85913e12422bb4ac2fff483427bb47 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="mdescLeft">&#160;</td><td class="mdescRight">Injects data into a relevant tensor object, optionally setting its gradient requirement.  <br /></td></tr>
<tr class="separator:a9b85913e12422bb4ac2fff483427bb47 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a609f1730085dd1d31e0ddcbbae48a065 inherit pub_methods_classnz_1_1nodes_1_1_node" id="r_a609f1730085dd1d31e0ddcbbae48a065"><td class="memTemplParams" colspan="2">template&lt;typename Iterator &gt; </td></tr>
<tr class="memitem:a609f1730085dd1d31e0ddcbbae48a065 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="memTemplItemLeft" align="right" valign="top">void&#160;</td><td class="memTemplItemRight" valign="bottom"><a class="el" href="classnz_1_1nodes_1_1_node.html#a609f1730085dd1d31e0ddcbbae48a065">dataInject</a> (Iterator begin, Iterator end, const bool grad=false) const</td></tr>
<tr class="memdesc:a609f1730085dd1d31e0ddcbbae48a065 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="mdescLeft">&#160;</td><td class="mdescRight">Injects data from an iterator range into the output tensor of the InputNode, optionally setting its gradient requirement.  <br /></td></tr>
<tr class="separator:a609f1730085dd1d31e0ddcbbae48a065 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:af8b4bab3271df92ca1f0914f7a97b1e8 inherit pub_methods_classnz_1_1nodes_1_1_node" id="r_af8b4bab3271df92ca1f0914f7a97b1e8"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classnz_1_1nodes_1_1_node.html#af8b4bab3271df92ca1f0914f7a97b1e8">dataInject</a> (const std::initializer_list&lt; Tensor::value_type &gt; &amp;data, bool grad=false) const</td></tr>
<tr class="memdesc:af8b4bab3271df92ca1f0914f7a97b1e8 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="mdescLeft">&#160;</td><td class="mdescRight">Injects data from a std::initializer_list into the output tensor of the <a class="el" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph.">Node</a>, optionally setting its gradient requirement.  <br /></td></tr>
<tr class="separator:af8b4bab3271df92ca1f0914f7a97b1e8 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>Base class for loss function nodes in a computational graph. </p>
<p>The <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> class serves as the base class for all nodes representing loss functions in a neural network. It connects to the output of a node that produces the final result, and it computes the loss based on that result. During the forward pass, it simply copies the output of the input node, and during the backward pass, it sets the gradient of the output tensor to 1, effectively marking the end of the gradient flow.</p>
<p>The <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> class is used as a parent class for more specific loss function nodes (such as Mean Squared Error or Cross-Entropy loss), which can further extend its functionality to compute the actual loss and update the <code>loss</code> member.</p>
<p>Key features:</p><ul>
<li><b>Loss Calculation</b>: The <code>loss</code> member variable holds the value of the computed loss. Specific loss functions can update this value by extending the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> class.</li>
<li><b>Forward Pass</b>: The <code><a class="el" href="#a1c05ec6cdbddef105a20c400d0515471" title="Forward pass for the OutputNode.">forward()</a></code> method simply sets the <code>output</code> member to the output of the input node.</li>
<li><b>Backward Pass</b>: The <code><a class="el" href="#a2f76355b646a9c9f1a0972ad87f6a260" title="Backward pass for the OutputNode.">backward()</a></code> method sets the gradient of the output tensor to 1, which marks the start of gradient propagation for the backward pass.</li>
<li><b>Loss Access</b>: The <code><a class="el" href="#a7ac1292b280afcd86b31853b1275c1c4" title="Retrieves the loss value stored in the OutputNode.">getLoss()</a></code> method provides access to the loss value stored in the <code>loss</code> member.</li>
</ul>
<p>This class is part of the <code><a class="el" href="namespacenz_1_1nodes.html" title="Contains classes and functionality for nodes in a neural network or computational graph.">nz::nodes</a></code> namespace, and it is designed to be extended for implementing various loss functions.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> class does not perform any specific loss computation. It is intended to be a base class for more specific loss function nodes that compute and track the actual loss.</li>
<li>The backward pass simply sets the gradient to 1, which is appropriate for the output layer of a neural network where the loss gradient is propagated back.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md88"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> input({3, 3}, <span class="keyword">true</span>);  <span class="comment">// Create an input node with shape {3, 3} and requires gradients</span></div>
<div class="line">input.output-&gt;fill(1.0f);  <span class="comment">// Fill the input tensor with 1.0</span></div>
<div class="line"> </div>
<div class="line"><a class="code hl_class" href="classnz_1_1nodes_1_1io_1_1_output_node.html">OutputNode</a> output(&amp;input);  <span class="comment">// Create an OutputNode and pass the input node as the source</span></div>
<div class="line">output.forward();  <span class="comment">// Forward pass: output now points to the input node&#39;s output</span></div>
<div class="line">output.backward();  <span class="comment">// Backward pass: set the gradient of the input node&#39;s output to 1</span></div>
<div class="line"> </div>
<div class="line">std::cout &lt;&lt; <span class="stringliteral">&quot;Loss: &quot;</span> &lt;&lt; output.getLoss() &lt;&lt; std::endl;  <span class="comment">// Access the loss value (which is 0 initially)</span></div>
<div class="ttc" id="aclassnz_1_1nodes_1_1io_1_1_input_node_html"><div class="ttname"><a href="classnz_1_1nodes_1_1io_1_1_input_node.html">nz::nodes::io::InputNode</a></div><div class="ttdoc">Represents an input node in a computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_nodes_8cuh_source.html#l00437">Nodes.cuh:437</a></div></div>
<div class="ttc" id="aclassnz_1_1nodes_1_1io_1_1_output_node_html"><div class="ttname"><a href="classnz_1_1nodes_1_1io_1_1_output_node.html">nz::nodes::io::OutputNode</a></div><div class="ttdoc">Base class for loss function nodes in a computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_nodes_8cuh_source.html#l00683">Nodes.cuh:683</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="#a1c05ec6cdbddef105a20c400d0515471" title="Forward pass for the OutputNode.">forward()</a> for the <a class="el" href="#a1c05ec6cdbddef105a20c400d0515471" title="Forward pass for the OutputNode.">forward</a> pass computation method. </dd>
<dd>
<a class="el" href="#a2f76355b646a9c9f1a0972ad87f6a260" title="Backward pass for the OutputNode.">backward()</a> for the <a class="el" href="#a2f76355b646a9c9f1a0972ad87f6a260" title="Backward pass for the OutputNode.">backward</a> pass gradient propagation method.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/11/29 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_nodes_8cuh_source.html#l00683">683</a> of file <a class="el" href="_nodes_8cuh_source.html">Nodes.cuh</a>.</p>
</div><h2 class="groupheader">Constructor &amp; Destructor Documentation</h2>
<a id="a98af165dc12d16d812708c3cdc9097b2" name="a98af165dc12d16d812708c3cdc9097b2"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a98af165dc12d16d812708c3cdc9097b2">&#9670;&#160;</a></span>OutputNode()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">nz::nodes::io::OutputNode::OutputNode </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *</td>          <td class="paramname"><span class="paramname"><em>input</em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">explicit</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Constructor to initialize an <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> with a given input node. </p>
<p>This constructor initializes an <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> by accepting an input node. The <code>output</code> of this node will be set to the <code>output</code> of the provided input node during the forward pass. The <code>loss</code> is initialized to <code>0</code>, and the <code>type</code> is set to <code>"Output"</code>.</p>
<p>The <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> class is designed to represent the output layer of a neural network, and it serves as the base class for loss function nodes. The <code><a class="el" href="#a1c05ec6cdbddef105a20c400d0515471" title="Forward pass for the OutputNode.">forward()</a></code> and <code><a class="el" href="#a2f76355b646a9c9f1a0972ad87f6a260" title="Backward pass for the OutputNode.">backward()</a></code> methods will be responsible for propagating data and gradients, respectively.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">input</td><td>A pointer to the <code><a class="el" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph.">Node</a></code> that serves as the input to the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code>. The <code>output</code> of this node will be used as the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code>'s output.</td></tr>
  </table>
  </dd>
</dl>
<p>This constructor sets up the node with a reference to its input, allowing the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> to pass data from its input node and compute the loss during the forward and backward passes.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html" title="Represents an input node in a computational graph.">InputNode</a></code> or any other node that provides the final output of the network can be passed to this constructor.</li>
<li>The <code>loss</code> member is initialized to <code>0</code> and can be updated by specific loss function implementations in derived classes.</li>
</ul>
</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/11/29 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_nodes_8cu_source.html#l00052">52</a> of file <a class="el" href="_nodes_8cu_source.html">Nodes.cu</a>.</p>

</div>
</div>
<h2 class="groupheader">Member Function Documentation</h2>
<a id="a2f76355b646a9c9f1a0972ad87f6a260" name="a2f76355b646a9c9f1a0972ad87f6a260"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a2f76355b646a9c9f1a0972ad87f6a260">&#9670;&#160;</a></span>backward()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">void nz::nodes::io::OutputNode::backward </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">override</span><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Backward pass for the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code>. </p>
<p>The <code><a class="el" href="#a2f76355b646a9c9f1a0972ad87f6a260" title="Backward pass for the OutputNode.">backward()</a></code> method for the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> sets the gradient of the output tensor to 1. If the input tensor of the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> requires gradients (i.e., it is part of the model parameters), the gradient of the input tensor is set to 1. This is a standard operation in the backward pass for the output layer, as it marks the start of the gradient propagation in the network.</p>
<p>This method does not perform any gradient calculations for the output node itself. Instead, it ensures that the gradient of the input node’s output is set to 1, which is necessary for the backpropagation process in the neural network.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code><a class="el" href="#a2f76355b646a9c9f1a0972ad87f6a260" title="Backward pass for the OutputNode.">backward()</a></code> method simply fills the gradient of the input tensor with 1. This is because the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> represents the output layer, where the gradient is typically set to 1 as the starting point of backpropagation.</li>
<li>This method ensures that the gradient for the input node’s <code>output</code> is available for further propagation through the network during the backward pass.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="#a1c05ec6cdbddef105a20c400d0515471" title="Forward pass for the OutputNode.">forward()</a> for the <a class="el" href="#a1c05ec6cdbddef105a20c400d0515471" title="Forward pass for the OutputNode.">forward</a> pass computation method.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/11/29 </dd></dl>

<p>Implements <a class="el" href="classnz_1_1nodes_1_1_node.html#a0a9ecbaa3d790ba38e8218aca7837fd0">nz::nodes::Node</a>.</p>

<p class="definition">Definition at line <a class="el" href="_nodes_8cu_source.html#l00062">62</a> of file <a class="el" href="_nodes_8cu_source.html">Nodes.cu</a>.</p>

</div>
</div>
<a id="a1c05ec6cdbddef105a20c400d0515471" name="a1c05ec6cdbddef105a20c400d0515471"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a1c05ec6cdbddef105a20c400d0515471">&#9670;&#160;</a></span>forward()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">void nz::nodes::io::OutputNode::forward </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">override</span><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Forward pass for the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code>. </p>
<p>The <code><a class="el" href="#a1c05ec6cdbddef105a20c400d0515471" title="Forward pass for the OutputNode.">forward()</a></code> method for the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> sets the <code>output</code> member of the node to be the same as the <code>output</code> of its input node. This effectively passes the output from the input node to the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> without any modification. Since the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> does not perform any computation itself, it simply relays the input node's output during the forward pass, making it equivalent to its input node's output.</p>
<p>This method is typically used in the context of a neural network, where the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> represents the final layer, and it connects the output of the network to the loss function for loss computation and backpropagation.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code><a class="el" href="#a1c05ec6cdbddef105a20c400d0515471" title="Forward pass for the OutputNode.">forward()</a></code> method does not alter the data from the input node; it merely sets the <code>output</code> of the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> to be the same as the input node's <code>output</code>.</li>
<li>This method is implemented as part of the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code> class to conform to the interface defined by its base class <code><a class="el" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph.">Node</a></code>.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="#a2f76355b646a9c9f1a0972ad87f6a260" title="Backward pass for the OutputNode.">backward()</a> for the <a class="el" href="#a2f76355b646a9c9f1a0972ad87f6a260" title="Backward pass for the OutputNode.">backward</a> pass gradient propagation method.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/11/29 </dd></dl>

<p>Implements <a class="el" href="classnz_1_1nodes_1_1_node.html#a8a828c2e91a4aa2a9ab7b94554e4685b">nz::nodes::Node</a>.</p>

<p class="definition">Definition at line <a class="el" href="_nodes_8cu_source.html#l00058">58</a> of file <a class="el" href="_nodes_8cu_source.html">Nodes.cu</a>.</p>

</div>
</div>
<a id="a7ac1292b280afcd86b31853b1275c1c4" name="a7ac1292b280afcd86b31853b1275c1c4"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a7ac1292b280afcd86b31853b1275c1c4">&#9670;&#160;</a></span>getLoss()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">Tensor::value_type nz::nodes::io::OutputNode::getLoss </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">nodiscard</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Retrieves the loss value stored in the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code>. </p>
<p>The <code><a class="el" href="#a7ac1292b280afcd86b31853b1275c1c4" title="Retrieves the loss value stored in the OutputNode.">getLoss()</a></code> method returns the value of the loss that is stored in the <code>loss</code> member of the <code><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">OutputNode</a></code>. This value is typically updated by a derived class (e.g., a specific loss function class like Mean Squared Error or Cross-Entropy Loss) during the forward pass. The <code>loss</code> represents the discrepancy between the predicted output and the actual target output in the context of a neural network.</p>
<p>The <code><a class="el" href="#a7ac1292b280afcd86b31853b1275c1c4" title="Retrieves the loss value stored in the OutputNode.">getLoss()</a></code> function provides access to the computed loss value, which is essential for monitoring the network’s performance during training and optimization.</p>
<dl class="section return"><dt>Returns</dt><dd>The current loss value stored in the <code>loss</code> member, which is of type <code>Tensor::value_type</code>.</dd></dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>This method does not modify the loss value; it simply returns the current stored loss value.</li>
<li>The actual loss computation happens in the derived class, such as <code>MeanSquaredErrorNode</code> or <code>BinaryCrossEntropyNode</code>.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="#a1c05ec6cdbddef105a20c400d0515471" title="Forward pass for the OutputNode.">forward()</a> for the <a class="el" href="#a1c05ec6cdbddef105a20c400d0515471" title="Forward pass for the OutputNode.">forward</a> pass computation method where the <a class="el" href="namespacenz_1_1nodes_1_1loss.html" title="Contains loss function nodes for computing various loss metrics in a machine learning model.">loss</a> might be updated.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/11/29 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_nodes_8cu_source.html#l00068">68</a> of file <a class="el" href="_nodes_8cu_source.html">Nodes.cu</a>.</p>

</div>
</div>
<a id="ac340bd5a932808333e08e8bf24d53039" name="ac340bd5a932808333e08e8bf24d53039"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ac340bd5a932808333e08e8bf24d53039">&#9670;&#160;</a></span>print()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">void nz::nodes::io::OutputNode::print </td>
          <td>(</td>
          <td class="paramtype">std::ostream &amp;</td>          <td class="paramname"><span class="paramname"><em>os</em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">override</span><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Prints the type, data, gradient, and loss of the node. </p>
<p>The <code><a class="el" href="#ac340bd5a932808333e08e8bf24d53039" title="Prints the type, data, gradient, and loss of the node.">print()</a></code> method outputs the information about the node, including its type, the tensor data stored in the node's output, the corresponding gradient, and the loss value (if available). This is useful for debugging and inspecting the state of nodes in a computational graph or during training, allowing for easy visualization of the node's content, gradients, and any associated loss.</p>
<p>The method outputs the following details:</p><ul>
<li><b>Type</b>: The type of the node (e.g., the operation it represents, such as "MatrixMul", "ReLU", etc.).</li>
<li><b>Data</b>: The tensor data stored in the node's <code>output</code> tensor.</li>
<li><b>Gradient</b>: If the node has a computed gradient, it is also displayed, providing insights into the gradient values that are being backpropagated through the network during training.</li>
<li><b>Loss</b>: The loss value associated with the node (if applicable). This value can be used to track the error or discrepancy during the forward-backward pass in training.</li>
</ul>
<p>This method is primarily used for debugging and monitoring the state of tensors, gradients, and loss, making it easier to inspect how the data, gradients, and error values flow through the network.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code>output</code> tensor should contain both the data and the gradient information, and both are printed when this method is called.</li>
<li>The <code>loss</code> value will only be printed if it is associated with the node. If the node does not have a loss value, this field may be omitted.</li>
<li>This method is typically used during development or debugging phases and should not be used in performance-critical code as it involves printing potentially large amounts of data.</li>
</ul>
</dd></dl>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">os</td><td>The output stream (e.g., <code>std::cout</code>) to which the node's information will be printed.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/11/29 </dd></dl>

<p>Reimplemented from <a class="el" href="classnz_1_1nodes_1_1_node.html#a687ee9c34eb61f8f28caa201ca42696e">nz::nodes::Node</a>.</p>

<p class="definition">Definition at line <a class="el" href="_nodes_8cu_source.html#l00072">72</a> of file <a class="el" href="_nodes_8cu_source.html">Nodes.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1nodes_1_1io_1_1_output_node_ac340bd5a932808333e08e8bf24d53039_cgraph.png" border="0" usemap="#aclassnz_1_1nodes_1_1io_1_1_output_node_ac340bd5a932808333e08e8bf24d53039_cgraph" alt=""/></div>
<map name="aclassnz_1_1nodes_1_1io_1_1_output_node_ac340bd5a932808333e08e8bf24d53039_cgraph" id="aclassnz_1_1nodes_1_1io_1_1_output_node_ac340bd5a932808333e08e8bf24d53039_cgraph">
<area shape="rect" title="Prints the type, data, gradient, and loss of the node." alt="" coords="5,5,180,48"/>
<area shape="rect" href="classnz_1_1nodes_1_1_node.html#a687ee9c34eb61f8f28caa201ca42696e" title="Prints the type, data, and gradient of the node." alt="" coords="228,13,378,40"/>
<area shape="poly" title=" " alt="" coords="180,24,212,24,212,29,180,29"/>
</map>
</div>

</div>
</div>
<hr/>The documentation for this class was generated from the following files:<ul>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/include/NeuZephyr/<a class="el" href="_nodes_8cuh_source.html">Nodes.cuh</a></li>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/src/<a class="el" href="_nodes_8cu_source.html">Nodes.cu</a></li>
</ul>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by&#160;<a href="https://www.doxygen.org/index.html"><img class="footer" src="doxygen.svg" width="104" height="31" alt="doxygen"/></a> 1.12.0
</small></address>
</div><!-- doc-content -->
</body>
</html>
