<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en-US">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=11"/>
<meta name="generator" content="Doxygen 1.12.0"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>NeuZephyr: nz::graph::ComputeGraph Class Reference</title>
<link rel="icon" href="NZ_logo2.png" type="image/x-icon" />
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
 <tbody>
 <tr id="projectrow">
  <td id="projectlogo"><img alt="Logo" src="NZ_logo2.png"/></td>
  <td id="projectalign">
   <div id="projectname">NeuZephyr
   </div>
   <div id="projectbrief">Simple DL Framework</div>
  </td>
 </tr>
 </tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.12.0 -->
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function() { codefold.init(0); });
/* @license-end */
</script>
  <div id="navrow1" class="tabs">
    <ul class="tablist">
      <li><a href="index.html"><span>Main&#160;Page</span></a></li>
      <li><a href="pages.html"><span>Related&#160;Pages</span></a></li>
      <li><a href="namespaces.html"><span>Namespaces</span></a></li>
      <li class="current"><a href="annotated.html"><span>Classes</span></a></li>
      <li><a href="files.html"><span>Files</span></a></li>
    </ul>
  </div>
  <div id="navrow2" class="tabs2">
    <ul class="tablist">
      <li><a href="annotated.html"><span>Class&#160;List</span></a></li>
      <li><a href="classes.html"><span>Class&#160;Index</span></a></li>
      <li><a href="inherits.html"><span>Class&#160;Hierarchy</span></a></li>
      <li><a href="functions.html"><span>Class&#160;Members</span></a></li>
    </ul>
  </div>
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function(){ initResizable(false); });
/* @license-end */
</script>
<div id="nav-path" class="navpath">
  <ul>
<li class="navelem"><b>nz</b></li><li class="navelem"><a class="el" href="namespacenz_1_1graph.html">graph</a></li><li class="navelem"><a class="el" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a></li>  </ul>
</div>
</div><!-- top -->
<div id="doc-content">
<div class="header">
  <div class="summary">
<a href="#friends">Friends</a> &#124;
<a href="classnz_1_1graph_1_1_compute_graph-members.html">List of all members</a>  </div>
  <div class="headertitle"><div class="title">nz::graph::ComputeGraph Class Reference</div></div>
</div><!--header-->
<div class="contents">

<p>Represents a computational graph, which manages nodes and the computation flow.  
 <a href="#details">More...</a></p>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a id="pub-methods" name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr><td colspan="2"><div class="groupHeader">Constructors and Destructors</div></td></tr>
<tr class="memitem:a328a263c572540a330ba650651fb7722" id="r_a328a263c572540a330ba650651fb7722"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a328a263c572540a330ba650651fb7722">ComputeGraph</a> ()=default</td></tr>
<tr class="memdesc:a328a263c572540a330ba650651fb7722"><td class="mdescLeft">&#160;</td><td class="mdescRight">Default constructor for the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> class.  <br /></td></tr>
<tr class="separator:a328a263c572540a330ba650651fb7722"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a6397db45a249af6389dd3f7548b5aa0a" id="r_a6397db45a249af6389dd3f7548b5aa0a"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a6397db45a249af6389dd3f7548b5aa0a">~ComputeGraph</a> ()=default</td></tr>
<tr class="memdesc:a6397db45a249af6389dd3f7548b5aa0a"><td class="mdescLeft">&#160;</td><td class="mdescRight">Destructor for the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> class.  <br /></td></tr>
<tr class="separator:a6397db45a249af6389dd3f7548b5aa0a"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr><td colspan="2"><div class="groupHeader">Graph Builders</div></td></tr>
<tr class="memitem:a75a4b72c93448c8617a7af41cc471897" id="r_a75a4b72c93448c8617a7af41cc471897"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a75a4b72c93448c8617a7af41cc471897">addInput</a> (const <a class="el" href="classnz_1_1data_1_1_dimension.html">Tensor::shape_type</a> &amp;shape, bool requires_grad=false, const std::string &amp;name=&quot;default&quot;)</td></tr>
<tr class="memdesc:a75a4b72c93448c8617a7af41cc471897"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adds an input node to the computational graph.  <br /></td></tr>
<tr class="separator:a75a4b72c93448c8617a7af41cc471897"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a86b54a014876f245d64c524dc70075ea" id="r_a86b54a014876f245d64c524dc70075ea"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a86b54a014876f245d64c524dc70075ea">addInput</a> (const <a class="el" href="classnz_1_1data_1_1_tensor.html">Tensor</a> &amp;tensor, const std::string &amp;name=&quot;default&quot;)</td></tr>
<tr class="memdesc:a86b54a014876f245d64c524dc70075ea"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adds an input node to the computational graph using a Tensor.  <br /></td></tr>
<tr class="separator:a86b54a014876f245d64c524dc70075ea"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a8ab70380135ecdeb3652b76c2bf2766c" id="r_a8ab70380135ecdeb3652b76c2bf2766c"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a8ab70380135ecdeb3652b76c2bf2766c">addInput</a> (<a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> *input, const std::string &amp;name=&quot;default&quot;)</td></tr>
<tr class="memdesc:a8ab70380135ecdeb3652b76c2bf2766c"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adds an existing <code>InputNode</code> to the computational graph.  <br /></td></tr>
<tr class="separator:a8ab70380135ecdeb3652b76c2bf2766c"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ab75d30dc8133099118f60bdd07284f01" id="r_ab75d30dc8133099118f60bdd07284f01"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#ab75d30dc8133099118f60bdd07284f01">addInput</a> (const <a class="el" href="classnz_1_1data_1_1_dimension.html">Tensor::shape_type</a> &amp;shape, Tensor::value_type *data, bool requires_grad, bool host, const std::string &amp;name=&quot;default&quot;)</td></tr>
<tr class="memdesc:ab75d30dc8133099118f60bdd07284f01"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adds an input node to the computation graph and returns a pointer to the newly created InputNode.  <br /></td></tr>
<tr class="separator:ab75d30dc8133099118f60bdd07284f01"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a3ef35c2707abf965e7badf00850fb747" id="r_a3ef35c2707abf965e7badf00850fb747"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a3ef35c2707abf965e7badf00850fb747">addInput</a> (const <a class="el" href="classnz_1_1data_1_1_dimension.html">Tensor::shape_type</a> &amp;shape, const std::initializer_list&lt; Tensor::value_type &gt; &amp;data, bool requires_grad, const std::string &amp;name=&quot;default&quot;)</td></tr>
<tr class="memdesc:a3ef35c2707abf965e7badf00850fb747"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adds an InputNode to the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> using a std::initializer_list for data and returns a pointer to the created node.  <br /></td></tr>
<tr class="separator:a3ef35c2707abf965e7badf00850fb747"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a5ce9a271dcea6cc2b9490cd815ee53ed" id="r_a5ce9a271dcea6cc2b9490cd815ee53ed"><td class="memTemplParams" colspan="2">template&lt;typename NodeType &gt; </td></tr>
<tr class="memitem:a5ce9a271dcea6cc2b9490cd815ee53ed"><td class="memTemplItemLeft" align="right" valign="top">NodeType *&#160;</td><td class="memTemplItemRight" valign="bottom"><a class="el" href="#a5ce9a271dcea6cc2b9490cd815ee53ed">addNode</a> (NodeType *node, const std::string &amp;name=&quot;default&quot;)</td></tr>
<tr class="memdesc:a5ce9a271dcea6cc2b9490cd815ee53ed"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adds a node of any type to the computational graph.  <br /></td></tr>
<tr class="separator:a5ce9a271dcea6cc2b9490cd815ee53ed"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a21a839e5d155c752db1f2130812bc787" id="r_a21a839e5d155c752db1f2130812bc787"><td class="memTemplParams" colspan="2">template&lt;typename... Args&gt; </td></tr>
<tr class="memitem:a21a839e5d155c752db1f2130812bc787"><td class="memTemplItemLeft" align="right" valign="top"><a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *&#160;</td><td class="memTemplItemRight" valign="bottom"><a class="el" href="#a21a839e5d155c752db1f2130812bc787">addNode</a> (const std::string &amp;type, const std::string &amp;input1, const std::string &amp;input2, const std::string &amp;name=&quot;default&quot;, Args... args)</td></tr>
<tr class="memdesc:a21a839e5d155c752db1f2130812bc787"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adds a node to the computational graph based on the provided node type and inputs.  <br /></td></tr>
<tr class="separator:a21a839e5d155c752db1f2130812bc787"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a47cde0bdd9c961d55bfd16c9adb8810e" id="r_a47cde0bdd9c961d55bfd16c9adb8810e"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html">OutputNode</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a47cde0bdd9c961d55bfd16c9adb8810e">addOutput</a> (<a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html">OutputNode</a> *node, const std::string &amp;name=&quot;default&quot;)</td></tr>
<tr class="memdesc:a47cde0bdd9c961d55bfd16c9adb8810e"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adds an output node to the computational graph.  <br /></td></tr>
<tr class="separator:a47cde0bdd9c961d55bfd16c9adb8810e"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr><td colspan="2"><div class="groupHeader">Modifiers</div></td></tr>
<tr class="memitem:a5960e75d631200994c0a2f78f58674dd" id="r_a5960e75d631200994c0a2f78f58674dd"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a5960e75d631200994c0a2f78f58674dd">topologicalSort</a> ()</td></tr>
<tr class="memdesc:a5960e75d631200994c0a2f78f58674dd"><td class="mdescLeft">&#160;</td><td class="mdescRight">Performs topological sorting on the computational graph.  <br /></td></tr>
<tr class="separator:a5960e75d631200994c0a2f78f58674dd"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a3f94019acc205a47726c0b0797b0e631" id="r_a3f94019acc205a47726c0b0797b0e631"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a3f94019acc205a47726c0b0797b0e631">zeroGrad</a> () const</td></tr>
<tr class="memdesc:a3f94019acc205a47726c0b0797b0e631"><td class="mdescLeft">&#160;</td><td class="mdescRight">Resets the gradients of all nodes in the computational graph.  <br /></td></tr>
<tr class="separator:a3f94019acc205a47726c0b0797b0e631"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a856ffb66fa42357ab1a12f9f74d2aba4" id="r_a856ffb66fa42357ab1a12f9f74d2aba4"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a856ffb66fa42357ab1a12f9f74d2aba4">randomize</a> (const std::string &amp;name, unsigned long long seed=0)</td></tr>
<tr class="memdesc:a856ffb66fa42357ab1a12f9f74d2aba4"><td class="mdescLeft">&#160;</td><td class="mdescRight">Randomizes the output tensor of a specified node in the computational graph.  <br /></td></tr>
<tr class="separator:a856ffb66fa42357ab1a12f9f74d2aba4"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a8d767679a2ff3fcb75a863e7ff808170" id="r_a8d767679a2ff3fcb75a863e7ff808170"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a8d767679a2ff3fcb75a863e7ff808170">randomize</a> (const <a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *node, unsigned long long seed=0)</td></tr>
<tr class="memdesc:a8d767679a2ff3fcb75a863e7ff808170"><td class="mdescLeft">&#160;</td><td class="mdescRight">Randomizes the output tensor of a specified node in the computational graph.  <br /></td></tr>
<tr class="separator:a8d767679a2ff3fcb75a863e7ff808170"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a90e4737107c4ec96a1a95022daa6de2f" id="r_a90e4737107c4ec96a1a95022daa6de2f"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a90e4737107c4ec96a1a95022daa6de2f">randomizeAll</a> () const</td></tr>
<tr class="memdesc:a90e4737107c4ec96a1a95022daa6de2f"><td class="mdescLeft">&#160;</td><td class="mdescRight">Randomizes the output tensors of all input nodes in the computational graph.  <br /></td></tr>
<tr class="separator:a90e4737107c4ec96a1a95022daa6de2f"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ae732145241d1da547219a3e8b95a04ee" id="r_ae732145241d1da547219a3e8b95a04ee"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#ae732145241d1da547219a3e8b95a04ee">fill</a> (const std::string &amp;name, Tensor::value_type val)</td></tr>
<tr class="memdesc:ae732145241d1da547219a3e8b95a04ee"><td class="mdescLeft">&#160;</td><td class="mdescRight">Fills the output tensor of a specified node with a given value.  <br /></td></tr>
<tr class="separator:ae732145241d1da547219a3e8b95a04ee"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a0f7e2b25f6b75998e0e912320607f6e8" id="r_a0f7e2b25f6b75998e0e912320607f6e8"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a0f7e2b25f6b75998e0e912320607f6e8">fill</a> (const <a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *node, Tensor::value_type val)</td></tr>
<tr class="memdesc:a0f7e2b25f6b75998e0e912320607f6e8"><td class="mdescLeft">&#160;</td><td class="mdescRight">Fills the output tensor of a specified node with a given value.  <br /></td></tr>
<tr class="separator:a0f7e2b25f6b75998e0e912320607f6e8"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:adf21c05c4515016d34d5fc7d639b9e40" id="r_adf21c05c4515016d34d5fc7d639b9e40"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#adf21c05c4515016d34d5fc7d639b9e40">fillAll</a> (Tensor::value_type val) const</td></tr>
<tr class="memdesc:adf21c05c4515016d34d5fc7d639b9e40"><td class="mdescLeft">&#160;</td><td class="mdescRight">Fills the output tensors of all input nodes with a given value.  <br /></td></tr>
<tr class="separator:adf21c05c4515016d34d5fc7d639b9e40"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a8b3498c50429d631b07b5906e7455614" id="r_a8b3498c50429d631b07b5906e7455614"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a8b3498c50429d631b07b5906e7455614">setInput</a> (const std::string &amp;name, Tensor::value_type *data)</td></tr>
<tr class="memdesc:a8b3498c50429d631b07b5906e7455614"><td class="mdescLeft">&#160;</td><td class="mdescRight">Sets the input data for a specified node in the computational graph.  <br /></td></tr>
<tr class="separator:a8b3498c50429d631b07b5906e7455614"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:aa32640d8edb36f42e6093cd70037b5f3" id="r_aa32640d8edb36f42e6093cd70037b5f3"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#aa32640d8edb36f42e6093cd70037b5f3">setInput</a> (const <a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *node, Tensor::value_type *data)</td></tr>
<tr class="memdesc:aa32640d8edb36f42e6093cd70037b5f3"><td class="mdescLeft">&#160;</td><td class="mdescRight">Sets the input data for a specified node in the computational graph using a node pointer.  <br /></td></tr>
<tr class="separator:aa32640d8edb36f42e6093cd70037b5f3"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr><td colspan="2"><div class="groupHeader">Getters</div></td></tr>
<tr class="memitem:aede9d05b6c8e7394ea730b5ffea42164" id="r_aede9d05b6c8e7394ea730b5ffea42164"><td class="memItemLeft" align="right" valign="top">bool&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#aede9d05b6c8e7394ea730b5ffea42164">isSorted</a> () const</td></tr>
<tr class="memdesc:aede9d05b6c8e7394ea730b5ffea42164"><td class="mdescLeft">&#160;</td><td class="mdescRight">Checks whether the computational graph has been topologically sorted.  <br /></td></tr>
<tr class="separator:aede9d05b6c8e7394ea730b5ffea42164"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a75f918fb756d5f7155ee49092ce713e1" id="r_a75f918fb756d5f7155ee49092ce713e1"><td class="memItemLeft" align="right" valign="top">Tensor::value_type *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a75f918fb756d5f7155ee49092ce713e1">getOutput</a> () const</td></tr>
<tr class="memdesc:a75f918fb756d5f7155ee49092ce713e1"><td class="mdescLeft">&#160;</td><td class="mdescRight">Retrieves the output data of the first output node in the computational graph.  <br /></td></tr>
<tr class="separator:a75f918fb756d5f7155ee49092ce713e1"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:aef02d3290fb92b142be346f2fdb125c5" id="r_aef02d3290fb92b142be346f2fdb125c5"><td class="memItemLeft" align="right" valign="top">Tensor::value_type *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#aef02d3290fb92b142be346f2fdb125c5">getOutputHost</a> () const</td></tr>
<tr class="memdesc:aef02d3290fb92b142be346f2fdb125c5"><td class="mdescLeft">&#160;</td><td class="mdescRight">Retrieves the output data of the first output node in the computational graph and copies it to host memory.  <br /></td></tr>
<tr class="separator:aef02d3290fb92b142be346f2fdb125c5"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a69d293299714e489ce8cc2dba1e5a9a5" id="r_a69d293299714e489ce8cc2dba1e5a9a5"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html">OutputNode</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a69d293299714e489ce8cc2dba1e5a9a5">getOutputNode</a> () const</td></tr>
<tr class="memdesc:a69d293299714e489ce8cc2dba1e5a9a5"><td class="mdescLeft">&#160;</td><td class="mdescRight">Retrieves the first output node in the computational graph.  <br /></td></tr>
<tr class="separator:a69d293299714e489ce8cc2dba1e5a9a5"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a3a53a15c04e85da4e6aa0a209fbee4c2" id="r_a3a53a15c04e85da4e6aa0a209fbee4c2"><td class="memItemLeft" align="right" valign="top">Tensor::value_type&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a3a53a15c04e85da4e6aa0a209fbee4c2">getLoss</a> () const</td></tr>
<tr class="memdesc:a3a53a15c04e85da4e6aa0a209fbee4c2"><td class="mdescLeft">&#160;</td><td class="mdescRight">Retrieves the loss value from the first output node in the computational graph.  <br /></td></tr>
<tr class="separator:a3a53a15c04e85da4e6aa0a209fbee4c2"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a1d54fe96c53fec967c8a20e1ac79ceed" id="r_a1d54fe96c53fec967c8a20e1ac79ceed"><td class="memItemLeft" align="right" valign="top">std::ostream &amp;&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a1d54fe96c53fec967c8a20e1ac79ceed">print</a> (std::ostream &amp;os)</td></tr>
<tr class="memdesc:a1d54fe96c53fec967c8a20e1ac79ceed"><td class="mdescLeft">&#160;</td><td class="mdescRight">Prints the details of the computational graph to the provided output stream.  <br /></td></tr>
<tr class="separator:a1d54fe96c53fec967c8a20e1ac79ceed"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a5ae0d765ed40c4973b54dc20a5cddf61" id="r_a5ae0d765ed40c4973b54dc20a5cddf61"><td class="memItemLeft" align="right" valign="top"><a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a5ae0d765ed40c4973b54dc20a5cddf61">operator[]</a> (const std::string &amp;name)</td></tr>
<tr class="memdesc:a5ae0d765ed40c4973b54dc20a5cddf61"><td class="mdescLeft">&#160;</td><td class="mdescRight">Retrieves the node associated with the given name in the computational graph.  <br /></td></tr>
<tr class="separator:a5ae0d765ed40c4973b54dc20a5cddf61"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a1e5d98f57514c6f7a539de2776f48c5d" id="r_a1e5d98f57514c6f7a539de2776f48c5d"><td class="memItemLeft" align="right" valign="top">std::string&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a1e5d98f57514c6f7a539de2776f48c5d">nodesList</a> ()</td></tr>
<tr class="memdesc:a1e5d98f57514c6f7a539de2776f48c5d"><td class="mdescLeft">&#160;</td><td class="mdescRight">Generates a formatted string representing the list of nodes in the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a>.  <br /></td></tr>
<tr class="separator:a1e5d98f57514c6f7a539de2776f48c5d"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr><td colspan="2"><div class="groupHeader">Computation</div></td></tr>
<tr class="memitem:a0c5a596f38e6d7e8c1c78e19b9b56167" id="r_a0c5a596f38e6d7e8c1c78e19b9b56167"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a0c5a596f38e6d7e8c1c78e19b9b56167">forward</a> ()</td></tr>
<tr class="memdesc:a0c5a596f38e6d7e8c1c78e19b9b56167"><td class="mdescLeft">&#160;</td><td class="mdescRight">Performs forward propagation on the computational graph.  <br /></td></tr>
<tr class="separator:a0c5a596f38e6d7e8c1c78e19b9b56167"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:adfcad402fe85084ddbb4f8b6415adc05" id="r_adfcad402fe85084ddbb4f8b6415adc05"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#adfcad402fe85084ddbb4f8b6415adc05">backward</a> ()</td></tr>
<tr class="memdesc:adfcad402fe85084ddbb4f8b6415adc05"><td class="mdescLeft">&#160;</td><td class="mdescRight">Performs backward propagation on the computational graph.  <br /></td></tr>
<tr class="separator:adfcad402fe85084ddbb4f8b6415adc05"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:aee306895a3078a0b09d2a4bdf4843200" id="r_aee306895a3078a0b09d2a4bdf4843200"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#aee306895a3078a0b09d2a4bdf4843200">update</a> (<a class="el" href="classnz_1_1opt_1_1_optimizer.html">Optimizer</a> *optimizer) const</td></tr>
<tr class="memdesc:aee306895a3078a0b09d2a4bdf4843200"><td class="mdescLeft">&#160;</td><td class="mdescRight">Updates the parameters of the nodes that require gradients using the provided optimizer.  <br /></td></tr>
<tr class="separator:aee306895a3078a0b09d2a4bdf4843200"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr><td colspan="2"><div class="groupHeader">File Managers</div></td></tr>
<tr class="memitem:a6aa07603223ea714d74fc884218cb50f" id="r_a6aa07603223ea714d74fc884218cb50f"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a6aa07603223ea714d74fc884218cb50f">save</a> (const std::string &amp;path)</td></tr>
<tr class="memdesc:a6aa07603223ea714d74fc884218cb50f"><td class="mdescLeft">&#160;</td><td class="mdescRight">Saves the current computational graph to a JSON file.  <br /></td></tr>
<tr class="separator:a6aa07603223ea714d74fc884218cb50f"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a9748d859d952864bfa498a79c9fb394a" id="r_a9748d859d952864bfa498a79c9fb394a"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a9748d859d952864bfa498a79c9fb394a">load</a> (const std::string &amp;path)</td></tr>
<tr class="memdesc:a9748d859d952864bfa498a79c9fb394a"><td class="mdescLeft">&#160;</td><td class="mdescRight">Loads a computational graph from a JSON file.  <br /></td></tr>
<tr class="separator:a9748d859d952864bfa498a79c9fb394a"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a id="friends" name="friends"></a>
Friends</h2></td></tr>
<tr class="memitem:a25cf602885275406f8fe60a5077308a8" id="r_a25cf602885275406f8fe60a5077308a8"><td class="memItemLeft" align="right" valign="top">DL_API std::ostream &amp;&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a25cf602885275406f8fe60a5077308a8">operator&lt;&lt;</a> (std::ostream &amp;os, <a class="el" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> &amp;graph)</td></tr>
<tr class="memdesc:a25cf602885275406f8fe60a5077308a8"><td class="mdescLeft">&#160;</td><td class="mdescRight">Overloads the stream insertion operator to print the details of the computational graph.  <br /></td></tr>
<tr class="separator:a25cf602885275406f8fe60a5077308a8"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a11f567efba2c857e64a88c411c5e6c54" id="r_a11f567efba2c857e64a88c411c5e6c54"><td class="memItemLeft" align="right" valign="top">DL_API void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a11f567efba2c857e64a88c411c5e6c54">CreateNode</a> (<a class="el" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> *graph, const std::string &amp;type, const std::string &amp;name, std::vector&lt; int &gt; pre, const <a class="el" href="classnz_1_1data_1_1_dimension.html">Tensor::shape_type</a> &amp;shape, float *data, bool requires_grad, float *grad)</td></tr>
<tr class="memdesc:a11f567efba2c857e64a88c411c5e6c54"><td class="mdescLeft">&#160;</td><td class="mdescRight">Creates and adds a node to the computational graph based on the specified type.  <br /></td></tr>
<tr class="separator:a11f567efba2c857e64a88c411c5e6c54"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>Represents a computational graph, which manages nodes and the computation flow. </p>
<p>The <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> class is responsible for creating, managing, and computing the flow of nodes in a neural network or any other computational graph. It handles the addition of input and output nodes, as well as performing forward and backward passes through the graph. It also supports gradient updates, randomization of node values, and node management such as saving, loading, and setting node values.</p>
<p>Key features:</p><ul>
<li><b>Graph Management</b>: The class manages a list of nodes, input nodes, output nodes, and ensures that nodes are added and connected properly.</li>
<li><b>Forward and Backward Passes</b>: The <code><a class="el" href="#a0c5a596f38e6d7e8c1c78e19b9b56167" title="Performs forward propagation on the computational graph.">forward()</a></code> and <code><a class="el" href="#adfcad402fe85084ddbb4f8b6415adc05" title="Performs backward propagation on the computational graph.">backward()</a></code> methods execute the forward and backward computation across all nodes in the graph.</li>
<li><b>Topological Sort</b>: The <code><a class="el" href="#a5960e75d631200994c0a2f78f58674dd" title="Performs topological sorting on the computational graph.">topologicalSort()</a></code> method sorts the nodes in a way that respects their dependencies, ensuring that computation happens in the correct order.</li>
<li><b>Randomization and Initialization</b>: The graph supports random initialization of node values via the <code><a class="el" href="#a856ffb66fa42357ab1a12f9f74d2aba4" title="Randomizes the output tensor of a specified node in the computational graph.">randomize()</a></code> and <code><a class="el" href="#a90e4737107c4ec96a1a95022daa6de2f" title="Randomizes the output tensors of all input nodes in the computational graph.">randomizeAll()</a></code> methods.</li>
<li><b>Gradient Zeroing</b>: The <code><a class="el" href="#a3f94019acc205a47726c0b0797b0e631" title="Resets the gradients of all nodes in the computational graph.">zeroGrad()</a></code> method zeros the gradients of all nodes in the graph.</li>
<li><b>Saving and Loading</b>: The <code><a class="el" href="#a6aa07603223ea714d74fc884218cb50f" title="Saves the current computational graph to a JSON file.">save()</a></code> and <code><a class="el" href="#a9748d859d952864bfa498a79c9fb394a" title="Loads a computational graph from a JSON file.">load()</a></code> methods allow for the persistence of the graph’s state.</li>
</ul>
<p>This class is designed to be used in a computational graph where nodes represent various mathematical operations and tensors represent the data that flows through the graph.</p>
<h2><a class="anchor" id="autotoc_md0"></a>
Supported node types</h2>
<table class="markdownTable">
<tr class="markdownTableHead">
<th class="markdownTableHeadNone">Type   </th><th class="markdownTableHeadNone">Reference    </th></tr>
<tr class="markdownTableRowOdd">
<td class="markdownTableBodyNone">Input   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html" title="Represents an input node in a computational graph.">nz::nodes::io::InputNode</a>    </td></tr>
<tr class="markdownTableRowEven">
<td class="markdownTableBodyNone">Output   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">nz::nodes::io::OutputNode</a>    </td></tr>
<tr class="markdownTableRowOdd">
<td class="markdownTableBodyNone">Add   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_add_node.html" title="Represents a node that performs element-wise addition between two input tensors.">nz::nodes::calc::AddNode</a>    </td></tr>
<tr class="markdownTableRowEven">
<td class="markdownTableBodyNone">MatMul   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_mat_mul_node.html" title="Represents a matrix multiplication operation node in a computational graph.">nz::nodes::calc::MatMulNode</a>    </td></tr>
<tr class="markdownTableRowOdd">
<td class="markdownTableBodyNone">ScalarMul   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_scalar_mul_node.html" title="Represents a scalar multiplication operation node in a computational graph.">nz::nodes::calc::ScalarMulNode</a>    </td></tr>
<tr class="markdownTableRowEven">
<td class="markdownTableBodyNone">ScalarDiv   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_scalar_div_node.html" title="Represents a scalar division operation node in a computational graph.">nz::nodes::calc::ScalarDivNode</a>    </td></tr>
<tr class="markdownTableRowOdd">
<td class="markdownTableBodyNone">ScalarAdd   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_scalar_add_node.html" title="Represents a scalar addition operation node in a computational graph.">nz::nodes::calc::ScalarAddNode</a>    </td></tr>
<tr class="markdownTableRowEven">
<td class="markdownTableBodyNone">ScalarSub   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_scalar_sub_node.html" title="Represents a scalar subtraction operation node in a computational graph.">nz::nodes::calc::ScalarSubNode</a>    </td></tr>
<tr class="markdownTableRowOdd">
<td class="markdownTableBodyNone">Sub   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_sub_node.html" title="Represents a subtraction operation node in a computational graph.">nz::nodes::calc::SubNode</a>    </td></tr>
<tr class="markdownTableRowEven">
<td class="markdownTableBodyNone">ReLU   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_re_l_u_node.html" title="Represents a Rectified Linear Unit (ReLU) operation node in a computational graph.">nz::nodes::calc::ReLUNode</a>    </td></tr>
<tr class="markdownTableRowOdd">
<td class="markdownTableBodyNone">Sigmoid   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_sigmoid_node.html" title="Represents a Sigmoid activation function node in a computational graph.">nz::nodes::calc::SigmoidNode</a>    </td></tr>
<tr class="markdownTableRowEven">
<td class="markdownTableBodyNone">Tanh   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_tanh_node.html" title="Represents a hyperbolic tangent (tanh) activation function node in a computational graph.">nz::nodes::calc::TanhNode</a>    </td></tr>
<tr class="markdownTableRowOdd">
<td class="markdownTableBodyNone">LeakyReLU   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_leaky_re_l_u_node.html" title="Represents a Leaky Rectified Linear Unit (LeakyReLU) activation function node in a computational grap...">nz::nodes::calc::LeakyReLUNode</a>    </td></tr>
<tr class="markdownTableRowEven">
<td class="markdownTableBodyNone">Swish   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_swish_node.html" title="Represents a Swish activation function node in a computational graph.">nz::nodes::calc::SwishNode</a>    </td></tr>
<tr class="markdownTableRowOdd">
<td class="markdownTableBodyNone">ELU   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_e_l_u_node.html" title="Represents an Exponential Linear Unit (ELU) activation function node in a computational graph.">nz::nodes::calc::ELUNode</a>    </td></tr>
<tr class="markdownTableRowEven">
<td class="markdownTableBodyNone">HardSigmoid   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node.html" title="Represents a Hard Sigmoid activation function node in a computational graph.">nz::nodes::calc::HardSigmoidNode</a>    </td></tr>
<tr class="markdownTableRowOdd">
<td class="markdownTableBodyNone">HardSwish   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_swish_node.html" title="Represents a Hard Swish activation function node in a computational graph.">nz::nodes::calc::HardSwishNode</a>    </td></tr>
<tr class="markdownTableRowEven">
<td class="markdownTableBodyNone">Softmax   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_softmax_node.html" title="Implements the Softmax activation function as a node in a neural network computational graph.">nz::nodes::calc::SoftmaxNode</a>    </td></tr>
<tr class="markdownTableRowOdd">
<td class="markdownTableBodyNone">MeanSquaredError   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1loss_1_1_mean_squared_error_node.html" title="Represents the Mean Squared Error (MSE) loss function node in a computational graph.">nz::nodes::loss::MeanSquaredErrorNode</a>    </td></tr>
<tr class="markdownTableRowEven">
<td class="markdownTableBodyNone">BinaryCrossEntropy   </td><td class="markdownTableBodyNone"><a class="el" href="classnz_1_1nodes_1_1loss_1_1_binary_cross_entropy_node.html" title="Represents the Binary Cross-Entropy (BCE) loss function node in a computational graph.">nz::nodes::loss::BinaryCrossEntropyNode</a>   </td></tr>
</table>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The graph handles nodes by their names. Each node is stored in a node roster, allowing for easy lookup.</li>
<li>The nodes should be connected properly for the forward and backward passes to work correctly.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md1"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><span class="comment">// Create Graph (Method 1)</span></div>
<div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">graph::ComputeGraph</a> graph;</div>
<div class="line"> </div>
<div class="line"><span class="keyword">auto</span>* input1 = graph.<a class="code hl_function" href="#a75a4b72c93448c8617a7af41cc471897">addInput</a>({3, 4}, <span class="keyword">false</span>, <span class="stringliteral">&quot;Input&quot;</span>);  <span class="comment">// Add input data</span></div>
<div class="line"><span class="keyword">auto</span>* input2 = graph.<a class="code hl_function" href="#a75a4b72c93448c8617a7af41cc471897">addInput</a>({4, 3}, <span class="keyword">true</span>, <span class="stringliteral">&quot;Weight&quot;</span>);</div>
<div class="line"><span class="keyword">auto</span>* input3 = graph.<a class="code hl_function" href="#a75a4b72c93448c8617a7af41cc471897">addInput</a>({3, 3}, <span class="keyword">false</span>, <span class="stringliteral">&quot;Label&quot;</span>);</div>
<div class="line"> </div>
<div class="line">nodes::calc::MatMulNode matmul(input1, input2); <span class="comment">// Add Computation nodes</span></div>
<div class="line">graph.<a class="code hl_function" href="#a5ce9a271dcea6cc2b9490cd815ee53ed">addNode</a>(&amp;matmul, <span class="stringliteral">&quot;MatMul&quot;</span>);</div>
<div class="line">nodes::calc::ReLUNode relu(&amp;matmul);</div>
<div class="line">graph.<a class="code hl_function" href="#a5ce9a271dcea6cc2b9490cd815ee53ed">addNode</a>(&amp;relu, <span class="stringliteral">&quot;ReLU&quot;</span>);</div>
<div class="line"> </div>
<div class="line">nodes::loss::MeanSquaredErrorNode loss(&amp;relu, input3); <span class="comment">// Add loss function</span></div>
<div class="line">graph.<a class="code hl_function" href="#a47cde0bdd9c961d55bfd16c9adb8810e">addOutput</a>(&amp;loss, <span class="stringliteral">&quot;Loss&quot;</span>);</div>
<div class="line"> </div>
<div class="line">graph.<a class="code hl_function" href="#a90e4737107c4ec96a1a95022daa6de2f">randomizeAll</a>(); <span class="comment">// init data</span></div>
<div class="line"> </div>
<div class="line"><span class="comment">// Create graph (Method 2)</span></div>
<div class="line">graph::ComputeGraph graph;</div>
<div class="line"> </div>
<div class="line">graph.<a class="code hl_function" href="#a75a4b72c93448c8617a7af41cc471897">addInput</a>({3, 4}, <span class="keyword">false</span>, <span class="stringliteral">&quot;Input&quot;</span>);</div>
<div class="line">graph.addInput({4, 3}, <span class="keyword">true</span>, <span class="stringliteral">&quot;Weight&quot;</span>);</div>
<div class="line">graph.addInput({3, 3}, <span class="keyword">false</span>, <span class="stringliteral">&quot;Label&quot;</span>);</div>
<div class="line"> </div>
<div class="line">graph.addNode(<span class="stringliteral">&quot;MatMul&quot;</span>, <span class="stringliteral">&quot;Input&quot;</span>, <span class="stringliteral">&quot;Weight&quot;</span>, <span class="stringliteral">&quot;MatMul&quot;</span>);</div>
<div class="line">graph.addNode(<span class="stringliteral">&quot;ReLU&quot;</span>, <span class="stringliteral">&quot;MatMul&quot;</span>, <span class="stringliteral">&quot;&quot;</span>, <span class="stringliteral">&quot;ReLU&quot;</span>);</div>
<div class="line">graph.addNode(<span class="stringliteral">&quot;MeanSquaredError&quot;</span>, <span class="stringliteral">&quot;ReLU&quot;</span>, <span class="stringliteral">&quot;Label&quot;</span>);</div>
<div class="line"> </div>
<div class="line">graph.randomizeAll();</div>
<div class="line"> </div>
<div class="line"><span class="comment">// Perform forward and backward passes</span></div>
<div class="line">graph.<a class="code hl_function" href="classnz_1_1nodes_1_1io_1_1_input_node.html#a4ba34603676c094723409d9e6b770976">forward</a>();</div>
<div class="line">graph.backward();</div>
<div class="line">std::cout &lt;&lt; graph &lt;&lt; std::endl; <span class="comment">// Print result</span></div>
<div class="line"> </div>
<div class="line"><span class="comment">// Update weights</span></div>
<div class="line">opt::SGD optimizer(0.01); <span class="comment">// Create optimizer</span></div>
<div class="line">graph.update(&amp;optimizer); <span class="comment">// Update weights</span></div>
<div class="line"> </div>
<div class="line">graph.forward();</div>
<div class="line">std::cout &lt;&lt; graph &lt;&lt; std::endl;</div>
<div class="line"> </div>
<div class="line"><span class="comment">// Save model</span></div>
<div class="line">graph.save(<span class="stringliteral">&quot;model.json&quot;</span>);</div>
<div class="line"> </div>
<div class="line"><span class="comment">// Load model</span></div>
<div class="line">graph::ComputeGraph graph;</div>
<div class="line">graph.load(<span class="stringliteral">&quot;model.json&quot;</span>);</div>
<div class="line">graph.forward();</div>
<div class="line">graph.backward();</div>
<div class="line"><a class="code hl_function" href="namespacenz_1_1krnl.html#a2b9ab840eeb0e74f4b78277a046b3a07">opt::Adam</a> optimizer(0.01, 0.9, 0.99);</div>
<div class="line">graph.update(&amp;optimizer);</div>
<div class="line">graph.forward();</div>
<div class="line">std::cout &lt;&lt; graph &lt;&lt; std::endl;</div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html"><div class="ttname"><a href="classnz_1_1graph_1_1_compute_graph.html">nz::graph::ComputeGraph</a></div><div class="ttdoc">Represents a computational graph, which manages nodes and the computation flow.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cuh_source.html#l00224">ComputeGraph.cuh:224</a></div></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a47cde0bdd9c961d55bfd16c9adb8810e"><div class="ttname"><a href="#a47cde0bdd9c961d55bfd16c9adb8810e">nz::graph::ComputeGraph::addOutput</a></div><div class="ttdeci">OutputNode * addOutput(OutputNode *node, const std::string &amp;name=&quot;default&quot;)</div><div class="ttdoc">Adds an output node to the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00389">ComputeGraph.cu:389</a></div></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a5ce9a271dcea6cc2b9490cd815ee53ed"><div class="ttname"><a href="#a5ce9a271dcea6cc2b9490cd815ee53ed">nz::graph::ComputeGraph::addNode</a></div><div class="ttdeci">NodeType * addNode(NodeType *node, const std::string &amp;name=&quot;default&quot;)</div><div class="ttdoc">Adds a node of any type to the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cuh_source.html#l00511">ComputeGraph.cuh:511</a></div></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a75a4b72c93448c8617a7af41cc471897"><div class="ttname"><a href="#a75a4b72c93448c8617a7af41cc471897">nz::graph::ComputeGraph::addInput</a></div><div class="ttdeci">InputNode * addInput(const Tensor::shape_type &amp;shape, bool requires_grad=false, const std::string &amp;name=&quot;default&quot;)</div><div class="ttdoc">Adds an input node to the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00302">ComputeGraph.cu:302</a></div></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a90e4737107c4ec96a1a95022daa6de2f"><div class="ttname"><a href="#a90e4737107c4ec96a1a95022daa6de2f">nz::graph::ComputeGraph::randomizeAll</a></div><div class="ttdeci">void randomizeAll() const</div><div class="ttdoc">Randomizes the output tensors of all input nodes in the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00458">ComputeGraph.cu:458</a></div></div>
<div class="ttc" id="aclassnz_1_1nodes_1_1io_1_1_input_node_html_a4ba34603676c094723409d9e6b770976"><div class="ttname"><a href="classnz_1_1nodes_1_1io_1_1_input_node.html#a4ba34603676c094723409d9e6b770976">nz::nodes::io::InputNode::forward</a></div><div class="ttdeci">void forward() override</div><div class="ttdoc">Forward pass for the InputNode.</div><div class="ttdef"><b>Definition</b> <a href="_nodes_8cu_source.html#l00046">Nodes.cu:46</a></div></div>
<div class="ttc" id="anamespacenz_1_1krnl_html_a2b9ab840eeb0e74f4b78277a046b3a07"><div class="ttname"><a href="namespacenz_1_1krnl.html#a2b9ab840eeb0e74f4b78277a046b3a07">nz::krnl::Adam</a></div><div class="ttdeci">void Adam(dim3 gridDim, dim3 blockDim, float *data, float *m, float *v, float *grad, float lr, float beta1, float beta2, float eps, int t, unsigned long long n)</div><div class="ttdoc">Kernel function to apply Adam optimization.</div><div class="ttdef"><b>Definition</b> <a href="_operation_kernels_8cu_source.html#l00768">OperationKernels.cu:768</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="#a0c5a596f38e6d7e8c1c78e19b9b56167" title="Performs forward propagation on the computational graph.">forward()</a> for the <a class="el" href="#a0c5a596f38e6d7e8c1c78e19b9b56167" title="Performs forward propagation on the computational graph.">forward</a> pass computation method. </dd>
<dd>
<a class="el" href="#adfcad402fe85084ddbb4f8b6415adc05" title="Performs backward propagation on the computational graph.">backward()</a> for the <a class="el" href="#adfcad402fe85084ddbb4f8b6415adc05" title="Performs backward propagation on the computational graph.">backward</a> pass gradient propagation method.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cuh_source.html#l00224">224</a> of file <a class="el" href="_compute_graph_8cuh_source.html">ComputeGraph.cuh</a>.</p>
</div><h2 class="groupheader">Constructor &amp; Destructor Documentation</h2>
<a id="a328a263c572540a330ba650651fb7722" name="a328a263c572540a330ba650651fb7722"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a328a263c572540a330ba650651fb7722">&#9670;&#160;</a></span>ComputeGraph()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">nz::graph::ComputeGraph::ComputeGraph </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">default</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Default constructor for the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> class. </p>
<p>This constructor initializes the <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> object. It sets up all internal data structures, such as the lists for nodes, input nodes, output nodes, and the node roster. The graph is initially empty and requires the addition of nodes and connections to form a complete computational graph.</p>
<p>The constructor doesn't require any arguments and doesn't allocate any resources other than those necessary for the internal structure of the graph.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The constructor does not perform any computations or node additions. It merely initializes the empty graph.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="#a6397db45a249af6389dd3f7548b5aa0a" title="Destructor for the ComputeGraph class.">~ComputeGraph()</a> for the destructor that cleans up the resources used by the <a class="el" href="namespacenz_1_1graph.html" title="Contains classes and functions for managing and executing computation graphs in deep learning workflo...">graph</a>.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

</div>
</div>
<a id="a6397db45a249af6389dd3f7548b5aa0a" name="a6397db45a249af6389dd3f7548b5aa0a"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a6397db45a249af6389dd3f7548b5aa0a">&#9670;&#160;</a></span>~ComputeGraph()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">nz::graph::ComputeGraph::~ComputeGraph </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">default</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Destructor for the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> class. </p>
<p>The destructor ensures that any resources allocated by the <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> object, such as the nodes and their associated data, are properly cleaned up when the object is destroyed. It performs any necessary memory deallocation or resource release.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The destructor does not need to manually delete each node in the graph, as they are typically managed by smart pointers or are otherwise not responsible for memory deallocation.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="#a328a263c572540a330ba650651fb7722" title="Default constructor for the ComputeGraph class.">ComputeGraph()</a> for the constructor that initializes the <a class="el" href="namespacenz_1_1graph.html" title="Contains classes and functions for managing and executing computation graphs in deep learning workflo...">graph</a>.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

</div>
</div>
<h2 class="groupheader">Member Function Documentation</h2>
<a id="a86b54a014876f245d64c524dc70075ea" name="a86b54a014876f245d64c524dc70075ea"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a86b54a014876f245d64c524dc70075ea">&#9670;&#160;</a></span>addInput() <span class="overload">[1/5]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> * nz::graph::ComputeGraph::addInput </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="classnz_1_1data_1_1_tensor.html">Tensor</a> &amp;</td>          <td class="paramname"><span class="paramname"><em>tensor</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span><span class="paramdefsep"> = </span><span class="paramdefval">&quot;default&quot;</span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Adds an input node to the computational graph using a Tensor. </p>
<p>This method creates a new <code>InputNode</code> using the provided <code>Tensor</code> and adds it to the <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> object. The newly created node is added to both the <code>inputNodes</code> and <code>nodes</code> lists. Its name is stored in the <code>nodeRoster</code> and <code>nodeRosterReverse</code> maps, allowing for easy lookup by name. If no name is provided, a unique name is generated.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">tensor</td><td>The <code>Tensor</code> object that represents the data for the input node. </td></tr>
    <tr><td class="paramname">name</td><td>The name of the input node. If "default", a unique name is generated. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the newly created <code>InputNode</code>.</dd></dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>If the <code>name</code> is "default", a unique name will be generated for the input node by concatenating the node's type and a reference counter.</li>
<li>The node is added to both <code>inputNodes</code> and <code>nodes</code>, ensuring it is part of the computational graph.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md3"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1data_1_1_tensor.html">Tensor</a> input_tensor({3, 3}, <span class="keyword">true</span>);  <span class="comment">// Example tensor</span></div>
<div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><a class="code hl_class" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a>* input = graph.addInput(input_tensor, <span class="stringliteral">&quot;input_node&quot;</span>);</div>
<div class="ttc" id="aclassnz_1_1data_1_1_tensor_html"><div class="ttname"><a href="classnz_1_1data_1_1_tensor.html">nz::data::Tensor</a></div><div class="ttdoc">A class for representing and manipulating multidimensional arrays (tensors) in GPU memory.</div><div class="ttdef"><b>Definition</b> <a href="_tensor_8cuh_source.html#l00134">Tensor.cuh:134</a></div></div>
<div class="ttc" id="aclassnz_1_1nodes_1_1io_1_1_input_node_html"><div class="ttname"><a href="classnz_1_1nodes_1_1io_1_1_input_node.html">nz::nodes::io::InputNode</a></div><div class="ttdoc">Represents an input node in a computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_nodes_8cuh_source.html#l00437">Nodes.cuh:437</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html" title="Represents an input node in a computational graph.">nodes::io::InputNode</a> for the class definition of the input node.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00319">319</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a75a4b72c93448c8617a7af41cc471897" name="a75a4b72c93448c8617a7af41cc471897"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a75a4b72c93448c8617a7af41cc471897">&#9670;&#160;</a></span>addInput() <span class="overload">[2/5]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> * nz::graph::ComputeGraph::addInput </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="classnz_1_1data_1_1_dimension.html">Tensor::shape_type</a> &amp;</td>          <td class="paramname"><span class="paramname"><em>shape</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">bool</td>          <td class="paramname"><span class="paramname"><em>requires_grad</em></span><span class="paramdefsep"> = </span><span class="paramdefval">false</span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span><span class="paramdefsep"> = </span><span class="paramdefval">&quot;default&quot;</span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Adds an input node to the computational graph. </p>
<p>This method creates a new <code>InputNode</code> with the specified shape and gradient requirements and adds it to the <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> object. The newly created node is also added to the <code>inputNodes</code> list, and its name is recorded in the <code>nodeRoster</code> and <code>nodeRosterReverse</code> maps for easy access by name.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">shape</td><td>The shape of the input tensor for the node. </td></tr>
    <tr><td class="paramname">requires_grad</td><td>A boolean indicating whether the input node requires gradients for backpropagation. </td></tr>
    <tr><td class="paramname">name</td><td>The name of the input node. If not provided, a default name is generated. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the newly created <code>InputNode</code>.</dd></dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>If the name is "default", a unique name is generated for the input node using the node's type and a reference counter.</li>
<li>The node is added to both <code>inputNodes</code> and <code>nodes</code>, ensuring it is part of the overall computational graph.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md2"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><a class="code hl_class" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a>* input = graph.<a class="code hl_function" href="#a75a4b72c93448c8617a7af41cc471897">addInput</a>({3, 3}, <span class="keyword">true</span>, <span class="stringliteral">&quot;input_node&quot;</span>);</div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html" title="Represents an input node in a computational graph.">nodes::io::InputNode</a> for the class definition of the input node.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00302">302</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a3ef35c2707abf965e7badf00850fb747" name="a3ef35c2707abf965e7badf00850fb747"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a3ef35c2707abf965e7badf00850fb747">&#9670;&#160;</a></span>addInput() <span class="overload">[3/5]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> * nz::graph::ComputeGraph::addInput </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="classnz_1_1data_1_1_dimension.html">Tensor::shape_type</a> &amp;</td>          <td class="paramname"><span class="paramname"><em>shape</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::initializer_list&lt; Tensor::value_type &gt; &amp;</td>          <td class="paramname"><span class="paramname"><em>data</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">bool</td>          <td class="paramname"><span class="paramname"><em>requires_grad</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span><span class="paramdefsep"> = </span><span class="paramdefval">&quot;default&quot;</span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Adds an InputNode to the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> using a std::initializer_list for data and returns a pointer to the created node. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">shape</td><td>A reference to the shape of the tensor associated with the InputNode (host-to-device). Defines the dimensions of the input tensor. </td></tr>
    <tr><td class="paramname">data</td><td>A std::initializer_list containing the initial values for the tensor (host-to-device). </td></tr>
    <tr><td class="paramname">requires_grad</td><td>A boolean indicating whether the tensor of the InputNode should require gradient computation. </td></tr>
    <tr><td class="paramname">name</td><td>A string representing the name of the InputNode. If set to "default", a unique name will be generated.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the newly created InputNode.</dd></dl>
<p>This function is responsible for creating and adding an InputNode to the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a>. Memory management: It uses the <code>new</code> operator to allocate memory for the InputNode, and the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> takes ownership of this memory. The memory should be deallocated when the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> is destroyed.</p>
<p>Exception handling: This function does not explicitly catch exceptions. If memory allocation for the InputNode fails (<code>new</code> throws <code>std::bad_alloc</code>), or if there are issues with the <code>shape</code> or <code>data</code> passed to the InputNode constructor, the exceptions will propagate to the caller.</p>
<p>It interacts with the <code>nodes</code>, <code>inputNodes</code>, <code>nodeRoster</code>, and <code>nodeRosterReverse</code> members of the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> to manage the list of nodes and the mapping between node names and pointers.</p>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::bad_alloc</td><td>If memory allocation for the InputNode fails.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>Ensure that the <code>shape</code> and <code>data</code> are compatible with the requirements of the InputNode constructor.</li>
<li>If the <code>name</code> is "default", a unique name will be generated based on the node type and a reference counter.</li>
<li>The time complexity of this function is O(1) for node creation and O(log m) for insertion into the <code>nodeRoster</code> map, where m is the number of nodes in the graph.</li>
</ul>
</dd></dl>
<dl class="section warning"><dt>Warning</dt><dd><ul>
<li>Do not delete the returned pointer as the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> takes ownership of the InputNode.</li>
</ul>
</dd></dl>
<div class="fragment"><div class="line">```cpp</div>
<div class="line"><span class="preprocessor">#include &lt;vector&gt;</span></div>
<div class="line"> </div>
<div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line">shape_type shape = {2, 2};</div>
<div class="line"><a class="code hl_class" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a>* inputNode = graph.addInput(shape, {1.0f, 2.0f, 3.0f, 4.0f}, <span class="keyword">true</span>, <span class="stringliteral">&quot;my_input&quot;</span>);</div>
<div class="line">```</div>
</div><!-- fragment --> 
<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00370">370</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="ab75d30dc8133099118f60bdd07284f01" name="ab75d30dc8133099118f60bdd07284f01"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ab75d30dc8133099118f60bdd07284f01">&#9670;&#160;</a></span>addInput() <span class="overload">[4/5]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> * nz::graph::ComputeGraph::addInput </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="classnz_1_1data_1_1_dimension.html">Tensor::shape_type</a> &amp;</td>          <td class="paramname"><span class="paramname"><em>shape</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">Tensor::value_type *</td>          <td class="paramname"><span class="paramname"><em>data</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">bool</td>          <td class="paramname"><span class="paramname"><em>requires_grad</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">bool</td>          <td class="paramname"><span class="paramname"><em>host</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span><span class="paramdefsep"> = </span><span class="paramdefval">&quot;default&quot;</span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Adds an input node to the computation graph and returns a pointer to the newly created InputNode. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">shape</td><td>A reference to the shape of the input tensor (host-to-device). This defines the dimensions of the tensor associated with the input node. </td></tr>
    <tr><td class="paramname">data</td><td>A pointer to the initial data for the input tensor (host-to-device). It can be nullptr if no initial data is provided. </td></tr>
    <tr><td class="paramname">requires_grad</td><td>A boolean indicating whether the input tensor should require gradient computation. </td></tr>
    <tr><td class="paramname">host</td><td>A boolean indicating whether the tensor data is stored on the host. </td></tr>
    <tr><td class="paramname">name</td><td>A string representing the name of the input node. If set to "default", a unique name will be generated.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the newly created InputNode.</dd></dl>
<p>This function is used to add an input node to the computation graph. Memory management: It dynamically allocates memory for the InputNode using the <code>new</code> operator. The caller does not need to free this memory directly as the graph takes ownership of the node. When the graph is destroyed, it should be responsible for deallocating the memory of all its nodes.</p>
<p>Exception handling: This function does not explicitly catch exceptions. If memory allocation for the InputNode fails (<code>new</code> throws <code>std::bad_alloc</code>), the exception will propagate to the caller. Also, if there are issues with the provided <code>shape</code> or <code>data</code>, the constructor of InputNode may throw relevant exceptions.</p>
<p>This function interacts with the <code>nodes</code>, <code>inputNodes</code>, <code>nodeRoster</code>, and <code>nodeRosterReverse</code> members of the <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> class to manage the list of nodes and the mapping between node names and pointers.</p>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::bad_alloc</td><td>If memory allocation for the InputNode fails.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>Ensure that the <code>shape</code> and <code>data</code> are valid and compatible with the requirements of the InputNode constructor.</li>
<li>If the <code>name</code> is set to "default", a unique name will be generated based on the node type and a reference counter.</li>
<li>The time complexity of this function is O(1) for the node creation and O(log m) for the insertion into the <code>nodeRoster</code> map, where m is the number of nodes in the graph.</li>
</ul>
</dd></dl>
<dl class="section warning"><dt>Warning</dt><dd><ul>
<li>The caller should not delete the returned pointer as the graph takes ownership of the node.</li>
</ul>
</dd></dl>
<div class="fragment"><div class="line">```cpp</div>
<div class="line"><span class="preprocessor">#include &lt;vector&gt;</span></div>
<div class="line"> </div>
<div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line">shape_type shape = {2, 2};</div>
<div class="line">value_type data[] = {1.0f, 2.0f, 3.0f, 4.0f};</div>
<div class="line">InputNode* inputNode = graph.<a class="code hl_function" href="#a75a4b72c93448c8617a7af41cc471897">addInput</a>(shape, data, <span class="keyword">true</span>, <span class="keyword">true</span>, <span class="stringliteral">&quot;my_input&quot;</span>);</div>
<div class="line">```</div>
</div><!-- fragment --> 
<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00352">352</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a8ab70380135ecdeb3652b76c2bf2766c" name="a8ab70380135ecdeb3652b76c2bf2766c"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a8ab70380135ecdeb3652b76c2bf2766c">&#9670;&#160;</a></span>addInput() <span class="overload">[5/5]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> * nz::graph::ComputeGraph::addInput </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a> *</td>          <td class="paramname"><span class="paramname"><em>input</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span><span class="paramdefsep"> = </span><span class="paramdefval">&quot;default&quot;</span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Adds an existing <code>InputNode</code> to the computational graph. </p>
<p>This method adds an already created <code>InputNode</code> to the <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code>. The node is pushed into both the <code>nodes</code> and <code>inputNodes</code> lists. If a name is provided, the node is stored in the <code>nodeRoster</code> and <code>nodeRosterReverse</code> maps with the given name. If the name is <code>"default"</code>, a unique name is generated for the node.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">input</td><td>A pointer to the existing <code>InputNode</code> to be added to the graph. </td></tr>
    <tr><td class="paramname">name</td><td>The name of the input node. If "default", a unique name is generated for the node. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the added <code>InputNode</code>.</dd></dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>If the <code>name</code> is <code>"default"</code>, a unique name is generated by concatenating the node's type and a reference counter.</li>
<li>The node is added to both the <code>inputNodes</code> and <code>nodes</code> lists, ensuring it becomes part of the computational graph.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md4"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><a class="code hl_class" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a>* input = <span class="keyword">new</span> <a class="code hl_class" href="classnz_1_1nodes_1_1io_1_1_input_node.html">InputNode</a>({3, 3}, <span class="keyword">true</span>);</div>
<div class="line">graph.<a class="code hl_function" href="#a75a4b72c93448c8617a7af41cc471897">addInput</a>(input, <span class="stringliteral">&quot;input_node&quot;</span>);</div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1io_1_1_input_node.html" title="Represents an input node in a computational graph.">nodes::io::InputNode</a> for more details on the input node class.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00336">336</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a21a839e5d155c752db1f2130812bc787" name="a21a839e5d155c752db1f2130812bc787"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a21a839e5d155c752db1f2130812bc787">&#9670;&#160;</a></span>addNode() <span class="overload">[1/2]</span></h2>

<div class="memitem">
<div class="memproto">
<div class="memtemplate">
template&lt;typename... Args&gt; </div>
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> * nz::graph::ComputeGraph::addNode </td>
          <td>(</td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>type</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>input1</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>input2</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span><span class="paramdefsep"> = </span><span class="paramdefval">&quot;default&quot;</span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">Args...</td>          <td class="paramname"><span class="paramname"><em>args</em></span>&#160;)</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Adds a node to the computational graph based on the provided node type and inputs. </p>
<p>This method adds a node to the computational graph based on the specified node type (e.g., "Input", "Output", "Add", "MatMul", etc.). It also ensures that the required input nodes are present in the graph. Depending on the node type, the method creates and adds the corresponding node to the graph and returns a pointer to the added node. If the node type or input nodes are invalid, an exception will be thrown.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">type</td><td>The type of the node to be added. It can be one of the following: "Input", "Output", "Add", "MatMul", "Sub", "ReLU", "Sigmoid", "Tanh", "LeakyReLU", "Swish", "ELU", "HardSigmoid", "HardSwish", "Softmax", "MeanSquaredError", "BinaryCrossEntropy", "ScalarAdd", "ScalarSub", "ScalarMul", "ScalarDiv". </td></tr>
    <tr><td class="paramname">input1</td><td>The name of the first input node. The exact meaning depends on the node type. </td></tr>
    <tr><td class="paramname">input2</td><td>The name of the second input node (if required by the node type). </td></tr>
    <tr><td class="paramname">name</td><td>The name of the node to be added. If <code>"default"</code>, a unique name is generated. The default value is <code>"default"</code>. </td></tr>
    <tr><td class="paramname">args</td><td>Additional arguments required for some node types (e.g., parameters for LeakyReLU, ELU, etc.).</td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the newly added node.</dd></dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If any required input node is not found, or if an unsupported node type is provided.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>For "Input" and "Scalar" nodes, the method will print a warning and return <code>nullptr</code> because these nodes cannot be added using this method.</li>
<li>If the specified node type requires specific input nodes, the method checks if the input nodes exist in the graph before proceeding.</li>
<li>The method supports variable arguments (<code>Args... args</code>) for nodes like LeakyReLU, ELU, and others that may require additional parameters.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md6"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><a class="code hl_class" href="classnz_1_1nodes_1_1_node.html">Node</a>* <a class="code hl_function" href="#a5ce9a271dcea6cc2b9490cd815ee53ed">addNode</a> = graph.<a class="code hl_function" href="#a5ce9a271dcea6cc2b9490cd815ee53ed">addNode</a>(<span class="stringliteral">&quot;Add&quot;</span>, <span class="stringliteral">&quot;input1&quot;</span>, <span class="stringliteral">&quot;input2&quot;</span>, <span class="stringliteral">&quot;add_node&quot;</span>);</div>
<div class="line"><span class="keywordflow">if</span> (<a class="code hl_function" href="#a5ce9a271dcea6cc2b9490cd815ee53ed">addNode</a> != <span class="keyword">nullptr</span>) {</div>
<div class="line">    <span class="comment">// Use the addNode pointer here</span></div>
<div class="line">}</div>
<div class="ttc" id="aclassnz_1_1nodes_1_1_node_html"><div class="ttname"><a href="classnz_1_1nodes_1_1_node.html">nz::nodes::Node</a></div><div class="ttdoc">Base class for nodes in a neural network or computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_nodes_8cuh_source.html#l00114">Nodes.cuh:114</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph.">nodes::Node</a> for the base class of all node types. </dd>
<dd>
<a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">nodes::io::OutputNode</a>, <a class="el" href="classnz_1_1nodes_1_1calc_1_1_add_node.html" title="Represents a node that performs element-wise addition between two input tensors.">nodes::calc::AddNode</a>, <a class="el" href="classnz_1_1nodes_1_1calc_1_1_mat_mul_node.html" title="Represents a matrix multiplication operation node in a computational graph.">nodes::calc::MatMulNode</a>, etc., for the specific node classes that are created.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cuh_source.html#l00571">571</a> of file <a class="el" href="_compute_graph_8cuh_source.html">ComputeGraph.cuh</a>.</p>

</div>
</div>
<a id="a5ce9a271dcea6cc2b9490cd815ee53ed" name="a5ce9a271dcea6cc2b9490cd815ee53ed"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a5ce9a271dcea6cc2b9490cd815ee53ed">&#9670;&#160;</a></span>addNode() <span class="overload">[2/2]</span></h2>

<div class="memitem">
<div class="memproto">
<div class="memtemplate">
template&lt;typename NodeType &gt; </div>
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">NodeType * nz::graph::ComputeGraph::addNode </td>
          <td>(</td>
          <td class="paramtype">NodeType *</td>          <td class="paramname"><span class="paramname"><em>node</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span><span class="paramdefsep"> = </span><span class="paramdefval">&quot;default&quot;</span>&#160;)</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Adds a node of any type to the computational graph. </p>
<p>This template method allows adding a node of any type derived from the <code>Node</code> class to the computational graph. The node is added to the <code>nodes</code> list and optionally assigned a name. If the name is <code>"default"</code>, a unique name is generated using the node's type and a reference counter.</p>
<dl class="tparams"><dt>Template Parameters</dt><dd>
  <table class="tparams">
    <tr><td class="paramname">NodeType</td><td>The type of the node, which must be derived from <code>Node</code>. This allows the method to work with different types of nodes (e.g., <code>InputNode</code>, <code>OutputNode</code>, etc.).</td></tr>
  </table>
  </dd>
</dl>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">node</td><td>A pointer to the node to be added to the graph. </td></tr>
    <tr><td class="paramname">name</td><td>The name of the node. If <code>"default"</code>, a unique name will be generated. The default value is <code>"default"</code>.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the added node.</dd></dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>If the <code>name</code> is <code>"default"</code>, the method generates a unique name for the node by concatenating the node's type and a reference counter (e.g., <code>Input_1</code>, <code>Add_2</code>).</li>
<li>The node is added to both the <code>nodes</code> list (for graph traversal) and the <code>nodeRoster</code>/<code>nodeRosterReverse</code> maps (for name-based access).</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md5"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="keyword">auto</span>* node = <span class="keyword">new</span> <a class="code hl_class" href="classnz_1_1nodes_1_1calc_1_1_add_node.html">AddNode</a>(&amp;input1, &amp;input2);</div>
<div class="line">graph.<a class="code hl_function" href="#a5ce9a271dcea6cc2b9490cd815ee53ed">addNode</a>(node, <span class="stringliteral">&quot;Add&quot;</span>);</div>
<div class="ttc" id="aclassnz_1_1nodes_1_1calc_1_1_add_node_html"><div class="ttname"><a href="classnz_1_1nodes_1_1calc_1_1_add_node.html">nz::nodes::calc::AddNode</a></div><div class="ttdoc">Represents a node that performs element-wise addition between two input tensors.</div><div class="ttdef"><b>Definition</b> <a href="_nodes_8cuh_source.html#l00917">Nodes.cuh:917</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph.">nodes::Node</a> for the base node class.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cuh_source.html#l00511">511</a> of file <a class="el" href="_compute_graph_8cuh_source.html">ComputeGraph.cuh</a>.</p>

</div>
</div>
<a id="a47cde0bdd9c961d55bfd16c9adb8810e" name="a47cde0bdd9c961d55bfd16c9adb8810e"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a47cde0bdd9c961d55bfd16c9adb8810e">&#9670;&#160;</a></span>addOutput()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html">OutputNode</a> * nz::graph::ComputeGraph::addOutput </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html">OutputNode</a> *</td>          <td class="paramname"><span class="paramname"><em>node</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span><span class="paramdefsep"> = </span><span class="paramdefval">&quot;default&quot;</span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Adds an output node to the computational graph. </p>
<p>This method adds an <code>OutputNode</code> to the computational graph. It takes an <code>OutputNode</code> pointer and an optional name. The node is added to both the <code>nodes</code> list and the <code>outputNodes</code> list. If the name is <code>"default"</code>, a unique name is generated using the node's type and a reference counter.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">node</td><td>A pointer to the <code>OutputNode</code> to be added to the graph. </td></tr>
    <tr><td class="paramname">name</td><td>The name of the node. If <code>"default"</code>, a unique name will be generated. The default value is <code>"default"</code>.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the added <code>OutputNode</code>.</dd></dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>If the <code>name</code> is <code>"default"</code>, the method generates a unique name for the node by concatenating the node's type and a reference counter (e.g., <code>Output_1</code>, <code>Output_2</code>).</li>
<li>The node is added to both the <code>nodes</code> list and the <code>outputNodes</code> list for graph traversal and output handling.</li>
<li>The node's name is also stored in the <code>nodeRoster</code> and <code>nodeRosterReverse</code> maps.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md7"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"> </div>
<div class="line"><span class="comment">// Add basic output node</span></div>
<div class="line"><span class="keyword">auto</span>* outputNode = <span class="keyword">new</span> <a class="code hl_class" href="classnz_1_1nodes_1_1io_1_1_output_node.html">OutputNode</a>(inputNode); <span class="comment">// assuming inputNode is a valid InputNode pointer</span></div>
<div class="line">graph.<a class="code hl_function" href="#a47cde0bdd9c961d55bfd16c9adb8810e">addOutput</a>(outputNode, <span class="stringliteral">&quot;output&quot;</span>);</div>
<div class="line"> </div>
<div class="line"><span class="comment">// Add loss function node</span></div>
<div class="line"><span class="keyword">auto</span> outputNode = <span class="keyword">new</span> <a class="code hl_class" href="classnz_1_1nodes_1_1loss_1_1_mean_squared_error_node.html">MeanSquaredErrorNode</a>(inputNode1, inputNode2);</div>
<div class="line">graph.<a class="code hl_function" href="#a47cde0bdd9c961d55bfd16c9adb8810e">addOutput</a>(outputNode, <span class="stringliteral">&quot;output&quot;</span>);</div>
<div class="ttc" id="aclassnz_1_1nodes_1_1io_1_1_output_node_html"><div class="ttname"><a href="classnz_1_1nodes_1_1io_1_1_output_node.html">nz::nodes::io::OutputNode</a></div><div class="ttdoc">Base class for loss function nodes in a computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_nodes_8cuh_source.html#l00683">Nodes.cuh:683</a></div></div>
<div class="ttc" id="aclassnz_1_1nodes_1_1loss_1_1_mean_squared_error_node_html"><div class="ttname"><a href="classnz_1_1nodes_1_1loss_1_1_mean_squared_error_node.html">nz::nodes::loss::MeanSquaredErrorNode</a></div><div class="ttdoc">Represents the Mean Squared Error (MSE) loss function node in a computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_nodes_8cuh_source.html#l04804">Nodes.cuh:4804</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">nodes::io::OutputNode</a> for the output node class.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00389">389</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="adfcad402fe85084ddbb4f8b6415adc05" name="adfcad402fe85084ddbb4f8b6415adc05"></a>
<h2 class="memtitle"><span class="permalink"><a href="#adfcad402fe85084ddbb4f8b6415adc05">&#9670;&#160;</a></span>backward()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::backward </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Performs backward propagation on the computational graph. </p>
<p>This method performs backward propagation starting from the output node(s) in the computational graph. The method first checks if the graph is sorted. If the graph is not sorted, a runtime error is thrown. If the graph is sorted, the backward propagation is performed by iterating over the nodes in reverse topological order (i.e., from outputs to inputs). Each node’s <code><a class="el" href="#adfcad402fe85084ddbb4f8b6415adc05" title="Performs backward propagation on the computational graph.">backward()</a></code> method is called to compute gradients with respect to its inputs.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">None</td><td></td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>None</dd></dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If the graph is not sorted or if there are multiple output nodes or no output node.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>If the graph is not sorted, a runtime error is thrown because backward propagation relies on the topological order of the nodes. Sorting ensures that each node’s gradient can be computed in the correct order.</li>
<li>If the graph has exactly one output node, the method proceeds with backward propagation in reverse topological order (from the output node back to the input nodes).</li>
<li>If the graph has no output nodes or multiple output nodes, a runtime error is thrown.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md27"></a>
Why Not Automatically Sort the Graph?</h3>
<p>The <code><a class="el" href="#adfcad402fe85084ddbb4f8b6415adc05" title="Performs backward propagation on the computational graph.">backward()</a></code> method does not automatically sort the graph because backward propagation must correspond to a previously completed forward pass. A forward pass determines the order of operations and ensures that the graph is in a valid state for backward propagation. Automatically sorting the graph would interfere with this flow, as backward propagation is dependent on the state of the graph after forward propagation. Hence, the graph is only processed for backward propagation if it has been sorted and the forward pass has already occurred.</p>
<h3><a class="anchor" id="autotoc_md28"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Assuming nodes are added and the graph is sorted...</span></div>
<div class="line">graph.<a class="code hl_function" href="#a0c5a596f38e6d7e8c1c78e19b9b56167">forward</a>();  <span class="comment">// Perform forward propagation first</span></div>
<div class="line">graph.<a class="code hl_function" href="#adfcad402fe85084ddbb4f8b6415adc05">backward</a>(); <span class="comment">// Perform backward propagation after forward pass</span></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a0c5a596f38e6d7e8c1c78e19b9b56167"><div class="ttname"><a href="#a0c5a596f38e6d7e8c1c78e19b9b56167">nz::graph::ComputeGraph::forward</a></div><div class="ttdeci">void forward()</div><div class="ttdoc">Performs forward propagation on the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00405">ComputeGraph.cu:405</a></div></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_adfcad402fe85084ddbb4f8b6415adc05"><div class="ttname"><a href="#adfcad402fe85084ddbb4f8b6415adc05">nz::graph::ComputeGraph::backward</a></div><div class="ttdeci">void backward()</div><div class="ttdoc">Performs backward propagation on the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00414">ComputeGraph.cu:414</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="#aede9d05b6c8e7394ea730b5ffea42164" title="Checks whether the computational graph has been topologically sorted.">isSorted()</a> for the method that checks if the <a class="el" href="namespacenz_1_1graph.html" title="Contains classes and functions for managing and executing computation graphs in deep learning workflo...">graph</a> is sorted. </dd>
<dd>
<a class="el" href="#a0c5a596f38e6d7e8c1c78e19b9b56167" title="Performs forward propagation on the computational graph.">forward()</a> for the method that performs <a class="el" href="#a0c5a596f38e6d7e8c1c78e19b9b56167" title="Performs forward propagation on the computational graph.">forward</a> propagation.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00414">414</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1graph_1_1_compute_graph_adfcad402fe85084ddbb4f8b6415adc05_cgraph.png" border="0" usemap="#aclassnz_1_1graph_1_1_compute_graph_adfcad402fe85084ddbb4f8b6415adc05_cgraph" alt=""/></div>
<map name="aclassnz_1_1graph_1_1_compute_graph_adfcad402fe85084ddbb4f8b6415adc05_cgraph" id="aclassnz_1_1graph_1_1_compute_graph_adfcad402fe85084ddbb4f8b6415adc05_cgraph">
<area shape="rect" title="Performs backward propagation on the computational graph." alt="" coords="5,5,177,48"/>
<area shape="rect" href="classnz_1_1graph_1_1_compute_graph.html#aede9d05b6c8e7394ea730b5ffea42164" title="Checks whether the computational graph has been topologically sorted." alt="" coords="225,5,396,48"/>
<area shape="poly" title=" " alt="" coords="177,24,209,24,209,29,177,29"/>
</map>
</div>

</div>
</div>
<a id="a0f7e2b25f6b75998e0e912320607f6e8" name="a0f7e2b25f6b75998e0e912320607f6e8"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a0f7e2b25f6b75998e0e912320607f6e8">&#9670;&#160;</a></span>fill() <span class="overload">[1/2]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::fill </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *</td>          <td class="paramname"><span class="paramname"><em>node</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">Tensor::value_type</td>          <td class="paramname"><span class="paramname"><em>val</em></span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Fills the output tensor of a specified node with a given value. </p>
<p>This method fills the output tensor of a node, identified by its pointer, with a specified value. It checks if the node is present in the graph by searching the node pointer in the <code>nodes</code> list. If the node is found, it calls the <code>fill</code> method on the node's output tensor to set all its elements to the provided value. If the node is not found in the graph, an exception is thrown.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">node</td><td>A pointer to the node whose output tensor will be filled. </td></tr>
    <tr><td class="paramname">val</td><td>The value to fill the output tensor with.</td></tr>
  </table>
  </dd>
</dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>if the node is not found in the graph.</td></tr>
  </table>
  </dd>
</dl>
<h3><a class="anchor" id="autotoc_md15"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Assuming graph has nodes added and a valid node pointer &quot;inputNode&quot;</span></div>
<div class="line">graph.<a class="code hl_function" href="#ae732145241d1da547219a3e8b95a04ee">fill</a>(inputNode, 0.0);  <span class="comment">// Fills the output tensor of &quot;inputNode&quot; with 0.0</span></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_ae732145241d1da547219a3e8b95a04ee"><div class="ttname"><a href="#ae732145241d1da547219a3e8b95a04ee">nz::graph::ComputeGraph::fill</a></div><div class="ttdeci">void fill(const std::string &amp;name, Tensor::value_type val)</div><div class="ttdoc">Fills the output tensor of a specified node with a given value.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00466">ComputeGraph.cu:466</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1data_1_1_tensor.html#ad220de56b18c404611f07f2290cd7e9d" title="Fills the tensor&#39;s data with a specified value.">Tensor::fill()</a> for the method that fills the tensor with a specific value.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00475">475</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="ae732145241d1da547219a3e8b95a04ee" name="ae732145241d1da547219a3e8b95a04ee"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ae732145241d1da547219a3e8b95a04ee">&#9670;&#160;</a></span>fill() <span class="overload">[2/2]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::fill </td>
          <td>(</td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">Tensor::value_type</td>          <td class="paramname"><span class="paramname"><em>val</em></span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Fills the output tensor of a specified node with a given value. </p>
<p>This method fills the output tensor of a node, identified by its name, with a specified value. It looks up the node by name in the <code>nodeRoster</code>. If the node is found, it calls the <code>fill</code> method on the node's output tensor, setting all its elements to the provided value. If the node is not found, an exception is thrown.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">name</td><td>The name of the node whose output tensor will be filled. </td></tr>
    <tr><td class="paramname">val</td><td>The value to fill the output tensor with.</td></tr>
  </table>
  </dd>
</dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>if the node with the specified name is not found in the graph.</td></tr>
  </table>
  </dd>
</dl>
<h3><a class="anchor" id="autotoc_md14"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Assuming graph has nodes added and a node named &quot;input_1&quot;</span></div>
<div class="line">graph.<a class="code hl_function" href="#ae732145241d1da547219a3e8b95a04ee">fill</a>(<span class="stringliteral">&quot;input_1&quot;</span>, 0.0);  <span class="comment">// Fills the output tensor of &quot;input_1&quot; with 0.0</span></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1data_1_1_tensor.html#ad220de56b18c404611f07f2290cd7e9d" title="Fills the tensor&#39;s data with a specified value.">Tensor::fill()</a> for the method that fills the tensor with a specific value.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00466">466</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="adf21c05c4515016d34d5fc7d639b9e40" name="adf21c05c4515016d34d5fc7d639b9e40"></a>
<h2 class="memtitle"><span class="permalink"><a href="#adf21c05c4515016d34d5fc7d639b9e40">&#9670;&#160;</a></span>fillAll()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::fillAll </td>
          <td>(</td>
          <td class="paramtype">Tensor::value_type</td>          <td class="paramname"><span class="paramname"><em>val</em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Fills the output tensors of all input nodes with a given value. </p>
<p>This method iterates over all input nodes in the computational graph and fills their output tensors with the specified value. It calls the <code>fill</code> method on each input node's output tensor to set all its elements to the provided value. This operation is performed for every input node in the graph.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">val</td><td>The value to fill the output tensors of all input nodes with.</td></tr>
  </table>
  </dd>
</dl>
<h3><a class="anchor" id="autotoc_md16"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Assuming graph has input nodes added</span></div>
<div class="line">graph.<a class="code hl_function" href="#adf21c05c4515016d34d5fc7d639b9e40">fillAll</a>(0.0);  <span class="comment">// Fills the output tensors of all input nodes with 0.0</span></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_adf21c05c4515016d34d5fc7d639b9e40"><div class="ttname"><a href="#adf21c05c4515016d34d5fc7d639b9e40">nz::graph::ComputeGraph::fillAll</a></div><div class="ttdeci">void fillAll(Tensor::value_type val) const</div><div class="ttdoc">Fills the output tensors of all input nodes with a given value.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00484">ComputeGraph.cu:484</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1data_1_1_tensor.html#ad220de56b18c404611f07f2290cd7e9d" title="Fills the tensor&#39;s data with a specified value.">Tensor::fill()</a> for the method that fills the tensor of a specific node with a value.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00484">484</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a0c5a596f38e6d7e8c1c78e19b9b56167" name="a0c5a596f38e6d7e8c1c78e19b9b56167"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a0c5a596f38e6d7e8c1c78e19b9b56167">&#9670;&#160;</a></span>forward()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::forward </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Performs forward propagation on the computational graph. </p>
<p>This method performs forward propagation on all nodes in the computational graph. It first ensures the graph is sorted in topological order (if not already sorted), and then propagates the data through each node in the sorted order. Each node's <code><a class="el" href="#a0c5a596f38e6d7e8c1c78e19b9b56167" title="Performs forward propagation on the computational graph.">forward()</a></code> method is called to compute its output based on its inputs.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">None</td><td></td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>None</dd></dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>This method checks if the graph is sorted by calling the <code><a class="el" href="#aede9d05b6c8e7394ea730b5ffea42164" title="Checks whether the computational graph has been topologically sorted.">isSorted()</a></code> method. If the graph is not sorted, it calls the <code><a class="el" href="#a5960e75d631200994c0a2f78f58674dd" title="Performs topological sorting on the computational graph.">topologicalSort()</a></code> method to sort the nodes in topological order before performing the forward propagation.</li>
<li>The nodes are processed in sorted order, ensuring that each node’s inputs are computed before the node itself.</li>
<li>After calling <code><a class="el" href="#a5960e75d631200994c0a2f78f58674dd" title="Performs topological sorting on the computational graph.">topologicalSort()</a></code>, the <code><a class="el" href="#a0c5a596f38e6d7e8c1c78e19b9b56167" title="Performs forward propagation on the computational graph.">forward()</a></code> method calls each node's <code><a class="el" href="#a0c5a596f38e6d7e8c1c78e19b9b56167" title="Performs forward propagation on the computational graph.">forward()</a></code> method to compute the node’s output and propagate the result through the graph.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md26"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Assuming nodes are added to the graph...</span></div>
<div class="line">graph.<a class="code hl_function" href="#a0c5a596f38e6d7e8c1c78e19b9b56167">forward</a>(); <span class="comment">// Performs forward propagation on all nodes in the graph</span></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="#a5960e75d631200994c0a2f78f58674dd" title="Performs topological sorting on the computational graph.">topologicalSort()</a> for the method that sorts the <a class="el" href="namespacenz_1_1nodes.html" title="Contains classes and functionality for nodes in a neural network or computational graph.">nodes</a> in topological order. </dd>
<dd>
<a class="el" href="#aede9d05b6c8e7394ea730b5ffea42164" title="Checks whether the computational graph has been topologically sorted.">isSorted()</a> for the method that checks if the <a class="el" href="namespacenz_1_1graph.html" title="Contains classes and functions for managing and executing computation graphs in deep learning workflo...">graph</a> is sorted. </dd>
<dd>
<a class="el" href="#adfcad402fe85084ddbb4f8b6415adc05" title="Performs backward propagation on the computational graph.">backward()</a> for the method that performs <a class="el" href="#adfcad402fe85084ddbb4f8b6415adc05" title="Performs backward propagation on the computational graph.">backward</a> propagation.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00405">405</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1graph_1_1_compute_graph_a0c5a596f38e6d7e8c1c78e19b9b56167_cgraph.png" border="0" usemap="#aclassnz_1_1graph_1_1_compute_graph_a0c5a596f38e6d7e8c1c78e19b9b56167_cgraph" alt=""/></div>
<map name="aclassnz_1_1graph_1_1_compute_graph_a0c5a596f38e6d7e8c1c78e19b9b56167_cgraph" id="aclassnz_1_1graph_1_1_compute_graph_a0c5a596f38e6d7e8c1c78e19b9b56167_cgraph">
<area shape="rect" title="Performs forward propagation on the computational graph." alt="" coords="5,39,177,81"/>
<area shape="rect" href="classnz_1_1graph_1_1_compute_graph.html#aede9d05b6c8e7394ea730b5ffea42164" title="Checks whether the computational graph has been topologically sorted." alt="" coords="225,5,396,48"/>
<area shape="poly" title=" " alt="" coords="176,44,209,39,209,45,177,50"/>
<area shape="rect" href="classnz_1_1graph_1_1_compute_graph.html#a5960e75d631200994c0a2f78f58674dd" title="Performs topological sorting on the computational graph." alt="" coords="225,72,396,115"/>
<area shape="poly" title=" " alt="" coords="177,70,209,75,209,81,176,76"/>
</map>
</div>

</div>
</div>
<a id="a3a53a15c04e85da4e6aa0a209fbee4c2" name="a3a53a15c04e85da4e6aa0a209fbee4c2"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a3a53a15c04e85da4e6aa0a209fbee4c2">&#9670;&#160;</a></span>getLoss()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">Tensor::value_type nz::graph::ComputeGraph::getLoss </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">nodiscard</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Retrieves the loss value from the first output node in the computational graph. </p>
<p>This method retrieves the loss value computed by the first <code>OutputNode</code> in the computational graph. The method assumes that there is at least one output node in the graph. If no output nodes exist, a <code>std::runtime_error</code> is thrown.</p>
<dl class="section return"><dt>Returns</dt><dd>The loss value computed by the first output node in the graph.</dd></dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If no output nodes are present in the graph.</td></tr>
  </table>
  </dd>
</dl>
<h3><a class="anchor" id="autotoc_md23"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="keywordflow">try</span> {</div>
<div class="line">    Tensor::value_type loss = graph.<a class="code hl_function" href="#a3a53a15c04e85da4e6aa0a209fbee4c2">getLoss</a>();</div>
<div class="line">    std::cout &lt;&lt; <span class="stringliteral">&quot;Loss: &quot;</span> &lt;&lt; loss &lt;&lt; std::endl;</div>
<div class="line">} <span class="keywordflow">catch</span> (<span class="keyword">const</span> std::runtime_error&amp; e) {</div>
<div class="line">    <span class="comment">// Handle the case when no output node is present</span></div>
<div class="line">    std::cerr &lt;&lt; <span class="stringliteral">&quot;Error: &quot;</span> &lt;&lt; e.what() &lt;&lt; std::endl;</div>
<div class="line">}</div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a3a53a15c04e85da4e6aa0a209fbee4c2"><div class="ttname"><a href="#a3a53a15c04e85da4e6aa0a209fbee4c2">nz::graph::ComputeGraph::getLoss</a></div><div class="ttdeci">Tensor::value_type getLoss() const</div><div class="ttdoc">Retrieves the loss value from the first output node in the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00534">ComputeGraph.cu:534</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">nodes::io::OutputNode</a> for the output node class. </dd>
<dd>
<a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html#a7ac1292b280afcd86b31853b1275c1c4" title="Retrieves the loss value stored in the OutputNode.">OutputNode::getLoss()</a> for the method in the <code>OutputNode</code> class that computes the loss.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00534">534</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a75f918fb756d5f7155ee49092ce713e1" name="a75f918fb756d5f7155ee49092ce713e1"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a75f918fb756d5f7155ee49092ce713e1">&#9670;&#160;</a></span>getOutput()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">Tensor::value_type * nz::graph::ComputeGraph::getOutput </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">nodiscard</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Retrieves the output data of the first output node in the computational graph. </p>
<p>This method retrieves a pointer to the output data of the first <code>OutputNode</code> in the computational graph. The output data is stored in the output tensor of the node. It is important to note that the returned pointer points to data that resides in GPU memory.</p>
<p>If no output nodes exist in the graph, a <code>std::runtime_error</code> is thrown. The method assumes that there is at least one output node in the graph, and will not return a <code>nullptr</code>.</p>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the output data of the first output node in the graph, which is stored in GPU memory.</dd></dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If no output nodes are present in the graph.</td></tr>
  </table>
  </dd>
</dl>
<h3><a class="anchor" id="autotoc_md20"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="keywordflow">try</span> {</div>
<div class="line">    Tensor::value_type* outputData = graph.<a class="code hl_function" href="#a75f918fb756d5f7155ee49092ce713e1">getOutput</a>();</div>
<div class="line">    <span class="comment">// Use the outputData pointer here</span></div>
<div class="line">} <span class="keywordflow">catch</span> (<span class="keyword">const</span> std::runtime_error&amp; e) {</div>
<div class="line">    <span class="comment">// Handle the case when no output node is present</span></div>
<div class="line">    std::cerr &lt;&lt; <span class="stringliteral">&quot;Error: &quot;</span> &lt;&lt; e.what() &lt;&lt; std::endl;</div>
<div class="line">}</div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a75f918fb756d5f7155ee49092ce713e1"><div class="ttname"><a href="#a75f918fb756d5f7155ee49092ce713e1">nz::graph::ComputeGraph::getOutput</a></div><div class="ttdeci">Tensor::value_type * getOutput() const</div><div class="ttdoc">Retrieves the output data of the first output node in the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00508">ComputeGraph.cu:508</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">nodes::io::OutputNode</a> for the output node class. </dd>
<dd>
<a class="el" href="classnz_1_1data_1_1_tensor.html" title="A class for representing and manipulating multidimensional arrays (tensors) in GPU memory.">data::Tensor</a> for the class representing tensors and their associated operations.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00508">508</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="aef02d3290fb92b142be346f2fdb125c5" name="aef02d3290fb92b142be346f2fdb125c5"></a>
<h2 class="memtitle"><span class="permalink"><a href="#aef02d3290fb92b142be346f2fdb125c5">&#9670;&#160;</a></span>getOutputHost()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">Tensor::value_type * nz::graph::ComputeGraph::getOutputHost </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">nodiscard</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Retrieves the output data of the first output node in the computational graph and copies it to host memory. </p>
<p>This method retrieves a pointer to the output data of the first <code>OutputNode</code> in the computational graph. It then copies the data from GPU memory to host memory. The returned pointer points to a memory block in host memory that contains the output data.</p>
<p>If the graph contains no output nodes, a runtime error is thrown. The method assumes that there is at least one output node in the graph; otherwise, it will throw an exception.</p>
<p>The returned pointer points to memory allocated in the host's memory space. The caller is responsible for freeing this memory using <code>free()</code> once it's done using the data.</p>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the output data of the first output node in the graph, stored in host memory. The memory must be freed by the caller after use.</dd></dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If no output nodes are present in the graph.</td></tr>
  </table>
  </dd>
</dl>
<h3><a class="anchor" id="autotoc_md21"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="keywordflow">try</span> {</div>
<div class="line">    Tensor::value_type* outputDataHost = graph.<a class="code hl_function" href="#aef02d3290fb92b142be346f2fdb125c5">getOutputHost</a>();</div>
<div class="line">    <span class="comment">// Use the outputDataHost pointer here</span></div>
<div class="line">    <span class="comment">// Remember to free the memory when done</span></div>
<div class="line">    free(outputDataHost);</div>
<div class="line">} <span class="keywordflow">catch</span> (<span class="keyword">const</span> std::runtime_error&amp; e) {</div>
<div class="line">    std::cerr &lt;&lt; <span class="stringliteral">&quot;Error: &quot;</span> &lt;&lt; e.what() &lt;&lt; std::endl;</div>
<div class="line">}</div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_aef02d3290fb92b142be346f2fdb125c5"><div class="ttname"><a href="#aef02d3290fb92b142be346f2fdb125c5">nz::graph::ComputeGraph::getOutputHost</a></div><div class="ttdeci">Tensor::value_type * getOutputHost() const</div><div class="ttdoc">Retrieves the output data of the first output node in the computational graph and copies it to host m...</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00515">ComputeGraph.cu:515</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">nodes::io::OutputNode</a> for the output node class. </dd>
<dd>
<a class="el" href="classnz_1_1data_1_1_tensor.html" title="A class for representing and manipulating multidimensional arrays (tensors) in GPU memory.">data::Tensor</a> for the class representing tensors and their associated operations.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00515">515</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1graph_1_1_compute_graph_aef02d3290fb92b142be346f2fdb125c5_cgraph.png" border="0" usemap="#aclassnz_1_1graph_1_1_compute_graph_aef02d3290fb92b142be346f2fdb125c5_cgraph" alt=""/></div>
<map name="aclassnz_1_1graph_1_1_compute_graph_aef02d3290fb92b142be346f2fdb125c5_cgraph" id="aclassnz_1_1graph_1_1_compute_graph_aef02d3290fb92b142be346f2fdb125c5_cgraph">
<area shape="rect" title="Retrieves the output data of the first output node in the computational graph and copies it to host m..." alt="" coords="5,72,177,115"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#ab4b2eb422e0e1ee44bdfdc0eb94457ce" title="Returns a reference to the singleton instance of the StreamManager." alt="" coords="225,5,410,48"/>
<area shape="poly" title=" " alt="" coords="165,69,227,50,228,56,166,74"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#afa38d5c6db0e6b48c8f74ce8ad0df2bc" title="Asynchronously copies data between CUDA device and host memory based on the specified memory copy kin..." alt="" coords="225,72,410,115"/>
<area shape="poly" title=" " alt="" coords="177,91,209,91,209,96,177,96"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#abe439fa00c0bd369c0b2345b095ed5af" title="Synchronizes host thread with completion events for a specific data object." alt="" coords="225,139,410,181"/>
<area shape="poly" title=" " alt="" coords="166,113,228,131,227,136,165,118"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#a1de1cf3aadea137faf90a2f9b4b7abe2" title="Acquires CUDA stream from pool using round&#45;robin scheduling." alt="" coords="458,39,643,81"/>
<area shape="poly" title=" " alt="" coords="410,77,442,73,443,78,411,83"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#adb1078a67c6e38932d7d58c2adb05ec0" title="Synchronizes CUDA stream execution until data writes complete." alt="" coords="458,105,643,148"/>
<area shape="poly" title=" " alt="" coords="411,104,443,109,442,114,410,109"/>
</map>
</div>

</div>
</div>
<a id="a69d293299714e489ce8cc2dba1e5a9a5" name="a69d293299714e489ce8cc2dba1e5a9a5"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a69d293299714e489ce8cc2dba1e5a9a5">&#9670;&#160;</a></span>getOutputNode()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html">OutputNode</a> * nz::graph::ComputeGraph::getOutputNode </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">nodiscard</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Retrieves the first output node in the computational graph. </p>
<p>This method retrieves the first <code>OutputNode</code> in the computational graph. The method assumes that there is at least one output node in the graph. If no output nodes exist, a <code>std::runtime_error</code> is thrown.</p>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the first output node in the graph.</dd></dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If no output nodes are present in the graph.</td></tr>
  </table>
  </dd>
</dl>
<h3><a class="anchor" id="autotoc_md22"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="keywordflow">try</span> {</div>
<div class="line">    <a class="code hl_class" href="classnz_1_1nodes_1_1io_1_1_output_node.html">OutputNode</a>* outputNode = graph.<a class="code hl_function" href="#a69d293299714e489ce8cc2dba1e5a9a5">getOutputNode</a>();</div>
<div class="line">    <span class="comment">// Use the outputNode pointer here</span></div>
<div class="line">} <span class="keywordflow">catch</span> (<span class="keyword">const</span> std::runtime_error&amp; e) {</div>
<div class="line">    <span class="comment">// Handle the case when no output node is present</span></div>
<div class="line">    std::cerr &lt;&lt; <span class="stringliteral">&quot;Error: &quot;</span> &lt;&lt; e.what() &lt;&lt; std::endl;</div>
<div class="line">}</div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a69d293299714e489ce8cc2dba1e5a9a5"><div class="ttname"><a href="#a69d293299714e489ce8cc2dba1e5a9a5">nz::graph::ComputeGraph::getOutputNode</a></div><div class="ttdeci">OutputNode * getOutputNode() const</div><div class="ttdoc">Retrieves the first output node in the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00527">ComputeGraph.cu:527</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1io_1_1_output_node.html" title="Base class for loss function nodes in a computational graph.">nodes::io::OutputNode</a> for the output node class.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00527">527</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="aede9d05b6c8e7394ea730b5ffea42164" name="aede9d05b6c8e7394ea730b5ffea42164"></a>
<h2 class="memtitle"><span class="permalink"><a href="#aede9d05b6c8e7394ea730b5ffea42164">&#9670;&#160;</a></span>isSorted()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">bool nz::graph::ComputeGraph::isSorted </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">nodiscard</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Checks whether the computational graph has been topologically sorted. </p>
<p>This function checks if the <code>sortedNodes</code> vector contains the nodes in a valid topologically sorted order. It returns <code>true</code> if the graph is sorted, meaning that each node appears before any node that depends on it. Otherwise, it returns <code>false</code>, indicating that the graph is not sorted.</p>
<dl class="section return"><dt>Returns</dt><dd><code>true</code> if the graph is sorted, <code>false</code> if not.</dd></dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>This function does not modify the state of the graph.</li>
<li>It is a helper function that can be used to verify whether a graph needs sorting before traversing.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md19"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Add nodes to the graph...</span></div>
<div class="line"><span class="keywordflow">if</span> (!graph.<a class="code hl_function" href="#aede9d05b6c8e7394ea730b5ffea42164">isSorted</a>()) {</div>
<div class="line">    graph.<a class="code hl_function" href="#a5960e75d631200994c0a2f78f58674dd">topologicalSort</a>();  <span class="comment">// Sort the graph if it is not sorted</span></div>
<div class="line">}</div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a5960e75d631200994c0a2f78f58674dd"><div class="ttname"><a href="#a5960e75d631200994c0a2f78f58674dd">nz::graph::ComputeGraph::topologicalSort</a></div><div class="ttdeci">void topologicalSort()</div><div class="ttdoc">Performs topological sorting on the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00258">ComputeGraph.cu:258</a></div></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_aede9d05b6c8e7394ea730b5ffea42164"><div class="ttname"><a href="#aede9d05b6c8e7394ea730b5ffea42164">nz::graph::ComputeGraph::isSorted</a></div><div class="ttdeci">bool isSorted() const</div><div class="ttdoc">Checks whether the computational graph has been topologically sorted.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00298">ComputeGraph.cu:298</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="#a5960e75d631200994c0a2f78f58674dd" title="Performs topological sorting on the computational graph.">topologicalSort()</a> for the function that performs the sorting. </dd>
<dd>
<a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> for more details on the <a class="el" href="namespacenz_1_1graph.html" title="Contains classes and functions for managing and executing computation graphs in deep learning workflo...">graph</a> structure and node management.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00298">298</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a9748d859d952864bfa498a79c9fb394a" name="a9748d859d952864bfa498a79c9fb394a"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a9748d859d952864bfa498a79c9fb394a">&#9670;&#160;</a></span>load()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::load </td>
          <td>(</td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>path</em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Loads a computational graph from a JSON file. </p>
<p>This method deserializes the computational graph from the provided JSON file and reconstructs the nodes, their types, names, input-output relationships, shapes, data, gradients, and other relevant information. It validates the file structure and populates the graph accordingly.</p>
<h3><a class="anchor" id="autotoc_md33"></a>
Graph Deserialization:</h3>
<ul>
<li><b>Nodes</b>: Each node's type, name, input-output connections, and other details are extracted.</li>
<li><b>Pre and Post nodes</b>: Lists of indices for input (pre) and output (post) nodes are read.</li>
<li><b>Data and Gradients</b>: Node data and gradients (if required) are read and restored into their respective tensors.</li>
</ul>
<h3><a class="anchor" id="autotoc_md34"></a>
Error Handling:</h3>
<ul>
<li>Throws a <code>std::runtime_error</code> if the path is empty, the graph is already loaded, or there is an issue opening the file.</li>
</ul>
<h3><a class="anchor" id="autotoc_md35"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line">graph.<a class="code hl_function" href="#a9748d859d952864bfa498a79c9fb394a">load</a>(<span class="stringliteral">&quot;path_to_load_graph.json&quot;</span>);</div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a9748d859d952864bfa498a79c9fb394a"><div class="ttname"><a href="#a9748d859d952864bfa498a79c9fb394a">nz::graph::ComputeGraph::load</a></div><div class="ttdeci">void load(const std::string &amp;path)</div><div class="ttdoc">Loads a computational graph from a JSON file.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00642">ComputeGraph.cu:642</a></div></div>
</div><!-- fragment --><dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">path</td><td>The file path from which the graph should be loaded.</td></tr>
  </table>
  </dd>
</dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If the path is empty, the graph is already loaded, or file reading fails.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph.">nodes::Node</a> for the base class of all <a class="el" href="namespacenz_1_1nodes.html" title="Contains classes and functionality for nodes in a neural network or computational graph.">nodes</a>. </dd>
<dd>
<a class="el" href="classnz_1_1data_1_1_tensor.html" title="A class for representing and manipulating multidimensional arrays (tensors) in GPU memory.">data::Tensor</a> for the class representing tensors and their associated operations.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00642">642</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a1e5d98f57514c6f7a539de2776f48c5d" name="a1e5d98f57514c6f7a539de2776f48c5d"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a1e5d98f57514c6f7a539de2776f48c5d">&#9670;&#160;</a></span>nodesList()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">std::string nz::graph::ComputeGraph::nodesList </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Generates a formatted string representing the list of nodes in the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a>. </p>
<dl class="section return"><dt>Returns</dt><dd>A string containing a tabular representation of node names and types.</dd></dl>
<p>This function iterates through the <code>nodeRoster</code> of the <a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> to determine the maximum width required for displaying node names and types. It then constructs a formatted string using <code>std::ostringstream</code> to present the nodes in a tabular format.</p>
<p>Memory management: The function does not allocate any dynamic memory that needs to be explicitly managed. It uses local variables and the <code>std::ostringstream</code> which handles its own memory internally.</p>
<p>Exception handling: This function does not explicitly catch exceptions. Exceptions such as <code>std::bad_alloc</code> may be thrown if there is insufficient memory during the construction of the output string.</p>
<p>This function only depends on the <code>nodeRoster</code> member of the <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> class, which stores the mapping between node names and pointers.</p>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::bad_alloc</td><td>If there is insufficient memory to construct the output string.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The time complexity of this function is O(n), where n is the number of nodes in the <code>nodeRoster</code>, as it iterates through the map twice.</li>
<li>The output string is formatted in a left - aligned tabular style.</li>
</ul>
</dd></dl>
<div class="fragment"><div class="line">```cpp</div>
<div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Assume some nodes are added to the graph</span></div>
<div class="line">std::string nodesListStr = graph.<a class="code hl_function" href="#a1e5d98f57514c6f7a539de2776f48c5d">nodesList</a>();</div>
<div class="line">std::cout &lt;&lt; nodesListStr &lt;&lt; std::endl;</div>
<div class="line">```</div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a1e5d98f57514c6f7a539de2776f48c5d"><div class="ttname"><a href="#a1e5d98f57514c6f7a539de2776f48c5d">nz::graph::ComputeGraph::nodesList</a></div><div class="ttdeci">std::string nodesList()</div><div class="ttdoc">Generates a formatted string representing the list of nodes in the ComputeGraph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00817">ComputeGraph.cu:817</a></div></div>
</div><!-- fragment --> 
<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00817">817</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a5ae0d765ed40c4973b54dc20a5cddf61" name="a5ae0d765ed40c4973b54dc20a5cddf61"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a5ae0d765ed40c4973b54dc20a5cddf61">&#9670;&#160;</a></span>operator[]()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> * nz::graph::ComputeGraph::operator[] </td>
          <td>(</td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Retrieves the node associated with the given name in the computational graph. </p>
<p>This method overloads the <code>operator[]</code> to provide access to nodes in the computational graph by their name. It allows for easy retrieval of nodes from the <code>nodeRoster</code> map. The operator returns a pointer to the node associated with the provided name.</p>
<p>If the node with the specified name is not found, the method will cause undefined behavior as it directly accesses the <code>nodeRoster</code> map without checking for the node's existence.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">name</td><td>The name of the node to retrieve.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>A pointer to the node associated with the specified name.</dd></dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>If the node does not exist in <code>nodeRoster</code>, this method will cause undefined behavior because it directly accesses the map. To safely check for the existence of a node, consider using <code>find()</code> instead.</li>
<li>This operator does not throw exceptions; it relies on the <code>nodeRoster</code> map's behavior when accessing an element by key.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md25"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><a class="code hl_class" href="classnz_1_1nodes_1_1_node.html">Node</a>* node = graph[<span class="stringliteral">&quot;node_name&quot;</span>];</div>
<div class="line"><span class="keywordflow">if</span> (node != <span class="keyword">nullptr</span>) {</div>
<div class="line">    <span class="comment">// Use the node here</span></div>
<div class="line">} <span class="keywordflow">else</span> {</div>
<div class="line">    <span class="comment">// Handle the case when the node is not found</span></div>
<div class="line">}</div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd>nodeRoster for the map storing <a class="el" href="namespacenz_1_1nodes.html" title="Contains classes and functionality for nodes in a neural network or computational graph.">nodes</a> by name. </dd>
<dd>
Node for the base class of all <a class="el" href="namespacenz_1_1nodes.html" title="Contains classes and functionality for nodes in a neural network or computational graph.">nodes</a>.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00813">813</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a1d54fe96c53fec967c8a20e1ac79ceed" name="a1d54fe96c53fec967c8a20e1ac79ceed"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a1d54fe96c53fec967c8a20e1ac79ceed">&#9670;&#160;</a></span>print()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">std::ostream &amp; nz::graph::ComputeGraph::print </td>
          <td>(</td>
          <td class="paramtype">std::ostream &amp;</td>          <td class="paramname"><span class="paramname"><em>os</em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Prints the details of the computational graph to the provided output stream. </p>
<p>The <code>print</code> method prints a detailed description of the computational graph, including each node's name, its preceding (input) nodes, following (output) nodes, data, and gradients. If the graph is not sorted, it will automatically perform a topological sort before printing the details. The method assumes that the graph contains at least one output node and prints the loss value of the first output node.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">os</td><td>The output stream where the graph details will be printed (e.g., <code>std::cout</code>). </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>The same output stream after printing the graph details, enabling method chaining.</dd></dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>if an error occurs during the process.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>If the graph is not sorted, the method will automatically call <code><a class="el" href="#a5960e75d631200994c0a2f78f58674dd" title="Performs topological sorting on the computational graph.">topologicalSort()</a></code> to sort the nodes.</li>
<li>The method prints the loss value of the first output node in the graph, assuming there is at least one output node.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md24"></a>
Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Add nodes and build the graph</span></div>
<div class="line">graph.<a class="code hl_function" href="#a1d54fe96c53fec967c8a20e1ac79ceed">print</a>(std::cout);  <span class="comment">// Print the graph details to the console</span></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a1d54fe96c53fec967c8a20e1ac79ceed"><div class="ttname"><a href="#a1d54fe96c53fec967c8a20e1ac79ceed">nz::graph::ComputeGraph::print</a></div><div class="ttdeci">std::ostream &amp; print(std::ostream &amp;os)</div><div class="ttdoc">Prints the details of the computational graph to the provided output stream.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00008">ComputeGraph.cu:8</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="#a5960e75d631200994c0a2f78f58674dd" title="Performs topological sorting on the computational graph.">topologicalSort()</a> for sorting the <a class="el" href="namespacenz_1_1nodes.html" title="Contains classes and functionality for nodes in a neural network or computational graph.">nodes</a> of the <a class="el" href="namespacenz_1_1graph.html" title="Contains classes and functions for managing and executing computation graphs in deep learning workflo...">graph</a>.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00008">8</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1graph_1_1_compute_graph_a1d54fe96c53fec967c8a20e1ac79ceed_cgraph.png" border="0" usemap="#aclassnz_1_1graph_1_1_compute_graph_a1d54fe96c53fec967c8a20e1ac79ceed_cgraph" alt=""/></div>
<map name="aclassnz_1_1graph_1_1_compute_graph_a1d54fe96c53fec967c8a20e1ac79ceed_cgraph" id="aclassnz_1_1graph_1_1_compute_graph_a1d54fe96c53fec967c8a20e1ac79ceed_cgraph">
<area shape="rect" title="Prints the details of the computational graph to the provided output stream." alt="" coords="5,39,177,81"/>
<area shape="rect" href="classnz_1_1graph_1_1_compute_graph.html#aede9d05b6c8e7394ea730b5ffea42164" title="Checks whether the computational graph has been topologically sorted." alt="" coords="225,5,396,48"/>
<area shape="poly" title=" " alt="" coords="176,44,209,39,209,45,177,50"/>
<area shape="rect" href="classnz_1_1graph_1_1_compute_graph.html#a5960e75d631200994c0a2f78f58674dd" title="Performs topological sorting on the computational graph." alt="" coords="225,72,396,115"/>
<area shape="poly" title=" " alt="" coords="177,70,209,75,209,81,176,76"/>
</map>
</div>

</div>
</div>
<a id="a8d767679a2ff3fcb75a863e7ff808170" name="a8d767679a2ff3fcb75a863e7ff808170"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a8d767679a2ff3fcb75a863e7ff808170">&#9670;&#160;</a></span>randomize() <span class="overload">[1/2]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::randomize </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *</td>          <td class="paramname"><span class="paramname"><em>node</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">unsigned long long</td>          <td class="paramname"><span class="paramname"><em>seed</em></span><span class="paramdefsep"> = </span><span class="paramdefval">0</span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Randomizes the output tensor of a specified node in the computational graph. </p>
<p>This method sets the values of the specified node's output tensor to random values using the provided random seed. The method first checks if the given node exists in the graph by searching for it in the list of nodes. If the node exists, it calls the <code><a class="el" href="#a856ffb66fa42357ab1a12f9f74d2aba4" title="Randomizes the output tensor of a specified node in the computational graph.">randomize()</a></code> method on the node’s output tensor. If the node is not found in the graph, a runtime error is thrown.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">node</td><td>A pointer to the <code>Node</code> whose output tensor should be randomized. </td></tr>
    <tr><td class="paramname">seed</td><td>The seed value for the random number generator.</td></tr>
  </table>
  </dd>
</dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If the node is not found in the graph.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code><a class="el" href="#a856ffb66fa42357ab1a12f9f74d2aba4" title="Randomizes the output tensor of a specified node in the computational graph.">randomize()</a></code> method is expected to be defined for the node's output tensor to set its values randomly.</li>
<li>The method uses the provided <code>seed</code> value to ensure reproducibility of the randomization process.</li>
<li>The node is searched in the <code>nodes</code> list to ensure it is part of the graph.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md12"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Assuming &quot;inputNode&quot; is a valid pointer to a node in the graph</span></div>
<div class="line">graph.<a class="code hl_function" href="#a856ffb66fa42357ab1a12f9f74d2aba4">randomize</a>(inputNode, 42);  <span class="comment">// Randomizes the output of &quot;inputNode&quot; using seed 42</span></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a856ffb66fa42357ab1a12f9f74d2aba4"><div class="ttname"><a href="#a856ffb66fa42357ab1a12f9f74d2aba4">nz::graph::ComputeGraph::randomize</a></div><div class="ttdeci">void randomize(const std::string &amp;name, unsigned long long seed=0)</div><div class="ttdoc">Randomizes the output tensor of a specified node in the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00440">ComputeGraph.cu:440</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1data_1_1_tensor.html#a7a9f1d5fae2989181645e5f59f7666d8" title="Randomizes the tensor&#39;s data with a uniform distribution.">Tensor::randomize()</a> for the method that randomizes a specific node’s output tensor.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00449">449</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a856ffb66fa42357ab1a12f9f74d2aba4" name="a856ffb66fa42357ab1a12f9f74d2aba4"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a856ffb66fa42357ab1a12f9f74d2aba4">&#9670;&#160;</a></span>randomize() <span class="overload">[2/2]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::randomize </td>
          <td>(</td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">unsigned long long</td>          <td class="paramname"><span class="paramname"><em>seed</em></span><span class="paramdefsep"> = </span><span class="paramdefval">0</span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Randomizes the output tensor of a specified node in the computational graph. </p>
<p>This method sets the values of the specified node's output tensor to random values using the provided random seed. The method first checks if the node with the given name exists in the graph. If the node exists, it calls the <code><a class="el" href="#a856ffb66fa42357ab1a12f9f74d2aba4" title="Randomizes the output tensor of a specified node in the computational graph.">randomize()</a></code> method on the node’s output tensor. If the node is not found in the graph, a runtime error is thrown.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">name</td><td>The name of the node whose output tensor should be randomized. </td></tr>
    <tr><td class="paramname">seed</td><td>The seed value for the random number generator. If not provided, the seed defaults to 0.</td></tr>
  </table>
  </dd>
</dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If the node with the given name is not found in the graph.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code><a class="el" href="#a856ffb66fa42357ab1a12f9f74d2aba4" title="Randomizes the output tensor of a specified node in the computational graph.">randomize()</a></code> method is expected to be defined for the node's output tensor to set its values randomly.</li>
<li>The method uses the provided <code>seed</code> value to ensure reproducibility of the randomization process.</li>
<li>If the node is not found, an error is thrown to inform the user that the node is missing from the graph.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md11"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Assuming &quot;input_node&quot; is a valid node name in the graph</span></div>
<div class="line">graph.<a class="code hl_function" href="#a856ffb66fa42357ab1a12f9f74d2aba4">randomize</a>(<span class="stringliteral">&quot;input_node&quot;</span>, 42);  <span class="comment">// Randomizes the output of &quot;input_node&quot; using seed 42</span></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1data_1_1_tensor.html#a7a9f1d5fae2989181645e5f59f7666d8" title="Randomizes the tensor&#39;s data with a uniform distribution.">Tensor::randomize()</a> for the method that randomizes a tensor's <a class="el" href="namespacenz_1_1data.html" title="Contains data structures and utilities for tensor operations in machine learning workflows.">data</a>.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00440">440</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a90e4737107c4ec96a1a95022daa6de2f" name="a90e4737107c4ec96a1a95022daa6de2f"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a90e4737107c4ec96a1a95022daa6de2f">&#9670;&#160;</a></span>randomizeAll()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::randomizeAll </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Randomizes the output tensors of all input nodes in the computational graph. </p>
<p>This method iterates over all input nodes in the graph and randomizes the output tensor for each of them. It uses the current system time (in nanoseconds) as the seed for the random number generator. Each input node is assigned a unique seed by incrementing the base seed for each randomization.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The randomization process is applied to each input node's output tensor.</li>
<li>The seed for randomization is based on the system's current time, ensuring a unique starting point.</li>
<li>The seed is incremented for each input node to provide a different randomization for each one.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md13"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Assuming graph has input nodes added</span></div>
<div class="line">graph.<a class="code hl_function" href="#a90e4737107c4ec96a1a95022daa6de2f">randomizeAll</a>();  <span class="comment">// Randomizes the output of all input nodes</span></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1data_1_1_tensor.html#a7a9f1d5fae2989181645e5f59f7666d8" title="Randomizes the tensor&#39;s data with a uniform distribution.">Tensor::randomize()</a> for the method that randomizes a specific node’s output tensor.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00458">458</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a6aa07603223ea714d74fc884218cb50f" name="a6aa07603223ea714d74fc884218cb50f"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a6aa07603223ea714d74fc884218cb50f">&#9670;&#160;</a></span>save()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::save </td>
          <td>(</td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>path</em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Saves the current computational graph to a JSON file. </p>
<p>This method serializes the entire computational graph into a JSON file at the specified path. It traverses the nodes in the graph and stores their types, names, input-output relationships, shapes, data, gradients (if required), and other relevant information in JSON format. The serialized graph can later be loaded for further processing or visualization.</p>
<h3><a class="anchor" id="autotoc_md30"></a>
Graph Serialization:</h3>
<ul>
<li><b>Nodes</b>: Each node's type, name, input-output connections, and other details are stored.</li>
<li><b>Pre and Post nodes</b>: Lists of indices for input (pre) and output (post) nodes are saved.</li>
<li><b>Data and Gradients</b>: Node data and gradients are copied from GPU to host and serialized.</li>
</ul>
<h3><a class="anchor" id="autotoc_md31"></a>
Error Handling:</h3>
<ul>
<li>Throws a <code>std::runtime_error</code> if the path is empty, the graph is not sorted, or if there is any failure during file writing.</li>
</ul>
<h3><a class="anchor" id="autotoc_md32"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line">graph.<a class="code hl_function" href="#a6aa07603223ea714d74fc884218cb50f">save</a>(<span class="stringliteral">&quot;path_to_save_graph.json&quot;</span>);</div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a6aa07603223ea714d74fc884218cb50f"><div class="ttname"><a href="#a6aa07603223ea714d74fc884218cb50f">nz::graph::ComputeGraph::save</a></div><div class="ttdeci">void save(const std::string &amp;path)</div><div class="ttdoc">Saves the current computational graph to a JSON file.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00553">ComputeGraph.cu:553</a></div></div>
</div><!-- fragment --><dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">path</td><td>The file path where the graph should be saved.</td></tr>
  </table>
  </dd>
</dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If the path is empty, the graph is not sorted, or file writing fails.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph.">nodes::Node</a> for the base class of all <a class="el" href="namespacenz_1_1nodes.html" title="Contains classes and functionality for nodes in a neural network or computational graph.">nodes</a>. </dd>
<dd>
<a class="el" href="classnz_1_1data_1_1_tensor.html" title="A class for representing and manipulating multidimensional arrays (tensors) in GPU memory.">data::Tensor</a> for the class representing tensors and their associated operations.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00553">553</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1graph_1_1_compute_graph_a6aa07603223ea714d74fc884218cb50f_cgraph.png" border="0" usemap="#aclassnz_1_1graph_1_1_compute_graph_a6aa07603223ea714d74fc884218cb50f_cgraph" alt=""/></div>
<map name="aclassnz_1_1graph_1_1_compute_graph_a6aa07603223ea714d74fc884218cb50f_cgraph" id="aclassnz_1_1graph_1_1_compute_graph_a6aa07603223ea714d74fc884218cb50f_cgraph">
<area shape="rect" title="Saves the current computational graph to a JSON file." alt="" coords="5,72,177,115"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#ab4b2eb422e0e1ee44bdfdc0eb94457ce" title="Returns a reference to the singleton instance of the StreamManager." alt="" coords="225,5,410,48"/>
<area shape="poly" title=" " alt="" coords="165,69,227,50,228,56,166,74"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#afa38d5c6db0e6b48c8f74ce8ad0df2bc" title="Asynchronously copies data between CUDA device and host memory based on the specified memory copy kin..." alt="" coords="225,72,410,115"/>
<area shape="poly" title=" " alt="" coords="177,91,209,91,209,96,177,96"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#abe439fa00c0bd369c0b2345b095ed5af" title="Synchronizes host thread with completion events for a specific data object." alt="" coords="225,139,410,181"/>
<area shape="poly" title=" " alt="" coords="166,113,228,131,227,136,165,118"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#a1de1cf3aadea137faf90a2f9b4b7abe2" title="Acquires CUDA stream from pool using round&#45;robin scheduling." alt="" coords="458,39,643,81"/>
<area shape="poly" title=" " alt="" coords="410,77,442,73,443,78,411,83"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#adb1078a67c6e38932d7d58c2adb05ec0" title="Synchronizes CUDA stream execution until data writes complete." alt="" coords="458,105,643,148"/>
<area shape="poly" title=" " alt="" coords="411,104,443,109,442,114,410,109"/>
</map>
</div>

</div>
</div>
<a id="aa32640d8edb36f42e6093cd70037b5f3" name="aa32640d8edb36f42e6093cd70037b5f3"></a>
<h2 class="memtitle"><span class="permalink"><a href="#aa32640d8edb36f42e6093cd70037b5f3">&#9670;&#160;</a></span>setInput() <span class="overload">[1/2]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::setInput </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *</td>          <td class="paramname"><span class="paramname"><em>node</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">Tensor::value_type *</td>          <td class="paramname"><span class="paramname"><em>data</em></span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Sets the input data for a specified node in the computational graph using a node pointer. </p>
<p>This method sets the input data for a node in the computational graph by copying the provided raw data into the node's output tensor. The input data is assumed to be an array of type <code>Tensor::value_type</code> and will be copied into the output tensor of the specified node. The shape of the output tensor will be used to determine the amount of data to copy.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">node</td><td>A pointer to the <code>Node</code> whose input data is to be set. </td></tr>
    <tr><td class="paramname">data</td><td>A pointer to the raw input data that will be copied into the node's output tensor.</td></tr>
  </table>
  </dd>
</dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If the node is not found in the graph.</td></tr>
  </table>
  </dd>
</dl>
<h3><a class="anchor" id="autotoc_md18"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line">Tensor::value_type inputData[] = {1.0, 2.0, 3.0};  <span class="comment">// Example input data</span></div>
<div class="line">graph.<a class="code hl_function" href="#a8b3498c50429d631b07b5906e7455614">setInput</a>(inputNode, inputData);  <span class="comment">// Sets the input data for the specified node</span></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a8b3498c50429d631b07b5906e7455614"><div class="ttname"><a href="#a8b3498c50429d631b07b5906e7455614">nz::graph::ComputeGraph::setInput</a></div><div class="ttdeci">void setInput(const std::string &amp;name, Tensor::value_type *data)</div><div class="ttdoc">Sets the input data for a specified node in the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00490">ComputeGraph.cu:490</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1data_1_1_tensor.html#adf80894b8e06f260bb2695951e2f539e" title="Injects data or gradient data into the tensor.">Tensor::dataInject()</a> for the method that inject the <a class="el" href="namespacenz_1_1data.html" title="Contains data structures and utilities for tensor operations in machine learning workflows.">data</a> into the tensor. </dd>
<dd>
<a class="el" href="classnz_1_1data_1_1_tensor.html" title="A class for representing and manipulating multidimensional arrays (tensors) in GPU memory.">data::Tensor</a> for the class representing tensors and their associated operations.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00499">499</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a8b3498c50429d631b07b5906e7455614" name="a8b3498c50429d631b07b5906e7455614"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a8b3498c50429d631b07b5906e7455614">&#9670;&#160;</a></span>setInput() <span class="overload">[2/2]</span></h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::setInput </td>
          <td>(</td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">Tensor::value_type *</td>          <td class="paramname"><span class="paramname"><em>data</em></span>&#160;)</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Sets the input data for a specified node in the computational graph. </p>
<p>This method sets the input data for a node in the computational graph by copying the provided raw data into the node's output tensor. The input data is assumed to be an array of type <code>Tensor::value_type</code> and will be copied into the output tensor of the node specified by the <code>name</code>. The shape of the output tensor will be used to determine the amount of data to copy.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">name</td><td>The name of the node whose input data is to be set. </td></tr>
    <tr><td class="paramname">data</td><td>A pointer to the raw input data that will be copied into the node's output tensor.</td></tr>
  </table>
  </dd>
</dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If the node with the specified <code>name</code> is not found in the graph.</td></tr>
  </table>
  </dd>
</dl>
<h3><a class="anchor" id="autotoc_md17"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line">Tensor::value_type inputData[] = {1.0, 2.0, 3.0};  <span class="comment">// Example input data</span></div>
<div class="line">graph.<a class="code hl_function" href="#a8b3498c50429d631b07b5906e7455614">setInput</a>(<span class="stringliteral">&quot;input_node_name&quot;</span>, inputData);  <span class="comment">// Sets the input data for the specified node</span></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1data_1_1_tensor.html#adf80894b8e06f260bb2695951e2f539e" title="Injects data or gradient data into the tensor.">Tensor::dataInject()</a> for the method that inject the <a class="el" href="namespacenz_1_1data.html" title="Contains data structures and utilities for tensor operations in machine learning workflows.">data</a> into the tensor. </dd>
<dd>
<a class="el" href="classnz_1_1data_1_1_tensor.html" title="A class for representing and manipulating multidimensional arrays (tensors) in GPU memory.">data::Tensor</a> for the class representing tensors and their associated operations.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00490">490</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a5960e75d631200994c0a2f78f58674dd" name="a5960e75d631200994c0a2f78f58674dd"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a5960e75d631200994c0a2f78f58674dd">&#9670;&#160;</a></span>topologicalSort()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::topologicalSort </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Performs topological sorting on the computational graph. </p>
<p>This function performs topological sorting on the computational graph to order the nodes such that each node appears before any nodes that depend on it. The sorted nodes are stored in the <code>sortedNodes</code> vector, which allows for a correct computation order during graph traversal. It uses Kahn's algorithm for topological sorting.</p>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If the graph contains a cycle, indicating that topological sorting is not possible.</td></tr>
  </table>
  </dd>
</dl>
<p>This method modifies the following member variables:</p><ul>
<li><code>sortedNodes</code>: A vector that stores the nodes in topologically sorted order.</li>
<li><code>inDegree</code>: A map that keeps track of the in-degree (number of incoming edges) for each node.</li>
<li><code>adjList</code>: A map that stores the adjacency list for each node, representing which nodes depend on it.</li>
</ul>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The function assumes that the graph is a Directed Acyclic Graph (DAG). If a cycle is detected during the sorting process, an exception will be thrown.</li>
<li>This method is useful in scenarios like forward propagation in neural networks, where nodes need to be processed in a specific order.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md8"></a>
Algorithm Explanation:</h3>
<ol type="1">
<li>Initialize the <code>inDegree</code> of each node to 0.</li>
<li>Build the <code>adjList</code> for each node and increment the <code>inDegree</code> of nodes that have incoming edges.</li>
<li>Initialize a queue with all nodes that have an in-degree of 0 (i.e., no dependencies).</li>
<li>Process each node from the queue, adding it to the <code>sortedNodes</code> list, and decrement the <code>inDegree</code> of its adjacent nodes (i.e., nodes that depend on it). If any adjacent node's in-degree becomes 0, it is added to the queue.</li>
<li>If the number of nodes in <code>sortedNodes</code> does not match the total number of nodes in the graph, a cycle is detected and an exception is thrown.</li>
</ol>
<h3><a class="anchor" id="autotoc_md9"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><span class="comment">// Add nodes and edges to the graph...</span></div>
<div class="line">graph.<a class="code hl_function" href="#a5960e75d631200994c0a2f78f58674dd">topologicalSort</a>();  <span class="comment">// Perform topological sorting</span></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> for more details on the <a class="el" href="namespacenz_1_1graph.html" title="Contains classes and functions for managing and executing computation graphs in deep learning workflo...">graph</a> structure and node management. </dd>
<dd>
Node for information on individual node types and their operations.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00258">258</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="aee306895a3078a0b09d2a4bdf4843200" name="aee306895a3078a0b09d2a4bdf4843200"></a>
<h2 class="memtitle"><span class="permalink"><a href="#aee306895a3078a0b09d2a4bdf4843200">&#9670;&#160;</a></span>update()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::update </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="classnz_1_1opt_1_1_optimizer.html">Optimizer</a> *</td>          <td class="paramname"><span class="paramname"><em>optimizer</em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Updates the parameters of the nodes that require gradients using the provided optimizer. </p>
<p>This method iterates through all the nodes in the computational graph and applies the optimizer's update step to the nodes that have their <code>output</code> tensor marked as requiring gradients. The update is performed by calling the <code>step</code> method of the provided optimizer for each node.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">optimizer</td><td>A pointer to the optimizer that will be used to update the parameters. The optimizer's <code>step</code> method is called for each node that requires gradients.</td></tr>
  </table>
  </dd>
</dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If the optimizer is a null pointer.</td></tr>
  </table>
  </dd>
</dl>
<h3><a class="anchor" id="autotoc_md29"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line"><a class="code hl_class" href="classnz_1_1opt_1_1_optimizer.html">Optimizer</a>* optimizer = <span class="keyword">new</span> <a class="code hl_class" href="classnz_1_1opt_1_1_s_g_d.html">SGD</a>(learning_rate);  <span class="comment">// assuming an SGD optimizer</span></div>
<div class="line">graph.<a class="code hl_function" href="#aee306895a3078a0b09d2a4bdf4843200">update</a>(optimizer);</div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_aee306895a3078a0b09d2a4bdf4843200"><div class="ttname"><a href="#aee306895a3078a0b09d2a4bdf4843200">nz::graph::ComputeGraph::update</a></div><div class="ttdeci">void update(Optimizer *optimizer) const</div><div class="ttdoc">Updates the parameters of the nodes that require gradients using the provided optimizer.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00541">ComputeGraph.cu:541</a></div></div>
<div class="ttc" id="aclassnz_1_1opt_1_1_optimizer_html"><div class="ttname"><a href="classnz_1_1opt_1_1_optimizer.html">nz::opt::Optimizer</a></div><div class="ttdoc">Base class for optimization algorithms in deep learning.</div><div class="ttdef"><b>Definition</b> <a href="_optimizer_8cuh_source.html#l00125">Optimizer.cuh:125</a></div></div>
<div class="ttc" id="aclassnz_1_1opt_1_1_s_g_d_html"><div class="ttname"><a href="classnz_1_1opt_1_1_s_g_d.html">nz::opt::SGD</a></div><div class="ttdoc">Stochastic Gradient Descent (SGD) optimizer for deep learning models.</div><div class="ttdef"><b>Definition</b> <a href="_optimizer_8cuh_source.html#l00250">Optimizer.cuh:250</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">opt::Optimizer</a> for the interface of the optimizer class. </dd>
<dd>
<a class="el" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph.">nodes::Node</a> for the node class that holds the parameters and their gradients. </dd>
<dd>
<a class="el" href="classnz_1_1data_1_1_tensor.html" title="A class for representing and manipulating multidimensional arrays (tensors) in GPU memory.">data::Tensor</a> for the tensor class associated with the node's output.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00541">541</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1graph_1_1_compute_graph_aee306895a3078a0b09d2a4bdf4843200_cgraph.png" border="0" usemap="#aclassnz_1_1graph_1_1_compute_graph_aee306895a3078a0b09d2a4bdf4843200_cgraph" alt=""/></div>
<map name="aclassnz_1_1graph_1_1_compute_graph_aee306895a3078a0b09d2a4bdf4843200_cgraph" id="aclassnz_1_1graph_1_1_compute_graph_aee306895a3078a0b09d2a4bdf4843200_cgraph">
<area shape="rect" title="Updates the parameters of the nodes that require gradients using the provided optimizer." alt="" coords="5,5,177,48"/>
<area shape="rect" href="classnz_1_1opt_1_1_optimizer.html#a826381abaaf29dbebade7cfd38b266e4" title="Pure virtual function for performing a single optimization step." alt="" coords="225,5,350,48"/>
<area shape="poly" title=" " alt="" coords="177,24,209,24,209,29,177,29"/>
</map>
</div>

</div>
</div>
<a id="a3f94019acc205a47726c0b0797b0e631" name="a3f94019acc205a47726c0b0797b0e631"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a3f94019acc205a47726c0b0797b0e631">&#9670;&#160;</a></span>zeroGrad()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void nz::graph::ComputeGraph::zeroGrad </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td> const</td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Resets the gradients of all nodes in the computational graph. </p>
<p>This method iterates over all nodes in the computational graph and calls the <code><a class="el" href="#a3f94019acc205a47726c0b0797b0e631" title="Resets the gradients of all nodes in the computational graph.">zeroGrad()</a></code> method on each node's output tensor to reset its gradient. This is useful to clear the gradients between different backward passes, ensuring that previous gradient values do not accumulate. Typically called at the beginning of each new backward pass to prepare the graph for gradient computation.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">None</td><td></td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>None</dd></dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>This method assumes that each node has an associated output tensor with a <code><a class="el" href="#a3f94019acc205a47726c0b0797b0e631" title="Resets the gradients of all nodes in the computational graph.">zeroGrad()</a></code> method to reset gradients.</li>
<li>It does not perform any checks on whether the graph is sorted or whether backward propagation has been performed previously. It simply clears the gradients of all nodes in the graph.</li>
<li>The method is typically used in training loops to avoid gradient accumulation across iterations.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md10"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line">graph.<a class="code hl_function" href="#a3f94019acc205a47726c0b0797b0e631">zeroGrad</a>();  <span class="comment">// Clears the gradients of all nodes in the graph</span></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a3f94019acc205a47726c0b0797b0e631"><div class="ttname"><a href="#a3f94019acc205a47726c0b0797b0e631">nz::graph::ComputeGraph::zeroGrad</a></div><div class="ttdeci">void zeroGrad() const</div><div class="ttdoc">Resets the gradients of all nodes in the computational graph.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00434">ComputeGraph.cu:434</a></div></div>
</div><!-- fragment --><dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00434">434</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<h2 class="groupheader">Friends And Related Symbol Documentation</h2>
<a id="a11f567efba2c857e64a88c411c5e6c54" name="a11f567efba2c857e64a88c411c5e6c54"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a11f567efba2c857e64a88c411c5e6c54">&#9670;&#160;</a></span>CreateNode</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">DL_API void CreateNode </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> *</td>          <td class="paramname"><span class="paramname"><em>graph</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>type</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::string &amp;</td>          <td class="paramname"><span class="paramname"><em>name</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; int &gt;</td>          <td class="paramname"><span class="paramname"><em>pre</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="classnz_1_1data_1_1_dimension.html">Tensor::shape_type</a> &amp;</td>          <td class="paramname"><span class="paramname"><em>shape</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">float *</td>          <td class="paramname"><span class="paramname"><em>data</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">bool</td>          <td class="paramname"><span class="paramname"><em>requires_grad</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">float *</td>          <td class="paramname"><span class="paramname"><em>grad</em></span>&#160;)</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">friend</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Creates and adds a node to the computational graph based on the specified type. </p>
<p>This function is used to create various types of nodes in a computational graph based on the provided node type, and then adds the created node to the <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> object. The node is initialized with the specified shape, data, and gradient information if needed. It also ensures that the nodes are connected to their previous nodes as specified by the <code>pre</code> vector.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">graph</td><td>The <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> object to which the new node will be added. </td></tr>
    <tr><td class="paramname">type</td><td>A string representing the type of node to be created. Supported types include "Input", "Output", "Add", "MatMul", "Sub", "ReLU", "Sigmoid", "Tanh", "LeakyReLU", "Swish", "ELU", "HardSigmoid", "HardSwish", "Softmax", "MeanSquaredError", "BinaryCrossEntropy". </td></tr>
    <tr><td class="paramname">name</td><td>The name of the node to be added to the graph. </td></tr>
    <tr><td class="paramname">pre</td><td>A vector of integers specifying the indices of the previous nodes (input nodes) that this node depends on. The number of elements in <code>pre</code> and the type of node may vary. </td></tr>
    <tr><td class="paramname">shape</td><td>A vector representing the shape of the node's output tensor. </td></tr>
    <tr><td class="paramname">data</td><td>A pointer to the data to initialize the node's output tensor. </td></tr>
    <tr><td class="paramname">requires_grad</td><td>A boolean flag indicating whether the node requires gradients for backpropagation. </td></tr>
    <tr><td class="paramname">grad</td><td>A pointer to the gradient data for the node's output tensor if <code>requires_grad</code> is true.</td></tr>
  </table>
  </dd>
</dl>
<dl class="exception"><dt>Exceptions</dt><dd>
  <table class="exception">
    <tr><td class="paramname">std::runtime_error</td><td>If an unsupported node type is provided or if there is a mismatch in node dependencies.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code>CreateNode</code> function automatically handles the creation of nodes, their connection to previous nodes, and the addition of the new node to the graph.</li>
<li>The <code>pre</code> vector is used to specify which nodes are required as inputs for the current node, and it may differ in size based on the node type.</li>
<li>Some node types, such as "ScalarMul", "ScalarDiv", "ScalarAdd", and "ScalarSub", are not supported and will throw a runtime error.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md131"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> graph;</div>
<div class="line">std::vector&lt;int&gt; pre = {0, 1};  <span class="comment">// Specify the input nodes for the current node</span></div>
<div class="line">std::vector&lt;int&gt; shape = {3, 3};  <span class="comment">// Specify the shape of the output tensor</span></div>
<div class="line"><span class="keywordtype">float</span> data[] = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f};  <span class="comment">// Example data</span></div>
<div class="line"><a class="code hl_friend" href="#a11f567efba2c857e64a88c411c5e6c54">CreateNode</a>(&amp;graph, <span class="stringliteral">&quot;Add&quot;</span>, <span class="stringliteral">&quot;add_node&quot;</span>, pre, shape, data, <span class="keyword">true</span>, <span class="keyword">nullptr</span>);  <span class="comment">// Create an &quot;Add&quot; node</span></div>
<div class="ttc" id="aclassnz_1_1graph_1_1_compute_graph_html_a11f567efba2c857e64a88c411c5e6c54"><div class="ttname"><a href="#a11f567efba2c857e64a88c411c5e6c54">nz::graph::ComputeGraph::CreateNode</a></div><div class="ttdeci">friend DL_API void CreateNode(ComputeGraph *graph, const std::string &amp;type, const std::string &amp;name, std::vector&lt; int &gt; pre, const Tensor::shape_type &amp;shape, float *data, bool requires_grad, float *grad)</div><div class="ttdoc">Creates and adds a node to the computational graph based on the specified type.</div><div class="ttdef"><b>Definition</b> <a href="_compute_graph_8cu_source.html#l00108">ComputeGraph.cu:108</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a> for more details on <a class="el" href="namespacenz_1_1graph.html" title="Contains classes and functions for managing and executing computation graphs in deep learning workflo...">graph</a> structure and node management. </dd>
<dd>
<a class="el" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph.">nodes::Node</a> for information on individual node types and their operations.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00108">108</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<a id="a25cf602885275406f8fe60a5077308a8" name="a25cf602885275406f8fe60a5077308a8"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a25cf602885275406f8fe60a5077308a8">&#9670;&#160;</a></span>operator&lt;&lt;</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">DL_API std::ostream &amp; operator&lt;&lt; </td>
          <td>(</td>
          <td class="paramtype">std::ostream &amp;</td>          <td class="paramname"><span class="paramname"><em>os</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype"><a class="el" href="classnz_1_1graph_1_1_compute_graph.html">ComputeGraph</a> &amp;</td>          <td class="paramname"><span class="paramname"><em>graph</em></span>&#160;)</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">friend</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Overloads the stream insertion operator to print the details of the computational graph. </p>
<p>This function overloads the <code>&lt;&lt;</code> operator to provide an easy and intuitive way to print the details of a <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> object. It calls the <code>print</code> method of <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> to output the graph's nodes, their connections, data, gradients, and loss to the provided output stream.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">os</td><td>The output stream to which the graph details will be printed (e.g., <code>std::cout</code>). </td></tr>
    <tr><td class="paramname">graph</td><td>The <code><a class="el" href="classnz_1_1graph_1_1_compute_graph.html" title="Represents a computational graph, which manages nodes and the computation flow.">ComputeGraph</a></code> object whose details will be printed. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>The output stream after printing the graph details, enabling method chaining.</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="#a1d54fe96c53fec967c8a20e1ac79ceed" title="Prints the details of the computational graph to the provided output stream.">ComputeGraph::print()</a> for more information about the internal printing process.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/09 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_compute_graph_8cu_source.html#l00056">56</a> of file <a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a>.</p>

</div>
</div>
<hr/>The documentation for this class was generated from the following files:<ul>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/include/NeuZephyr/<a class="el" href="_compute_graph_8cuh_source.html">ComputeGraph.cuh</a></li>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/src/<a class="el" href="_compute_graph_8cu_source.html">ComputeGraph.cu</a></li>
</ul>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by&#160;<a href="https://www.doxygen.org/index.html"><img class="footer" src="doxygen.svg" width="104" height="31" alt="doxygen"/></a> 1.12.0
</small></address>
</div><!-- doc-content -->
</body>
</html>
