<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en-US">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=11"/>
<meta name="generator" content="Doxygen 1.12.0"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>NeuZephyr: nz::opt::SGD Class Reference</title>
<link rel="icon" href="NZ_logo2.png" type="image/x-icon" />
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
 <tbody>
 <tr id="projectrow">
  <td id="projectlogo"><img alt="Logo" src="NZ_logo2.png"/></td>
  <td id="projectalign">
   <div id="projectname">NeuZephyr
   </div>
   <div id="projectbrief">Simple DL Framework</div>
  </td>
 </tr>
 </tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.12.0 -->
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function() { codefold.init(0); });
/* @license-end */
</script>
  <div id="navrow1" class="tabs">
    <ul class="tablist">
      <li><a href="index.html"><span>Main&#160;Page</span></a></li>
      <li><a href="pages.html"><span>Related&#160;Pages</span></a></li>
      <li><a href="namespaces.html"><span>Namespaces</span></a></li>
      <li class="current"><a href="annotated.html"><span>Classes</span></a></li>
      <li><a href="files.html"><span>Files</span></a></li>
    </ul>
  </div>
  <div id="navrow2" class="tabs2">
    <ul class="tablist">
      <li><a href="annotated.html"><span>Class&#160;List</span></a></li>
      <li><a href="classes.html"><span>Class&#160;Index</span></a></li>
      <li><a href="inherits.html"><span>Class&#160;Hierarchy</span></a></li>
      <li><a href="functions.html"><span>Class&#160;Members</span></a></li>
    </ul>
  </div>
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function(){ initResizable(false); });
/* @license-end */
</script>
<div id="nav-path" class="navpath">
  <ul>
<li class="navelem"><b>nz</b></li><li class="navelem"><a class="el" href="namespacenz_1_1opt.html">opt</a></li><li class="navelem"><a class="el" href="classnz_1_1opt_1_1_s_g_d.html">SGD</a></li>  </ul>
</div>
</div><!-- top -->
<div id="doc-content">
<div class="header">
  <div class="summary">
<a href="#pub-methods">Public Member Functions</a> &#124;
<a href="classnz_1_1opt_1_1_s_g_d-members.html">List of all members</a>  </div>
  <div class="headertitle"><div class="title">nz::opt::SGD Class Reference</div></div>
</div><!--header-->
<div class="contents">

<p>Stochastic Gradient Descent (<a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a>) optimizer for deep learning models.  
 <a href="#details">More...</a></p>
<div class="dynheader">
Inheritance diagram for nz::opt::SGD:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1opt_1_1_s_g_d__inherit__graph.png" border="0" usemap="#anz_1_1opt_1_1_s_g_d_inherit__map" alt="Inheritance graph"/></div>
<map name="anz_1_1opt_1_1_s_g_d_inherit__map" id="anz_1_1opt_1_1_s_g_d_inherit__map">
<area shape="rect" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models." alt="" coords="19,80,117,107"/>
<area shape="rect" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning." alt="" coords="5,5,131,32"/>
<area shape="poly" title=" " alt="" coords="71,48,71,80,65,80,65,48"/>
</map>
<center><span class="legend">[<a href="graph_legend.html">legend</a>]</span></center></div>
<div class="dynheader">
Collaboration diagram for nz::opt::SGD:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1opt_1_1_s_g_d__coll__graph.png" border="0" usemap="#anz_1_1opt_1_1_s_g_d_coll__map" alt="Collaboration graph"/></div>
<map name="anz_1_1opt_1_1_s_g_d_coll__map" id="anz_1_1opt_1_1_s_g_d_coll__map">
<area shape="rect" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models." alt="" coords="19,80,117,107"/>
<area shape="rect" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning." alt="" coords="5,5,131,32"/>
<area shape="poly" title=" " alt="" coords="71,48,71,80,65,80,65,48"/>
</map>
<center><span class="legend">[<a href="graph_legend.html">legend</a>]</span></center></div>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a id="pub-methods" name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr class="memitem:a2b3d169ace2070e793da8f270bd760c8" id="r_a2b3d169ace2070e793da8f270bd760c8"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a2b3d169ace2070e793da8f270bd760c8">SGD</a> (Tensor::value_type learning_rate)</td></tr>
<tr class="memdesc:a2b3d169ace2070e793da8f270bd760c8"><td class="mdescLeft">&#160;</td><td class="mdescRight">Constructor for the <a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a> optimizer.  <br /></td></tr>
<tr class="separator:a2b3d169ace2070e793da8f270bd760c8"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ac1232979bd4ed03f49b27e5f8391707f" id="r_ac1232979bd4ed03f49b27e5f8391707f"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#ac1232979bd4ed03f49b27e5f8391707f">step</a> (<a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *input) override</td></tr>
<tr class="memdesc:ac1232979bd4ed03f49b27e5f8391707f"><td class="mdescLeft">&#160;</td><td class="mdescRight">Performs a single step of the Stochastic Gradient Descent (<a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a>) optimization.  <br /></td></tr>
<tr class="separator:ac1232979bd4ed03f49b27e5f8391707f"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="inherit_header pub_methods_classnz_1_1opt_1_1_optimizer"><td colspan="2" onclick="javascript:dynsection.toggleInherit('pub_methods_classnz_1_1opt_1_1_optimizer')"><img src="closed.png" alt="-"/>&#160;Public Member Functions inherited from <a class="el" href="classnz_1_1opt_1_1_optimizer.html">nz::opt::Optimizer</a></td></tr>
<tr class="memitem:aaf8d92566a815254dbb0ace9af9cb1ae inherit pub_methods_classnz_1_1opt_1_1_optimizer" id="r_aaf8d92566a815254dbb0ace9af9cb1ae"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classnz_1_1opt_1_1_optimizer.html#aaf8d92566a815254dbb0ace9af9cb1ae">Optimizer</a> ()=default</td></tr>
<tr class="memdesc:aaf8d92566a815254dbb0ace9af9cb1ae inherit pub_methods_classnz_1_1opt_1_1_optimizer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Default constructor for the <a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a> class.  <br /></td></tr>
<tr class="separator:aaf8d92566a815254dbb0ace9af9cb1ae inherit pub_methods_classnz_1_1opt_1_1_optimizer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ab9262983ef3bd11e6f548862b2f58e1d inherit pub_methods_classnz_1_1opt_1_1_optimizer" id="r_ab9262983ef3bd11e6f548862b2f58e1d"><td class="memItemLeft" align="right" valign="top">virtual&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classnz_1_1opt_1_1_optimizer.html#ab9262983ef3bd11e6f548862b2f58e1d">~Optimizer</a> ()=default</td></tr>
<tr class="memdesc:ab9262983ef3bd11e6f548862b2f58e1d inherit pub_methods_classnz_1_1opt_1_1_optimizer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Default destructor for the <a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a> class.  <br /></td></tr>
<tr class="separator:ab9262983ef3bd11e6f548862b2f58e1d inherit pub_methods_classnz_1_1opt_1_1_optimizer"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>Stochastic Gradient Descent (<a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a>) optimizer for deep learning models. </p>
<p>The <code><a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a></code> class implements the Stochastic Gradient Descent optimization algorithm, which is one of the most basic and widely-used methods for optimizing deep learning model parameters. The algorithm updates the model's parameters by moving in the direction of the negative gradient scaled by a learning rate.</p>
<p>This class extends the <code><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a></code> base class and provides a concrete implementation of the <code>step</code> method, which updates the parameters of the model (represented as <code>Node</code> objects) using the <a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a> algorithm.</p>
<ul>
<li>The primary function of this optimizer is to adjust model parameters based on the gradients and a fixed learning rate. It performs updates to minimize the loss function during training.</li>
<li>The optimizer uses parallel processing on the GPU through CUDA to accelerate the parameter update process, making it suitable for training large models with many parameters.</li>
<li>While simple, <a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a> is effective for many machine learning tasks and serves as a foundation for more advanced optimizers such as <a class="el" href="classnz_1_1opt_1_1_adam.html" title="Adam optimizer for deep learning models.">Adam</a> and <a class="el" href="classnz_1_1opt_1_1_r_m_sprop.html" title="RMSprop optimizer for deep learning models.">RMSprop</a>.</li>
<li>This optimizer works by updating the weights in the direction that reduces the loss, with the magnitude of the update controlled by the learning rate.</li>
</ul>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The optimizer assumes that the model parameters are represented by <code>Node</code> objects, and these nodes must have associated gradients for the optimizer to function correctly.</li>
<li>It is specifically designed to work with deep learning frameworks that leverage GPU acceleration for efficient computation.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md115"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1opt_1_1_s_g_d.html">SGD</a> optimizer(0.01);</div>
<div class="line">graph.update(&amp;optimizer) <span class="comment">// Suppose &quot;graph&quot; is a computation graph waiting for gradient updates;</span></div>
<div class="ttc" id="aclassnz_1_1opt_1_1_s_g_d_html"><div class="ttname"><a href="classnz_1_1opt_1_1_s_g_d.html">nz::opt::SGD</a></div><div class="ttdoc">Stochastic Gradient Descent (SGD) optimizer for deep learning models.</div><div class="ttdef"><b>Definition</b> <a href="_optimizer_8cuh_source.html#l00250">Optimizer.cuh:250</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a> for the base class that defines the interface for all optimizers.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/07 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_optimizer_8cuh_source.html#l00250">250</a> of file <a class="el" href="_optimizer_8cuh_source.html">Optimizer.cuh</a>.</p>
</div><h2 class="groupheader">Constructor &amp; Destructor Documentation</h2>
<a id="a2b3d169ace2070e793da8f270bd760c8" name="a2b3d169ace2070e793da8f270bd760c8"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a2b3d169ace2070e793da8f270bd760c8">&#9670;&#160;</a></span>SGD()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">nz::opt::SGD::SGD </td>
          <td>(</td>
          <td class="paramtype">Tensor::value_type</td>          <td class="paramname"><span class="paramname"><em>learning_rate</em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">explicit</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Constructor for the <a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a> optimizer. </p>
<p>This constructor initializes the <code><a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a></code> optimizer with a specified learning rate. The learning rate is a crucial hyperparameter that determines the step size for each parameter update during training. A smaller learning rate leads to smaller updates, while a larger learning rate results in faster convergence but may risk overshooting the optimal solution.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">learning_rate</td><td>The learning rate to be used in the optimization process. It defines the magnitude of the updates to the model parameters.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The learning rate should be chosen carefully, as it significantly impacts the model's convergence during training. A value that is too large may cause the optimization to diverge, while a value that is too small may lead to slow convergence.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a> for the optimizer class that uses this constructor.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/07 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_optimizer_8cu_source.html#l00010">10</a> of file <a class="el" href="_optimizer_8cu_source.html">Optimizer.cu</a>.</p>

</div>
</div>
<h2 class="groupheader">Member Function Documentation</h2>
<a id="ac1232979bd4ed03f49b27e5f8391707f" name="ac1232979bd4ed03f49b27e5f8391707f"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ac1232979bd4ed03f49b27e5f8391707f">&#9670;&#160;</a></span>step()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">void nz::opt::SGD::step </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *</td>          <td class="paramname"><span class="paramname"><em>input</em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">override</span><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Performs a single step of the Stochastic Gradient Descent (<a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a>) optimization. </p>
<p>This method updates the model parameters (represented by <code>Node</code> objects) using the Stochastic Gradient Descent algorithm. The parameters are updated based on the gradients computed during the backward pass, and the updates are scaled by the learning rate. The method uses CUDA to parallelize the parameter updates on the GPU, ensuring high performance for large-scale models.</p>
<p>The update process involves computing the negative gradient and scaling it by the learning rate to adjust the model parameters. This method is intended to be called during the training loop to update the parameters at each iteration.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">input</td><td>The <code>Node</code> object that holds the model parameters and their gradients. This node must have a valid gradient computed during the backward pass.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The method assumes that the <code>input</code> node contains a valid <code>output</code> tensor with computed gradients.</li>
<li>The computation is performed on the GPU using CUDA, so a CUDA-compatible environment is required.</li>
<li>Ensure that the model parameters have been properly initialized and gradients are computed before calling this method.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a> for the class that defines this method. </dd>
<dd>
Nodes::Node for the class representing the model parameters.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/07 </dd></dl>

<p>Implements <a class="el" href="classnz_1_1opt_1_1_optimizer.html#a826381abaaf29dbebade7cfd38b266e4">nz::opt::Optimizer</a>.</p>

<p class="definition">Definition at line <a class="el" href="_optimizer_8cu_source.html#l00014">14</a> of file <a class="el" href="_optimizer_8cu_source.html">Optimizer.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1opt_1_1_s_g_d_ac1232979bd4ed03f49b27e5f8391707f_cgraph.png" border="0" usemap="#aclassnz_1_1opt_1_1_s_g_d_ac1232979bd4ed03f49b27e5f8391707f_cgraph" alt=""/></div>
<map name="aclassnz_1_1opt_1_1_s_g_d_ac1232979bd4ed03f49b27e5f8391707f_cgraph" id="aclassnz_1_1opt_1_1_s_g_d_ac1232979bd4ed03f49b27e5f8391707f_cgraph">
<area shape="rect" title="Performs a single step of the Stochastic Gradient Descent (SGD) optimization." alt="" coords="5,47,137,73"/>
<area shape="rect" href="namespacenz_1_1krnl.html#aeec286d5351eee7061e151470adb4eef" title="Kernel function to perform Stochastic Gradient Descent (SGD) optimization." alt="" coords="185,39,368,81"/>
<area shape="poly" title=" " alt="" coords="137,57,169,57,169,63,137,63"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#ab4b2eb422e0e1ee44bdfdc0eb94457ce" title="Returns a reference to the singleton instance of the StreamManager." alt="" coords="416,5,601,48"/>
<area shape="poly" title=" " alt="" coords="368,44,400,40,401,45,369,49"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#a46ce59b45de432842454aadf00b93791" title="Asynchronously submits a CUDA kernel with stream&#45;ordered dependency management." alt="" coords="416,72,601,115"/>
<area shape="poly" title=" " alt="" coords="369,71,401,75,400,80,368,76"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#a1de1cf3aadea137faf90a2f9b4b7abe2" title="Acquires CUDA stream from pool using round&#45;robin scheduling." alt="" coords="649,39,835,81"/>
<area shape="poly" title=" " alt="" coords="601,77,633,73,634,78,602,83"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#adb1078a67c6e38932d7d58c2adb05ec0" title="Synchronizes CUDA stream execution until data writes complete." alt="" coords="649,105,835,148"/>
<area shape="poly" title=" " alt="" coords="602,104,634,109,633,114,601,109"/>
</map>
</div>

</div>
</div>
<hr/>The documentation for this class was generated from the following files:<ul>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/include/NeuZephyr/<a class="el" href="_optimizer_8cuh_source.html">Optimizer.cuh</a></li>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/src/<a class="el" href="_optimizer_8cu_source.html">Optimizer.cu</a></li>
</ul>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by&#160;<a href="https://www.doxygen.org/index.html"><img class="footer" src="doxygen.svg" width="104" height="31" alt="doxygen"/></a> 1.12.0
</small></address>
</div><!-- doc-content -->
</body>
</html>
