<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en-US">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=11"/>
<meta name="generator" content="Doxygen 1.12.0"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>NeuZephyr: nz::opt::Optimizer Class Reference</title>
<link rel="icon" href="NZ_logo2.png" type="image/x-icon" />
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
 <tbody>
 <tr id="projectrow">
  <td id="projectlogo"><img alt="Logo" src="NZ_logo2.png"/></td>
  <td id="projectalign">
   <div id="projectname">NeuZephyr
   </div>
   <div id="projectbrief">Simple DL Framework</div>
  </td>
 </tr>
 </tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.12.0 -->
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function() { codefold.init(0); });
/* @license-end */
</script>
  <div id="navrow1" class="tabs">
    <ul class="tablist">
      <li><a href="index.html"><span>Main&#160;Page</span></a></li>
      <li><a href="pages.html"><span>Related&#160;Pages</span></a></li>
      <li><a href="namespaces.html"><span>Namespaces</span></a></li>
      <li class="current"><a href="annotated.html"><span>Classes</span></a></li>
      <li><a href="files.html"><span>Files</span></a></li>
    </ul>
  </div>
  <div id="navrow2" class="tabs2">
    <ul class="tablist">
      <li><a href="annotated.html"><span>Class&#160;List</span></a></li>
      <li><a href="classes.html"><span>Class&#160;Index</span></a></li>
      <li><a href="inherits.html"><span>Class&#160;Hierarchy</span></a></li>
      <li><a href="functions.html"><span>Class&#160;Members</span></a></li>
    </ul>
  </div>
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function(){ initResizable(false); });
/* @license-end */
</script>
<div id="nav-path" class="navpath">
  <ul>
<li class="navelem"><b>nz</b></li><li class="navelem"><a class="el" href="namespacenz_1_1opt.html">opt</a></li><li class="navelem"><a class="el" href="classnz_1_1opt_1_1_optimizer.html">Optimizer</a></li>  </ul>
</div>
</div><!-- top -->
<div id="doc-content">
<div class="header">
  <div class="summary">
<a href="#pub-methods">Public Member Functions</a> &#124;
<a href="classnz_1_1opt_1_1_optimizer-members.html">List of all members</a>  </div>
  <div class="headertitle"><div class="title">nz::opt::Optimizer Class Reference<span class="mlabels"><span class="mlabel">abstract</span></span></div></div>
</div><!--header-->
<div class="contents">

<p>Base class for optimization algorithms in deep learning.  
 <a href="#details">More...</a></p>
<div class="dynheader">
Inheritance diagram for nz::opt::Optimizer:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1opt_1_1_optimizer__inherit__graph.png" border="0" usemap="#anz_1_1opt_1_1_optimizer_inherit__map" alt="Inheritance graph"/></div>
<map name="anz_1_1opt_1_1_optimizer_inherit__map" id="anz_1_1opt_1_1_optimizer_inherit__map">
<area shape="rect" title="Base class for optimization algorithms in deep learning." alt="" coords="5,157,131,184"/>
<area shape="rect" href="classnz_1_1opt_1_1_ada_delta.html" title="AdaDelta optimizer for deep learning models." alt="" coords="184,5,308,32"/>
<area shape="poly" title=" " alt="" coords="84,143,123,92,149,65,177,42,198,30,201,35,180,46,152,69,127,96,88,146"/>
<area shape="rect" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models." alt="" coords="186,56,306,83"/>
<area shape="poly" title=" " alt="" coords="97,145,134,119,177,92,203,81,205,86,180,97,137,123,100,150"/>
<area shape="rect" href="classnz_1_1opt_1_1_adam.html" title="Adam optimizer for deep learning models." alt="" coords="194,107,298,133"/>
<area shape="poly" title=" " alt="" coords="131,150,196,131,197,136,133,155"/>
<area shape="rect" href="classnz_1_1opt_1_1_momentum.html" title="Momentum optimizer for deep learning models." alt="" coords="179,157,313,184"/>
<area shape="poly" title=" " alt="" coords="146,168,179,168,179,173,146,173"/>
<area shape="rect" href="classnz_1_1opt_1_1_n_adam.html" title="NAdam optimizer for deep learning models." alt="" coords="189,208,302,235"/>
<area shape="poly" title=" " alt="" coords="133,186,197,205,196,210,131,191"/>
<area shape="rect" href="classnz_1_1opt_1_1_r_m_sprop.html" title="RMSprop optimizer for deep learning models." alt="" coords="184,259,308,285"/>
<area shape="poly" title=" " alt="" coords="100,192,137,218,180,244,205,256,203,261,177,249,134,223,97,196"/>
<area shape="rect" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models." alt="" coords="197,309,295,336"/>
<area shape="poly" title=" " alt="" coords="88,195,127,246,152,272,180,295,201,306,198,311,177,300,149,276,123,249,84,198"/>
</map>
<center><span class="legend">[<a href="graph_legend.html">legend</a>]</span></center></div>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a id="pub-methods" name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr class="memitem:aaf8d92566a815254dbb0ace9af9cb1ae" id="r_aaf8d92566a815254dbb0ace9af9cb1ae"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#aaf8d92566a815254dbb0ace9af9cb1ae">Optimizer</a> ()=default</td></tr>
<tr class="memdesc:aaf8d92566a815254dbb0ace9af9cb1ae"><td class="mdescLeft">&#160;</td><td class="mdescRight">Default constructor for the <a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a> class.  <br /></td></tr>
<tr class="separator:aaf8d92566a815254dbb0ace9af9cb1ae"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ab9262983ef3bd11e6f548862b2f58e1d" id="r_ab9262983ef3bd11e6f548862b2f58e1d"><td class="memItemLeft" align="right" valign="top">virtual&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#ab9262983ef3bd11e6f548862b2f58e1d">~Optimizer</a> ()=default</td></tr>
<tr class="memdesc:ab9262983ef3bd11e6f548862b2f58e1d"><td class="mdescLeft">&#160;</td><td class="mdescRight">Default destructor for the <a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a> class.  <br /></td></tr>
<tr class="separator:ab9262983ef3bd11e6f548862b2f58e1d"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a826381abaaf29dbebade7cfd38b266e4" id="r_a826381abaaf29dbebade7cfd38b266e4"><td class="memItemLeft" align="right" valign="top">virtual void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a826381abaaf29dbebade7cfd38b266e4">step</a> (<a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *input)=0</td></tr>
<tr class="memdesc:a826381abaaf29dbebade7cfd38b266e4"><td class="mdescLeft">&#160;</td><td class="mdescRight">Pure virtual function for performing a single optimization step.  <br /></td></tr>
<tr class="separator:a826381abaaf29dbebade7cfd38b266e4"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>Base class for optimization algorithms in deep learning. </p>
<p>The <code><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a></code> class serves as the base class for all optimization algorithms used in training deep learning models. It defines the common interface that all optimizer classes must implement, including the <code>step</code> function, which updates the model parameters (or nodes) during training based on the optimizer's specific strategy.</p>
<p>The <code><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a></code> class contains a protected member:</p><ul>
<li><b>learning_rate</b>: A scalar value representing the learning rate used in parameter updates.</li>
</ul>
<p>This class is intended to be subclassed by various optimization algorithms, such as <a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a>, <a class="el" href="classnz_1_1opt_1_1_adam.html" title="Adam optimizer for deep learning models.">Adam</a>, and <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a>. Each subclass is required to implement the <code>step</code> function, which is responsible for updating the model parameters according to the specific optimization method being used.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code>step</code> function should be called after calculating the gradients for a given input.</li>
<li>Subclasses should ensure that they implement parameter-specific update logic within the <code>step</code> function.</li>
<li>The <code>learning_rate</code> is typically set during the initialization of an optimizer and is used to control the size of the updates applied to model parameters.</li>
</ul>
</dd></dl>
<p>This class is part of the <code><a class="el" href="namespacenz_1_1opt.html" title="Contains optimization algorithms for training deep learning models.">nz::opt</a></code> namespace and provides a common structure for implementing various optimizers, facilitating extensibility and code reuse.</p>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge(<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/07 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_optimizer_8cuh_source.html#l00125">125</a> of file <a class="el" href="_optimizer_8cuh_source.html">Optimizer.cuh</a>.</p>
</div><h2 class="groupheader">Constructor &amp; Destructor Documentation</h2>
<a id="aaf8d92566a815254dbb0ace9af9cb1ae" name="aaf8d92566a815254dbb0ace9af9cb1ae"></a>
<h2 class="memtitle"><span class="permalink"><a href="#aaf8d92566a815254dbb0ace9af9cb1ae">&#9670;&#160;</a></span>Optimizer()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">nz::opt::Optimizer::Optimizer </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">explicit</span><span class="mlabel">default</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Default constructor for the <a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a> class. </p>
<p>This is the default constructor for the <code><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a></code> class. It initializes the base class and sets the <code>learning_rate</code> to its default value. This constructor does not perform any specific initialization, as it is intended to be used in subclasses where additional initialization might occur.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>This constructor should typically not be used directly; rather, the derived classes should be used to initialize specific optimizer instances.</li>
<li>The <code>learning_rate</code> is intended to be set by the derived classes during their initialization.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><code><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a></code> for the base class and other methods. </dd></dl>

</div>
</div>
<a id="ab9262983ef3bd11e6f548862b2f58e1d" name="ab9262983ef3bd11e6f548862b2f58e1d"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ab9262983ef3bd11e6f548862b2f58e1d">&#9670;&#160;</a></span>~Optimizer()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">virtual nz::opt::Optimizer::~Optimizer </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">virtual</span><span class="mlabel">default</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Default destructor for the <a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a> class. </p>
<p>This is the default destructor for the <code><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a></code> class. It ensures proper cleanup of any resources acquired by the class. Since this is a base class, the destructor is virtual to ensure that the destructors of derived classes are called correctly when an object is deleted through a base class pointer.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>This destructor does not perform any specific cleanup, as the optimizer class does not manage resources directly. However, derived classes that manage dynamic memory or other resources should implement their own destructor to handle cleanup appropriately.</li>
<li>The use of a virtual destructor ensures proper resource deallocation in case of polymorphic object deletion.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><code><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a></code> for the base class and other methods.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/07 </dd></dl>

</div>
</div>
<h2 class="groupheader">Member Function Documentation</h2>
<a id="a826381abaaf29dbebade7cfd38b266e4" name="a826381abaaf29dbebade7cfd38b266e4"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a826381abaaf29dbebade7cfd38b266e4">&#9670;&#160;</a></span>step()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">virtual void nz::opt::Optimizer::step </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *</td>          <td class="paramname"><span class="paramname"><em>input</em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">pure virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Pure virtual function for performing a single optimization step. </p>
<p>This is a pure virtual function that must be overridden by derived optimizer classes. The <code>step</code> function is responsible for updating the model parameters (or nodes) based on the optimization algorithm's rules. It takes a <code>Node</code> pointer as input, representing the parameters that will be modified in the optimization process.</p>
<p>The implementation of this function varies depending on the specific optimization algorithm (e.g., <a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a>, <a class="el" href="classnz_1_1opt_1_1_adam.html" title="Adam optimizer for deep learning models.">Adam</a>, <a class="el" href="classnz_1_1opt_1_1_momentum.html" title="Momentum optimizer for deep learning models.">Momentum</a>, etc.), but the common goal is to update the model parameters in the direction that minimizes the loss function.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">input</td><td>A pointer to a <code>Node</code> object representing the model's parameters that will be updated during the optimization step.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>Since this is a pure virtual function, it must be implemented by all derived classes of <code><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a></code> for the optimization process to work.</li>
<li>The <code>Node</code> class is expected to represent model parameters and should support necessary operations for optimization, such as gradient updates.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd>Derived classes like <code><a class="el" href="classnz_1_1opt_1_1_s_g_d.html" title="Stochastic Gradient Descent (SGD) optimizer for deep learning models.">SGD</a></code>, <code><a class="el" href="classnz_1_1opt_1_1_adam.html" title="Adam optimizer for deep learning models.">Adam</a></code>, <code><a class="el" href="classnz_1_1opt_1_1_momentum.html" title="Momentum optimizer for deep learning models.">Momentum</a></code>, etc., for specific implementations of this method.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/07 </dd></dl>

<p>Implemented in <a class="el" href="classnz_1_1opt_1_1_ada_delta.html#a0d24bd903517823f9607160d2e8207a1">nz::opt::AdaDelta</a>, <a class="el" href="classnz_1_1opt_1_1_ada_grad.html#ac0755dbf299371f78decfe63b0bf8ab6">nz::opt::AdaGrad</a>, <a class="el" href="classnz_1_1opt_1_1_adam.html#aa7fc73a17f092e104d5284d2556a1a98">nz::opt::Adam</a>, <a class="el" href="classnz_1_1opt_1_1_momentum.html#a9b8d15dc85293840cbd19e27a6bb52a6">nz::opt::Momentum</a>, <a class="el" href="classnz_1_1opt_1_1_n_adam.html#add5c94bdc1b012f035b51339f92e7a49">nz::opt::NAdam</a>, <a class="el" href="classnz_1_1opt_1_1_r_m_sprop.html#ad5356d2c2dccd94c5f78ff69c76aa2ee">nz::opt::RMSprop</a>, and <a class="el" href="classnz_1_1opt_1_1_s_g_d.html#ac1232979bd4ed03f49b27e5f8391707f">nz::opt::SGD</a>.</p>

</div>
</div>
<hr/>The documentation for this class was generated from the following file:<ul>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/include/NeuZephyr/<a class="el" href="_optimizer_8cuh_source.html">Optimizer.cuh</a></li>
</ul>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by&#160;<a href="https://www.doxygen.org/index.html"><img class="footer" src="doxygen.svg" width="104" height="31" alt="doxygen"/></a> 1.12.0
</small></address>
</div><!-- doc-content -->
</body>
</html>
