<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en-US">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=11"/>
<meta name="generator" content="Doxygen 1.12.0"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>NeuZephyr: nz::opt::AdaGrad Class Reference</title>
<link rel="icon" href="NZ_logo2.png" type="image/x-icon" />
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
 <tbody>
 <tr id="projectrow">
  <td id="projectlogo"><img alt="Logo" src="NZ_logo2.png"/></td>
  <td id="projectalign">
   <div id="projectname">NeuZephyr
   </div>
   <div id="projectbrief">Simple DL Framework</div>
  </td>
 </tr>
 </tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.12.0 -->
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function() { codefold.init(0); });
/* @license-end */
</script>
  <div id="navrow1" class="tabs">
    <ul class="tablist">
      <li><a href="index.html"><span>Main&#160;Page</span></a></li>
      <li><a href="pages.html"><span>Related&#160;Pages</span></a></li>
      <li><a href="namespaces.html"><span>Namespaces</span></a></li>
      <li class="current"><a href="annotated.html"><span>Classes</span></a></li>
      <li><a href="files.html"><span>Files</span></a></li>
    </ul>
  </div>
  <div id="navrow2" class="tabs2">
    <ul class="tablist">
      <li><a href="annotated.html"><span>Class&#160;List</span></a></li>
      <li><a href="classes.html"><span>Class&#160;Index</span></a></li>
      <li><a href="inherits.html"><span>Class&#160;Hierarchy</span></a></li>
      <li><a href="functions.html"><span>Class&#160;Members</span></a></li>
    </ul>
  </div>
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function(){ initResizable(false); });
/* @license-end */
</script>
<div id="nav-path" class="navpath">
  <ul>
<li class="navelem"><b>nz</b></li><li class="navelem"><a class="el" href="namespacenz_1_1opt.html">opt</a></li><li class="navelem"><a class="el" href="classnz_1_1opt_1_1_ada_grad.html">AdaGrad</a></li>  </ul>
</div>
</div><!-- top -->
<div id="doc-content">
<div class="header">
  <div class="summary">
<a href="#pub-methods">Public Member Functions</a> &#124;
<a href="classnz_1_1opt_1_1_ada_grad-members.html">List of all members</a>  </div>
  <div class="headertitle"><div class="title">nz::opt::AdaGrad Class Reference</div></div>
</div><!--header-->
<div class="contents">

<p><a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> optimizer for deep learning models.  
 <a href="#details">More...</a></p>
<div class="dynheader">
Inheritance diagram for nz::opt::AdaGrad:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1opt_1_1_ada_grad__inherit__graph.png" border="0" usemap="#anz_1_1opt_1_1_ada_grad_inherit__map" alt="Inheritance graph"/></div>
<map name="anz_1_1opt_1_1_ada_grad_inherit__map" id="anz_1_1opt_1_1_ada_grad_inherit__map">
<area shape="rect" title="AdaGrad optimizer for deep learning models." alt="" coords="8,80,128,107"/>
<area shape="rect" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning." alt="" coords="5,5,131,32"/>
<area shape="poly" title=" " alt="" coords="71,48,71,80,65,80,65,48"/>
</map>
<center><span class="legend">[<a href="graph_legend.html">legend</a>]</span></center></div>
<div class="dynheader">
Collaboration diagram for nz::opt::AdaGrad:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1opt_1_1_ada_grad__coll__graph.png" border="0" usemap="#anz_1_1opt_1_1_ada_grad_coll__map" alt="Collaboration graph"/></div>
<map name="anz_1_1opt_1_1_ada_grad_coll__map" id="anz_1_1opt_1_1_ada_grad_coll__map">
<area shape="rect" title="AdaGrad optimizer for deep learning models." alt="" coords="8,80,128,107"/>
<area shape="rect" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning." alt="" coords="5,5,131,32"/>
<area shape="poly" title=" " alt="" coords="71,48,71,80,65,80,65,48"/>
</map>
<center><span class="legend">[<a href="graph_legend.html">legend</a>]</span></center></div>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a id="pub-methods" name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr class="memitem:a4bb060af66efe393674e65c837ccdc60" id="r_a4bb060af66efe393674e65c837ccdc60"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a4bb060af66efe393674e65c837ccdc60">AdaGrad</a> (Tensor::value_type learning_rate)</td></tr>
<tr class="memdesc:a4bb060af66efe393674e65c837ccdc60"><td class="mdescLeft">&#160;</td><td class="mdescRight">Constructs an <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> optimizer with the specified learning rate.  <br /></td></tr>
<tr class="separator:a4bb060af66efe393674e65c837ccdc60"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ac0755dbf299371f78decfe63b0bf8ab6" id="r_ac0755dbf299371f78decfe63b0bf8ab6"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#ac0755dbf299371f78decfe63b0bf8ab6">step</a> (<a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *input) override</td></tr>
<tr class="memdesc:ac0755dbf299371f78decfe63b0bf8ab6"><td class="mdescLeft">&#160;</td><td class="mdescRight">Performs a single optimization step using the <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> algorithm.  <br /></td></tr>
<tr class="separator:ac0755dbf299371f78decfe63b0bf8ab6"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="inherit_header pub_methods_classnz_1_1opt_1_1_optimizer"><td colspan="2" onclick="javascript:dynsection.toggleInherit('pub_methods_classnz_1_1opt_1_1_optimizer')"><img src="closed.png" alt="-"/>&#160;Public Member Functions inherited from <a class="el" href="classnz_1_1opt_1_1_optimizer.html">nz::opt::Optimizer</a></td></tr>
<tr class="memitem:aaf8d92566a815254dbb0ace9af9cb1ae inherit pub_methods_classnz_1_1opt_1_1_optimizer" id="r_aaf8d92566a815254dbb0ace9af9cb1ae"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classnz_1_1opt_1_1_optimizer.html#aaf8d92566a815254dbb0ace9af9cb1ae">Optimizer</a> ()=default</td></tr>
<tr class="memdesc:aaf8d92566a815254dbb0ace9af9cb1ae inherit pub_methods_classnz_1_1opt_1_1_optimizer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Default constructor for the <a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a> class.  <br /></td></tr>
<tr class="separator:aaf8d92566a815254dbb0ace9af9cb1ae inherit pub_methods_classnz_1_1opt_1_1_optimizer"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ab9262983ef3bd11e6f548862b2f58e1d inherit pub_methods_classnz_1_1opt_1_1_optimizer" id="r_ab9262983ef3bd11e6f548862b2f58e1d"><td class="memItemLeft" align="right" valign="top">virtual&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classnz_1_1opt_1_1_optimizer.html#ab9262983ef3bd11e6f548862b2f58e1d">~Optimizer</a> ()=default</td></tr>
<tr class="memdesc:ab9262983ef3bd11e6f548862b2f58e1d inherit pub_methods_classnz_1_1opt_1_1_optimizer"><td class="mdescLeft">&#160;</td><td class="mdescRight">Default destructor for the <a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a> class.  <br /></td></tr>
<tr class="separator:ab9262983ef3bd11e6f548862b2f58e1d inherit pub_methods_classnz_1_1opt_1_1_optimizer"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p><a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> optimizer for deep learning models. </p>
<p>The <code><a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a></code> class implements the Adaptive Gradient algorithm, which is a popular optimization method that adapts the learning rate for each parameter based on the historical gradients. <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> is known for its ability to handle sparse gradients and adjust learning rates during training.</p>
<p>This class extends the <code><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a></code> base class and provides a concrete implementation of the <code>step</code> method, which updates the model's parameters using the <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> algorithm.</p>
<ul>
<li>The main idea of <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> is to maintain a separate learning rate for each parameter by scaling the gradient based on the sum of squares of past gradients. This helps reduce the learning rate for frequently updated parameters and increases it for rarely updated ones.</li>
<li><a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> can significantly improve training performance for problems with sparse data or parameters that have widely varying scales.</li>
<li>This optimizer is effective for tasks such as natural language processing or training deep learning models with sparse gradients.</li>
<li>The optimizer uses parallel GPU processing with CUDA to speed up parameter updates, especially when dealing with large models.</li>
</ul>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The optimizer assumes that the model parameters are represented by <code>Node</code> objects, and these nodes must have associated gradients for the optimizer to function correctly.</li>
<li>The <code>gss</code> map stores the sum of squared gradients for each parameter, which is used to adjust the learning rate.</li>
<li>The <code>epsilon</code> term ensures numerical stability when dividing by the sum of squared gradients.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md117"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><a class="code hl_class" href="classnz_1_1opt_1_1_ada_grad.html">AdaGrad</a> optimizer(0.01);</div>
<div class="line">graph.update(&amp;optimizer) <span class="comment">// Suppose &quot;graph&quot; is a computation graph waiting for gradient updates;</span></div>
<div class="ttc" id="aclassnz_1_1opt_1_1_ada_grad_html"><div class="ttname"><a href="classnz_1_1opt_1_1_ada_grad.html">nz::opt::AdaGrad</a></div><div class="ttdoc">AdaGrad optimizer for deep learning models.</div><div class="ttdef"><b>Definition</b> <a href="_optimizer_8cuh_source.html#l00458">Optimizer.cuh:458</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1opt_1_1_optimizer.html" title="Base class for optimization algorithms in deep learning.">Optimizer</a> for the base class that defines the interface for all optimizers.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/07 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_optimizer_8cuh_source.html#l00458">458</a> of file <a class="el" href="_optimizer_8cuh_source.html">Optimizer.cuh</a>.</p>
</div><h2 class="groupheader">Constructor &amp; Destructor Documentation</h2>
<a id="a4bb060af66efe393674e65c837ccdc60" name="a4bb060af66efe393674e65c837ccdc60"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a4bb060af66efe393674e65c837ccdc60">&#9670;&#160;</a></span>AdaGrad()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">nz::opt::AdaGrad::AdaGrad </td>
          <td>(</td>
          <td class="paramtype">Tensor::value_type</td>          <td class="paramname"><span class="paramname"><em>learning_rate</em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">explicit</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Constructs an <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> optimizer with the specified learning rate. </p>
<p>This constructor initializes the <code><a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a></code> optimizer with the given learning rate, which is used to control the magnitude of the updates during training. The learning rate determines how much to adjust the model's parameters in response to the computed gradients.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">learning_rate</td><td>The learning rate to be used for parameter updates. It is a scalar value that controls the size of the steps taken during the optimization process. A smaller value makes the updates more conservative, while a larger value can speed up convergence but may cause instability.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code>epsilon</code> value used in the <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> algorithm is set to a default of <code>1e-6</code> for numerical stability during updates and is not modified by this constructor.</li>
<li>The optimizer assumes that the model parameters are represented by <code>Node</code> objects, and the gradients for these nodes will be updated during the <code>step</code> method.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> for the full class definition.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/07 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_optimizer_8cu_source.html#l00046">46</a> of file <a class="el" href="_optimizer_8cu_source.html">Optimizer.cu</a>.</p>

</div>
</div>
<h2 class="groupheader">Member Function Documentation</h2>
<a id="ac0755dbf299371f78decfe63b0bf8ab6" name="ac0755dbf299371f78decfe63b0bf8ab6"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ac0755dbf299371f78decfe63b0bf8ab6">&#9670;&#160;</a></span>step()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">void nz::opt::AdaGrad::step </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *</td>          <td class="paramname"><span class="paramname"><em>input</em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">override</span><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Performs a single optimization step using the <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> algorithm. </p>
<p>The <code>step</code> function updates the model parameters represented by the <code>Node</code> object using the <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> optimization algorithm. <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> adapts the learning rate for each parameter by considering the history of gradients, providing faster convergence for sparse gradients.</p>
<p>This method performs the following steps:</p><ul>
<li>Initializes the sum of squared gradients (GSS) for the parameter (<code>Node</code>) if it has not been initialized.</li>
<li>Allocates memory on the GPU for storing intermediate results and computes the <a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> update for the model parameters.</li>
<li>Uses the sum of squared gradients to scale the gradient and update the model parameters.</li>
<li>Frees the temporary memory allocated for computations after the update.</li>
</ul>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">input</td><td>A pointer to the <code>Node</code> object representing the model parameters. This object should have gradients stored in its <code>output</code> attribute, which will be used to update the parameters.</td></tr>
  </table>
  </dd>
</dl>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code>Node</code> object is assumed to have a valid <code>output</code> tensor with its gradients already computed.</li>
<li>The <code>gss</code> map stores the sum of squared gradients for each parameter, ensuring that the learning rate adapts to the frequency of gradient updates.</li>
<li>The <code>epsilon</code> term is used to avoid division by zero and ensure numerical stability when updating the parameters.</li>
<li>The method leverages CUDA for parallel computation, which speeds up the update process, especially for large models.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="classnz_1_1opt_1_1_ada_grad.html" title="AdaGrad optimizer for deep learning models.">AdaGrad</a> for the class definition and constructor.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/07 </dd></dl>

<p>Implements <a class="el" href="classnz_1_1opt_1_1_optimizer.html#a826381abaaf29dbebade7cfd38b266e4">nz::opt::Optimizer</a>.</p>

<p class="definition">Definition at line <a class="el" href="_optimizer_8cu_source.html#l00050">50</a> of file <a class="el" href="_optimizer_8cu_source.html">Optimizer.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1opt_1_1_ada_grad_ac0755dbf299371f78decfe63b0bf8ab6_cgraph.png" border="0" usemap="#aclassnz_1_1opt_1_1_ada_grad_ac0755dbf299371f78decfe63b0bf8ab6_cgraph" alt=""/></div>
<map name="aclassnz_1_1opt_1_1_ada_grad_ac0755dbf299371f78decfe63b0bf8ab6_cgraph" id="aclassnz_1_1opt_1_1_ada_grad_ac0755dbf299371f78decfe63b0bf8ab6_cgraph">
<area shape="rect" title="Performs a single optimization step using the AdaGrad algorithm." alt="" coords="5,39,159,65"/>
<area shape="rect" href="namespacenz_1_1krnl.html#a1e915bd4a354938d8bc2d09be00eae76" title="Kernel function to apply AdaGrad optimization." alt="" coords="391,13,514,40"/>
<area shape="poly" title=" " alt="" coords="159,44,376,29,376,35,159,49"/>
<area shape="rect" href="classnz_1_1data_1_1_tensor.html#ad220de56b18c404611f07f2290cd7e9d" title="Fills the tensor&#39;s data with a specified value." alt="" coords="207,64,343,91"/>
<area shape="poly" title=" " alt="" coords="159,59,192,64,191,69,159,65"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#ab4b2eb422e0e1ee44bdfdc0eb94457ce" title="Returns a reference to the singleton instance of the StreamManager." alt="" coords="562,5,748,48"/>
<area shape="poly" title=" " alt="" coords="515,24,547,24,547,29,515,29"/>
<area shape="rect" href="namespacenz_1_1krnl.html#ad136c8a6560a5305984ce0a31bea71bf" title="Kernel function to fill a data array with a given value." alt="" coords="408,64,497,91"/>
<area shape="poly" title=" " alt="" coords="343,75,393,75,393,80,343,80"/>
<area shape="poly" title=" " alt="" coords="497,64,552,50,553,55,498,69"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#a46ce59b45de432842454aadf00b93791" title="Asynchronously submits a CUDA kernel with stream&#45;ordered dependency management." alt="" coords="562,72,748,115"/>
<area shape="poly" title=" " alt="" coords="498,78,547,82,547,87,498,84"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#a1de1cf3aadea137faf90a2f9b4b7abe2" title="Acquires CUDA stream from pool using round&#45;robin scheduling." alt="" coords="796,39,981,81"/>
<area shape="poly" title=" " alt="" coords="748,77,780,73,780,78,749,83"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#adb1078a67c6e38932d7d58c2adb05ec0" title="Synchronizes CUDA stream execution until data writes complete." alt="" coords="796,105,981,148"/>
<area shape="poly" title=" " alt="" coords="749,104,780,109,780,114,748,109"/>
</map>
</div>

</div>
</div>
<hr/>The documentation for this class was generated from the following files:<ul>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/include/NeuZephyr/<a class="el" href="_optimizer_8cuh_source.html">Optimizer.cuh</a></li>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/src/<a class="el" href="_optimizer_8cu_source.html">Optimizer.cu</a></li>
</ul>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by&#160;<a href="https://www.doxygen.org/index.html"><img class="footer" src="doxygen.svg" width="104" height="31" alt="doxygen"/></a> 1.12.0
</small></address>
</div><!-- doc-content -->
</body>
</html>
