<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.9.1"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>AIfES 2: aiopti_adam.h File Reference</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<script type="text/javascript" src="navtreedata.js"></script>
<script type="text/javascript" src="navtree.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<script type="text/x-mathjax-config">
  MathJax.Hub.Config({
    extensions: ["tex2jax.js"],
    jax: ["input/TeX","output/HTML-CSS"],
});
</script>
<script type="text/javascript" async="async" src="https://cdn.jsdelivr.net/npm/mathjax@2/MathJax.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
 <tbody>
 <tr style="height: 56px;">
  <td id="projectlogo"><img alt="Logo" src="AIfES_logo_small.png"/></td>
  <td id="projectalign" style="padding-left: 0.5em;">
   <div id="projectname">AIfES 2
   &#160;<span id="projectnumber">2.0.0</span>
   </div>
  </td>
 </tr>
 </tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.9.1 -->
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&amp;dn=gpl-2.0.txt GPL-v2 */
var searchBox = new SearchBox("searchBox", "search",false,'Search','.html');
/* @license-end */
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&amp;dn=gpl-2.0.txt GPL-v2 */
$(function() {
  initMenu('',true,false,'search.php','Search');
  $(document).ready(function() { init_search(); });
});
/* @license-end */</script>
<div id="main-nav"></div>
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
  <div id="nav-tree">
    <div id="nav-tree-contents">
      <div id="nav-sync" class="sync"></div>
    </div>
  </div>
  <div id="splitbar" style="-moz-user-select:none;" 
       class="ui-resizable-handle">
  </div>
</div>
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&amp;dn=gpl-2.0.txt GPL-v2 */
$(document).ready(function(){initNavTree('aiopti__adam_8h.html',''); initResizable(); });
/* @license-end */
</script>
<div id="doc-content">
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
     onmouseover="return searchBox.OnSearchSelectShow()"
     onmouseout="return searchBox.OnSearchSelectHide()"
     onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>

<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0" 
        name="MSearchResults" id="MSearchResults">
</iframe>
</div>

<div class="header">
  <div class="summary">
<a href="#nested-classes">Data Structures</a> &#124;
<a href="#typedef-members">Typedefs</a> &#124;
<a href="#func-members">Functions</a> &#124;
<a href="#var-members">Variables</a>  </div>
  <div class="headertitle">
<div class="title">aiopti_adam.h File Reference</div>  </div>
</div><!--header-->
<div class="contents">

<p>Base <a class="el" href="structaiopti.html">optimizer </a> implementation of the <a href="https://arxiv.org/pdf/1412.6980.pdf">Adam optimizer</a>  
<a href="#details">More...</a></p>

<p><a href="aiopti__adam_8h_source.html">Go to the source code of this file.</a></p>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="nested-classes"></a>
Data Structures</h2></td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">struct &#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="structaiopti__adam.html">aiopti_adam</a></td></tr>
<tr class="memdesc:"><td class="mdescLeft">&#160;</td><td class="mdescRight">General <a class="el" href="aiopti__adam_8h.html">Adam optimizer </a> struct.  <a href="structaiopti__adam.html#details">More...</a><br /></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:"><td class="memItemLeft" align="right" valign="top">struct &#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="structaiopti__adam__momentums.html">aiopti_adam_momentums</a></td></tr>
<tr class="memdesc:"><td class="mdescLeft">&#160;</td><td class="mdescRight">Struct for the momentum tensors of an Adam optimizer.  <a href="structaiopti__adam__momentums.html#details">More...</a><br /></td></tr>
<tr class="separator:"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="typedef-members"></a>
Typedefs</h2></td></tr>
<tr class="memitem:af6a2a8ada598e63f1be2ec6ff233c24f"><td class="memItemLeft" align="right" valign="top"><a id="af6a2a8ada598e63f1be2ec6ff233c24f"></a>
typedef struct <a class="el" href="structaiopti__adam.html">aiopti_adam</a>&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="aiopti__adam_8h.html#af6a2a8ada598e63f1be2ec6ff233c24f">aiopti_adam_t</a></td></tr>
<tr class="memdesc:af6a2a8ada598e63f1be2ec6ff233c24f"><td class="mdescLeft">&#160;</td><td class="mdescRight">New data type name for code reduction. <br /></td></tr>
<tr class="separator:af6a2a8ada598e63f1be2ec6ff233c24f"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a40c00ead1bde1b1ac8c46282b0de8804"><td class="memItemLeft" align="right" valign="top"><a id="a40c00ead1bde1b1ac8c46282b0de8804"></a>
typedef struct <a class="el" href="structaiopti__adam__momentums.html">aiopti_adam_momentums</a>&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="aiopti__adam_8h.html#a40c00ead1bde1b1ac8c46282b0de8804">aiopti_adam_momentums_t</a></td></tr>
<tr class="memdesc:a40c00ead1bde1b1ac8c46282b0de8804"><td class="mdescLeft">&#160;</td><td class="mdescRight">New data type name for code reduction. <br /></td></tr>
<tr class="separator:a40c00ead1bde1b1ac8c46282b0de8804"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="func-members"></a>
Functions</h2></td></tr>
<tr class="memitem:af629e0de34a263c34060368bb8e7d6d0"><td class="memItemLeft" align="right" valign="top"><a class="el" href="structaiopti.html">aiopti_t</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="aiopti__adam_8h.html#af629e0de34a263c34060368bb8e7d6d0">aiopti_adam</a> (<a class="el" href="aiopti__adam_8h.html#af6a2a8ada598e63f1be2ec6ff233c24f">aiopti_adam_t</a> *opti)</td></tr>
<tr class="memdesc:af629e0de34a263c34060368bb8e7d6d0"><td class="mdescLeft">&#160;</td><td class="mdescRight">Initialize the given Adam optimizer.  <a href="aiopti__adam_8h.html#af629e0de34a263c34060368bb8e7d6d0">More...</a><br /></td></tr>
<tr class="separator:af629e0de34a263c34060368bb8e7d6d0"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a79d431f3d4b7d1d0a78d720f297abc30"><td class="memItemLeft" align="right" valign="top">uint32_t&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="aiopti__adam_8h.html#a79d431f3d4b7d1d0a78d720f297abc30">aiopti_adam_sizeof_optimem</a> (<a class="el" href="structaiopti.html">aiopti_t</a> *self, const <a class="el" href="structaitensor.html">aitensor_t</a> *params)</td></tr>
<tr class="memdesc:a79d431f3d4b7d1d0a78d720f297abc30"><td class="mdescLeft">&#160;</td><td class="mdescRight">Calculates the required memory for the optimization step.  <a href="aiopti__adam_8h.html#a79d431f3d4b7d1d0a78d720f297abc30">More...</a><br /></td></tr>
<tr class="separator:a79d431f3d4b7d1d0a78d720f297abc30"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a5fa06ea548cdaa249a98f5e9aaf0fc53"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="aiopti__adam_8h.html#a5fa06ea548cdaa249a98f5e9aaf0fc53">aiopti_adam_init_optimem</a> (<a class="el" href="structaiopti.html">aiopti_t</a> *self, const <a class="el" href="structaitensor.html">aitensor_t</a> *params, const <a class="el" href="structaitensor.html">aitensor_t</a> *gradients, void *optimem)</td></tr>
<tr class="memdesc:a5fa06ea548cdaa249a98f5e9aaf0fc53"><td class="mdescLeft">&#160;</td><td class="mdescRight">Initialization of the optimization memory buffer.  <a href="aiopti__adam_8h.html#a5fa06ea548cdaa249a98f5e9aaf0fc53">More...</a><br /></td></tr>
<tr class="separator:a5fa06ea548cdaa249a98f5e9aaf0fc53"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a735b282e282209a39b101cd8f04cf43d"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="aiopti__adam_8h.html#a735b282e282209a39b101cd8f04cf43d">aiopti_adam_zero_gradients</a> (<a class="el" href="structaiopti.html">aiopti_t</a> *self, <a class="el" href="structaitensor.html">aitensor_t</a> *gradients)</td></tr>
<tr class="memdesc:a735b282e282209a39b101cd8f04cf43d"><td class="mdescLeft">&#160;</td><td class="mdescRight">Set the gradients to zero.  <a href="aiopti__adam_8h.html#a735b282e282209a39b101cd8f04cf43d">More...</a><br /></td></tr>
<tr class="separator:a735b282e282209a39b101cd8f04cf43d"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a4b9e08fa3872a6edcf3dbfb81c53f641"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="aiopti__adam_8h.html#a4b9e08fa3872a6edcf3dbfb81c53f641">aiopti_adam_update_params</a> (<a class="el" href="structaiopti.html">aiopti_t</a> *self, <a class="el" href="structaitensor.html">aitensor_t</a> *params, const <a class="el" href="structaitensor.html">aitensor_t</a> *gradients, void *optimem)</td></tr>
<tr class="memdesc:a4b9e08fa3872a6edcf3dbfb81c53f641"><td class="mdescLeft">&#160;</td><td class="mdescRight">Update the given parameter tensor with respect to the gradients.  <a href="aiopti__adam_8h.html#a4b9e08fa3872a6edcf3dbfb81c53f641">More...</a><br /></td></tr>
<tr class="separator:a4b9e08fa3872a6edcf3dbfb81c53f641"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a916f8f38aaeba5814db961c3ff00a3d4"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="aiopti__adam_8h.html#a916f8f38aaeba5814db961c3ff00a3d4">aiopti_adam_print_specs</a> (const <a class="el" href="structaiopti.html">aiopti_t</a> *self)</td></tr>
<tr class="memdesc:a916f8f38aaeba5814db961c3ff00a3d4"><td class="mdescLeft">&#160;</td><td class="mdescRight">Print the optimizer specification.  <a href="aiopti__adam_8h.html#a916f8f38aaeba5814db961c3ff00a3d4">More...</a><br /></td></tr>
<tr class="separator:a916f8f38aaeba5814db961c3ff00a3d4"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="var-members"></a>
Variables</h2></td></tr>
<tr class="memitem:af4b770ca8b7c46a865a01dedd0c866af"><td class="memItemLeft" align="right" valign="top">const <a class="el" href="structaicore__optitype.html">aicore_optitype_t</a> *&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="aiopti__adam_8h.html#af4b770ca8b7c46a865a01dedd0c866af">aiopti_adam_type</a></td></tr>
<tr class="memdesc:af4b770ca8b7c46a865a01dedd0c866af"><td class="mdescLeft">&#160;</td><td class="mdescRight">Adam optimizer type.  <a href="aiopti__adam_8h.html#af4b770ca8b7c46a865a01dedd0c866af">More...</a><br /></td></tr>
<tr class="separator:af4b770ca8b7c46a865a01dedd0c866af"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>Base <a class="el" href="structaiopti.html">optimizer </a> implementation of the <a href="https://arxiv.org/pdf/1412.6980.pdf">Adam optimizer</a> </p>
<dl class="section version"><dt>Version</dt><dd>2.2.0 </dd></dl>
<dl class="section copyright"><dt>Copyright</dt><dd>Copyright (C) 2020-2023 Fraunhofer Institute for Microelectronic Circuits and Systems. All rights reserved.<br  />
<br  />
 AIfES is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.<br  />
<br  />
 This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.<br  />
<br  />
 You should have received a copy of the GNU Affero General Public License along with this program. If not, see <a href="https://www.gnu.org/licenses/">https://www.gnu.org/licenses/</a>.</dd></dl>
<p>This is an "abstract" data-type independent implementation. To use the optimizer, use one of the provided implementations for a specific hardware and data-type (for example from <a class="el" href="aiopti__adam__default_8h.html" title="Default implementation of the Adam optimizer .">aiopti_adam_default.h</a>) or set the required math functions on your own.</p>
<p>The <a href="https://arxiv.org/pdf/1412.6980.pdf">Adam optimizer</a> is based on SGD and uses first-order and second-order moments for adaptive estimation. It uses the pre-calculated gradients to optimize the given parameters. For every parameter \( p \) of the parameters to optimize (trainable parameters) and the related gradient \( g \) it calculates </p><p class="formulaDsp">
\[ m_t = \beta_1 \cdot m_{t-1} + (1 - \beta_1) \cdot g_t \]
</p>
 <p class="formulaDsp">
\[ v_t = \beta_2 \cdot v_{t-1} + (1 - \beta_2) \cdot g^2_t \]
</p>
 <p class="formulaDsp">
\[ p_t = p_{t-1} - lr_t \cdot \frac{m_t} {\sqrt{v_t} + \hat{\epsilon}} \]
</p>
<p> in every optimization step with \( lr_t = lr \cdot \frac{\sqrt{1 - \beta^t_2}} {(1 - \beta_1)^t} \).<br  />
 \( lr \) is the learning rate that defines how big the optimization steps should be, and therefore how fast the training will be. \( m \) and \( v \) are the first- and second-order moments related to the parameter and must be stored in the optimization memory for every parameter. </p>
</div><h2 class="groupheader">Function Documentation</h2>
<a id="af629e0de34a263c34060368bb8e7d6d0"></a>
<h2 class="memtitle"><span class="permalink"><a href="#af629e0de34a263c34060368bb8e7d6d0">&#9670;&nbsp;</a></span>aiopti_adam()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="structaiopti.html">aiopti_t</a>* <a class="el" href="structaiopti__adam.html">aiopti_adam</a> </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="aiopti__adam_8h.html#af6a2a8ada598e63f1be2ec6ff233c24f">aiopti_adam_t</a> *&#160;</td>
          <td class="paramname"><em>opti</em></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Initialize the given Adam optimizer. </p>
<p>This function represents the "constructor" of the abstract Adam optimizer.<br  />
This function is not intended to call it directly. Instead use one of the data type specific implementations (like for example <a class="el" href="aiopti__adam__default_8h.html#a403603bdc0d77e2d1cbc3fd4cd37880a" title="Initializes an Adam optimizer  with the F32  default implementation.">aiopti_adam_f32_default()</a>).</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">*opti</td><td>The optimizer to initialize. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>Pointer to the (successfully) initialized general optimizer structure (<a class="el" href="structaiopti__adam.html#a76e9fa4cf381a685a5df819a57737e64" title="Inherited field members from general optimizer struct.">aiopti_adam.base</a>) </dd></dl>

</div>
</div>
<a id="a5fa06ea548cdaa249a98f5e9aaf0fc53"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a5fa06ea548cdaa249a98f5e9aaf0fc53">&#9670;&nbsp;</a></span>aiopti_adam_init_optimem()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void aiopti_adam_init_optimem </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="structaiopti.html">aiopti_t</a> *&#160;</td>
          <td class="paramname"><em>self</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="structaitensor.html">aitensor_t</a> *&#160;</td>
          <td class="paramname"><em>params</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="structaitensor.html">aitensor_t</a> *&#160;</td>
          <td class="paramname"><em>gradients</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">void *&#160;</td>
          <td class="paramname"><em>optimem</em>&#160;</td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Initialization of the optimization memory buffer. </p>
<p><em>Implementation of <a class="el" href="structaiopti.html#a39e07b6004587fae3c9b12b1821ef066" title="Initialize the optimization memory for a trainable parameter tensor.">aiopti.init_optimem</a>.</em></p>
<p>Initialize the first and second moment tensors with zeros: </p><p class="formulaDsp">
\[ m_{0,i} \leftarrow 0 \]
</p>
 <p class="formulaDsp">
\[ v_{0,i} \leftarrow 0 \]
</p>
<p>Used math functions:</p><ul>
<li><a class="el" href="structaiopti__adam.html#aa0c5520df9a1bfa236b1ef7eddeb3553" title="Required math function: Sets the elements of a tensor to zero.">aiopti_adam.zero_tensor</a></li>
</ul>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">*self</td><td>The optimizer </td></tr>
    <tr><td class="paramname">*params</td><td>The tensor of trainable parameters </td></tr>
    <tr><td class="paramname">*gradients</td><td>The gradients associated to the parameters </td></tr>
    <tr><td class="paramname">*optimem</td><td>The optimization memory (containing the first and second moment) associated to the parameters </td></tr>
  </table>
  </dd>
</dl>

</div>
</div>
<a id="a916f8f38aaeba5814db961c3ff00a3d4"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a916f8f38aaeba5814db961c3ff00a3d4">&#9670;&nbsp;</a></span>aiopti_adam_print_specs()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void aiopti_adam_print_specs </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="structaiopti.html">aiopti_t</a> *&#160;</td>
          <td class="paramname"><em>self</em></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Print the optimizer specification. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">*self</td><td>The optimizer to print the specification for </td></tr>
  </table>
  </dd>
</dl>

</div>
</div>
<a id="a79d431f3d4b7d1d0a78d720f297abc30"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a79d431f3d4b7d1d0a78d720f297abc30">&#9670;&nbsp;</a></span>aiopti_adam_sizeof_optimem()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">uint32_t aiopti_adam_sizeof_optimem </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="structaiopti.html">aiopti_t</a> *&#160;</td>
          <td class="paramname"><em>self</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="structaitensor.html">aitensor_t</a> *&#160;</td>
          <td class="paramname"><em>params</em>&#160;</td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Calculates the required memory for the optimization step. </p>
<p><em>Implementation of <a class="el" href="structaiopti.html#a948b4e86692cb9306c71c1099a6ba67d" title="Calculates the optimization memory size for a trainable parameter tensor.">aiopti.sizeof_optimem</a>.</em></p>
<p>Calculates the size of the memory space that must be reserved. The memory is used for the first and second moment tensors and is calculated by:</p>
<div class="fragment"><div class="line">2 * (<span class="keyword">sizeof</span>(<a class="code" href="structaitensor.html">aitensor</a>) + <span class="keyword">sizeof</span>(params.data)) </div>
<div class="ttc" id="astructaitensor_html"><div class="ttname"><a href="structaitensor.html">aitensor</a></div><div class="ttdoc">A tensor in AIfES.</div><div class="ttdef"><b>Definition:</b> aifes_math.h:89</div></div>
</div><!-- fragment --><dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">*self</td><td>The optimizer </td></tr>
    <tr><td class="paramname">*params</td><td>The tensor of trainable parameters to calculate the memory for </td></tr>
  </table>
  </dd>
</dl>

</div>
</div>
<a id="a4b9e08fa3872a6edcf3dbfb81c53f641"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a4b9e08fa3872a6edcf3dbfb81c53f641">&#9670;&nbsp;</a></span>aiopti_adam_update_params()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void aiopti_adam_update_params </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="structaiopti.html">aiopti_t</a> *&#160;</td>
          <td class="paramname"><em>self</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype"><a class="el" href="structaitensor.html">aitensor_t</a> *&#160;</td>
          <td class="paramname"><em>params</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="structaitensor.html">aitensor_t</a> *&#160;</td>
          <td class="paramname"><em>gradients</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">void *&#160;</td>
          <td class="paramname"><em>optimem</em>&#160;</td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Update the given parameter tensor with respect to the gradients. </p>
<p><em>Implementation of <a class="el" href="structaiopti.html#a833b900d14688a649c1037466adf444b" title="Performs an optimization step on the given tensor.">aiopti.update_params</a>.</em></p>
<p>Calculate and update the values of the trainable parameters (perform one update step): </p><p class="formulaDsp">
\[ m_t \leftarrow \beta_1 \cdot m_{t-1} + (1 - \beta_1) \cdot g_t \]
</p>
 <p class="formulaDsp">
\[ v_t \leftarrow \beta_2 \cdot v_{t-1} + (1 - \beta_2) \cdot g^2_t \]
</p>
 <p class="formulaDsp">
\[ p_t \leftarrow p_{t-1} - lr_t \cdot \frac{m_t} {\sqrt{v_t} + \hat{\epsilon}} \]
</p>
<p>\( m \): First moment estimates<br  />
 \( v \): Second moment estimates<br  />
 \( p \): Tensor of trainable parameters to update (params)<br  />
 \( g \): Gradients<br  />
 \( lr \): Learning rate / Optimization step size<br  />
 \( \beta_1 \): Exponential decay rate for the first moment estimates<br  />
 \( \beta_2 \): Exponential decay rate for the second moment estimates<br  />
 \( \hat{\epsilon} \): Small positive number for numerical stability<br  />
<br  />
 Used math functions:</p><ul>
<li><a class="el" href="structaiopti__adam.html#a2cbbfaac40611a8dd0ec908cc199b388" title="Required math function: Multiplication of a scalar with a tensor.">aiopti_adam.scalar_mul</a></li>
<li><a class="el" href="structaiopti__adam.html#a7674445f16490c6cacdb03157feab8c7" title="Required math function: Element wise tensor addition.">aiopti_adam.tensor_add</a></li>
<li><a class="el" href="structaiopti__adam.html#a50f5e44b0fca9c1e79a5287adb4a59eb" title="Required math function: Element wise tensor multiplication.">aiopti_adam.multiply</a></li>
<li><a class="el" href="structaiopti__adam.html#a5d36deab4e71dd3b8d45ea2fff8f4b55" title="Required math function: Square root.">aiopti_adam.sqrt</a></li>
<li><a class="el" href="structaiopti__adam.html#a5aaf0a1aa2e5c1674e5b4be2f80f907f" title="Required math function: Element wise addition of a scalar to a tensor.">aiopti_adam.scalar_add</a></li>
<li><a class="el" href="structaiopti__adam.html#a5c1478e75fa8c703a1ed25b1091a6bc4" title="Required math function: Element wise tensor dividation.">aiopti_adam.divide</a></li>
<li><a class="el" href="structaiopti__adam.html#a5f5b55cad6c306233a8e4510d1773244" title="Required math function: Element wise tensor subtraction.">aiopti_adam.tensor_sub</a></li>
</ul>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">*self</td><td>The optimizer </td></tr>
    <tr><td class="paramname">*params</td><td>The tensor of trainable parameters \( p \) to update </td></tr>
    <tr><td class="paramname">*gradients</td><td>The gradients \( g \) associated to the parameters </td></tr>
    <tr><td class="paramname">*optimem</td><td>The buffer to store the first and second momentums \( m \) and \( v \) </td></tr>
  </table>
  </dd>
</dl>

</div>
</div>
<a id="a735b282e282209a39b101cd8f04cf43d"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a735b282e282209a39b101cd8f04cf43d">&#9670;&nbsp;</a></span>aiopti_adam_zero_gradients()</h2>

<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void aiopti_adam_zero_gradients </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="structaiopti.html">aiopti_t</a> *&#160;</td>
          <td class="paramname"><em>self</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype"><a class="el" href="structaitensor.html">aitensor_t</a> *&#160;</td>
          <td class="paramname"><em>gradients</em>&#160;</td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
</div><div class="memdoc">

<p>Set the gradients to zero. </p>
<p><em>Implementation of <a class="el" href="structaiopti.html#ae145c9527c45c9c98fe7a0b317245e66" title="Set the gradient tensor to zero.">aiopti.zero_gradients</a>.</em></p>
<p class="formulaDsp">
\[ g_{i} \leftarrow 0 \]
</p>
<p>Used math functions:</p><ul>
<li><a class="el" href="structaiopti__adam.html#aa0c5520df9a1bfa236b1ef7eddeb3553" title="Required math function: Sets the elements of a tensor to zero.">aiopti_adam.zero_tensor</a></li>
</ul>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">*self</td><td>The optimizer </td></tr>
    <tr><td class="paramname">*gradients</td><td>The gradients to set to zero </td></tr>
  </table>
  </dd>
</dl>

</div>
</div>
<h2 class="groupheader">Variable Documentation</h2>
<a id="af4b770ca8b7c46a865a01dedd0c866af"></a>
<h2 class="memtitle"><span class="permalink"><a href="#af4b770ca8b7c46a865a01dedd0c866af">&#9670;&nbsp;</a></span>aiopti_adam_type</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">const <a class="el" href="structaicore__optitype.html">aicore_optitype_t</a>* aiopti_adam_type</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">extern</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Adam optimizer type. </p>
<p>Defines the type of the optimizer (for example for type checks and debug prints). See <a class="el" href="structaicore__optitype.html" title="Type indicator of the optimizer to check for the optimizer type.">aicore_optitype</a> for more information about the optimizer type. </p>

</div>
</div>
</div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
  <ul>
    <li class="navelem"><a class="el" href="dir_d44c64559bbebec7f509842c48db8b23.html">include</a></li><li class="navelem"><a class="el" href="dir_1e5d3661ed79af157d57e64a38265d09.html">basic</a></li><li class="navelem"><a class="el" href="dir_90008ee2b0f86999412b56217da88d54.html">base</a></li><li class="navelem"><a class="el" href="dir_a6118b80a1160589fd2e088758244a4b.html">aiopti</a></li><li class="navelem"><a class="el" href="aiopti__adam_8h.html">aiopti_adam.h</a></li>
    <li class="footer">Generated by <a href="https://www.doxygen.org/index.html"><img class="footer" src="doxygen.svg" width="104" height="31" alt="doxygen"/></a> 1.9.1 </li>
  </ul>
</div>
</body>
</html>
