<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en-US">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=11"/>
<meta name="generator" content="Doxygen 1.12.0"/>
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>NeuZephyr: nz::nodes::calc::HardSigmoidNode Class Reference</title>
<link rel="icon" href="NZ_logo2.png" type="image/x-icon" />
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
 <tbody>
 <tr id="projectrow">
  <td id="projectlogo"><img alt="Logo" src="NZ_logo2.png"/></td>
  <td id="projectalign">
   <div id="projectname">NeuZephyr
   </div>
   <div id="projectbrief">Simple DL Framework</div>
  </td>
 </tr>
 </tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.12.0 -->
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function() { codefold.init(0); });
/* @license-end */
</script>
  <div id="navrow1" class="tabs">
    <ul class="tablist">
      <li><a href="index.html"><span>Main&#160;Page</span></a></li>
      <li><a href="pages.html"><span>Related&#160;Pages</span></a></li>
      <li><a href="namespaces.html"><span>Namespaces</span></a></li>
      <li class="current"><a href="annotated.html"><span>Classes</span></a></li>
      <li><a href="files.html"><span>Files</span></a></li>
    </ul>
  </div>
  <div id="navrow2" class="tabs2">
    <ul class="tablist">
      <li><a href="annotated.html"><span>Class&#160;List</span></a></li>
      <li><a href="classes.html"><span>Class&#160;Index</span></a></li>
      <li><a href="inherits.html"><span>Class&#160;Hierarchy</span></a></li>
      <li><a href="functions.html"><span>Class&#160;Members</span></a></li>
    </ul>
  </div>
<script type="text/javascript">
/* @license magnet:?xt=urn:btih:d3d9a9a6595521f9666a5e94cc830dab83b65699&amp;dn=expat.txt MIT */
$(function(){ initResizable(false); });
/* @license-end */
</script>
<div id="nav-path" class="navpath">
  <ul>
<li class="navelem"><b>nz</b></li><li class="navelem"><a class="el" href="namespacenz_1_1nodes.html">nodes</a></li><li class="navelem"><a class="el" href="namespacenz_1_1nodes_1_1calc.html">calc</a></li><li class="navelem"><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node.html">HardSigmoidNode</a></li>  </ul>
</div>
</div><!-- top -->
<div id="doc-content">
<div class="header">
  <div class="summary">
<a href="#pub-methods">Public Member Functions</a> &#124;
<a href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node-members.html">List of all members</a>  </div>
  <div class="headertitle"><div class="title">nz::nodes::calc::HardSigmoidNode Class Reference</div></div>
</div><!--header-->
<div class="contents">

<p>Represents a Hard Sigmoid activation function node in a computational graph.  
 <a href="#details">More...</a></p>
<div class="dynheader">
Inheritance diagram for nz::nodes::calc::HardSigmoidNode:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node__inherit__graph.png" border="0" usemap="#anz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_inherit__map" alt="Inheritance graph"/></div>
<map name="anz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_inherit__map" id="anz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_inherit__map">
<area shape="rect" title="Represents a Hard Sigmoid activation function node in a computational graph." alt="" coords="5,80,230,107"/>
<area shape="rect" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph." alt="" coords="59,5,176,32"/>
<area shape="poly" title=" " alt="" coords="120,48,120,80,115,80,115,48"/>
</map>
<center><span class="legend">[<a href="graph_legend.html">legend</a>]</span></center></div>
<div class="dynheader">
Collaboration diagram for nz::nodes::calc::HardSigmoidNode:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node__coll__graph.png" border="0" usemap="#anz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_coll__map" alt="Collaboration graph"/></div>
<map name="anz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_coll__map" id="anz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_coll__map">
<area shape="rect" title="Represents a Hard Sigmoid activation function node in a computational graph." alt="" coords="5,80,230,107"/>
<area shape="rect" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph." alt="" coords="59,5,176,32"/>
<area shape="poly" title=" " alt="" coords="120,48,120,80,115,80,115,48"/>
</map>
<center><span class="legend">[<a href="graph_legend.html">legend</a>]</span></center></div>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a id="pub-methods" name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr class="memitem:a9fad60d7a07f6296aa5ce13acd6511d2" id="r_a9fad60d7a07f6296aa5ce13acd6511d2"><td class="memItemLeft" align="right" valign="top">&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a9fad60d7a07f6296aa5ce13acd6511d2">HardSigmoidNode</a> (<a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *input, Tensor::value_type alpha=0.2f, Tensor::value_type beta=0.5f)</td></tr>
<tr class="memdesc:a9fad60d7a07f6296aa5ce13acd6511d2"><td class="mdescLeft">&#160;</td><td class="mdescRight">Constructor to initialize a <code><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node.html" title="Represents a Hard Sigmoid activation function node in a computational graph.">HardSigmoidNode</a></code> for applying the Hard Sigmoid activation function.  <br /></td></tr>
<tr class="separator:a9fad60d7a07f6296aa5ce13acd6511d2"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a97590995aa192807d96a856ee2bcd71f" id="r_a97590995aa192807d96a856ee2bcd71f"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#a97590995aa192807d96a856ee2bcd71f">forward</a> () override</td></tr>
<tr class="memdesc:a97590995aa192807d96a856ee2bcd71f"><td class="mdescLeft">&#160;</td><td class="mdescRight">Forward pass for the <code><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node.html" title="Represents a Hard Sigmoid activation function node in a computational graph.">HardSigmoidNode</a></code> to apply the Hard Sigmoid activation function.  <br /></td></tr>
<tr class="separator:a97590995aa192807d96a856ee2bcd71f"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ad977c6a8c49252de4038f8ac08beed3c" id="r_ad977c6a8c49252de4038f8ac08beed3c"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="#ad977c6a8c49252de4038f8ac08beed3c">backward</a> () override</td></tr>
<tr class="memdesc:ad977c6a8c49252de4038f8ac08beed3c"><td class="mdescLeft">&#160;</td><td class="mdescRight">Backward pass for the <code><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node.html" title="Represents a Hard Sigmoid activation function node in a computational graph.">HardSigmoidNode</a></code> to compute gradients.  <br /></td></tr>
<tr class="separator:ad977c6a8c49252de4038f8ac08beed3c"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="inherit_header pub_methods_classnz_1_1nodes_1_1_node"><td colspan="2" onclick="javascript:dynsection.toggleInherit('pub_methods_classnz_1_1nodes_1_1_node')"><img src="closed.png" alt="-"/>&#160;Public Member Functions inherited from <a class="el" href="classnz_1_1nodes_1_1_node.html">nz::nodes::Node</a></td></tr>
<tr class="memitem:a687ee9c34eb61f8f28caa201ca42696e inherit pub_methods_classnz_1_1nodes_1_1_node" id="r_a687ee9c34eb61f8f28caa201ca42696e"><td class="memItemLeft" align="right" valign="top">virtual void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classnz_1_1nodes_1_1_node.html#a687ee9c34eb61f8f28caa201ca42696e">print</a> (std::ostream &amp;os) const</td></tr>
<tr class="memdesc:a687ee9c34eb61f8f28caa201ca42696e inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="mdescLeft">&#160;</td><td class="mdescRight">Prints the type, data, and gradient of the node.  <br /></td></tr>
<tr class="separator:a687ee9c34eb61f8f28caa201ca42696e inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a9b85913e12422bb4ac2fff483427bb47 inherit pub_methods_classnz_1_1nodes_1_1_node" id="r_a9b85913e12422bb4ac2fff483427bb47"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classnz_1_1nodes_1_1_node.html#a9b85913e12422bb4ac2fff483427bb47">dataInject</a> (Tensor::value_type *data, bool grad=false) const</td></tr>
<tr class="memdesc:a9b85913e12422bb4ac2fff483427bb47 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="mdescLeft">&#160;</td><td class="mdescRight">Injects data into a relevant tensor object, optionally setting its gradient requirement.  <br /></td></tr>
<tr class="separator:a9b85913e12422bb4ac2fff483427bb47 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a609f1730085dd1d31e0ddcbbae48a065 inherit pub_methods_classnz_1_1nodes_1_1_node" id="r_a609f1730085dd1d31e0ddcbbae48a065"><td class="memTemplParams" colspan="2">template&lt;typename Iterator &gt; </td></tr>
<tr class="memitem:a609f1730085dd1d31e0ddcbbae48a065 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="memTemplItemLeft" align="right" valign="top">void&#160;</td><td class="memTemplItemRight" valign="bottom"><a class="el" href="classnz_1_1nodes_1_1_node.html#a609f1730085dd1d31e0ddcbbae48a065">dataInject</a> (Iterator begin, Iterator end, const bool grad=false) const</td></tr>
<tr class="memdesc:a609f1730085dd1d31e0ddcbbae48a065 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="mdescLeft">&#160;</td><td class="mdescRight">Injects data from an iterator range into the output tensor of the InputNode, optionally setting its gradient requirement.  <br /></td></tr>
<tr class="separator:a609f1730085dd1d31e0ddcbbae48a065 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:af8b4bab3271df92ca1f0914f7a97b1e8 inherit pub_methods_classnz_1_1nodes_1_1_node" id="r_af8b4bab3271df92ca1f0914f7a97b1e8"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classnz_1_1nodes_1_1_node.html#af8b4bab3271df92ca1f0914f7a97b1e8">dataInject</a> (const std::initializer_list&lt; Tensor::value_type &gt; &amp;data, bool grad=false) const</td></tr>
<tr class="memdesc:af8b4bab3271df92ca1f0914f7a97b1e8 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="mdescLeft">&#160;</td><td class="mdescRight">Injects data from a std::initializer_list into the output tensor of the <a class="el" href="classnz_1_1nodes_1_1_node.html" title="Base class for nodes in a neural network or computational graph.">Node</a>, optionally setting its gradient requirement.  <br /></td></tr>
<tr class="separator:af8b4bab3271df92ca1f0914f7a97b1e8 inherit pub_methods_classnz_1_1nodes_1_1_node"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>Represents a Hard Sigmoid activation function node in a computational graph. </p>
<p>The <code><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node.html" title="Represents a Hard Sigmoid activation function node in a computational graph.">HardSigmoidNode</a></code> class applies the Hard Sigmoid activation function to the input tensor. The Hard Sigmoid function is a computationally efficient approximation of the sigmoid function and is defined as: </p><div class="fragment"><div class="line"><a class="code hl_function" href="namespacenz_1_1krnl.html#a52e449285e560185378234aecaf2f87c">HardSigmoid</a>(x) = max(0, min(1, alpha * x + beta))</div>
<div class="ttc" id="anamespacenz_1_1krnl_html_a52e449285e560185378234aecaf2f87c"><div class="ttname"><a href="namespacenz_1_1krnl.html#a52e449285e560185378234aecaf2f87c">nz::krnl::HardSigmoid</a></div><div class="ttdeci">void HardSigmoid(dim3 gridDim, dim3 blockDim, float *out, float *in, unsigned long long n, float alpha=0.2f, float beta=0.5f)</div><div class="ttdoc">Kernel function to apply the Hard Sigmoid activation function on the GPU.</div><div class="ttdef"><b>Definition</b> <a href="_operation_kernels_8cu_source.html#l00403">OperationKernels.cu:403</a></div></div>
</div><!-- fragment --><p> where <code>alpha</code> and <code>beta</code> control the slope and offset, respectively.</p>
<p>Key features:</p><ul>
<li><b>Forward Pass</b>: Applies the Hard Sigmoid activation function element-wise to the input tensor, mapping values to the range [0, 1] with linear interpolation.</li>
<li><b>Backward Pass</b>: Computes the gradient of the loss with respect to the input tensor. Gradients are propagated only for input values within the linear range (<code>0 &lt;= alpha * x + beta &lt;= 1</code>): <div class="fragment"><div class="line"><a class="code hl_function" href="namespacenz_1_1krnl.html#a52e449285e560185378234aecaf2f87c">HardSigmoid</a><span class="stringliteral">&#39;(x) = alpha, if 0 &lt;= alpha * x + beta &lt;= 1</span></div>
<div class="line"><span class="stringliteral">                  0, otherwise</span></div>
</div><!-- fragment --></li>
<li><b>Shape Preservation</b>: The output tensor has the same shape as the input tensor.</li>
<li><b>Gradient Management</b>: Automatically tracks gradients if required by the input tensor.</li>
</ul>
<p>This class is part of the <code><a class="el" href="namespacenz_1_1nodes.html" title="Contains classes and functionality for nodes in a neural network or computational graph.">nz::nodes</a></code> namespace and is used in models where efficiency is prioritized over precise non-linearity.</p>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The <code>alpha</code> and <code>beta</code> parameters default to <code>0.2</code> and <code>0.5</code>, respectively, but can be customized during construction.</li>
<li>Efficient GPU computations are performed for both forward and backward passes.</li>
</ul>
</dd></dl>
<h3><a class="anchor" id="autotoc_md102"></a>
Usage Example:</h3>
<div class="fragment"><div class="line"><span class="comment">// Example: Using HardSigmoidNode in a computational graph</span></div>
<div class="line">InputNode input({3, 3}, <span class="keyword">true</span>);  <span class="comment">// Create an input node with shape {3, 3}</span></div>
<div class="line"> </div>
<div class="line"><span class="keywordtype">float</span> data[] = {-1.0f, 0.0f, 1.0f, 2.0f, -2.0f, 3.0f, -3.0f, 4.0f, -4.0f};  <span class="comment">// Sample input values</span></div>
<div class="line">input.output-&gt;dataInject(data);  <span class="comment">// Copy data to the input tensor</span></div>
<div class="line"> </div>
<div class="line"><a class="code hl_function" href="#a9fad60d7a07f6296aa5ce13acd6511d2">HardSigmoidNode</a> hard_sigmoid_node(&amp;input, 0.2f, 0.5f);  <span class="comment">// Apply Hard Sigmoid activation</span></div>
<div class="line">hard_sigmoid_node.forward();  <span class="comment">// Perform the forward pass</span></div>
<div class="line">hard_sigmoid_node.backward();  <span class="comment">// Propagate gradients in the backward pass</span></div>
<div class="line"> </div>
<div class="line">std::cout &lt;&lt; <span class="stringliteral">&quot;Output: &quot;</span> &lt;&lt; *hard_sigmoid_node.output &lt;&lt; std::endl;  <span class="comment">// Print the result</span></div>
<div class="ttc" id="aclassnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_html_a9fad60d7a07f6296aa5ce13acd6511d2"><div class="ttname"><a href="#a9fad60d7a07f6296aa5ce13acd6511d2">nz::nodes::calc::HardSigmoidNode::HardSigmoidNode</a></div><div class="ttdeci">HardSigmoidNode(Node *input, Tensor::value_type alpha=0.2f, Tensor::value_type beta=0.5f)</div><div class="ttdoc">Constructor to initialize a HardSigmoidNode for applying the Hard Sigmoid activation function.</div><div class="ttdef"><b>Definition</b> <a href="_nodes_8cu_source.html#l00476">Nodes.cu:476</a></div></div>
</div><!-- fragment --><dl class="section see"><dt>See also</dt><dd><a class="el" href="#a97590995aa192807d96a856ee2bcd71f" title="Forward pass for the HardSigmoidNode to apply the Hard Sigmoid activation function.">forward()</a> for the Hard <a class="el" href="namespacenz_1_1krnl.html#a21bbbcf6d97bfaccc828ce7736814bd4" title="Kernel function to apply the Sigmoid activation function on the GPU.">Sigmoid</a> computation in the <a class="el" href="#a97590995aa192807d96a856ee2bcd71f" title="Forward pass for the HardSigmoidNode to apply the Hard Sigmoid activation function.">forward</a> pass. </dd>
<dd>
<a class="el" href="#ad977c6a8c49252de4038f8ac08beed3c" title="Backward pass for the HardSigmoidNode to compute gradients.">backward()</a> for gradient computation in the <a class="el" href="#ad977c6a8c49252de4038f8ac08beed3c" title="Backward pass for the HardSigmoidNode to compute gradients.">backward</a> pass.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/05 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_nodes_8cuh_source.html#l02803">2803</a> of file <a class="el" href="_nodes_8cuh_source.html">Nodes.cuh</a>.</p>
</div><h2 class="groupheader">Constructor &amp; Destructor Documentation</h2>
<a id="a9fad60d7a07f6296aa5ce13acd6511d2" name="a9fad60d7a07f6296aa5ce13acd6511d2"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a9fad60d7a07f6296aa5ce13acd6511d2">&#9670;&#160;</a></span>HardSigmoidNode()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">nz::nodes::calc::HardSigmoidNode::HardSigmoidNode </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="classnz_1_1nodes_1_1_node.html">Node</a> *</td>          <td class="paramname"><span class="paramname"><em>input</em></span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">Tensor::value_type</td>          <td class="paramname"><span class="paramname"><em>alpha</em></span><span class="paramdefsep"> = </span><span class="paramdefval">0.2f</span>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">Tensor::value_type</td>          <td class="paramname"><span class="paramname"><em>beta</em></span><span class="paramdefsep"> = </span><span class="paramdefval">0.5f</span>&#160;)</td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">explicit</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Constructor to initialize a <code><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node.html" title="Represents a Hard Sigmoid activation function node in a computational graph.">HardSigmoidNode</a></code> for applying the Hard Sigmoid activation function. </p>
<p>The constructor initializes a <code><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node.html" title="Represents a Hard Sigmoid activation function node in a computational graph.">HardSigmoidNode</a></code>, which applies the Hard Sigmoid activation function to an input tensor. It establishes a connection to the input node, initializes the output tensor, and sets the <code>alpha</code> and <code>beta</code> parameters as well as the node type.</p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">input</td><td>A pointer to the input node. Its <code>output</code> tensor will have the Hard Sigmoid activation applied. </td></tr>
    <tr><td class="paramname">alpha</td><td>The slope parameter for the linear part of the Hard Sigmoid function. Defaults to <code>0.2</code>. </td></tr>
    <tr><td class="paramname">beta</td><td>The offset parameter for the Hard Sigmoid function. Defaults to <code>0.5</code>.</td></tr>
  </table>
  </dd>
</dl>
<ul>
<li>The input node is added to the <code>inputs</code> vector to establish the connection in the computational graph.</li>
<li>The <code>output</code> tensor is initialized with the same shape as the input tensor, and its gradient tracking is determined based on the input tensor's requirements.</li>
<li>The <code>alpha</code> and <code>beta</code> parameters control the slope and offset of the Hard Sigmoid activation function, influencing the gradient flow and the range mapping.</li>
<li>The node's type is set to "HardSigmoid" to reflect its operation.</li>
</ul>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The Hard Sigmoid activation function is defined as: <div class="fragment"><div class="line"><a class="code hl_function" href="namespacenz_1_1krnl.html#a52e449285e560185378234aecaf2f87c">HardSigmoid</a>(x) = max(0, min(1, alpha * x + beta))</div>
</div><!-- fragment --></li>
<li>This node supports automatic gradient tracking if the input tensor requires gradients.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="#a97590995aa192807d96a856ee2bcd71f" title="Forward pass for the HardSigmoidNode to apply the Hard Sigmoid activation function.">forward()</a> for the <a class="el" href="#a97590995aa192807d96a856ee2bcd71f" title="Forward pass for the HardSigmoidNode to apply the Hard Sigmoid activation function.">forward</a> pass implementation. </dd>
<dd>
<a class="el" href="#ad977c6a8c49252de4038f8ac08beed3c" title="Backward pass for the HardSigmoidNode to compute gradients.">backward()</a> for gradient computation in the <a class="el" href="#ad977c6a8c49252de4038f8ac08beed3c" title="Backward pass for the HardSigmoidNode to compute gradients.">backward</a> pass.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/05 </dd></dl>

<p class="definition">Definition at line <a class="el" href="_nodes_8cu_source.html#l00476">476</a> of file <a class="el" href="_nodes_8cu_source.html">Nodes.cu</a>.</p>

</div>
</div>
<h2 class="groupheader">Member Function Documentation</h2>
<a id="ad977c6a8c49252de4038f8ac08beed3c" name="ad977c6a8c49252de4038f8ac08beed3c"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ad977c6a8c49252de4038f8ac08beed3c">&#9670;&#160;</a></span>backward()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">void nz::nodes::calc::HardSigmoidNode::backward </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">override</span><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Backward pass for the <code><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node.html" title="Represents a Hard Sigmoid activation function node in a computational graph.">HardSigmoidNode</a></code> to compute gradients. </p>
<p>The <code><a class="el" href="#ad977c6a8c49252de4038f8ac08beed3c" title="Backward pass for the HardSigmoidNode to compute gradients.">backward()</a></code> method computes the gradient of the loss with respect to the input tensor by applying the derivative of the Hard Sigmoid activation function. The gradient computation is defined as: </p><div class="fragment"><div class="line"><a class="code hl_function" href="namespacenz_1_1krnl.html#a52e449285e560185378234aecaf2f87c">HardSigmoid</a><span class="stringliteral">&#39;(x) = alpha, if 0 &lt;= alpha * x + beta &lt;= 1</span></div>
<div class="line"><span class="stringliteral">                  0, otherwise</span></div>
</div><!-- fragment --><p> where <code>alpha</code> and <code>beta</code> control the slope and offset of the Hard Sigmoid function.</p>
<ul>
<li>A CUDA kernel (<code>HardSigmoidBackward</code>) is launched to compute the gradients in parallel on the GPU.</li>
<li>The derivative of the Hard Sigmoid function is applied element-wise to the input tensor's data and combined with the gradient of the <code>output</code> tensor to compute the input gradient.</li>
<li>The computed gradient is stored in the gradient tensor of the input node.</li>
</ul>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>Gradients are only computed and propagated if the input tensor's <code>requiresGrad</code> property is true.</li>
<li>The shape of the gradient tensor matches that of the input tensor.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="#a97590995aa192807d96a856ee2bcd71f" title="Forward pass for the HardSigmoidNode to apply the Hard Sigmoid activation function.">forward()</a> for the Hard <a class="el" href="namespacenz_1_1krnl.html#a21bbbcf6d97bfaccc828ce7736814bd4" title="Kernel function to apply the Sigmoid activation function on the GPU.">Sigmoid</a> computation in the <a class="el" href="#a97590995aa192807d96a856ee2bcd71f" title="Forward pass for the HardSigmoidNode to apply the Hard Sigmoid activation function.">forward</a> pass.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/05 </dd></dl>

<p>Implements <a class="el" href="classnz_1_1nodes_1_1_node.html#a0a9ecbaa3d790ba38e8218aca7837fd0">nz::nodes::Node</a>.</p>

<p class="definition">Definition at line <a class="el" href="_nodes_8cu_source.html#l00491">491</a> of file <a class="el" href="_nodes_8cu_source.html">Nodes.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_ad977c6a8c49252de4038f8ac08beed3c_cgraph.png" border="0" usemap="#aclassnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_ad977c6a8c49252de4038f8ac08beed3c_cgraph" alt=""/></div>
<map name="aclassnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_ad977c6a8c49252de4038f8ac08beed3c_cgraph" id="aclassnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_ad977c6a8c49252de4038f8ac08beed3c_cgraph">
<area shape="rect" title="Backward pass for the HardSigmoidNode to compute gradients." alt="" coords="5,39,199,81"/>
<area shape="rect" href="namespacenz_1_1krnl.html#a43232f9472ad3b974351e59386208efa" title="Kernel function to compute the gradient of the Hard Sigmoid activation during backpropagation." alt="" coords="247,47,452,73"/>
<area shape="poly" title=" " alt="" coords="199,57,231,57,231,63,199,63"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#ab4b2eb422e0e1ee44bdfdc0eb94457ce" title="Returns a reference to the singleton instance of the StreamManager." alt="" coords="500,5,685,48"/>
<area shape="poly" title=" " alt="" coords="451,43,484,39,485,44,452,49"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#a46ce59b45de432842454aadf00b93791" title="Asynchronously submits a CUDA kernel with stream&#45;ordered dependency management." alt="" coords="500,72,685,115"/>
<area shape="poly" title=" " alt="" coords="452,71,485,76,484,81,451,77"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#a1de1cf3aadea137faf90a2f9b4b7abe2" title="Acquires CUDA stream from pool using round&#45;robin scheduling." alt="" coords="733,39,919,81"/>
<area shape="poly" title=" " alt="" coords="685,77,717,73,718,78,686,83"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#adb1078a67c6e38932d7d58c2adb05ec0" title="Synchronizes CUDA stream execution until data writes complete." alt="" coords="733,105,919,148"/>
<area shape="poly" title=" " alt="" coords="686,104,718,109,717,114,685,109"/>
</map>
</div>

</div>
</div>
<a id="a97590995aa192807d96a856ee2bcd71f" name="a97590995aa192807d96a856ee2bcd71f"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a97590995aa192807d96a856ee2bcd71f">&#9670;&#160;</a></span>forward()</h2>

<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">void nz::nodes::calc::HardSigmoidNode::forward </td>
          <td>(</td>
          <td class="paramname"><span class="paramname"><em></em></span></td><td>)</td>
          <td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">override</span><span class="mlabel">virtual</span></span>  </td>
  </tr>
</table>
</div><div class="memdoc">

<p>Forward pass for the <code><a class="el" href="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node.html" title="Represents a Hard Sigmoid activation function node in a computational graph.">HardSigmoidNode</a></code> to apply the Hard Sigmoid activation function. </p>
<p>The <code><a class="el" href="#a97590995aa192807d96a856ee2bcd71f" title="Forward pass for the HardSigmoidNode to apply the Hard Sigmoid activation function.">forward()</a></code> method applies the Hard Sigmoid activation function element-wise to the input tensor. The result is stored in the <code>output</code> tensor. The Hard Sigmoid function is defined as: </p><div class="fragment"><div class="line"><a class="code hl_function" href="namespacenz_1_1krnl.html#a52e449285e560185378234aecaf2f87c">HardSigmoid</a>(x) = max(0, min(1, alpha * x + beta))</div>
</div><!-- fragment --><ul>
<li>A CUDA kernel (<code>HardSigmoid</code>) is launched to compute the activation function in parallel on the GPU.</li>
<li>The grid and block dimensions are dynamically calculated based on the size of the <code>output</code> tensor to ensure efficient GPU utilization.</li>
<li>The <code>alpha</code> and <code>beta</code> parameters, provided during construction, control the slope and offset of the linear part of the activation function.</li>
</ul>
<dl class="section note"><dt>Note</dt><dd><ul>
<li>The shape of the output tensor matches that of the input tensor.</li>
</ul>
</dd></dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="#ad977c6a8c49252de4038f8ac08beed3c" title="Backward pass for the HardSigmoidNode to compute gradients.">backward()</a> for the computation of gradients in the <a class="el" href="#ad977c6a8c49252de4038f8ac08beed3c" title="Backward pass for the HardSigmoidNode to compute gradients.">backward</a> pass.</dd></dl>
<dl class="section author"><dt>Author</dt><dd>Mgepahmge (<a href="https://github.com/Mgepahmge">https://github.com/Mgepahmge</a>)</dd></dl>
<dl class="section date"><dt>Date</dt><dd>2024/12/05 </dd></dl>

<p>Implements <a class="el" href="classnz_1_1nodes_1_1_node.html#a8a828c2e91a4aa2a9ab7b94554e4685b">nz::nodes::Node</a>.</p>

<p class="definition">Definition at line <a class="el" href="_nodes_8cu_source.html#l00485">485</a> of file <a class="el" href="_nodes_8cu_source.html">Nodes.cu</a>.</p>
<div class="dynheader">
Here is the call graph for this function:</div>
<div class="dyncontent">
<div class="center"><img src="classnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_a97590995aa192807d96a856ee2bcd71f_cgraph.png" border="0" usemap="#aclassnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_a97590995aa192807d96a856ee2bcd71f_cgraph" alt=""/></div>
<map name="aclassnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_a97590995aa192807d96a856ee2bcd71f_cgraph" id="aclassnz_1_1nodes_1_1calc_1_1_hard_sigmoid_node_a97590995aa192807d96a856ee2bcd71f_cgraph">
<area shape="rect" title="Forward pass for the HardSigmoidNode to apply the Hard Sigmoid activation function." alt="" coords="5,39,199,81"/>
<area shape="rect" href="namespacenz_1_1krnl.html#a52e449285e560185378234aecaf2f87c" title="Kernel function to apply the Hard Sigmoid activation function on the GPU." alt="" coords="247,47,394,73"/>
<area shape="poly" title=" " alt="" coords="199,57,231,57,231,63,199,63"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#ab4b2eb422e0e1ee44bdfdc0eb94457ce" title="Returns a reference to the singleton instance of the StreamManager." alt="" coords="442,5,627,48"/>
<area shape="poly" title=" " alt="" coords="394,46,426,41,427,46,395,51"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#a46ce59b45de432842454aadf00b93791" title="Asynchronously submits a CUDA kernel with stream&#45;ordered dependency management." alt="" coords="442,72,627,115"/>
<area shape="poly" title=" " alt="" coords="395,69,427,74,426,79,394,74"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#a1de1cf3aadea137faf90a2f9b4b7abe2" title="Acquires CUDA stream from pool using round&#45;robin scheduling." alt="" coords="675,39,861,81"/>
<area shape="poly" title=" " alt="" coords="627,77,659,73,660,78,628,83"/>
<area shape="rect" href="classnz_1_1cu_strm_1_1_stream_manager.html#adb1078a67c6e38932d7d58c2adb05ec0" title="Synchronizes CUDA stream execution until data writes complete." alt="" coords="675,105,861,148"/>
<area shape="poly" title=" " alt="" coords="628,104,660,109,659,114,627,109"/>
</map>
</div>

</div>
</div>
<hr/>The documentation for this class was generated from the following files:<ul>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/include/NeuZephyr/<a class="el" href="_nodes_8cuh_source.html">Nodes.cuh</a></li>
<li>D:/Users/Mgepahmge/Documents/C Program/NeuZephyr/src/<a class="el" href="_nodes_8cu_source.html">Nodes.cu</a></li>
</ul>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by&#160;<a href="https://www.doxygen.org/index.html"><img class="footer" src="doxygen.svg" width="104" height="31" alt="doxygen"/></a> 1.12.0
</small></address>
</div><!-- doc-content -->
</body>
</html>
