<!-- HTML header for doxygen 1.8.6-->
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">

<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta content="text/xhtml;charset=utf-8" http-equiv="Content-Type"/>
<meta content="IE=9" http-equiv="X-UA-Compatible"/>
<meta content="Doxygen 1.8.13" name="generator"/>
<title>OpenCV: cv::dnn::Net Class Reference</title>
<link href="../../opencv.ico" rel="shortcut icon" type="image/x-icon"/>
<link href="../../tabs.css" rel="stylesheet" type="text/css"/>
<script src="../../jquery.js" type="text/javascript"></script>
<script src="../../dynsections.js" type="text/javascript"></script>
<script src="../../tutorial-utils.js" type="text/javascript"></script>
<link href="../../search/search.css" rel="stylesheet" type="text/css"/>
<script src="../../search/searchdata.js" type="text/javascript"></script>
<script src="../../search/search.js" type="text/javascript"></script>
<script type="text/x-mathjax-config">
  MathJax.Hub.Config({
    extensions: ["tex2jax.js", "TeX/AMSmath.js", "TeX/AMSsymbols.js"],
    jax: ["input/TeX","output/HTML-CSS"],
});
//<![CDATA[
MathJax.Hub.Config(
{
  TeX: {
      Macros: {
          matTT: [ "\\[ \\left|\\begin{array}{ccc} #1 & #2 & #3\\\\ #4 & #5 & #6\\\\ #7 & #8 & #9 \\end{array}\\right| \\]", 9],
          fork: ["\\left\\{ \\begin{array}{l l} #1 & \\mbox{#2}\\\\ #3 & \\mbox{#4}\\\\ \\end{array} \\right.", 4],
          forkthree: ["\\left\\{ \\begin{array}{l l} #1 & \\mbox{#2}\\\\ #3 & \\mbox{#4}\\\\ #5 & \\mbox{#6}\\\\ \\end{array} \\right.", 6],
          forkfour: ["\\left\\{ \\begin{array}{l l} #1 & \\mbox{#2}\\\\ #3 & \\mbox{#4}\\\\ #5 & \\mbox{#6}\\\\ #7 & \\mbox{#8}\\\\ \\end{array} \\right.", 8],
          vecthree: ["\\begin{bmatrix} #1\\\\ #2\\\\ #3 \\end{bmatrix}", 3],
          vecthreethree: ["\\begin{bmatrix} #1 & #2 & #3\\\\ #4 & #5 & #6\\\\ #7 & #8 & #9 \\end{bmatrix}", 9],
          cameramatrix: ["#1 = \\begin{bmatrix} f_x & 0 & c_x\\\\ 0 & f_y & c_y\\\\ 0 & 0 & 1 \\end{bmatrix}", 1],
          distcoeffs: ["(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6 [, s_1, s_2, s_3, s_4[, \\tau_x, \\tau_y]]]]) \\text{ of 4, 5, 8, 12 or 14 elements}"],
          distcoeffsfisheye: ["(k_1, k_2, k_3, k_4)"],
          hdotsfor: ["\\dots", 1],
          mathbbm: ["\\mathbb{#1}", 1],
          bordermatrix: ["\\matrix{#1}", 1]
      }
  }
}
);
//]]>
</script><script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/MathJax.js" type="text/javascript"></script>
<link href="../../doxygen.css" rel="stylesheet" type="text/css"/>
<link href="../../stylesheet.css" rel="stylesheet" type="text/css"/>
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<!--#include virtual="/google-search.html"-->
<table cellpadding="0" cellspacing="0">
 <tbody>
 <tr style="height: 56px;">
  <td id="projectlogo"><img alt="Logo" src="../../opencv-logo-small.png"/></td>
  <td style="padding-left: 0.5em;">
   <div id="projectname">OpenCV
    <span id="projectnumber">4.5.2</span>
   </div>
   <div id="projectbrief">Open Source Computer Vision</div>
  </td>
 </tr>
 </tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.13 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "../../search",false,'Search');
</script>
<script src="../../menudata.js" type="text/javascript"></script>
<script src="../../menu.js" type="text/javascript"></script>
<script type="text/javascript">
$(function() {
  initMenu('../../',true,false,'search.php','Search');
  $(document).ready(function() { init_search(); });
});
</script>
<div id="main-nav"></div>
<!-- window showing the filter options -->
<div id="MSearchSelectWindow" onkeydown="return searchBox.OnSearchSelectKey(event)" onmouseout="return searchBox.OnSearchSelectHide()" onmouseover="return searchBox.OnSearchSelectShow()">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe frameborder="0" id="MSearchResults" name="MSearchResults" src="javascript:void(0)">
</iframe>
</div>
<div class="navpath" id="nav-path">
  <ul>
<li class="navelem"><a class="el" href="../../d2/d75/namespacecv.html">cv</a></li><li class="navelem"><a class="el" href="../../df/d57/namespacecv_1_1dnn.html">dnn</a></li><li class="navelem"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html">Net</a></li>  </ul>
</div>
</div><!-- top -->
<div class="header">
  <div class="summary">
<a href="#pub-types">Public Types</a> |
<a href="#pub-methods">Public Member Functions</a> |
<a href="#pub-static-methods">Static Public Member Functions</a> |
<a href="../../d0/d5b/classcv_1_1dnn_1_1Net-members.html">List of all members</a>  </div>
  <div class="headertitle">
<div class="title">cv::dnn::Net Class Reference<div class="ingroups"><a class="el" href="../../d6/d0f/group__dnn.html">Deep Neural Network module</a></div></div>  </div>
</div><!--header-->
<div class="contents">
<p>This class allows to create and manipulate comprehensive artificial neural networks.  
 <a href="../../db/d30/classcv_1_1dnn_1_1Net.html#details">More...</a></p>
<p><code>#include &lt;opencv2/dnn/dnn.hpp&gt;</code></p>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-types"></a>
Public Types</h2></td></tr>
<tr class="memitem:a6962dbfe16ce1ae18c67de9f5f2912ef"><td align="right" class="memItemLeft" valign="top">typedef <a class="el" href="../../d4/db3/structcv_1_1dnn_1_1DictValue.html">DictValue</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6962dbfe16ce1ae18c67de9f5f2912ef">LayerId</a></td></tr>
<tr class="memdesc:a6962dbfe16ce1ae18c67de9f5f2912ef"><td class="mdescLeft"> </td><td class="mdescRight">Container for strings and integers.  <a href="#a6962dbfe16ce1ae18c67de9f5f2912ef">More...</a><br/></td></tr>
<tr class="separator:a6962dbfe16ce1ae18c67de9f5f2912ef"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr class="memitem:a82eb4d60b3c396cb85c79d267516cf15"><td align="right" class="memItemLeft" valign="top"> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a82eb4d60b3c396cb85c79d267516cf15">Net</a> ()</td></tr>
<tr class="memdesc:a82eb4d60b3c396cb85c79d267516cf15"><td class="mdescLeft"> </td><td class="mdescRight">Default constructor.  <a href="#a82eb4d60b3c396cb85c79d267516cf15">More...</a><br/></td></tr>
<tr class="separator:a82eb4d60b3c396cb85c79d267516cf15"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a878ffc8a35b90f78af583eaefbd20d45"><td align="right" class="memItemLeft" valign="top"> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a878ffc8a35b90f78af583eaefbd20d45">~Net</a> ()</td></tr>
<tr class="memdesc:a878ffc8a35b90f78af583eaefbd20d45"><td class="mdescLeft"> </td><td class="mdescRight">Destructor frees the net only if there aren't references to the net anymore.  <a href="#a878ffc8a35b90f78af583eaefbd20d45">More...</a><br/></td></tr>
<tr class="separator:a878ffc8a35b90f78af583eaefbd20d45"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a1569c03d93c80fd8c6020589bd2ac116"><td align="right" class="memItemLeft" valign="top">int </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a1569c03d93c80fd8c6020589bd2ac116">addLayer</a> (const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;name, const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;type, <a class="el" href="../../db/db6/classcv_1_1dnn_1_1LayerParams.html">LayerParams</a> &amp;params)</td></tr>
<tr class="memdesc:a1569c03d93c80fd8c6020589bd2ac116"><td class="mdescLeft"> </td><td class="mdescRight">Adds new layer to the net.  <a href="#a1569c03d93c80fd8c6020589bd2ac116">More...</a><br/></td></tr>
<tr class="separator:a1569c03d93c80fd8c6020589bd2ac116"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a7e78af0e0add3ff8594919f1af548dc4"><td align="right" class="memItemLeft" valign="top">int </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a7e78af0e0add3ff8594919f1af548dc4">addLayerToPrev</a> (const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;name, const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;type, <a class="el" href="../../db/db6/classcv_1_1dnn_1_1LayerParams.html">LayerParams</a> &amp;params)</td></tr>
<tr class="memdesc:a7e78af0e0add3ff8594919f1af548dc4"><td class="mdescLeft"> </td><td class="mdescRight">Adds new layer and connects its first input to the first output of previously added layer.  <a href="#a7e78af0e0add3ff8594919f1af548dc4">More...</a><br/></td></tr>
<tr class="separator:a7e78af0e0add3ff8594919f1af548dc4"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a032292c7866fb72db251708b0e7c2bea"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a032292c7866fb72db251708b0e7c2bea">connect</a> (<a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> outPin, <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> inpPin)</td></tr>
<tr class="memdesc:a032292c7866fb72db251708b0e7c2bea"><td class="mdescLeft"> </td><td class="mdescRight">Connects output of the first layer to input of the second layer.  <a href="#a032292c7866fb72db251708b0e7c2bea">More...</a><br/></td></tr>
<tr class="separator:a032292c7866fb72db251708b0e7c2bea"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a26615a67830b0045b68565c7e7dc1307"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a26615a67830b0045b68565c7e7dc1307">connect</a> (int outLayerId, int outNum, int inpLayerId, int inpNum)</td></tr>
<tr class="memdesc:a26615a67830b0045b68565c7e7dc1307"><td class="mdescLeft"> </td><td class="mdescRight">Connects #<code>outNum</code> output of the first layer to #<code>inNum</code> input of the second layer.  <a href="#a26615a67830b0045b68565c7e7dc1307">More...</a><br/></td></tr>
<tr class="separator:a26615a67830b0045b68565c7e7dc1307"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ac4d2758cc4cbf52564f8715645fa93a8"><td align="right" class="memItemLeft" valign="top"><a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#ac4d2758cc4cbf52564f8715645fa93a8">dump</a> ()</td></tr>
<tr class="memdesc:ac4d2758cc4cbf52564f8715645fa93a8"><td class="mdescLeft"> </td><td class="mdescRight">Dump net to String.  <a href="#ac4d2758cc4cbf52564f8715645fa93a8">More...</a><br/></td></tr>
<tr class="separator:ac4d2758cc4cbf52564f8715645fa93a8"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a4f7d0d1e650571e11d988d07f7ed5a7d"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a4f7d0d1e650571e11d988d07f7ed5a7d">dumpToFile</a> (const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;path)</td></tr>
<tr class="memdesc:a4f7d0d1e650571e11d988d07f7ed5a7d"><td class="mdescLeft"> </td><td class="mdescRight">Dump net structure, hyperparameters, backend, target and fusion to dot file.  <a href="#a4f7d0d1e650571e11d988d07f7ed5a7d">More...</a><br/></td></tr>
<tr class="separator:a4f7d0d1e650571e11d988d07f7ed5a7d"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a6a5778787d5b8770deab5eda6968e66c"><td align="right" class="memItemLeft" valign="top">bool </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6a5778787d5b8770deab5eda6968e66c">empty</a> () const</td></tr>
<tr class="separator:a6a5778787d5b8770deab5eda6968e66c"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:af22e2f1a542d80ea797ba38902c726bd"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#af22e2f1a542d80ea797ba38902c726bd">enableFusion</a> (bool fusion)</td></tr>
<tr class="memdesc:af22e2f1a542d80ea797ba38902c726bd"><td class="mdescLeft"> </td><td class="mdescRight">Enables or disables layer fusion in the network.  <a href="#af22e2f1a542d80ea797ba38902c726bd">More...</a><br/></td></tr>
<tr class="separator:af22e2f1a542d80ea797ba38902c726bd"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a98ed94cb6ef7063d3697259566da310b"><td align="right" class="memItemLeft" valign="top"><a class="el" href="../../d3/d63/classcv_1_1Mat.html">Mat</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a98ed94cb6ef7063d3697259566da310b">forward</a> (const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;outputName=<a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a>())</td></tr>
<tr class="memdesc:a98ed94cb6ef7063d3697259566da310b"><td class="mdescLeft"> </td><td class="mdescRight">Runs forward pass to compute output of layer with name <code>outputName</code>.  <a href="#a98ed94cb6ef7063d3697259566da310b">More...</a><br/></td></tr>
<tr class="separator:a98ed94cb6ef7063d3697259566da310b"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a8d248f50efec70dffa785de18a4b2170"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a8d248f50efec70dffa785de18a4b2170">forward</a> (<a class="el" href="../../dc/d84/group__core__basic.html#ga889a09549b98223016170d9b613715de">OutputArrayOfArrays</a> outputBlobs, const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;outputName=<a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a>())</td></tr>
<tr class="memdesc:a8d248f50efec70dffa785de18a4b2170"><td class="mdescLeft"> </td><td class="mdescRight">Runs forward pass to compute output of layer with name <code>outputName</code>.  <a href="#a8d248f50efec70dffa785de18a4b2170">More...</a><br/></td></tr>
<tr class="separator:a8d248f50efec70dffa785de18a4b2170"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:adb34d7650e555264c7da3b47d967311b"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#adb34d7650e555264c7da3b47d967311b">forward</a> (<a class="el" href="../../dc/d84/group__core__basic.html#ga889a09549b98223016170d9b613715de">OutputArrayOfArrays</a> outputBlobs, const std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &gt; &amp;outBlobNames)</td></tr>
<tr class="memdesc:adb34d7650e555264c7da3b47d967311b"><td class="mdescLeft"> </td><td class="mdescRight">Runs forward pass to compute outputs of layers listed in <code>outBlobNames</code>.  <a href="#adb34d7650e555264c7da3b47d967311b">More...</a><br/></td></tr>
<tr class="separator:adb34d7650e555264c7da3b47d967311b"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a00e707a86b2da4f980f9342b1fc2cc92"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a00e707a86b2da4f980f9342b1fc2cc92">forward</a> (std::vector&lt; std::vector&lt; <a class="el" href="../../d3/d63/classcv_1_1Mat.html">Mat</a> &gt; &gt; &amp;outputBlobs, const std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &gt; &amp;outBlobNames)</td></tr>
<tr class="memdesc:a00e707a86b2da4f980f9342b1fc2cc92"><td class="mdescLeft"> </td><td class="mdescRight">Runs forward pass to compute outputs of layers listed in <code>outBlobNames</code>.  <a href="#a00e707a86b2da4f980f9342b1fc2cc92">More...</a><br/></td></tr>
<tr class="separator:a00e707a86b2da4f980f9342b1fc2cc92"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a814890154ea9e10b132fec00b6f6ba30"><td align="right" class="memItemLeft" valign="top"><a class="el" href="../../d4/d20/classcv_1_1AsyncArray.html">AsyncArray</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a814890154ea9e10b132fec00b6f6ba30">forwardAsync</a> (const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;outputName=<a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a>())</td></tr>
<tr class="memdesc:a814890154ea9e10b132fec00b6f6ba30"><td class="mdescLeft"> </td><td class="mdescRight">Runs forward pass to compute output of layer with name <code>outputName</code>.  <a href="#a814890154ea9e10b132fec00b6f6ba30">More...</a><br/></td></tr>
<tr class="separator:a814890154ea9e10b132fec00b6f6ba30"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a97e29e028f81d339e444ddaba4dcc989"><td align="right" class="memItemLeft" valign="top"><a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga7cde0074dfd288f2d70c0e035dacb28a">int64</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a97e29e028f81d339e444ddaba4dcc989">getFLOPS</a> (const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;netInputShapes) const</td></tr>
<tr class="memdesc:a97e29e028f81d339e444ddaba4dcc989"><td class="mdescLeft"> </td><td class="mdescRight">Computes FLOP for whole loaded model with specified input shapes.  <a href="#a97e29e028f81d339e444ddaba4dcc989">More...</a><br/></td></tr>
<tr class="separator:a97e29e028f81d339e444ddaba4dcc989"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a0afb2fdcd21997e293543287a7754de2"><td align="right" class="memItemLeft" valign="top"><a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga7cde0074dfd288f2d70c0e035dacb28a">int64</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a0afb2fdcd21997e293543287a7754de2">getFLOPS</a> (const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp;netInputShape) const</td></tr>
<tr class="separator:a0afb2fdcd21997e293543287a7754de2"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:aa56acbec6853a5dc2f894b8995c717ec"><td align="right" class="memItemLeft" valign="top"><a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga7cde0074dfd288f2d70c0e035dacb28a">int64</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#aa56acbec6853a5dc2f894b8995c717ec">getFLOPS</a> (const int layerId, const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;netInputShapes) const</td></tr>
<tr class="separator:aa56acbec6853a5dc2f894b8995c717ec"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:af0cd8ca93901e5ec928b6151b61ad8af"><td align="right" class="memItemLeft" valign="top"><a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga7cde0074dfd288f2d70c0e035dacb28a">int64</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#af0cd8ca93901e5ec928b6151b61ad8af">getFLOPS</a> (const int layerId, const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp;netInputShape) const</td></tr>
<tr class="separator:af0cd8ca93901e5ec928b6151b61ad8af"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a70aec7f768f38c32b1ee25f3a56526df"><td align="right" class="memItemLeft" valign="top"><a class="el" href="../../dc/d84/group__core__basic.html#ga6395ca871a678020c4a31fadf7e8cc63">Ptr</a>&lt; <a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html">Layer</a> &gt; </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a70aec7f768f38c32b1ee25f3a56526df">getLayer</a> (<a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6962dbfe16ce1ae18c67de9f5f2912ef">LayerId</a> layerId)</td></tr>
<tr class="memdesc:a70aec7f768f38c32b1ee25f3a56526df"><td class="mdescLeft"> </td><td class="mdescRight">Returns pointer to layer with specified id or name which the network use.  <a href="#a70aec7f768f38c32b1ee25f3a56526df">More...</a><br/></td></tr>
<tr class="separator:a70aec7f768f38c32b1ee25f3a56526df"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a9cbb544900f1bb9559a1595bf45a29a4"><td align="right" class="memItemLeft" valign="top">int </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a9cbb544900f1bb9559a1595bf45a29a4">getLayerId</a> (const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;layer)</td></tr>
<tr class="memdesc:a9cbb544900f1bb9559a1595bf45a29a4"><td class="mdescLeft"> </td><td class="mdescRight">Converts string name of the layer to the integer identifier.  <a href="#a9cbb544900f1bb9559a1595bf45a29a4">More...</a><br/></td></tr>
<tr class="separator:a9cbb544900f1bb9559a1595bf45a29a4"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:abf96c5e92de4f6cd3013a6fb900934b4"><td align="right" class="memItemLeft" valign="top">std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga6395ca871a678020c4a31fadf7e8cc63">Ptr</a>&lt; <a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html">Layer</a> &gt; &gt; </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#abf96c5e92de4f6cd3013a6fb900934b4">getLayerInputs</a> (<a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6962dbfe16ce1ae18c67de9f5f2912ef">LayerId</a> layerId)</td></tr>
<tr class="memdesc:abf96c5e92de4f6cd3013a6fb900934b4"><td class="mdescLeft"> </td><td class="mdescRight">Returns pointers to input layers of specific layer.  <a href="#abf96c5e92de4f6cd3013a6fb900934b4">More...</a><br/></td></tr>
<tr class="separator:abf96c5e92de4f6cd3013a6fb900934b4"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ae8be9806024a0d1d41aba687cce99e6b"><td align="right" class="memItemLeft" valign="top">std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &gt; </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#ae8be9806024a0d1d41aba687cce99e6b">getLayerNames</a> () const</td></tr>
<tr class="separator:ae8be9806024a0d1d41aba687cce99e6b"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a4e728f803a12f6feff35840209870d06"><td align="right" class="memItemLeft" valign="top">int </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a4e728f803a12f6feff35840209870d06">getLayersCount</a> (const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;layerType) const</td></tr>
<tr class="memdesc:a4e728f803a12f6feff35840209870d06"><td class="mdescLeft"> </td><td class="mdescRight">Returns count of layers of specified type.  <a href="#a4e728f803a12f6feff35840209870d06">More...</a><br/></td></tr>
<tr class="separator:a4e728f803a12f6feff35840209870d06"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ad0d10b1ff622c89562d2befaf351af45"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#ad0d10b1ff622c89562d2befaf351af45">getLayerShapes</a> (const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp;netInputShape, const int layerId, std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;inLayerShapes, std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;outLayerShapes) const</td></tr>
<tr class="memdesc:ad0d10b1ff622c89562d2befaf351af45"><td class="mdescLeft"> </td><td class="mdescRight">Returns input and output shapes for layer with specified id in loaded model; preliminary inferencing isn't necessary.  <a href="#ad0d10b1ff622c89562d2befaf351af45">More...</a><br/></td></tr>
<tr class="separator:ad0d10b1ff622c89562d2befaf351af45"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a1c42c7d8acf1bb18c7cc186228255460"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a1c42c7d8acf1bb18c7cc186228255460">getLayerShapes</a> (const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;netInputShapes, const int layerId, std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;inLayerShapes, std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;outLayerShapes) const</td></tr>
<tr class="separator:a1c42c7d8acf1bb18c7cc186228255460"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a5f4572a27c2c3af6192cac8eb0e3fd3c"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a5f4572a27c2c3af6192cac8eb0e3fd3c">getLayersShapes</a> (const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;netInputShapes, std::vector&lt; int &gt; &amp;layersIds, std::vector&lt; std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &gt; &amp;inLayersShapes, std::vector&lt; std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &gt; &amp;outLayersShapes) const</td></tr>
<tr class="memdesc:a5f4572a27c2c3af6192cac8eb0e3fd3c"><td class="mdescLeft"> </td><td class="mdescRight">Returns input and output shapes for all layers in loaded model; preliminary inferencing isn't necessary.  <a href="#a5f4572a27c2c3af6192cac8eb0e3fd3c">More...</a><br/></td></tr>
<tr class="separator:a5f4572a27c2c3af6192cac8eb0e3fd3c"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a413beeea373f7ee35742bea3692a198a"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a413beeea373f7ee35742bea3692a198a">getLayersShapes</a> (const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp;netInputShape, std::vector&lt; int &gt; &amp;layersIds, std::vector&lt; std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &gt; &amp;inLayersShapes, std::vector&lt; std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &gt; &amp;outLayersShapes) const</td></tr>
<tr class="separator:a413beeea373f7ee35742bea3692a198a"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a8b0a3e740e4e7b6c7dec83b61e707720"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a8b0a3e740e4e7b6c7dec83b61e707720">getLayerTypes</a> (std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &gt; &amp;layersTypes) const</td></tr>
<tr class="memdesc:a8b0a3e740e4e7b6c7dec83b61e707720"><td class="mdescLeft"> </td><td class="mdescRight">Returns list of types for layer used in model.  <a href="#a8b0a3e740e4e7b6c7dec83b61e707720">More...</a><br/></td></tr>
<tr class="separator:a8b0a3e740e4e7b6c7dec83b61e707720"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a200834361815e1f43272b48433402f21"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a200834361815e1f43272b48433402f21">getMemoryConsumption</a> (const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;netInputShapes, size_t &amp;weights, size_t &amp;blobs) const</td></tr>
<tr class="memdesc:a200834361815e1f43272b48433402f21"><td class="mdescLeft"> </td><td class="mdescRight">Computes bytes number which are required to store all weights and intermediate blobs for model.  <a href="#a200834361815e1f43272b48433402f21">More...</a><br/></td></tr>
<tr class="separator:a200834361815e1f43272b48433402f21"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:aab9ef8c98ec8424a7c8039cc0b9ff96f"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#aab9ef8c98ec8424a7c8039cc0b9ff96f">getMemoryConsumption</a> (const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp;netInputShape, size_t &amp;weights, size_t &amp;blobs) const</td></tr>
<tr class="separator:aab9ef8c98ec8424a7c8039cc0b9ff96f"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a89fb8bf6d2e4659a66d0ead5a0405061"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a89fb8bf6d2e4659a66d0ead5a0405061">getMemoryConsumption</a> (const int layerId, const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;netInputShapes, size_t &amp;weights, size_t &amp;blobs) const</td></tr>
<tr class="separator:a89fb8bf6d2e4659a66d0ead5a0405061"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a41715838baacf49108f97c8b1ae10a8f"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a41715838baacf49108f97c8b1ae10a8f">getMemoryConsumption</a> (const int layerId, const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp;netInputShape, size_t &amp;weights, size_t &amp;blobs) const</td></tr>
<tr class="separator:a41715838baacf49108f97c8b1ae10a8f"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a3303e9154a53299a8ac426d30c6ab321"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a3303e9154a53299a8ac426d30c6ab321">getMemoryConsumption</a> (const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp;netInputShapes, std::vector&lt; int &gt; &amp;layerIds, std::vector&lt; size_t &gt; &amp;weights, std::vector&lt; size_t &gt; &amp;blobs) const</td></tr>
<tr class="memdesc:a3303e9154a53299a8ac426d30c6ab321"><td class="mdescLeft"> </td><td class="mdescRight">Computes bytes number which are required to store all weights and intermediate blobs for each layer.  <a href="#a3303e9154a53299a8ac426d30c6ab321">More...</a><br/></td></tr>
<tr class="separator:a3303e9154a53299a8ac426d30c6ab321"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:af9d6c117533d859c5e26f08d09934d56"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#af9d6c117533d859c5e26f08d09934d56">getMemoryConsumption</a> (const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp;netInputShape, std::vector&lt; int &gt; &amp;layerIds, std::vector&lt; size_t &gt; &amp;weights, std::vector&lt; size_t &gt; &amp;blobs) const</td></tr>
<tr class="separator:af9d6c117533d859c5e26f08d09934d56"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a767f2a693ba409bedc442adca3b71511"><td align="right" class="memItemLeft" valign="top"><a class="el" href="../../d3/d63/classcv_1_1Mat.html">Mat</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a767f2a693ba409bedc442adca3b71511">getParam</a> (<a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6962dbfe16ce1ae18c67de9f5f2912ef">LayerId</a> layer, int numParam=0)</td></tr>
<tr class="memdesc:a767f2a693ba409bedc442adca3b71511"><td class="mdescLeft"> </td><td class="mdescRight">Returns parameter blob of the layer.  <a href="#a767f2a693ba409bedc442adca3b71511">More...</a><br/></td></tr>
<tr class="separator:a767f2a693ba409bedc442adca3b71511"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a06ce946f675f75d1c020c5ddbc78aedc"><td align="right" class="memItemLeft" valign="top"><a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga7cde0074dfd288f2d70c0e035dacb28a">int64</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a06ce946f675f75d1c020c5ddbc78aedc">getPerfProfile</a> (std::vector&lt; double &gt; &amp;timings)</td></tr>
<tr class="memdesc:a06ce946f675f75d1c020c5ddbc78aedc"><td class="mdescLeft"> </td><td class="mdescRight">Returns overall time for inference and timings (in ticks) for layers. Indexes in returned vector correspond to layers ids. Some layers can be fused with others, in this case zero ticks count will be return for that skipped layers.  <a href="#a06ce946f675f75d1c020c5ddbc78aedc">More...</a><br/></td></tr>
<tr class="separator:a06ce946f675f75d1c020c5ddbc78aedc"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ae62a73984f62c49fd3e8e689405b056a"><td align="right" class="memItemLeft" valign="top">std::vector&lt; int &gt; </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#ae62a73984f62c49fd3e8e689405b056a">getUnconnectedOutLayers</a> () const</td></tr>
<tr class="memdesc:ae62a73984f62c49fd3e8e689405b056a"><td class="mdescLeft"> </td><td class="mdescRight">Returns indexes of layers with unconnected outputs.  <a href="#ae62a73984f62c49fd3e8e689405b056a">More...</a><br/></td></tr>
<tr class="separator:ae62a73984f62c49fd3e8e689405b056a"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ac1840896b8643f91532e98c660627fb9"><td align="right" class="memItemLeft" valign="top">std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &gt; </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#ac1840896b8643f91532e98c660627fb9">getUnconnectedOutLayersNames</a> () const</td></tr>
<tr class="memdesc:ac1840896b8643f91532e98c660627fb9"><td class="mdescLeft"> </td><td class="mdescRight">Returns names of layers with unconnected outputs.  <a href="#ac1840896b8643f91532e98c660627fb9">More...</a><br/></td></tr>
<tr class="separator:ac1840896b8643f91532e98c660627fb9"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a56fbff351d1e0a47fb5aabf6915fc279"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a56fbff351d1e0a47fb5aabf6915fc279">setHalideScheduler</a> (const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;scheduler)</td></tr>
<tr class="memdesc:a56fbff351d1e0a47fb5aabf6915fc279"><td class="mdescLeft"> </td><td class="mdescRight">Compile Halide layers.  <a href="#a56fbff351d1e0a47fb5aabf6915fc279">More...</a><br/></td></tr>
<tr class="separator:a56fbff351d1e0a47fb5aabf6915fc279"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a5e74adacffd6aa53d56046581de7fcbd"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a5e74adacffd6aa53d56046581de7fcbd">setInput</a> (<a class="el" href="../../dc/d84/group__core__basic.html#ga353a9de602fe76c709e12074a6f362ba">InputArray</a> blob, const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;name="", double scalefactor=1.0, const <a class="el" href="../../dc/d84/group__core__basic.html#ga599fe92e910c027be274233eccad7beb">Scalar</a> &amp;<a class="el" href="../../d2/de8/group__core__array.html#ga191389f8a0e58180bb13a727782cd461">mean</a>=<a class="el" href="../../dc/d84/group__core__basic.html#ga599fe92e910c027be274233eccad7beb">Scalar</a>())</td></tr>
<tr class="memdesc:a5e74adacffd6aa53d56046581de7fcbd"><td class="mdescLeft"> </td><td class="mdescRight">Sets the new input value for the network.  <a href="#a5e74adacffd6aa53d56046581de7fcbd">More...</a><br/></td></tr>
<tr class="separator:a5e74adacffd6aa53d56046581de7fcbd"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a09e9579f40599efe7cc9300aca811497"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a09e9579f40599efe7cc9300aca811497">setInputShape</a> (const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;inputName, const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp;<a class="el" href="../../df/d57/namespacecv_1_1dnn.html#a270ae5dc357c85f7fec6d94470b52189">shape</a>)</td></tr>
<tr class="memdesc:a09e9579f40599efe7cc9300aca811497"><td class="mdescLeft"> </td><td class="mdescRight">Specify shape of network input.  <a href="#a09e9579f40599efe7cc9300aca811497">More...</a><br/></td></tr>
<tr class="separator:a09e9579f40599efe7cc9300aca811497"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a4331f0fa17fd90db99cb68752c796fed"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a4331f0fa17fd90db99cb68752c796fed">setInputsNames</a> (const std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &gt; &amp;inputBlobNames)</td></tr>
<tr class="memdesc:a4331f0fa17fd90db99cb68752c796fed"><td class="mdescLeft"> </td><td class="mdescRight">Sets outputs names of the network input pseudo layer.  <a href="#a4331f0fa17fd90db99cb68752c796fed">More...</a><br/></td></tr>
<tr class="separator:a4331f0fa17fd90db99cb68752c796fed"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:ab9e38d28672d35a8b59f7bffc2435400"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#ab9e38d28672d35a8b59f7bffc2435400">setParam</a> (<a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6962dbfe16ce1ae18c67de9f5f2912ef">LayerId</a> layer, int numParam, const <a class="el" href="../../d3/d63/classcv_1_1Mat.html">Mat</a> &amp;blob)</td></tr>
<tr class="memdesc:ab9e38d28672d35a8b59f7bffc2435400"><td class="mdescLeft"> </td><td class="mdescRight">Sets the new value for the learned param of the layer.  <a href="#ab9e38d28672d35a8b59f7bffc2435400">More...</a><br/></td></tr>
<tr class="separator:ab9e38d28672d35a8b59f7bffc2435400"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a7f767df11386d39374db49cd8df8f59e"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a7f767df11386d39374db49cd8df8f59e">setPreferableBackend</a> (int backendId)</td></tr>
<tr class="memdesc:a7f767df11386d39374db49cd8df8f59e"><td class="mdescLeft"> </td><td class="mdescRight">Ask network to use specific computation backend where it supported.  <a href="#a7f767df11386d39374db49cd8df8f59e">More...</a><br/></td></tr>
<tr class="separator:a7f767df11386d39374db49cd8df8f59e"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a9dddbefbc7f3defbe3eeb5dc3d3483f4"><td align="right" class="memItemLeft" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a9dddbefbc7f3defbe3eeb5dc3d3483f4">setPreferableTarget</a> (int targetId)</td></tr>
<tr class="memdesc:a9dddbefbc7f3defbe3eeb5dc3d3483f4"><td class="mdescLeft"> </td><td class="mdescRight">Ask network to make computations on specific target device.  <a href="#a9dddbefbc7f3defbe3eeb5dc3d3483f4">More...</a><br/></td></tr>
<tr class="separator:a9dddbefbc7f3defbe3eeb5dc3d3483f4"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-static-methods"></a>
Static Public Member Functions</h2></td></tr>
<tr class="memitem:a327720af187378a58e0a48feb0b5ef89"><td align="right" class="memItemLeft" valign="top">static <a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html">Net</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a327720af187378a58e0a48feb0b5ef89">readFromModelOptimizer</a> (const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;xml, const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp;bin)</td></tr>
<tr class="memdesc:a327720af187378a58e0a48feb0b5ef89"><td class="mdescLeft"> </td><td class="mdescRight">Create a network from Intel's <a class="el" href="../../d3/df0/classcv_1_1dnn_1_1Model.html" title="This class is presented high-level API for neural networks. ">Model</a> Optimizer intermediate representation (IR).  <a href="#a327720af187378a58e0a48feb0b5ef89">More...</a><br/></td></tr>
<tr class="separator:a327720af187378a58e0a48feb0b5ef89"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a7b162c8978d5b1aab599edbf59e5b79f"><td align="right" class="memItemLeft" valign="top">static <a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html">Net</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a7b162c8978d5b1aab599edbf59e5b79f">readFromModelOptimizer</a> (const std::vector&lt; <a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga65f85814a8290f9797005d3b28e7e5fc">uchar</a> &gt; &amp;bufferModelConfig, const std::vector&lt; <a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga65f85814a8290f9797005d3b28e7e5fc">uchar</a> &gt; &amp;bufferWeights)</td></tr>
<tr class="memdesc:a7b162c8978d5b1aab599edbf59e5b79f"><td class="mdescLeft"> </td><td class="mdescRight">Create a network from Intel's <a class="el" href="../../d3/df0/classcv_1_1dnn_1_1Model.html" title="This class is presented high-level API for neural networks. ">Model</a> Optimizer in-memory buffers with intermediate representation (IR).  <a href="#a7b162c8978d5b1aab599edbf59e5b79f">More...</a><br/></td></tr>
<tr class="separator:a7b162c8978d5b1aab599edbf59e5b79f"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:af1fd3b20f8878e202d41d597b3da15d6"><td align="right" class="memItemLeft" valign="top">static <a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html">Net</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#af1fd3b20f8878e202d41d597b3da15d6">readFromModelOptimizer</a> (const <a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga65f85814a8290f9797005d3b28e7e5fc">uchar</a> *bufferModelConfigPtr, size_t bufferModelConfigSize, const <a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga65f85814a8290f9797005d3b28e7e5fc">uchar</a> *bufferWeightsPtr, size_t bufferWeightsSize)</td></tr>
<tr class="memdesc:af1fd3b20f8878e202d41d597b3da15d6"><td class="mdescLeft"> </td><td class="mdescRight">Create a network from Intel's <a class="el" href="../../d3/df0/classcv_1_1dnn_1_1Model.html" title="This class is presented high-level API for neural networks. ">Model</a> Optimizer in-memory buffers with intermediate representation (IR).  <a href="#af1fd3b20f8878e202d41d597b3da15d6">More...</a><br/></td></tr>
<tr class="separator:af1fd3b20f8878e202d41d597b3da15d6"><td class="memSeparator" colspan="2"> </td></tr>
</table>
<a id="details" name="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>This class allows to create and manipulate comprehensive artificial neural networks. </p>
<p>Neural network is presented as directed acyclic graph (DAG), where vertices are <a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html" title="This interface class allows to build new Layers - are building blocks of networks. ">Layer</a> instances, and edges specify relationships between layers inputs and outputs.</p>
<p>Each network layer has unique integer id and unique string name inside its network. LayerId can store either layer name or layer id.</p>
<p>This class supports reference counting of its instances, i. e. copies point to the same instance. </p>
<dl><dt><b>Examples: </b></dt><dd><a class="el" href="../../d6/d39/samples_2dnn_2colorization_8cpp-example.html#_a6">samples/dnn/colorization.cpp</a>, and <a class="el" href="../../d7/d4f/samples_2dnn_2openpose_8cpp-example.html#_a5">samples/dnn/openpose.cpp</a>.</dd>
</dl></div><h2 class="groupheader">Member Typedef Documentation</h2>
<a id="a6962dbfe16ce1ae18c67de9f5f2912ef"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a6962dbfe16ce1ae18c67de9f5f2912ef">◆ </a></span>LayerId</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">typedef <a class="el" href="../../d4/db3/structcv_1_1dnn_1_1DictValue.html">DictValue</a> <a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6962dbfe16ce1ae18c67de9f5f2912ef">cv::dnn::Net::LayerId</a></td>
        </tr>
      </table>
</div><div class="memdoc">
<p>Container for strings and integers. </p>
</div>
</div>
<h2 class="groupheader">Constructor &amp; Destructor Documentation</h2>
<a id="a82eb4d60b3c396cb85c79d267516cf15"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a82eb4d60b3c396cb85c79d267516cf15">◆ </a></span>Net()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">cv::dnn::Net::Net </td>
          <td>(</td>
          <td class="paramname"></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>&lt;dnn_Net object&gt;</td><td>=</td><td>cv.dnn_Net(</td><td class="paramname"></td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Default constructor. </p>
</div>
</div>
<a id="a878ffc8a35b90f78af583eaefbd20d45"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a878ffc8a35b90f78af583eaefbd20d45">◆ </a></span>~Net()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">cv::dnn::Net::~Net </td>
          <td>(</td>
          <td class="paramname"></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">
<p>Destructor frees the net only if there aren't references to the net anymore. </p>
</div>
</div>
<h2 class="groupheader">Member Function Documentation</h2>
<a id="a1569c03d93c80fd8c6020589bd2ac116"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a1569c03d93c80fd8c6020589bd2ac116">◆ </a></span>addLayer()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">int cv::dnn::Net::addLayer </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>name</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>type</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype"><a class="el" href="../../db/db6/classcv_1_1dnn_1_1LayerParams.html">LayerParams</a> &amp; </td>
          <td class="paramname"><em>params</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
</div><div class="memdoc">
<p>Adds new layer to the net. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">name</td><td>unique name of the adding layer. </td></tr>
    <tr><td class="paramname">type</td><td>typename of the adding layer (type must be registered in LayerRegister). </td></tr>
    <tr><td class="paramname">params</td><td>parameters which will be used to initialize the creating layer. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>unique identifier of created layer, or -1 if a failure will happen. </dd></dl>
</div>
</div>
<a id="a7e78af0e0add3ff8594919f1af548dc4"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a7e78af0e0add3ff8594919f1af548dc4">◆ </a></span>addLayerToPrev()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">int cv::dnn::Net::addLayerToPrev </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>name</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>type</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype"><a class="el" href="../../db/db6/classcv_1_1dnn_1_1LayerParams.html">LayerParams</a> &amp; </td>
          <td class="paramname"><em>params</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
</div><div class="memdoc">
<p>Adds new layer and connects its first input to the first output of previously added layer. </p>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a1569c03d93c80fd8c6020589bd2ac116" title="Adds new layer to the net. ">addLayer()</a> </dd></dl>
</div>
</div>
<a id="a032292c7866fb72db251708b0e7c2bea"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a032292c7866fb72db251708b0e7c2bea">◆ </a></span>connect() <span class="overload">[1/2]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::connect </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> </td>
          <td class="paramname"><em>outPin</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype"><a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> </td>
          <td class="paramname"><em>inpPin</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.connect(</td><td class="paramname">outPin, inpPin</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Connects output of the first layer to input of the second layer. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">outPin</td><td>descriptor of the first layer output. </td></tr>
    <tr><td class="paramname">inpPin</td><td>descriptor of the second layer input.</td></tr>
  </table>
  </dd>
</dl>
<p>Descriptors have the following template <code>&lt;layer_name&gt;[.input_number]</code>:</p><ul>
<li>the first part of the template <code>layer_name</code> is string name of the added layer. If this part is empty then the network input pseudo layer will be used;</li>
<li><p class="startli">the second optional part of the template <code>input_number</code> is either number of the layer input, either label one. If this part is omitted then the first layer input will be used.</p>
<dl class="section see"><dt>See also</dt><dd>setNetInputs(), <a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html#a93212cf0fe50ace40af0f2b4f42263c4" title="Returns index of input blob into the input array. ">Layer::inputNameToIndex()</a>, <a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html#a60ffc8238f3fa26cd3f49daa7ac0884b" title="Returns index of output blob in output array. ">Layer::outputNameToIndex()</a> </dd></dl>
</li>
</ul>
</div>
</div>
<a id="a26615a67830b0045b68565c7e7dc1307"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a26615a67830b0045b68565c7e7dc1307">◆ </a></span>connect() <span class="overload">[2/2]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::connect </td>
          <td>(</td>
          <td class="paramtype">int </td>
          <td class="paramname"><em>outLayerId</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">int </td>
          <td class="paramname"><em>outNum</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">int </td>
          <td class="paramname"><em>inpLayerId</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">int </td>
          <td class="paramname"><em>inpNum</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.connect(</td><td class="paramname">outPin, inpPin</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Connects #<code>outNum</code> output of the first layer to #<code>inNum</code> input of the second layer. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">outLayerId</td><td>identifier of the first layer </td></tr>
    <tr><td class="paramname">outNum</td><td>number of the first layer output </td></tr>
    <tr><td class="paramname">inpLayerId</td><td>identifier of the second layer </td></tr>
    <tr><td class="paramname">inpNum</td><td>number of the second layer input </td></tr>
  </table>
  </dd>
</dl>
</div>
</div>
<a id="ac4d2758cc4cbf52564f8715645fa93a8"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ac4d2758cc4cbf52564f8715645fa93a8">◆ </a></span>dump()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> cv::dnn::Net::dump </td>
          <td>(</td>
          <td class="paramname"></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.dump(</td><td class="paramname"></td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Dump net to String. </p>
<dl class="section return"><dt>Returns</dt><dd>String with structure, hyperparameters, backend, target and fusion Call method after <a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a5e74adacffd6aa53d56046581de7fcbd" title="Sets the new input value for the network. ">setInput()</a>. To see correct backend, target and fusion run after <a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a98ed94cb6ef7063d3697259566da310b" title="Runs forward pass to compute output of layer with name outputName. ">forward()</a>. </dd></dl>
</div>
</div>
<a id="a4f7d0d1e650571e11d988d07f7ed5a7d"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a4f7d0d1e650571e11d988d07f7ed5a7d">◆ </a></span>dumpToFile()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::dumpToFile </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>path</em></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.dumpToFile(</td><td class="paramname">path</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Dump net structure, hyperparameters, backend, target and fusion to dot file. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">path</td><td>path to output file with .dot extension </td></tr>
  </table>
  </dd>
</dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#ac4d2758cc4cbf52564f8715645fa93a8" title="Dump net to String. ">dump()</a> </dd></dl>
</div>
</div>
<a id="a6a5778787d5b8770deab5eda6968e66c"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a6a5778787d5b8770deab5eda6968e66c">◆ </a></span>empty()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">bool cv::dnn::Net::empty </td>
          <td>(</td>
          <td class="paramname"></td><td>)</td>
          <td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.empty(</td><td class="paramname"></td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Returns true if there are no layers in the network. </p>
</div>
</div>
<a id="af22e2f1a542d80ea797ba38902c726bd"></a>
<h2 class="memtitle"><span class="permalink"><a href="#af22e2f1a542d80ea797ba38902c726bd">◆ </a></span>enableFusion()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::enableFusion </td>
          <td>(</td>
          <td class="paramtype">bool </td>
          <td class="paramname"><em>fusion</em></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.enableFusion(</td><td class="paramname">fusion</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Enables or disables layer fusion in the network. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">fusion</td><td>true to enable the fusion, false to disable. The fusion is enabled by default. </td></tr>
  </table>
  </dd>
</dl>
</div>
</div>
<a id="a98ed94cb6ef7063d3697259566da310b"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a98ed94cb6ef7063d3697259566da310b">◆ </a></span>forward() <span class="overload">[1/4]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="../../d3/d63/classcv_1_1Mat.html">Mat</a> cv::dnn::Net::forward </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>outputName</em> = <code><a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a>()</code></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">[, outputName]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">[, outputBlobs[, outputName]]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">outBlobNames[, outputBlobs]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forwardAndRetrieve(</td><td class="paramname">outBlobNames</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Runs forward pass to compute output of layer with name <code>outputName</code>. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">outputName</td><td>name for layer which output is needed to get </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>blob for first output of specified layer.</dd></dl>
<p>By default runs forward pass for the whole network. </p>
<dl><dt><b>Examples: </b></dt><dd><a class="el" href="../../d6/d39/samples_2dnn_2colorization_8cpp-example.html#a21">samples/dnn/colorization.cpp</a>, and <a class="el" href="../../d7/d4f/samples_2dnn_2openpose_8cpp-example.html#a14">samples/dnn/openpose.cpp</a>.</dd>
</dl>
</div>
</div>
<a id="a8d248f50efec70dffa785de18a4b2170"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a8d248f50efec70dffa785de18a4b2170">◆ </a></span>forward() <span class="overload">[2/4]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::forward </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="../../dc/d84/group__core__basic.html#ga889a09549b98223016170d9b613715de">OutputArrayOfArrays</a> </td>
          <td class="paramname"><em>outputBlobs</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>outputName</em> = <code><a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a>()</code> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">[, outputName]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">[, outputBlobs[, outputName]]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">outBlobNames[, outputBlobs]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forwardAndRetrieve(</td><td class="paramname">outBlobNames</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Runs forward pass to compute output of layer with name <code>outputName</code>. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">outputBlobs</td><td>contains all output blobs for specified layer. </td></tr>
    <tr><td class="paramname">outputName</td><td>name for layer which output is needed to get</td></tr>
  </table>
  </dd>
</dl>
<p>If <code>outputName</code> is empty, runs forward pass for the whole network. </p>
</div>
</div>
<a id="adb34d7650e555264c7da3b47d967311b"></a>
<h2 class="memtitle"><span class="permalink"><a href="#adb34d7650e555264c7da3b47d967311b">◆ </a></span>forward() <span class="overload">[3/4]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::forward </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="../../dc/d84/group__core__basic.html#ga889a09549b98223016170d9b613715de">OutputArrayOfArrays</a> </td>
          <td class="paramname"><em>outputBlobs</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &gt; &amp; </td>
          <td class="paramname"><em>outBlobNames</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">[, outputName]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">[, outputBlobs[, outputName]]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">outBlobNames[, outputBlobs]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forwardAndRetrieve(</td><td class="paramname">outBlobNames</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Runs forward pass to compute outputs of layers listed in <code>outBlobNames</code>. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">outputBlobs</td><td>contains blobs for first outputs of specified layers. </td></tr>
    <tr><td class="paramname">outBlobNames</td><td>names for layers which outputs are needed to get </td></tr>
  </table>
  </dd>
</dl>
</div>
</div>
<a id="a00e707a86b2da4f980f9342b1fc2cc92"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a00e707a86b2da4f980f9342b1fc2cc92">◆ </a></span>forward() <span class="overload">[4/4]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::forward </td>
          <td>(</td>
          <td class="paramtype">std::vector&lt; std::vector&lt; <a class="el" href="../../d3/d63/classcv_1_1Mat.html">Mat</a> &gt; &gt; &amp; </td>
          <td class="paramname"><em>outputBlobs</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &gt; &amp; </td>
          <td class="paramname"><em>outBlobNames</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">[, outputName]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">[, outputBlobs[, outputName]]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forward(</td><td class="paramname">outBlobNames[, outputBlobs]</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>outputBlobs</td><td>=</td><td>cv.dnn_Net.forwardAndRetrieve(</td><td class="paramname">outBlobNames</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Runs forward pass to compute outputs of layers listed in <code>outBlobNames</code>. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">outputBlobs</td><td>contains all output blobs for each layer specified in <code>outBlobNames</code>. </td></tr>
    <tr><td class="paramname">outBlobNames</td><td>names for layers which outputs are needed to get </td></tr>
  </table>
  </dd>
</dl>
</div>
</div>
<a id="a814890154ea9e10b132fec00b6f6ba30"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a814890154ea9e10b132fec00b6f6ba30">◆ </a></span>forwardAsync()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="../../d4/d20/classcv_1_1AsyncArray.html">AsyncArray</a> cv::dnn::Net::forwardAsync </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>outputName</em> = <code><a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a>()</code></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.forwardAsync(</td><td class="paramname">[, outputName]</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Runs forward pass to compute output of layer with name <code>outputName</code>. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">outputName</td><td>name for layer which output is needed to get</td></tr>
  </table>
  </dd>
</dl>
<p>By default runs forward pass for the whole network.</p>
<p>This is an asynchronous version of <a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a98ed94cb6ef7063d3697259566da310b" title="Runs forward pass to compute output of layer with name outputName. ">forward(const String&amp;)</a>. <a class="el" href="../../d6/d0f/group__dnn.html#gga186f7d9bfacac8b0ff2e26e2eab02625a6d17a7450b1e077ac91faa10a1e85486">dnn::DNN_BACKEND_INFERENCE_ENGINE</a> backend is required. </p>
</div>
</div>
<a id="a97e29e028f81d339e444ddaba4dcc989"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a97e29e028f81d339e444ddaba4dcc989">◆ </a></span>getFLOPS() <span class="overload">[1/4]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga7cde0074dfd288f2d70c0e035dacb28a">int64</a> cv::dnn::Net::getFLOPS </td>
          <td>(</td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>netInputShapes</em></td><td>)</td>
          <td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">netInputShape</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">layerId, netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">layerId, netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Computes FLOP for whole loaded model with specified input shapes. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">netInputShapes</td><td>vector of shapes for all net inputs. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>computed FLOP. </dd></dl>
</div>
</div>
<a id="a0afb2fdcd21997e293543287a7754de2"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a0afb2fdcd21997e293543287a7754de2">◆ </a></span>getFLOPS() <span class="overload">[2/4]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga7cde0074dfd288f2d70c0e035dacb28a">int64</a> cv::dnn::Net::getFLOPS </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp; </td>
          <td class="paramname"><em>netInputShape</em></td><td>)</td>
          <td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">netInputShape</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">layerId, netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">layerId, netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>This is an overloaded member function, provided for convenience. It differs from the above function only in what argument(s) it accepts. </p>
</div>
</div>
<a id="aa56acbec6853a5dc2f894b8995c717ec"></a>
<h2 class="memtitle"><span class="permalink"><a href="#aa56acbec6853a5dc2f894b8995c717ec">◆ </a></span>getFLOPS() <span class="overload">[3/4]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga7cde0074dfd288f2d70c0e035dacb28a">int64</a> cv::dnn::Net::getFLOPS </td>
          <td>(</td>
          <td class="paramtype">const int </td>
          <td class="paramname"><em>layerId</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>netInputShapes</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">netInputShape</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">layerId, netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">layerId, netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>This is an overloaded member function, provided for convenience. It differs from the above function only in what argument(s) it accepts. </p>
</div>
</div>
<a id="af0cd8ca93901e5ec928b6151b61ad8af"></a>
<h2 class="memtitle"><span class="permalink"><a href="#af0cd8ca93901e5ec928b6151b61ad8af">◆ </a></span>getFLOPS() <span class="overload">[4/4]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga7cde0074dfd288f2d70c0e035dacb28a">int64</a> cv::dnn::Net::getFLOPS </td>
          <td>(</td>
          <td class="paramtype">const int </td>
          <td class="paramname"><em>layerId</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp; </td>
          <td class="paramname"><em>netInputShape</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">netInputShape</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">layerId, netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getFLOPS(</td><td class="paramname">layerId, netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>This is an overloaded member function, provided for convenience. It differs from the above function only in what argument(s) it accepts. </p>
</div>
</div>
<a id="a70aec7f768f38c32b1ee25f3a56526df"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a70aec7f768f38c32b1ee25f3a56526df">◆ </a></span>getLayer()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="../../dc/d84/group__core__basic.html#ga6395ca871a678020c4a31fadf7e8cc63">Ptr</a>&lt;<a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html">Layer</a>&gt; cv::dnn::Net::getLayer </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6962dbfe16ce1ae18c67de9f5f2912ef">LayerId</a> </td>
          <td class="paramname"><em>layerId</em></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getLayer(</td><td class="paramname">layerId</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Returns pointer to layer with specified id or name which the network use. </p>
<dl><dt><b>Examples: </b></dt><dd><a class="el" href="../../d6/d39/samples_2dnn_2colorization_8cpp-example.html#a11">samples/dnn/colorization.cpp</a>.</dd>
</dl>
</div>
</div>
<a id="a9cbb544900f1bb9559a1595bf45a29a4"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a9cbb544900f1bb9559a1595bf45a29a4">◆ </a></span>getLayerId()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">int cv::dnn::Net::getLayerId </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>layer</em></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getLayerId(</td><td class="paramname">layer</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Converts string name of the layer to the integer identifier. </p>
<dl class="section return"><dt>Returns</dt><dd>id of the layer, or -1 if the layer wasn't found. </dd></dl>
</div>
</div>
<a id="abf96c5e92de4f6cd3013a6fb900934b4"></a>
<h2 class="memtitle"><span class="permalink"><a href="#abf96c5e92de4f6cd3013a6fb900934b4">◆ </a></span>getLayerInputs()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">std::vector&lt;<a class="el" href="../../dc/d84/group__core__basic.html#ga6395ca871a678020c4a31fadf7e8cc63">Ptr</a>&lt;<a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html">Layer</a>&gt; &gt; cv::dnn::Net::getLayerInputs </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6962dbfe16ce1ae18c67de9f5f2912ef">LayerId</a> </td>
          <td class="paramname"><em>layerId</em></td><td>)</td>
          <td></td>
        </tr>
      </table>
</div><div class="memdoc">
<p>Returns pointers to input layers of specific layer. </p>
</div>
</div>
<a id="ae8be9806024a0d1d41aba687cce99e6b"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ae8be9806024a0d1d41aba687cce99e6b">◆ </a></span>getLayerNames()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">std::vector&lt;<a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a>&gt; cv::dnn::Net::getLayerNames </td>
          <td>(</td>
          <td class="paramname"></td><td>)</td>
          <td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getLayerNames(</td><td class="paramname"></td><td>)</td></tr></table>
</div><div class="memdoc">
</div>
</div>
<a id="a4e728f803a12f6feff35840209870d06"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a4e728f803a12f6feff35840209870d06">◆ </a></span>getLayersCount()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">int cv::dnn::Net::getLayersCount </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>layerType</em></td><td>)</td>
          <td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getLayersCount(</td><td class="paramname">layerType</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Returns count of layers of specified type. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">layerType</td><td>type. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>count of layers </dd></dl>
</div>
</div>
<a id="ad0d10b1ff622c89562d2befaf351af45"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ad0d10b1ff622c89562d2befaf351af45">◆ </a></span>getLayerShapes() <span class="overload">[1/2]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getLayerShapes </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp; </td>
          <td class="paramname"><em>netInputShape</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const int </td>
          <td class="paramname"><em>layerId</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>inLayerShapes</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>outLayerShapes</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table>
</div><div class="memdoc">
<p>Returns input and output shapes for layer with specified id in loaded model; preliminary inferencing isn't necessary. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">netInputShape</td><td>shape input blob in net input layer. </td></tr>
    <tr><td class="paramname">layerId</td><td>id for layer. </td></tr>
    <tr><td class="paramname">inLayerShapes</td><td>output parameter for input layers shapes; order is the same as in layersIds </td></tr>
    <tr><td class="paramname">outLayerShapes</td><td>output parameter for output layers shapes; order is the same as in layersIds </td></tr>
  </table>
  </dd>
</dl>
</div>
</div>
<a id="a1c42c7d8acf1bb18c7cc186228255460"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a1c42c7d8acf1bb18c7cc186228255460">◆ </a></span>getLayerShapes() <span class="overload">[2/2]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getLayerShapes </td>
          <td>(</td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>netInputShapes</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const int </td>
          <td class="paramname"><em>layerId</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>inLayerShapes</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>outLayerShapes</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table>
</div><div class="memdoc">
<p>This is an overloaded member function, provided for convenience. It differs from the above function only in what argument(s) it accepts. </p>
</div>
</div>
<a id="a5f4572a27c2c3af6192cac8eb0e3fd3c"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a5f4572a27c2c3af6192cac8eb0e3fd3c">◆ </a></span>getLayersShapes() <span class="overload">[1/2]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getLayersShapes </td>
          <td>(</td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>netInputShapes</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; int &gt; &amp; </td>
          <td class="paramname"><em>layersIds</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &gt; &amp; </td>
          <td class="paramname"><em>inLayersShapes</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &gt; &amp; </td>
          <td class="paramname"><em>outLayersShapes</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>layersIds, inLayersShapes, outLayersShapes</td><td>=</td><td>cv.dnn_Net.getLayersShapes(</td><td class="paramname">netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>layersIds, inLayersShapes, outLayersShapes</td><td>=</td><td>cv.dnn_Net.getLayersShapes(</td><td class="paramname">netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Returns input and output shapes for all layers in loaded model; preliminary inferencing isn't necessary. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">netInputShapes</td><td>shapes for all input blobs in net input layer. </td></tr>
    <tr><td class="paramname">layersIds</td><td>output parameter for layer IDs. </td></tr>
    <tr><td class="paramname">inLayersShapes</td><td>output parameter for input layers shapes; order is the same as in layersIds </td></tr>
    <tr><td class="paramname">outLayersShapes</td><td>output parameter for output layers shapes; order is the same as in layersIds </td></tr>
  </table>
  </dd>
</dl>
</div>
</div>
<a id="a413beeea373f7ee35742bea3692a198a"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a413beeea373f7ee35742bea3692a198a">◆ </a></span>getLayersShapes() <span class="overload">[2/2]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getLayersShapes </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp; </td>
          <td class="paramname"><em>netInputShape</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; int &gt; &amp; </td>
          <td class="paramname"><em>layersIds</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &gt; &amp; </td>
          <td class="paramname"><em>inLayersShapes</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &gt; &amp; </td>
          <td class="paramname"><em>outLayersShapes</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>layersIds, inLayersShapes, outLayersShapes</td><td>=</td><td>cv.dnn_Net.getLayersShapes(</td><td class="paramname">netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>layersIds, inLayersShapes, outLayersShapes</td><td>=</td><td>cv.dnn_Net.getLayersShapes(</td><td class="paramname">netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>This is an overloaded member function, provided for convenience. It differs from the above function only in what argument(s) it accepts. </p>
</div>
</div>
<a id="a8b0a3e740e4e7b6c7dec83b61e707720"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a8b0a3e740e4e7b6c7dec83b61e707720">◆ </a></span>getLayerTypes()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getLayerTypes </td>
          <td>(</td>
          <td class="paramtype">std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &gt; &amp; </td>
          <td class="paramname"><em>layersTypes</em></td><td>)</td>
          <td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>layersTypes</td><td>=</td><td>cv.dnn_Net.getLayerTypes(</td><td class="paramname"></td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Returns list of types for layer used in model. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">layersTypes</td><td>output parameter for returning types. </td></tr>
  </table>
  </dd>
</dl>
</div>
</div>
<a id="a200834361815e1f43272b48433402f21"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a200834361815e1f43272b48433402f21">◆ </a></span>getMemoryConsumption() <span class="overload">[1/6]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getMemoryConsumption </td>
          <td>(</td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>netInputShapes</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">size_t &amp; </td>
          <td class="paramname"><em>weights</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">size_t &amp; </td>
          <td class="paramname"><em>blobs</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">netInputShape</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Computes bytes number which are required to store all weights and intermediate blobs for model. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">netInputShapes</td><td>vector of shapes for all net inputs. </td></tr>
    <tr><td class="paramname">weights</td><td>output parameter to store resulting bytes for weights. </td></tr>
    <tr><td class="paramname">blobs</td><td>output parameter to store resulting bytes for intermediate blobs. </td></tr>
  </table>
  </dd>
</dl>
</div>
</div>
<a id="aab9ef8c98ec8424a7c8039cc0b9ff96f"></a>
<h2 class="memtitle"><span class="permalink"><a href="#aab9ef8c98ec8424a7c8039cc0b9ff96f">◆ </a></span>getMemoryConsumption() <span class="overload">[2/6]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getMemoryConsumption </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp; </td>
          <td class="paramname"><em>netInputShape</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">size_t &amp; </td>
          <td class="paramname"><em>weights</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">size_t &amp; </td>
          <td class="paramname"><em>blobs</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">netInputShape</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>This is an overloaded member function, provided for convenience. It differs from the above function only in what argument(s) it accepts. </p>
</div>
</div>
<a id="a89fb8bf6d2e4659a66d0ead5a0405061"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a89fb8bf6d2e4659a66d0ead5a0405061">◆ </a></span>getMemoryConsumption() <span class="overload">[3/6]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getMemoryConsumption </td>
          <td>(</td>
          <td class="paramtype">const int </td>
          <td class="paramname"><em>layerId</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>netInputShapes</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">size_t &amp; </td>
          <td class="paramname"><em>weights</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">size_t &amp; </td>
          <td class="paramname"><em>blobs</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">netInputShape</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>This is an overloaded member function, provided for convenience. It differs from the above function only in what argument(s) it accepts. </p>
</div>
</div>
<a id="a41715838baacf49108f97c8b1ae10a8f"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a41715838baacf49108f97c8b1ae10a8f">◆ </a></span>getMemoryConsumption() <span class="overload">[4/6]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getMemoryConsumption </td>
          <td>(</td>
          <td class="paramtype">const int </td>
          <td class="paramname"><em>layerId</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp; </td>
          <td class="paramname"><em>netInputShape</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">size_t &amp; </td>
          <td class="paramname"><em>weights</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">size_t &amp; </td>
          <td class="paramname"><em>blobs</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">netInputShape</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>This is an overloaded member function, provided for convenience. It differs from the above function only in what argument(s) it accepts. </p>
</div>
</div>
<a id="a3303e9154a53299a8ac426d30c6ab321"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a3303e9154a53299a8ac426d30c6ab321">◆ </a></span>getMemoryConsumption() <span class="overload">[5/6]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getMemoryConsumption </td>
          <td>(</td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &gt; &amp; </td>
          <td class="paramname"><em>netInputShapes</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; int &gt; &amp; </td>
          <td class="paramname"><em>layerIds</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; size_t &gt; &amp; </td>
          <td class="paramname"><em>weights</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; size_t &gt; &amp; </td>
          <td class="paramname"><em>blobs</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">netInputShape</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Computes bytes number which are required to store all weights and intermediate blobs for each layer. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">netInputShapes</td><td>vector of shapes for all net inputs. </td></tr>
    <tr><td class="paramname">layerIds</td><td>output vector to save layer IDs. </td></tr>
    <tr><td class="paramname">weights</td><td>output parameter to store resulting bytes for weights. </td></tr>
    <tr><td class="paramname">blobs</td><td>output parameter to store resulting bytes for intermediate blobs. </td></tr>
  </table>
  </dd>
</dl>
</div>
</div>
<a id="af9d6c117533d859c5e26f08d09934d56"></a>
<h2 class="memtitle"><span class="permalink"><a href="#af9d6c117533d859c5e26f08d09934d56">◆ </a></span>getMemoryConsumption() <span class="overload">[6/6]</span></h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::getMemoryConsumption </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp; </td>
          <td class="paramname"><em>netInputShape</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; int &gt; &amp; </td>
          <td class="paramname"><em>layerIds</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; size_t &gt; &amp; </td>
          <td class="paramname"><em>weights</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">std::vector&lt; size_t &gt; &amp; </td>
          <td class="paramname"><em>blobs</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">netInputShape</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShapes</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>weights, blobs</td><td>=</td><td>cv.dnn_Net.getMemoryConsumption(</td><td class="paramname">layerId, netInputShape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>This is an overloaded member function, provided for convenience. It differs from the above function only in what argument(s) it accepts. </p>
</div>
</div>
<a id="a767f2a693ba409bedc442adca3b71511"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a767f2a693ba409bedc442adca3b71511">◆ </a></span>getParam()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="../../d3/d63/classcv_1_1Mat.html">Mat</a> cv::dnn::Net::getParam </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6962dbfe16ce1ae18c67de9f5f2912ef">LayerId</a> </td>
          <td class="paramname"><em>layer</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">int </td>
          <td class="paramname"><em>numParam</em> = <code>0</code> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getParam(</td><td class="paramname">layer[, numParam]</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Returns parameter blob of the layer. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">layer</td><td>name or id of the layer. </td></tr>
    <tr><td class="paramname">numParam</td><td>index of the layer parameter in the <a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html#a9a5578e0b3a0ec0301fb7320b54aa6ed" title="List of learned parameters must be stored here to allow read them by using Net::getParam(). ">Layer::blobs</a> array. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html#a9a5578e0b3a0ec0301fb7320b54aa6ed" title="List of learned parameters must be stored here to allow read them by using Net::getParam(). ">Layer::blobs</a> </dd></dl>
</div>
</div>
<a id="a06ce946f675f75d1c020c5ddbc78aedc"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a06ce946f675f75d1c020c5ddbc78aedc">◆ </a></span>getPerfProfile()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname"><a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga7cde0074dfd288f2d70c0e035dacb28a">int64</a> cv::dnn::Net::getPerfProfile </td>
          <td>(</td>
          <td class="paramtype">std::vector&lt; double &gt; &amp; </td>
          <td class="paramname"><em>timings</em></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval, timings</td><td>=</td><td>cv.dnn_Net.getPerfProfile(</td><td class="paramname"></td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Returns overall time for inference and timings (in ticks) for layers. Indexes in returned vector correspond to layers ids. Some layers can be fused with others, in this case zero ticks count will be return for that skipped layers. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">timings</td><td>vector for tick timings for all layers. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd>overall ticks for model inference. </dd></dl>
</div>
</div>
<a id="ae62a73984f62c49fd3e8e689405b056a"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ae62a73984f62c49fd3e8e689405b056a">◆ </a></span>getUnconnectedOutLayers()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">std::vector&lt;int&gt; cv::dnn::Net::getUnconnectedOutLayers </td>
          <td>(</td>
          <td class="paramname"></td><td>)</td>
          <td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getUnconnectedOutLayers(</td><td class="paramname"></td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Returns indexes of layers with unconnected outputs. </p>
</div>
</div>
<a id="ac1840896b8643f91532e98c660627fb9"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ac1840896b8643f91532e98c660627fb9">◆ </a></span>getUnconnectedOutLayersNames()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">std::vector&lt;<a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a>&gt; cv::dnn::Net::getUnconnectedOutLayersNames </td>
          <td>(</td>
          <td class="paramname"></td><td>)</td>
          <td> const</td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn_Net.getUnconnectedOutLayersNames(</td><td class="paramname"></td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Returns names of layers with unconnected outputs. </p>
</div>
</div>
<a id="a327720af187378a58e0a48feb0b5ef89"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a327720af187378a58e0a48feb0b5ef89">◆ </a></span>readFromModelOptimizer() <span class="overload">[1/3]</span></h2>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">static <a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html">Net</a> cv::dnn::Net::readFromModelOptimizer </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>xml</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>bin</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">static</span></span>  </td>
  </tr>
</table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn.Net_readFromModelOptimizer(</td><td class="paramname">xml, bin</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn.Net_readFromModelOptimizer(</td><td class="paramname">bufferModelConfig, bufferWeights</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Create a network from Intel's <a class="el" href="../../d3/df0/classcv_1_1dnn_1_1Model.html" title="This class is presented high-level API for neural networks. ">Model</a> Optimizer intermediate representation (IR). </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramdir">[in]</td><td class="paramname">xml</td><td>XML configuration file with network's topology. </td></tr>
    <tr><td class="paramdir">[in]</td><td class="paramname">bin</td><td>Binary file with trained weights. Networks imported from Intel's <a class="el" href="../../d3/df0/classcv_1_1dnn_1_1Model.html" title="This class is presented high-level API for neural networks. ">Model</a> Optimizer are launched in Intel's Inference Engine backend. </td></tr>
  </table>
  </dd>
</dl>
</div>
</div>
<a id="a7b162c8978d5b1aab599edbf59e5b79f"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a7b162c8978d5b1aab599edbf59e5b79f">◆ </a></span>readFromModelOptimizer() <span class="overload">[2/3]</span></h2>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">static <a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html">Net</a> cv::dnn::Net::readFromModelOptimizer </td>
          <td>(</td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga65f85814a8290f9797005d3b28e7e5fc">uchar</a> &gt; &amp; </td>
          <td class="paramname"><em>bufferModelConfig</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga65f85814a8290f9797005d3b28e7e5fc">uchar</a> &gt; &amp; </td>
          <td class="paramname"><em>bufferWeights</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">static</span></span>  </td>
  </tr>
</table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn.Net_readFromModelOptimizer(</td><td class="paramname">xml, bin</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn.Net_readFromModelOptimizer(</td><td class="paramname">bufferModelConfig, bufferWeights</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Create a network from Intel's <a class="el" href="../../d3/df0/classcv_1_1dnn_1_1Model.html" title="This class is presented high-level API for neural networks. ">Model</a> Optimizer in-memory buffers with intermediate representation (IR). </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramdir">[in]</td><td class="paramname">bufferModelConfig</td><td>buffer with model's configuration. </td></tr>
    <tr><td class="paramdir">[in]</td><td class="paramname">bufferWeights</td><td>buffer with model's trained weights. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html" title="This class allows to create and manipulate comprehensive artificial neural networks. ">Net</a> object. </dd></dl>
</div>
</div>
<a id="af1fd3b20f8878e202d41d597b3da15d6"></a>
<h2 class="memtitle"><span class="permalink"><a href="#af1fd3b20f8878e202d41d597b3da15d6">◆ </a></span>readFromModelOptimizer() <span class="overload">[3/3]</span></h2>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
  <tr>
  <td class="mlabels-left">
      <table class="memname">
        <tr>
          <td class="memname">static <a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html">Net</a> cv::dnn::Net::readFromModelOptimizer </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga65f85814a8290f9797005d3b28e7e5fc">uchar</a> * </td>
          <td class="paramname"><em>bufferModelConfigPtr</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">size_t </td>
          <td class="paramname"><em>bufferModelConfigSize</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../d1/d1b/group__core__hal__interface.html#ga65f85814a8290f9797005d3b28e7e5fc">uchar</a> * </td>
          <td class="paramname"><em>bufferWeightsPtr</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">size_t </td>
          <td class="paramname"><em>bufferWeightsSize</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table>
  </td>
  <td class="mlabels-right">
<span class="mlabels"><span class="mlabel">static</span></span>  </td>
  </tr>
</table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn.Net_readFromModelOptimizer(</td><td class="paramname">xml, bin</td><td>)</td></tr><tr><td style="width: 20px;"></td><td>retval</td><td>=</td><td>cv.dnn.Net_readFromModelOptimizer(</td><td class="paramname">bufferModelConfig, bufferWeights</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Create a network from Intel's <a class="el" href="../../d3/df0/classcv_1_1dnn_1_1Model.html" title="This class is presented high-level API for neural networks. ">Model</a> Optimizer in-memory buffers with intermediate representation (IR). </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramdir">[in]</td><td class="paramname">bufferModelConfigPtr</td><td>buffer pointer of model's configuration. </td></tr>
    <tr><td class="paramdir">[in]</td><td class="paramname">bufferModelConfigSize</td><td>buffer size of model's configuration. </td></tr>
    <tr><td class="paramdir">[in]</td><td class="paramname">bufferWeightsPtr</td><td>buffer pointer of model's trained weights. </td></tr>
    <tr><td class="paramdir">[in]</td><td class="paramname">bufferWeightsSize</td><td>buffer size of model's trained weights. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section return"><dt>Returns</dt><dd><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html" title="This class allows to create and manipulate comprehensive artificial neural networks. ">Net</a> object. </dd></dl>
</div>
</div>
<a id="a56fbff351d1e0a47fb5aabf6915fc279"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a56fbff351d1e0a47fb5aabf6915fc279">◆ </a></span>setHalideScheduler()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::setHalideScheduler </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>scheduler</em></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.setHalideScheduler(</td><td class="paramname">scheduler</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Compile Halide layers. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramdir">[in]</td><td class="paramname">scheduler</td><td>Path to YAML file with scheduling directives. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a7f767df11386d39374db49cd8df8f59e" title="Ask network to use specific computation backend where it supported. ">setPreferableBackend</a></dd></dl>
<p>Schedule layers that support Halide backend. Then compile them for specific target. For layers that not represented in scheduling file or if no manual scheduling used at all, automatic scheduling will be applied. </p>
</div>
</div>
<a id="a5e74adacffd6aa53d56046581de7fcbd"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a5e74adacffd6aa53d56046581de7fcbd">◆ </a></span>setInput()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::setInput </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="../../dc/d84/group__core__basic.html#ga353a9de602fe76c709e12074a6f362ba">InputArray</a> </td>
          <td class="paramname"><em>blob</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>name</em> = <code>""</code>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">double </td>
          <td class="paramname"><em>scalefactor</em> = <code>1.0</code>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga599fe92e910c027be274233eccad7beb">Scalar</a> &amp; </td>
          <td class="paramname"><em>mean</em> = <code><a class="el" href="../../dc/d84/group__core__basic.html#ga599fe92e910c027be274233eccad7beb">Scalar</a>()</code> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.setInput(</td><td class="paramname">blob[, name[, scalefactor[, mean]]]</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Sets the new input value for the network. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">blob</td><td>A new blob. Should have CV_32F or CV_8U depth. </td></tr>
    <tr><td class="paramname">name</td><td>A name of input layer. </td></tr>
    <tr><td class="paramname">scalefactor</td><td>An optional normalization scale. </td></tr>
    <tr><td class="paramname">mean</td><td>An optional mean subtraction values. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a032292c7866fb72db251708b0e7c2bea" title="Connects output of the first layer to input of the second layer. ">connect(String, String)</a> to know format of the descriptor.</dd></dl>
<p>If scale or mean values are specified, a final input blob is computed as: </p><p class="formulaDsp">
\[input(n,c,h,w) = scalefactor \times (blob(n,c,h,w) - mean_c)\]
</p>
 <dl><dt><b>Examples: </b></dt><dd><a class="el" href="../../d6/d39/samples_2dnn_2colorization_8cpp-example.html#a20">samples/dnn/colorization.cpp</a>, and <a class="el" href="../../d7/d4f/samples_2dnn_2openpose_8cpp-example.html#a13">samples/dnn/openpose.cpp</a>.</dd>
</dl>
</div>
</div>
<a id="a09e9579f40599efe7cc9300aca811497"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a09e9579f40599efe7cc9300aca811497">◆ </a></span>setInputShape()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::setInputShape </td>
          <td>(</td>
          <td class="paramtype">const <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &amp; </td>
          <td class="paramname"><em>inputName</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../d6/d0f/group__dnn.html#ga8a9ab61770c140f0fa2880c90aeae832">MatShape</a> &amp; </td>
          <td class="paramname"><em>shape</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.setInputShape(</td><td class="paramname">inputName, shape</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Specify shape of network input. </p>
</div>
</div>
<a id="a4331f0fa17fd90db99cb68752c796fed"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a4331f0fa17fd90db99cb68752c796fed">◆ </a></span>setInputsNames()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::setInputsNames </td>
          <td>(</td>
          <td class="paramtype">const std::vector&lt; <a class="el" href="../../dc/d84/group__core__basic.html#ga1f6634802eeadfd7245bc75cf3e216c2">String</a> &gt; &amp; </td>
          <td class="paramname"><em>inputBlobNames</em></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.setInputsNames(</td><td class="paramname">inputBlobNames</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Sets outputs names of the network input pseudo layer. </p>
<p>Each net always has special own the network input pseudo layer with id=0. This layer stores the user blobs only and don't make any computations. In fact, this layer provides the only way to pass user data into the network. As any other layer, this layer can label its outputs and this function provides an easy way to do this. </p>
</div>
</div>
<a id="ab9e38d28672d35a8b59f7bffc2435400"></a>
<h2 class="memtitle"><span class="permalink"><a href="#ab9e38d28672d35a8b59f7bffc2435400">◆ </a></span>setParam()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::setParam </td>
          <td>(</td>
          <td class="paramtype"><a class="el" href="../../db/d30/classcv_1_1dnn_1_1Net.html#a6962dbfe16ce1ae18c67de9f5f2912ef">LayerId</a> </td>
          <td class="paramname"><em>layer</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">int </td>
          <td class="paramname"><em>numParam</em>, </td>
        </tr>
        <tr>
          <td class="paramkey"></td>
          <td></td>
          <td class="paramtype">const <a class="el" href="../../d3/d63/classcv_1_1Mat.html">Mat</a> &amp; </td>
          <td class="paramname"><em>blob</em> </td>
        </tr>
        <tr>
          <td></td>
          <td>)</td>
          <td></td><td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.setParam(</td><td class="paramname">layer, numParam, blob</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Sets the new value for the learned param of the layer. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramname">layer</td><td>name or id of the layer. </td></tr>
    <tr><td class="paramname">numParam</td><td>index of the layer parameter in the <a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html#a9a5578e0b3a0ec0301fb7320b54aa6ed" title="List of learned parameters must be stored here to allow read them by using Net::getParam(). ">Layer::blobs</a> array. </td></tr>
    <tr><td class="paramname">blob</td><td>the new value. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="../../d3/d6c/classcv_1_1dnn_1_1Layer.html#a9a5578e0b3a0ec0301fb7320b54aa6ed" title="List of learned parameters must be stored here to allow read them by using Net::getParam(). ">Layer::blobs</a> </dd></dl>
<dl class="section note"><dt>Note</dt><dd>If shape of the new blob differs from the previous shape, then the following forward pass may fail. </dd></dl>
</div>
</div>
<a id="a7f767df11386d39374db49cd8df8f59e"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a7f767df11386d39374db49cd8df8f59e">◆ </a></span>setPreferableBackend()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::setPreferableBackend </td>
          <td>(</td>
          <td class="paramtype">int </td>
          <td class="paramname"><em>backendId</em></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.setPreferableBackend(</td><td class="paramname">backendId</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Ask network to use specific computation backend where it supported. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramdir">[in]</td><td class="paramname">backendId</td><td>backend identifier. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="../../d6/d0f/group__dnn.html#ga186f7d9bfacac8b0ff2e26e2eab02625" title="Enum of computation backends supported by layers. ">Backend</a></dd></dl>
<p>If OpenCV is compiled with Intel's Inference Engine library, DNN_BACKEND_DEFAULT means DNN_BACKEND_INFERENCE_ENGINE. Otherwise it equals to DNN_BACKEND_OPENCV. </p>
</div>
</div>
<a id="a9dddbefbc7f3defbe3eeb5dc3d3483f4"></a>
<h2 class="memtitle"><span class="permalink"><a href="#a9dddbefbc7f3defbe3eeb5dc3d3483f4">◆ </a></span>setPreferableTarget()</h2>
<div class="memitem">
<div class="memproto">
      <table class="memname">
        <tr>
          <td class="memname">void cv::dnn::Net::setPreferableTarget </td>
          <td>(</td>
          <td class="paramtype">int </td>
          <td class="paramname"><em>targetId</em></td><td>)</td>
          <td></td>
        </tr>
      </table><table class="python_language"><tr><th colspan="999" style="text-align:left">Python:</th></tr><tr><td style="width: 20px;"></td><td>None</td><td>=</td><td>cv.dnn_Net.setPreferableTarget(</td><td class="paramname">targetId</td><td>)</td></tr></table>
</div><div class="memdoc">
<p>Ask network to make computations on specific target device. </p>
<dl class="params"><dt>Parameters</dt><dd>
  <table class="params">
    <tr><td class="paramdir">[in]</td><td class="paramname">targetId</td><td>target identifier. </td></tr>
  </table>
  </dd>
</dl>
<dl class="section see"><dt>See also</dt><dd><a class="el" href="../../d6/d0f/group__dnn.html#ga709af7692ba29788182cf573531b0ff5" title="Enum of target devices for computations. ">Target</a></dd></dl>
<p>List of supported combinations backend / target: </p><table class="doxtable">
<tr>
<th></th><th>DNN_BACKEND_OPENCV </th><th>DNN_BACKEND_INFERENCE_ENGINE </th><th>DNN_BACKEND_HALIDE </th><th>DNN_BACKEND_CUDA  </th></tr>
<tr>
<td>DNN_TARGET_CPU </td><td>+ </td><td>+ </td><td>+ </td><td></td></tr>
<tr>
<td>DNN_TARGET_OPENCL </td><td>+ </td><td>+ </td><td>+ </td><td></td></tr>
<tr>
<td>DNN_TARGET_OPENCL_FP16 </td><td>+ </td><td>+ </td><td></td><td></td></tr>
<tr>
<td>DNN_TARGET_MYRIAD </td><td></td><td>+ </td><td></td><td></td></tr>
<tr>
<td>DNN_TARGET_FPGA </td><td></td><td>+ </td><td></td><td></td></tr>
<tr>
<td>DNN_TARGET_CUDA </td><td></td><td></td><td></td><td>+ </td></tr>
<tr>
<td>DNN_TARGET_CUDA_FP16 </td><td></td><td></td><td></td><td>+ </td></tr>
<tr>
<td>DNN_TARGET_HDDL </td><td></td><td>+ </td><td></td><td></td></tr>
</table>
<dl><dt><b>Examples: </b></dt><dd><a class="el" href="../../d6/d39/samples_2dnn_2colorization_8cpp-example.html#a8">samples/dnn/colorization.cpp</a>.</dd>
</dl>
</div>
</div>
<hr/>The documentation for this class was generated from the following file:<ul>
<li>opencv2/dnn/<a class="el" href="../../db/ddc/dnn_2dnn_8hpp.html">dnn.hpp</a></li>
</ul>
</div><!-- contents -->
<!-- HTML footer for doxygen 1.8.6-->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated on Fri Apr 2 2021 11:36:47 for OpenCV by  <a href="http://www.doxygen.org/index.html">
<img alt="doxygen" class="footer" src="../../doxygen.png"/>
</a> 1.8.13
</small></address>
<script type="text/javascript">
//<![CDATA[
addTutorialsButtons();
//]]>
</script>
</body>
</html>
