



<!DOCTYPE html>
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
  <meta charset="utf-8">
  
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  
  <title>Index &mdash; PyTorch master documentation</title>
  

  
  
  
  
    <link rel="canonical" href="https://pytorch.org/docs/stable/genindex.html"/>
  

  

  
  
    

  

  <link rel="stylesheet" href="_static/css/theme.css" type="text/css" />
  <!-- <link rel="stylesheet" href="_static/pygments.css" type="text/css" /> -->
  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/katex@0.10.0-beta/dist/katex.min.css" type="text/css" />
  <link rel="stylesheet" href="_static/css/jit.css" type="text/css" />
  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/katex@0.11.1/dist/katex.min.css" type="text/css" />
  <link rel="stylesheet" href="_static/katex-math.css" type="text/css" />
    <link rel="index" title="Index" href="#" />
    <link rel="search" title="Search" href="search.html" /> 

  
  <script src="_static/js/modernizr.min.js"></script>

  <!-- Preload the theme fonts -->

<link rel="preload" href="_static/fonts/FreightSans/freight-sans-book.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="_static/fonts/FreightSans/freight-sans-medium.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="_static/fonts/IBMPlexMono/IBMPlexMono-Medium.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="_static/fonts/FreightSans/freight-sans-bold.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="_static/fonts/FreightSans/freight-sans-medium-italic.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="_static/fonts/IBMPlexMono/IBMPlexMono-SemiBold.woff2" as="font" type="font/woff2" crossorigin="anonymous">

<!-- Preload the katex fonts -->

<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Math-Italic.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Main-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Main-Bold.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Size1-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Size4-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Size2-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Size3-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Caligraphic-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
</head>

<div class="container-fluid header-holder tutorials-header" id="header-holder">
  <div class="container">
    <div class="header-container">
      <a class="header-logo" href="https://pytorch.org/" aria-label="PyTorch"></a>

      <div class="main-menu">
        <ul>
          <li>
            <a href="https://pytorch.org/get-started">Get Started</a>
          </li>

          <li>
            <div class="ecosystem-dropdown">
              <a id="dropdownMenuButton" data-toggle="ecosystem-dropdown">
                Ecosystem
              </a>
              <div class="ecosystem-dropdown-menu">
                <a class="nav-dropdown-item" href="https://pytorch.org/hub"">
                  <span class=dropdown-title>Models (Beta)</span>
                  <p>Discover, publish, and reuse pre-trained models</p>
                </a>
                <a class="nav-dropdown-item" href="https://pytorch.org/ecosystem">
                  <span class=dropdown-title>Tools & Libraries</span>
                  <p>Explore the ecosystem of tools and libraries</p>
                </a>
              </div>
            </div>
          </li>

          <li>
            <a href="https://pytorch.org/mobile">Mobile</a>
          </li>

          <li>
            <a href="https://pytorch.org/blog/">Blog</a>
          </li>

          <li>
            <a href="https://pytorch.org/tutorials">Tutorials</a>
          </li>

          <li class="active">
            <a href="https://pytorch.org/docs/stable/index.html">Docs</a>
          </li>

          <li>
            <div class="resources-dropdown">
              <a id="resourcesDropdownButton" data-toggle="resources-dropdown">
                Resources
              </a>
              <div class="resources-dropdown-menu">
                <a class="nav-dropdown-item" href="https://pytorch.org/resources"">
                  <span class=dropdown-title>Developer Resources</span>
                  <p>Find resources and get questions answered</p>
                </a>
                <a class="nav-dropdown-item" href="https://pytorch.org/features">
                  <span class=dropdown-title>About</span>
                  <p>Learn about PyTorch’s features and capabilities</p>
                </a>
              </div>
            </div>
          </li>

          <li>
            <a href="https://github.com/pytorch/pytorch">Github</a>
          </li>
        </ul>
      </div>

      <a class="main-menu-open-button" href="#" data-behavior="open-mobile-menu"></a>
    </div>

  </div>
</div>


<body class="pytorch-body">

   

    

    <div class="table-of-contents-link-wrapper">
      <span>Table of Contents</span>
      <a href="#" class="toggle-table-of-contents" data-behavior="toggle-table-of-contents"></a>
    </div>

    <nav data-toggle="wy-nav-shift" class="pytorch-left-menu" id="pytorch-left-menu">
      <div class="pytorch-side-scroll">
        <div class="pytorch-menu pytorch-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
          <div class="pytorch-left-menu-search">
            

            
              
              
                <div class="version">
                  master (1.5.0 )
                </div>
              
            

            


  


<div role="search">
  <form id="rtd-search-form" class="wy-form" action="search.html" method="get">
    <input type="text" name="q" placeholder="Search Docs" />
    <input type="hidden" name="check_keywords" value="yes" />
    <input type="hidden" name="area" value="default" />
  </form>
</div>

            
          </div>

          
<div>
  <a style="color:#F05732" href="https://pytorch.org/docs/stable/genindex.html">
    You are viewing unstable developer preview docs.
    Click here to view docs for latest stable release.
  </a>
</div>

            
            
              
            
            
              <p class="caption"><span class="caption-text">Notes</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="notes/amp_examples.html">Automatic Mixed Precision examples</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/autograd.html">Autograd mechanics</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/broadcasting.html">Broadcasting semantics</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/cpu_threading_torchscript_inference.html">CPU threading and TorchScript inference</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/cuda.html">CUDA semantics</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/ddp.html">Distributed Data Parallel</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/extending.html">Extending PyTorch</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/faq.html">Frequently Asked Questions</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/large_scale_deployments.html">Features for large-scale deployments</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/multiprocessing.html">Multiprocessing best practices</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/randomness.html">Reproducibility</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/serialization.html">Serialization semantics</a></li>
<li class="toctree-l1"><a class="reference internal" href="notes/windows.html">Windows FAQ</a></li>
</ul>
<p class="caption"><span class="caption-text">Language Bindings</span></p>
<ul>
<li class="toctree-l1"><a class="reference external" href="https://pytorch.org/cppdocs/">C++ API</a></li>
<li class="toctree-l1"><a class="reference internal" href="packages.html">Javadoc</a></li>
</ul>
<p class="caption"><span class="caption-text">Python API</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="torch.html">torch</a></li>
<li class="toctree-l1"><a class="reference internal" href="nn.html">torch.nn</a></li>
<li class="toctree-l1"><a class="reference internal" href="nn.functional.html">torch.nn.functional</a></li>
<li class="toctree-l1"><a class="reference internal" href="tensors.html">torch.Tensor</a></li>
<li class="toctree-l1"><a class="reference internal" href="tensor_attributes.html">Tensor Attributes</a></li>
<li class="toctree-l1"><a class="reference internal" href="tensor_view.html">Tensor Views</a></li>
<li class="toctree-l1"><a class="reference internal" href="autograd.html">torch.autograd</a></li>
<li class="toctree-l1"><a class="reference internal" href="cuda.html">torch.cuda</a></li>
<li class="toctree-l1"><a class="reference internal" href="amp.html">torch.cuda.amp</a></li>
<li class="toctree-l1"><a class="reference internal" href="distributed.html">torch.distributed</a></li>
<li class="toctree-l1"><a class="reference internal" href="distributions.html">torch.distributions</a></li>
<li class="toctree-l1"><a class="reference internal" href="hub.html">torch.hub</a></li>
<li class="toctree-l1"><a class="reference internal" href="jit.html">torch.jit</a></li>
<li class="toctree-l1"><a class="reference internal" href="nn.init.html">torch.nn.init</a></li>
<li class="toctree-l1"><a class="reference internal" href="onnx.html">torch.onnx</a></li>
<li class="toctree-l1"><a class="reference internal" href="optim.html">torch.optim</a></li>
<li class="toctree-l1"><a class="reference internal" href="quantization.html">Quantization</a></li>
<li class="toctree-l1"><a class="reference internal" href="rpc/index.html">Distributed RPC Framework</a></li>
<li class="toctree-l1"><a class="reference internal" href="random.html">torch.random</a></li>
<li class="toctree-l1"><a class="reference internal" href="sparse.html">torch.sparse</a></li>
<li class="toctree-l1"><a class="reference internal" href="storage.html">torch.Storage</a></li>
<li class="toctree-l1"><a class="reference internal" href="bottleneck.html">torch.utils.bottleneck</a></li>
<li class="toctree-l1"><a class="reference internal" href="checkpoint.html">torch.utils.checkpoint</a></li>
<li class="toctree-l1"><a class="reference internal" href="cpp_extension.html">torch.utils.cpp_extension</a></li>
<li class="toctree-l1"><a class="reference internal" href="data.html">torch.utils.data</a></li>
<li class="toctree-l1"><a class="reference internal" href="dlpack.html">torch.utils.dlpack</a></li>
<li class="toctree-l1"><a class="reference internal" href="model_zoo.html">torch.utils.model_zoo</a></li>
<li class="toctree-l1"><a class="reference internal" href="tensorboard.html">torch.utils.tensorboard</a></li>
<li class="toctree-l1"><a class="reference internal" href="type_info.html">Type Info</a></li>
<li class="toctree-l1"><a class="reference internal" href="named_tensor.html">Named Tensors</a></li>
<li class="toctree-l1"><a class="reference internal" href="name_inference.html">Named Tensors operator coverage</a></li>
<li class="toctree-l1"><a class="reference internal" href="__config__.html">torch.__config__</a></li>
</ul>
<p class="caption"><span class="caption-text">Libraries</span></p>
<ul>
<li class="toctree-l1"><a class="reference external" href="https://pytorch.org/audio">torchaudio</a></li>
<li class="toctree-l1"><a class="reference external" href="https://pytorch.org/text">torchtext</a></li>
<li class="toctree-l1"><a class="reference external" href="https://pytorch.org/elastic/">TorchElastic</a></li>
<li class="toctree-l1"><a class="reference external" href="https://pytorch.org/serve">TorchServe</a></li>
<li class="toctree-l1"><a class="reference external" href="http://pytorch.org/xla/">PyTorch on XLA Devices</a></li>
</ul>
<p class="caption"><span class="caption-text">Community</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="community/contribution_guide.html">PyTorch Contribution Guide</a></li>
<li class="toctree-l1"><a class="reference internal" href="community/governance.html">PyTorch Governance</a></li>
<li class="toctree-l1"><a class="reference internal" href="community/persons_of_interest.html">PyTorch Governance | Persons of Interest</a></li>
</ul>

            
          

        </div>
      </div>
    </nav>

    <div class="pytorch-container">
      <div class="pytorch-page-level-bar" id="pytorch-page-level-bar">
        <div class="pytorch-breadcrumbs-wrapper">
          















<div role="navigation" aria-label="breadcrumbs navigation">

  <ul class="pytorch-breadcrumbs">
    
      <li>
        <a href="index.html">
          
            Docs
          
        </a> &gt;
      </li>

        
      <li>Index</li>
    
    
      <li class="pytorch-breadcrumbs-aside">
        
            
        
      </li>
    
  </ul>

  
</div>
        </div>

        <div class="pytorch-shortcuts-wrapper" id="pytorch-shortcuts-wrapper">
          Shortcuts
        </div>
      </div>

      <section data-toggle="wy-nav-shift" id="pytorch-content-wrap" class="pytorch-content-wrap">
        <div class="pytorch-content-left">

        
          
          <div class="rst-content">
          
            <div role="main" class="main-content" itemscope="itemscope" itemtype="http://schema.org/Article">
             <article itemprop="articleBody" id="pytorch-article" class="pytorch-article">
              

<h1 id="index">Index</h1>

<div class="genindex-jumpbox">
 <a href="#_"><strong>_</strong></a>
 | <a href="#A"><strong>A</strong></a>
 | <a href="#B"><strong>B</strong></a>
 | <a href="#C"><strong>C</strong></a>
 | <a href="#D"><strong>D</strong></a>
 | <a href="#E"><strong>E</strong></a>
 | <a href="#F"><strong>F</strong></a>
 | <a href="#G"><strong>G</strong></a>
 | <a href="#H"><strong>H</strong></a>
 | <a href="#I"><strong>I</strong></a>
 | <a href="#J"><strong>J</strong></a>
 | <a href="#K"><strong>K</strong></a>
 | <a href="#L"><strong>L</strong></a>
 | <a href="#M"><strong>M</strong></a>
 | <a href="#N"><strong>N</strong></a>
 | <a href="#O"><strong>O</strong></a>
 | <a href="#P"><strong>P</strong></a>
 | <a href="#Q"><strong>Q</strong></a>
 | <a href="#R"><strong>R</strong></a>
 | <a href="#S"><strong>S</strong></a>
 | <a href="#T"><strong>T</strong></a>
 | <a href="#U"><strong>U</strong></a>
 | <a href="#V"><strong>V</strong></a>
 | <a href="#W"><strong>W</strong></a>
 | <a href="#X"><strong>X</strong></a>
 | <a href="#Z"><strong>Z</strong></a>
 
</div>
<h2 id="_">_</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="autograd.html#torch.autograd.function._ContextMethodMixin">_ContextMethodMixin (class in torch.autograd.function)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor._indices">_indices() (torch.sparse.FloatTensor method)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="sparse.html#torch.sparse.FloatTensor._nnz">_nnz() (torch.sparse.FloatTensor method)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor._values">_values() (torch.sparse.FloatTensor method)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="A">A</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="torch.html#torch.abs">abs() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.abs">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.abs_">abs_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.AbsTransform">AbsTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="torch.html#torch.acos">acos() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.acos">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.acos_">acos_() (torch.Tensor method)</a>
</li>
      <li><a href="optim.html#torch.optim.Adadelta">Adadelta (class in torch.optim)</a>
</li>
      <li><a href="optim.html#torch.optim.Adagrad">Adagrad (class in torch.optim)</a>
</li>
      <li><a href="optim.html#torch.optim.Adam">Adam (class in torch.optim)</a>
</li>
      <li><a href="optim.html#torch.optim.Adamax">Adamax (class in torch.optim)</a>
</li>
      <li><a href="optim.html#torch.optim.AdamW">AdamW (class in torch.optim)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.adaptive_avg_pool1d">adaptive_avg_pool1d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.adaptive_avg_pool2d">adaptive_avg_pool2d() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.adaptive_avg_pool2d">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.adaptive_avg_pool3d">adaptive_avg_pool3d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.adaptive_max_pool1d">adaptive_max_pool1d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.adaptive_max_pool2d">adaptive_max_pool2d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.adaptive_max_pool3d">adaptive_max_pool3d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.AdaptiveAvgPool1d">AdaptiveAvgPool1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.AdaptiveAvgPool2d">AdaptiveAvgPool2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.AdaptiveAvgPool3d">AdaptiveAvgPool3d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.AdaptiveLogSoftmaxWithLoss">AdaptiveLogSoftmaxWithLoss (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.AdaptiveMaxPool1d">AdaptiveMaxPool1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.AdaptiveMaxPool2d">AdaptiveMaxPool2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.AdaptiveMaxPool3d">AdaptiveMaxPool3d (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.add">add() (in module torch)</a>, <a href="torch.html#torch.add">[1]</a>, <a href="torch.html#torch.add">[2]</a>

      <ul>
        <li><a href="sparse.html#torch.sparse.FloatTensor.add">(torch.sparse.FloatTensor method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.add">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.add_">add_() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.add_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Module.add_module">add_module() (torch.nn.Module method)</a>
</li>
      <li><a href="quantization.html#torch.quantization.add_observer_">add_observer_() (in module torch.quantization)</a>
</li>
      <li><a href="optim.html#torch.optim.Optimizer.add_param_group">add_param_group() (torch.optim.Optimizer method)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.PruningContainer.add_pruning_method">add_pruning_method() (torch.nn.utils.prune.PruningContainer method)</a>
</li>
      <li><a href="quantization.html#torch.quantization.add_quant_dequant">add_quant_dequant() (in module torch.quantization)</a>
</li>
      <li><a href="torch.html#torch.addbmm">addbmm() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.addbmm">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.addbmm_">addbmm_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.addcdiv">addcdiv() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.addcdiv">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.addcdiv_">addcdiv_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.addcmul">addcmul() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.addcmul">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.addcmul_">addcmul_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.addmm">addmm() (in module torch)</a>

      <ul>
        <li><a href="sparse.html#torch.sparse.addmm">(in module torch.sparse)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.addmm">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.addmm_">addmm_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.addmv">addmv() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.addmv">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.addmv_">addmv_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.addr">addr() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.addr">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.addr_">addr_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.affine_grid">affine_grid() (in module torch.nn.functional)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.AffineTransform">AffineTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="named_tensor.html#torch.Tensor.align_as">align_as() (torch.Tensor method)</a>
</li>
      <li><a href="named_tensor.html#torch.Tensor.align_to">align_to() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.BoolTensor.all">all() (torch.BoolTensor method)</a>
</li>
      <li><a href="distributed.html#torch.distributed.all_gather">all_gather() (in module torch.distributed)</a>
</li>
      <li><a href="distributed.html#torch.distributed.all_gather_multigpu">all_gather_multigpu() (in module torch.distributed)</a>
</li>
      <li><a href="distributed.html#torch.distributed.all_reduce">all_reduce() (in module torch.distributed)</a>
</li>
      <li><a href="distributed.html#torch.distributed.all_reduce_multigpu">all_reduce_multigpu() (in module torch.distributed)</a>
</li>
      <li><a href="torch.html#torch.allclose">allclose() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.allclose">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.allocateByteBuffer(int)">allocateByteBuffer(int) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.allocateDoubleBuffer(int)">allocateDoubleBuffer(int) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.allocateFloatBuffer(int)">allocateFloatBuffer(int) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.allocateIntBuffer(int)">allocateIntBuffer(int) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.allocateLongBuffer(int)">allocateLongBuffer(int) (Java method)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.alpha_dropout">alpha_dropout() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.AlphaDropout">AlphaDropout (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.angle">angle() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.angle">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.BoolTensor.any">any() (torch.BoolTensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.ModuleList.append">append() (torch.nn.ModuleList method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.ParameterList.append">(torch.nn.ParameterList method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Module.apply">apply() (torch.nn.Module method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.utils.prune.BasePruningMethod.apply">(torch.nn.utils.prune.BasePruningMethod class method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.CustomFromMask.apply">(torch.nn.utils.prune.CustomFromMask class method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.Identity.apply">(torch.nn.utils.prune.Identity class method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.L1Unstructured.apply">(torch.nn.utils.prune.L1Unstructured class method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.LnStructured.apply">(torch.nn.utils.prune.LnStructured class method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.PruningContainer.apply">(torch.nn.utils.prune.PruningContainer class method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.RandomStructured.apply">(torch.nn.utils.prune.RandomStructured class method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.RandomUnstructured.apply">(torch.nn.utils.prune.RandomUnstructured class method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="tensors.html#torch.Tensor.apply_">apply_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.BasePruningMethod.apply_mask">apply_mask() (torch.nn.utils.prune.BasePruningMethod method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.utils.prune.CustomFromMask.apply_mask">(torch.nn.utils.prune.CustomFromMask method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.Identity.apply_mask">(torch.nn.utils.prune.Identity method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.L1Unstructured.apply_mask">(torch.nn.utils.prune.L1Unstructured method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.LnStructured.apply_mask">(torch.nn.utils.prune.LnStructured method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.PruningContainer.apply_mask">(torch.nn.utils.prune.PruningContainer method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.RandomStructured.apply_mask">(torch.nn.utils.prune.RandomStructured method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.RandomUnstructured.apply_mask">(torch.nn.utils.prune.RandomUnstructured method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.arange">arange() (in module torch)</a>
</li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.arg_constraints">arg_constraints (torch.distributions.bernoulli.Bernoulli attribute)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.beta.Beta.arg_constraints">(torch.distributions.beta.Beta attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.arg_constraints">(torch.distributions.binomial.Binomial attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.arg_constraints">(torch.distributions.categorical.Categorical attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.arg_constraints">(torch.distributions.cauchy.Cauchy attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.chi2.Chi2.arg_constraints">(torch.distributions.chi2.Chi2 attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.arg_constraints">(torch.distributions.continuous_bernoulli.ContinuousBernoulli attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.dirichlet.Dirichlet.arg_constraints">(torch.distributions.dirichlet.Dirichlet attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.arg_constraints">(torch.distributions.exponential.Exponential attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.fishersnedecor.FisherSnedecor.arg_constraints">(torch.distributions.fishersnedecor.FisherSnedecor attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gamma.Gamma.arg_constraints">(torch.distributions.gamma.Gamma attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.geometric.Geometric.arg_constraints">(torch.distributions.geometric.Geometric attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gumbel.Gumbel.arg_constraints">(torch.distributions.gumbel.Gumbel attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.arg_constraints">(torch.distributions.half_cauchy.HalfCauchy attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.arg_constraints">(torch.distributions.half_normal.HalfNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.independent.Independent.arg_constraints">(torch.distributions.independent.Independent attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.arg_constraints">(torch.distributions.laplace.Laplace attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.log_normal.LogNormal.arg_constraints">(torch.distributions.log_normal.LogNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.arg_constraints">(torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.arg_constraints">(torch.distributions.mixture_same_family.MixtureSameFamily attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multinomial.Multinomial.arg_constraints">(torch.distributions.multinomial.Multinomial attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.arg_constraints">(torch.distributions.multivariate_normal.MultivariateNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial.arg_constraints">(torch.distributions.negative_binomial.NegativeBinomial attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.arg_constraints">(torch.distributions.normal.Normal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.arg_constraints">(torch.distributions.one_hot_categorical.OneHotCategorical attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.pareto.Pareto.arg_constraints">(torch.distributions.pareto.Pareto attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.poisson.Poisson.arg_constraints">(torch.distributions.poisson.Poisson attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.arg_constraints">(torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.RelaxedBernoulli.arg_constraints">(torch.distributions.relaxed_bernoulli.RelaxedBernoulli attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.arg_constraints">(torch.distributions.relaxed_categorical.RelaxedOneHotCategorical attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.studentT.StudentT.arg_constraints">(torch.distributions.studentT.StudentT attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.transformed_distribution.TransformedDistribution.arg_constraints">(torch.distributions.transformed_distribution.TransformedDistribution attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.arg_constraints">(torch.distributions.uniform.Uniform attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.von_mises.VonMises.arg_constraints">(torch.distributions.von_mises.VonMises attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.weibull.Weibull.arg_constraints">(torch.distributions.weibull.Weibull attribute)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.distribution.Distribution.arg_constraints">arg_constraints() (torch.distributions.distribution.Distribution property)</a>
</li>
      <li><a href="torch.html#torch.argmax">argmax() (in module torch)</a>, <a href="torch.html#torch.argmax">[1]</a>, <a href="torch.html#torch.argmax">[2]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.argmax">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.argmin">argmin() (in module torch)</a>, <a href="torch.html#torch.argmin">[1]</a>, <a href="torch.html#torch.argmin">[2]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.argmin">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.argsort">argsort() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.argsort">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.as_strided">as_strided() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.as_strided">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.as_tensor">as_tensor() (in module torch)</a>
</li>
      <li><a href="optim.html#torch.optim.ASGD">ASGD (class in torch.optim)</a>
</li>
      <li><a href="torch.html#torch.asin">asin() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.asin">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.asin_">asin_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.atan">atan() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.atan">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.atan2">atan2() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.atan2">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.atan2_">atan2_() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.atan_">atan_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.avg_pool1d">avg_pool1d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.avg_pool2d">avg_pool2d() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.avg_pool2d">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.avg_pool3d">avg_pool3d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.AvgPool1d">AvgPool1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.AvgPool2d">AvgPool2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.AvgPool3d">AvgPool3d (class in torch.nn)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="B">B</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="distributed.html#torch.distributed.Backend">Backend (class in torch.distributed)</a>
</li>
      <li><a href="autograd.html#torch.autograd.backward">backward() (in module torch.autograd)</a>

      <ul>
        <li><a href="rpc/rpc.html#torch.distributed.autograd.backward">(in module torch.distributed.autograd)</a>
</li>
        <li><a href="autograd.html#torch.autograd.Function.backward">(torch.autograd.Function static method)</a>
</li>
        <li><a href="autograd.html#torch.Tensor.backward">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.baddbmm">baddbmm() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.baddbmm">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.baddbmm_">baddbmm_() (torch.Tensor method)</a>
</li>
      <li><a href="distributed.html#torch.distributed.barrier">barrier() (in module torch.distributed)</a>
</li>
      <li><a href="torch.html#torch.bartlett_window">bartlett_window() (in module torch)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.BasePruningMethod">BasePruningMethod (class in torch.nn.utils.prune)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.batch_norm">batch_norm() (in module torch.nn.functional)</a>
</li>
      <li><a href="distributions.html#torch.distributions.distribution.Distribution.batch_shape">batch_shape() (torch.distributions.distribution.Distribution property)</a>
</li>
      <li><a href="nn.html#torch.nn.BatchNorm1d">BatchNorm1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.BatchNorm2d">BatchNorm2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.BatchNorm3d">BatchNorm3d (class in torch.nn)</a>
</li>
      <li><a href="data.html#torch.utils.data.BatchSampler">BatchSampler (class in torch.utils.data)</a>
</li>
      <li><a href="nn.html#torch.nn.BCELoss">BCELoss (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.BCEWithLogitsLoss">BCEWithLogitsLoss (class in torch.nn)</a>
</li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli">Bernoulli (class in torch.distributions.bernoulli)</a>
</li>
      <li><a href="torch.html#torch.bernoulli">bernoulli() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.bernoulli">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.bernoulli_">bernoulli_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.beta.Beta">Beta (class in torch.distributions.beta)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.bfloat16">bfloat16() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.Module.bfloat16">(torch.nn.Module method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.bfloat16">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Bilinear">Bilinear (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.bilinear">bilinear() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.binary_cross_entropy">binary_cross_entropy() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.binary_cross_entropy_with_logits">binary_cross_entropy_with_logits() (in module torch.nn.functional)</a>
</li>
      <li><a href="torch.html#torch.bincount">bincount() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.bincount">(torch.Tensor method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="distributions.html#torch.distributions.binomial.Binomial">Binomial (class in torch.distributions.binomial)</a>
</li>
      <li><a href="org/pytorch/TensorImageUtils.html#org.pytorch.torchvision.TensorImageUtils.bitmapToFloat32Tensor(Bitmap, float[], float[])">bitmapToFloat32Tensor(Bitmap, float[], float[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/TensorImageUtils.html#org.pytorch.torchvision.TensorImageUtils.bitmapToFloat32Tensor(Bitmap, int, int, int, int, float[], float[])">bitmapToFloat32Tensor(Bitmap, int, int, int, int, float[], float[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/TensorImageUtils.html#org.pytorch.torchvision.TensorImageUtils.bitmapToFloatBuffer(Bitmap, int, int, int, int, float[], float[], FloatBuffer, int)">bitmapToFloatBuffer(Bitmap, int, int, int, int, float[], float[], FloatBuffer, int) (Java method)</a>
</li>
      <li><a href="torch.html#torch.bitwise_and">bitwise_and() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.bitwise_and">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.bitwise_and_">bitwise_and_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.bitwise_not">bitwise_not() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.bitwise_not">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.bitwise_not_">bitwise_not_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.bitwise_or">bitwise_or() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.bitwise_or">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.bitwise_or_">bitwise_or_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.bitwise_xor">bitwise_xor() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.bitwise_xor">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.bitwise_xor_">bitwise_xor_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.blackman_window">blackman_window() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.bmm">bmm() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.bmm">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="storage.html#torch.FloatStorage.bool">bool() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.bool">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.BoolTensor">BoolTensor (class in torch)</a>
</li>
      <li><a href="tensors.html#torch.BoolTensor.all">BoolTensor.all() (in module torch)</a>, <a href="tensors.html#torch.BoolTensor.all">[1]</a>
</li>
      <li><a href="tensors.html#torch.BoolTensor.any">BoolTensor.any() (in module torch)</a>, <a href="tensors.html#torch.BoolTensor.any">[1]</a>
</li>
      <li><a href="cuda.html#torch.cuda.comm.broadcast">broadcast() (in module torch.cuda.comm)</a>

      <ul>
        <li><a href="distributed.html#torch.distributed.broadcast">(in module torch.distributed)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.comm.broadcast_coalesced">broadcast_coalesced() (in module torch.cuda.comm)</a>
</li>
      <li><a href="distributed.html#torch.distributed.broadcast_multigpu">broadcast_multigpu() (in module torch.distributed)</a>
</li>
      <li><a href="torch.html#torch.broadcast_tensors">broadcast_tensors() (in module torch)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.buffers">buffers() (torch.nn.Module method)</a>
</li>
      <li><a href="cpp_extension.html#torch.utils.cpp_extension.BuildExtension">BuildExtension() (in module torch.utils.cpp_extension)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.byte">byte() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.byte">(torch.Tensor method)</a>
</li>
      </ul></li>
  </ul></td>
</tr></table>

<h2 id="C">C</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.init.html#torch.nn.init.calculate_gain">calculate_gain() (in module torch.nn.init)</a>
</li>
      <li><a href="torch.html#torch.can_cast">can_cast() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.cartesian_prod">cartesian_prod() (in module torch)</a>
</li>
      <li><a href="distributions.html#torch.distributions.constraints.cat">cat (in module torch.distributions.constraints)</a>
</li>
      <li><a href="torch.html#torch.cat">cat() (in module torch)</a>
</li>
      <li><a href="distributions.html#torch.distributions.categorical.Categorical">Categorical (class in torch.distributions.categorical)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.CatTransform">CatTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="distributions.html#torch.distributions.cauchy.Cauchy">Cauchy (class in torch.distributions.cauchy)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.cauchy_">cauchy_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.cdf">cdf() (torch.distributions.cauchy.Cauchy method)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.cdf">(torch.distributions.continuous_bernoulli.ContinuousBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.cdf">(torch.distributions.distribution.Distribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.cdf">(torch.distributions.exponential.Exponential method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.cdf">(torch.distributions.half_cauchy.HalfCauchy method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.cdf">(torch.distributions.half_normal.HalfNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.cdf">(torch.distributions.laplace.Laplace method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.cdf">(torch.distributions.mixture_same_family.MixtureSameFamily method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.cdf">(torch.distributions.normal.Normal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.transformed_distribution.TransformedDistribution.cdf">(torch.distributions.transformed_distribution.TransformedDistribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.cdf">(torch.distributions.uniform.Uniform method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.cdist">cdist() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.ceil">ceil() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.ceil">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.ceil_">ceil_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.CELU">CELU (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.celu">celu() (in module torch.nn.functional)</a>
</li>
      <li><a href="torch.html#torch.chain_matmul">chain_matmul() (in module torch)</a>
</li>
      <li><a href="data.html#torch.utils.data.ChainDataset">ChainDataset (class in torch.utils.data)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.char">char() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.char">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.constraints.Constraint.check">check() (torch.distributions.constraints.Constraint method)</a>
</li>
      <li><a href="cpp_extension.html#torch.utils.cpp_extension.check_compiler_abi_compatibility">check_compiler_abi_compatibility() (in module torch.utils.cpp_extension)</a>
</li>
      <li><a href="checkpoint.html#torch.utils.checkpoint.checkpoint">checkpoint() (in module torch.utils.checkpoint)</a>
</li>
      <li><a href="checkpoint.html#torch.utils.checkpoint.checkpoint_sequential">checkpoint_sequential() (in module torch.utils.checkpoint)</a>
</li>
      <li><a href="distributions.html#torch.distributions.chi2.Chi2">Chi2 (class in torch.distributions.chi2)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.children">children() (torch.nn.Module method)</a>
</li>
      <li><a href="torch.html#torch.cholesky">cholesky() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.cholesky">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.cholesky_inverse">cholesky_inverse() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.cholesky_inverse">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.cholesky_solve">cholesky_solve() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.cholesky_solve">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.chunk">chunk() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.chunk">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.clamp">clamp() (in module torch)</a>, <a href="torch.html#torch.clamp">[1]</a>, <a href="torch.html#torch.clamp">[2]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.clamp">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.clamp_">clamp_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.ModuleDict.clear">clear() (torch.nn.ModuleDict method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.ParameterDict.clear">(torch.nn.ParameterDict method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.utils.clip_grad_norm_">clip_grad_norm_() (in module torch.nn.utils)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.clip_grad_value_">clip_grad_value_() (in module torch.nn.utils)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.clone">clone() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="sparse.html#torch.sparse.FloatTensor.clone">(torch.sparse.FloatTensor method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.clone">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.coalesce">coalesce() (torch.sparse.FloatTensor method)</a>
</li>
      <li><a href="jit.html#torch.jit.ScriptModule.code">code() (torch.jit.ScriptModule property)</a>
</li>
      <li><a href="torch.html#torch.combinations">combinations() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.compiled_with_cxx11_abi">compiled_with_cxx11_abi() (in module torch)</a>
</li>
      <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.component_distribution">component_distribution() (torch.distributions.mixture_same_family.MixtureSameFamily property)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.ComposeTransform">ComposeTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.BasePruningMethod.compute_mask">compute_mask() (torch.nn.utils.prune.BasePruningMethod method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.utils.prune.LnStructured.compute_mask">(torch.nn.utils.prune.LnStructured method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.PruningContainer.compute_mask">(torch.nn.utils.prune.PruningContainer method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.RandomStructured.compute_mask">(torch.nn.utils.prune.RandomStructured method)</a>
</li>
      </ul></li>
      <li><a href="data.html#torch.utils.data.ConcatDataset">ConcatDataset (class in torch.utils.data)</a>
</li>
      <li><a href="distributions.html#torch.distributions.beta.Beta.concentration0">concentration0() (torch.distributions.beta.Beta property)</a>
</li>
      <li><a href="distributions.html#torch.distributions.beta.Beta.concentration1">concentration1() (torch.distributions.beta.Beta property)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.RRef.confirmed_by_owner">confirmed_by_owner() (torch.distributed.rpc.RRef method)</a>
</li>
      <li><a href="torch.html#torch.conj">conj() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.conj">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.init.html#torch.nn.init.constant_">constant_() (in module torch.nn.init)</a>
</li>
      <li><a href="nn.html#torch.nn.ConstantPad1d">ConstantPad1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.ConstantPad2d">ConstantPad2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.ConstantPad3d">ConstantPad3d (class in torch.nn)</a>
</li>
      <li><a href="distributions.html#torch.distributions.constraints.Constraint">Constraint (class in torch.distributions.constraints)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="distributions.html#torch.distributions.constraint_registry.ConstraintRegistry">ConstraintRegistry (class in torch.distributions.constraint_registry)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.autograd.context">context (class in torch.distributed.autograd)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.contiguous">contiguous() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli">ContinuousBernoulli (class in torch.distributions.continuous_bernoulli)</a>
</li>
      <li><a href="nn.html#torch.nn.Conv1d">Conv1d (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.conv1d">conv1d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.Conv2d">Conv2d (class in torch.nn)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.qat.Conv2d">(class in torch.nn.qat)</a>
</li>
        <li><a href="quantization.html#torch.nn.quantized.Conv2d">(class in torch.nn.quantized)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.conv2d">conv2d() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.conv2d">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Conv3d">Conv3d (class in torch.nn)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.Conv3d">(class in torch.nn.quantized)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.conv3d">conv3d() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.conv3d">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.conv_transpose1d">conv_transpose1d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.conv_transpose2d">conv_transpose2d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.conv_transpose3d">conv_transpose3d() (in module torch.nn.functional)</a>
</li>
      <li><a href="quantization.html#torch.nn.intrinsic.ConvBn2d">ConvBn2d (class in torch.nn.intrinsic)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.intrinsic.qat.ConvBn2d">(class in torch.nn.intrinsic.qat)</a>
</li>
      </ul></li>
      <li><a href="quantization.html#torch.nn.intrinsic.ConvBnReLU2d">ConvBnReLU2d (class in torch.nn.intrinsic)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.intrinsic.qat.ConvBnReLU2d">(class in torch.nn.intrinsic.qat)</a>
</li>
      </ul></li>
      <li><a href="quantization.html#torch.quantization.convert">convert() (in module torch.quantization)</a>
</li>
      <li><a href="nn.html#torch.nn.SyncBatchNorm.convert_sync_batchnorm">convert_sync_batchnorm() (torch.nn.SyncBatchNorm class method)</a>
</li>
      <li><a href="quantization.html#torch.nn.intrinsic.ConvReLU2d">ConvReLU2d (class in torch.nn.intrinsic)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.intrinsic.qat.ConvReLU2d">(class in torch.nn.intrinsic.qat)</a>
</li>
        <li><a href="quantization.html#torch.nn.intrinsic.quantized.ConvReLU2d">(class in torch.nn.intrinsic.quantized)</a>
</li>
      </ul></li>
      <li><a href="quantization.html#torch.nn.intrinsic.ConvReLU3d">ConvReLU3d (class in torch.nn.intrinsic)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.intrinsic.quantized.ConvReLU3d">(class in torch.nn.intrinsic.quantized)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.ConvTranspose1d">ConvTranspose1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.ConvTranspose2d">ConvTranspose2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.ConvTranspose3d">ConvTranspose3d (class in torch.nn)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.copy_">copy_() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.copy_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.cos">cos() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.cos">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.cos_">cos_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.cosh">cosh() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.cosh">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.cosh_">cosh_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.cosine_embedding_loss">cosine_embedding_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.cosine_similarity">cosine_similarity() (in module torch.nn.functional)</a>
</li>
      <li><a href="optim.html#torch.optim.lr_scheduler.CosineAnnealingLR">CosineAnnealingLR (class in torch.optim.lr_scheduler)</a>
</li>
      <li><a href="optim.html#torch.optim.lr_scheduler.CosineAnnealingWarmRestarts">CosineAnnealingWarmRestarts (class in torch.optim.lr_scheduler)</a>
</li>
      <li><a href="nn.html#torch.nn.CosineEmbeddingLoss">CosineEmbeddingLoss (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.CosineSimilarity">CosineSimilarity (class in torch.nn)</a>
</li>
      <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.covariance_matrix">covariance_matrix (torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal attribute)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.covariance_matrix">(torch.distributions.multivariate_normal.MultivariateNormal attribute)</a>
</li>
      </ul></li>
      <li><a href="cpp_extension.html#torch.utils.cpp_extension.CppExtension">CppExtension() (in module torch.utils.cpp_extension)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.cpu">cpu() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.Module.cpu">(torch.nn.Module method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.cpu">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.cross">cross() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.cross">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.cross_entropy">cross_entropy() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.CrossEntropyLoss">CrossEntropyLoss (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.ctc_loss">ctc_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.CTCLoss">CTCLoss (class in torch.nn)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.cuda">cuda() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.Module.cuda">(torch.nn.Module method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.cuda">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="cpp_extension.html#torch.utils.cpp_extension.CUDAExtension">CUDAExtension() (in module torch.utils.cpp_extension)</a>
</li>
      <li><a href="torch.html#torch.cummax">cummax() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.cummax">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.cummin">cummin() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.cummin">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.cumprod">cumprod() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.cumprod">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.cumsum">cumsum() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.cumsum">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.current_blas_handle">current_blas_handle() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.current_device">current_device() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.current_stream">current_stream() (in module torch.cuda)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.custom_from_mask">custom_from_mask() (in module torch.nn.utils.prune)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.CustomFromMask">CustomFromMask (class in torch.nn.utils.prune)</a>
</li>
      <li><a href="optim.html#torch.optim.lr_scheduler.CyclicLR">CyclicLR (class in torch.optim.lr_scheduler)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="D">D</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.functional.html#torch.nn.parallel.data_parallel">data_parallel() (in module torch.nn.parallel)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.data_ptr">data_ptr() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.data_ptr">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="data.html#torch.utils.data.DataLoader">DataLoader (class in torch.utils.data)</a>
</li>
      <li><a href="nn.html#torch.nn.DataParallel">DataParallel (class in torch.nn)</a>
</li>
      <li><a href="data.html#torch.utils.data.Dataset">Dataset (class in torch.utils.data)</a>
</li>
      <li><a href="quantization.html#torch.quantization.default_eval_fn">default_eval_fn() (in module torch.quantization)</a>
</li>
      <li><a href="torch.html#torch.torch.default_generator">default_generator (torch.torch attribute)</a>
</li>
      <li><a href="cuda.html#torch.cuda.default_stream">default_stream() (in module torch.cuda)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.dense_dim">dense_dim() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.constraints.dependent_property">dependent_property (in module torch.distributions.constraints)</a>
</li>
      <li><a href="quantization.html#torch.nn.quantized.DeQuantize">DeQuantize (class in torch.nn.quantized)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.dequantize">dequantize() (torch.Tensor method)</a>
</li>
      <li><a href="quantization.html#torch.quantization.DeQuantStub">DeQuantStub (class in torch.quantization)</a>
</li>
      <li><a href="org/pytorch/Module.html#org.pytorch.Module.destroy()">destroy() (Java method)</a>
</li>
      <li><a href="torch.html#torch.det">det() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.det">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="autograd.html#torch.Tensor.detach">detach() (torch.Tensor method)</a>
</li>
      <li><a href="autograd.html#torch.Tensor.detach_">detach_() (torch.Tensor method)</a>
</li>
      <li><a href="autograd.html#torch.autograd.detect_anomaly">detect_anomaly (class in torch.autograd)</a>
</li>
      <li><a href="cuda.html#torch.cuda.device">device (class in torch.cuda)</a>

      <ul>
        <li><a href="torch.html#torch._C.Generator.device">(torch._C.Generator attribute)</a>
</li>
        <li><a href="storage.html#torch.FloatStorage.device">(torch.FloatStorage attribute)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.device">(torch.Tensor attribute)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.device_count">device_count() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.device_of">device_of (class in torch.cuda)</a>
</li>
      <li><a href="distributions.html#torch.distributions.chi2.Chi2.df">df() (torch.distributions.chi2.Chi2 property)</a>
</li>
      <li><a href="torch.html#torch.diag">diag() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.diag">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.diag_embed">diag_embed() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.diag_embed">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.diagflat">diagflat() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.diagflat">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.diagonal">diagonal() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.diagonal">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.dictLongKeyFrom(Map)">dictLongKeyFrom(Map) (Java method)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.dictStringKeyFrom(Map)">dictStringKeyFrom(Map) (Java method)</a>
</li>
      <li><a href="torch.html#torch.digamma">digamma() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.digamma">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.digamma_">digamma_() (torch.Tensor method)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.dim">dim() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.dim">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.init.html#torch.nn.init.dirac_">dirac_() (in module torch.nn.init)</a>
</li>
      <li><a href="distributions.html#torch.distributions.dirichlet.Dirichlet">Dirichlet (class in torch.distributions.dirichlet)</a>
</li>
      <li><a href="torch.html#torch.dist">dist() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.dist">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.parallel.DistributedDataParallel">DistributedDataParallel (class in torch.nn.parallel)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.optim.DistributedOptimizer">DistributedOptimizer (class in torch.distributed.optim)</a>
</li>
      <li><a href="data.html#torch.utils.data.distributed.DistributedSampler">DistributedSampler (class in torch.utils.data.distributed)</a>
</li>
      <li><a href="distributions.html#torch.distributions.distribution.Distribution">Distribution (class in torch.distributions.distribution)</a>
</li>
      <li><a href="torch.html#torch.div">div() (in module torch)</a>, <a href="torch.html#torch.div">[1]</a>, <a href="torch.html#torch.div">[2]</a>

      <ul>
        <li><a href="sparse.html#torch.sparse.FloatTensor.div">(torch.sparse.FloatTensor method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.div">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.div_">div_() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.div_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.dot">dot() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.dot">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="storage.html#torch.FloatStorage.double">double() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.Module.double">(torch.nn.Module method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.double">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="hub.html#torch.hub.download_url_to_file">download_url_to_file() (in module torch.hub)</a>
</li>
      <li><a href="torch.html#torch.quasirandom.SobolEngine.draw">draw() (torch.quasirandom.SobolEngine method)</a>
</li>
      <li><a href="nn.html#torch.nn.Dropout">Dropout (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.dropout">dropout() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.Dropout2d">Dropout2d (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.dropout2d">dropout2d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.Dropout3d">Dropout3d (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.dropout3d">dropout3d() (in module torch.nn.functional)</a>
</li>
      <li><a href="org/pytorch/DType.html#org.pytorch.DType">DType (Java enum)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.dtype">dtype (torch.FloatStorage attribute)</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_float32.html#org.pytorch.Tensor.Tensor_float32.dtype()">dtype() (Java method)</a>, <a href="org/pytorch/Tensor-Tensor_float64.html#org.pytorch.Tensor.Tensor_float64.dtype()">[1]</a>, <a href="org/pytorch/Tensor-Tensor_int32.html#org.pytorch.Tensor.Tensor_int32.dtype()">[2]</a>, <a href="org/pytorch/Tensor-Tensor_int64.html#org.pytorch.Tensor.Tensor_int64.dtype()">[3]</a>, <a href="org/pytorch/Tensor-Tensor_int8.html#org.pytorch.Tensor.Tensor_int8.dtype()">[4]</a>, <a href="org/pytorch/Tensor-Tensor_uint8.html#org.pytorch.Tensor.Tensor_uint8.dtype()">[5]</a>, <a href="org/pytorch/Tensor.html#org.pytorch.Tensor.dtype()">[6]</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.dtypeJniCode()">dtypeJniCode() (Java method)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.dump_patches">dump_patches (torch.nn.Module attribute)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="E">E</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="torch.html#torch.eig">eig() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.eig">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.einsum">einsum() (in module torch)</a>
</li>
      <li><a href="cuda.html#torch.cuda.Event.elapsed_time">elapsed_time() (torch.cuda.Event method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.element_size">element_size() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.element_size">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.ELU">ELU (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.elu">elu() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.elu_">elu_() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.Embedding">Embedding (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.embedding">embedding() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.embedding_bag">embedding_bag() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.EmbeddingBag">EmbeddingBag (class in torch.nn)</a>
</li>
      <li><a href="autograd.html#torch.autograd.profiler.emit_nvtx">emit_nvtx (class in torch.autograd.profiler)</a>
</li>
      <li><a href="torch.html#torch.empty">empty() (in module torch)</a>
</li>
      <li><a href="cuda.html#torch.cuda.empty_cache">empty_cache() (in module torch.cuda)</a>
</li>
      <li><a href="torch.html#torch.empty_like">empty_like() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.empty_strided">empty_strided() (in module torch)</a>
</li>
      <li><a href="autograd.html#torch.autograd.enable_grad">enable_grad (class in torch.autograd)</a>
</li>
      <li><a href="torch.html#torch.enable_grad">enable_grad() (in module torch)</a>
</li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.entropy">entropy() (torch.distributions.bernoulli.Bernoulli method)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.beta.Beta.entropy">(torch.distributions.beta.Beta method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.entropy">(torch.distributions.categorical.Categorical method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.entropy">(torch.distributions.cauchy.Cauchy method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.entropy">(torch.distributions.continuous_bernoulli.ContinuousBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.dirichlet.Dirichlet.entropy">(torch.distributions.dirichlet.Dirichlet method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.entropy">(torch.distributions.distribution.Distribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exp_family.ExponentialFamily.entropy">(torch.distributions.exp_family.ExponentialFamily method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.entropy">(torch.distributions.exponential.Exponential method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gamma.Gamma.entropy">(torch.distributions.gamma.Gamma method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.geometric.Geometric.entropy">(torch.distributions.geometric.Geometric method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gumbel.Gumbel.entropy">(torch.distributions.gumbel.Gumbel method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.entropy">(torch.distributions.half_cauchy.HalfCauchy method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.entropy">(torch.distributions.half_normal.HalfNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.independent.Independent.entropy">(torch.distributions.independent.Independent method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.entropy">(torch.distributions.laplace.Laplace method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.log_normal.LogNormal.entropy">(torch.distributions.log_normal.LogNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.entropy">(torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.entropy">(torch.distributions.multivariate_normal.MultivariateNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.entropy">(torch.distributions.normal.Normal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.entropy">(torch.distributions.one_hot_categorical.OneHotCategorical method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.pareto.Pareto.entropy">(torch.distributions.pareto.Pareto method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.studentT.StudentT.entropy">(torch.distributions.studentT.StudentT method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.entropy">(torch.distributions.uniform.Uniform method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.weibull.Weibull.entropy">(torch.distributions.weibull.Weibull method)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.enumerate_support">enumerate_support() (torch.distributions.bernoulli.Bernoulli method)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.enumerate_support">(torch.distributions.binomial.Binomial method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.enumerate_support">(torch.distributions.categorical.Categorical method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.enumerate_support">(torch.distributions.distribution.Distribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.independent.Independent.enumerate_support">(torch.distributions.independent.Independent method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.enumerate_support">(torch.distributions.one_hot_categorical.OneHotCategorical method)</a>
</li>
      </ul></li>
      <li>
    environment variable

      <ul>
        <li><a href="jit.html#envvar-PYTORCH_JIT">PYTORCH_JIT</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.eq">eq() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.eq">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.eq_">eq_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.equal">equal() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.equal">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.erf">erf() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.erf">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.erf_">erf_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.erfc">erfc() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.erfc">(torch.Tensor method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="tensors.html#torch.Tensor.erfc_">erfc_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.erfinv">erfinv() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.erfinv">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.erfinv_">erfinv_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.eval">eval() (torch.nn.Module method)</a>
</li>
      <li><a href="cuda.html#torch.cuda.Event">Event (class in torch.cuda)</a>
</li>
      <li><a href="distributions.html#torch.distributions.distribution.Distribution.event_shape">event_shape() (torch.distributions.distribution.Distribution property)</a>
</li>
      <li><a href="torch.html#torch.exp">exp() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.exp">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.exp_">exp_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.expand">expand() (torch.distributions.bernoulli.Bernoulli method)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.beta.Beta.expand">(torch.distributions.beta.Beta method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.expand">(torch.distributions.binomial.Binomial method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.expand">(torch.distributions.categorical.Categorical method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.expand">(torch.distributions.cauchy.Cauchy method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.chi2.Chi2.expand">(torch.distributions.chi2.Chi2 method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.expand">(torch.distributions.continuous_bernoulli.ContinuousBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.dirichlet.Dirichlet.expand">(torch.distributions.dirichlet.Dirichlet method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.expand">(torch.distributions.distribution.Distribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.expand">(torch.distributions.exponential.Exponential method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.fishersnedecor.FisherSnedecor.expand">(torch.distributions.fishersnedecor.FisherSnedecor method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gamma.Gamma.expand">(torch.distributions.gamma.Gamma method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.geometric.Geometric.expand">(torch.distributions.geometric.Geometric method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gumbel.Gumbel.expand">(torch.distributions.gumbel.Gumbel method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.expand">(torch.distributions.half_cauchy.HalfCauchy method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.expand">(torch.distributions.half_normal.HalfNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.independent.Independent.expand">(torch.distributions.independent.Independent method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.expand">(torch.distributions.laplace.Laplace method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.log_normal.LogNormal.expand">(torch.distributions.log_normal.LogNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.expand">(torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.expand">(torch.distributions.mixture_same_family.MixtureSameFamily method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multinomial.Multinomial.expand">(torch.distributions.multinomial.Multinomial method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.expand">(torch.distributions.multivariate_normal.MultivariateNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial.expand">(torch.distributions.negative_binomial.NegativeBinomial method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.expand">(torch.distributions.normal.Normal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.expand">(torch.distributions.one_hot_categorical.OneHotCategorical method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.pareto.Pareto.expand">(torch.distributions.pareto.Pareto method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.poisson.Poisson.expand">(torch.distributions.poisson.Poisson method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.expand">(torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.RelaxedBernoulli.expand">(torch.distributions.relaxed_bernoulli.RelaxedBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.expand">(torch.distributions.relaxed_categorical.RelaxedOneHotCategorical method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.studentT.StudentT.expand">(torch.distributions.studentT.StudentT method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.transformed_distribution.TransformedDistribution.expand">(torch.distributions.transformed_distribution.TransformedDistribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.expand">(torch.distributions.uniform.Uniform method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.von_mises.VonMises.expand">(torch.distributions.von_mises.VonMises method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.weibull.Weibull.expand">(torch.distributions.weibull.Weibull method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.expand">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.expand_as">expand_as() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.expm1">expm1() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.expm1">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.expm1_">expm1_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.exponential.Exponential">Exponential (class in torch.distributions.exponential)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.exponential_">exponential_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.exp_family.ExponentialFamily">ExponentialFamily (class in torch.distributions.exp_family)</a>
</li>
      <li><a href="optim.html#torch.optim.lr_scheduler.ExponentialLR">ExponentialLR (class in torch.optim.lr_scheduler)</a>
</li>
      <li><a href="jit.html#torch.jit.export">export() (in module torch.jit)</a>

      <ul>
        <li><a href="onnx.html#torch.onnx.export">(in module torch.onnx)</a>
</li>
      </ul></li>
      <li><a href="autograd.html#torch.autograd.profiler.profile.export_chrome_trace">export_chrome_trace() (torch.autograd.profiler.profile method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.ExpTransform">ExpTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="nn.html#torch.nn.ModuleList.extend">extend() (torch.nn.ModuleList method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.ParameterList.extend">(torch.nn.ParameterList method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Module.extra_repr">extra_repr() (torch.nn.Module method)</a>
</li>
      <li><a href="torch.html#torch.eye">eye() (in module torch)</a>
</li>
      <li><a href="nn.init.html#torch.nn.init.eye_">eye_() (in module torch.nn.init)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="F">F</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="quantization.html#torch.quantization.FakeQuantize">FakeQuantize (class in torch.quantization)</a>
</li>
      <li><a href="torch.html#torch.quasirandom.SobolEngine.fast_forward">fast_forward() (torch.quasirandom.SobolEngine method)</a>
</li>
      <li><a href="torch.html#torch.fft">fft() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.fft">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="storage.html#torch.FloatStorage.fill_">fill_() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.fill_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.fill_diagonal_">fill_diagonal_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.fishersnedecor.FisherSnedecor">FisherSnedecor (class in torch.distributions.fishersnedecor)</a>
</li>
      <li><a href="nn.html#torch.nn.Flatten">Flatten (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.flatten">flatten() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.flatten">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.RNNBase.flatten_parameters">flatten_parameters() (torch.nn.RNNBase method)</a>
</li>
      <li><a href="torch.html#torch.flip">flip() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.flip">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="storage.html#torch.FloatStorage.float">float() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.Module.float">(torch.nn.Module method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.float">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="org/pytorch/DType.html#org.pytorch.DType.FLOAT32">FLOAT32 (Java field)</a>
</li>
      <li><a href="org/pytorch/DType.html#org.pytorch.DType.FLOAT64">FLOAT64 (Java field)</a>
</li>
      <li><a href="quantization.html#torch.nn.quantized.FloatFunctional">FloatFunctional (class in torch.nn.quantized)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage">FloatStorage (class in torch)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor">FloatTensor (class in torch.sparse)</a>
</li>
      <li><a href="torch.html#torch.floor">floor() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.floor">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.floor_">floor_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.floor_divide">floor_divide() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.floor_divide">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.floor_divide_">floor_divide_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.fmod">fmod() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.fmod">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.fmod_">fmod_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.Fold">Fold (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.fold">fold() (in module torch.nn.functional)</a>
</li>
      <li><a href="random.html#torch.random.fork_rng">fork_rng() (in module torch.random)</a>, <a href="random.html#torch.random.fork_rng">[1]</a>
</li>
      <li><a href="autograd.html#torch.autograd.Function.forward">forward() (torch.autograd.Function static method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.Module.forward">(torch.nn.Module method)</a>
</li>
        <li><a href="nn.html#torch.nn.MultiheadAttention.forward">(torch.nn.MultiheadAttention method)</a>
</li>
        <li><a href="nn.html#torch.nn.Transformer.forward">(torch.nn.Transformer method)</a>
</li>
        <li><a href="nn.html#torch.nn.TransformerDecoder.forward">(torch.nn.TransformerDecoder method)</a>
</li>
        <li><a href="nn.html#torch.nn.TransformerDecoderLayer.forward">(torch.nn.TransformerDecoderLayer method)</a>
</li>
        <li><a href="nn.html#torch.nn.TransformerEncoder.forward">(torch.nn.TransformerEncoder method)</a>
</li>
        <li><a href="nn.html#torch.nn.TransformerEncoderLayer.forward">(torch.nn.TransformerEncoderLayer method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="org/pytorch/Module.html#org.pytorch.Module.forward(IValue)">forward(IValue) (Java method)</a>
</li>
      <li><a href="torch.html#torch.frac">frac() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.frac">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.frac_">frac_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.FractionalMaxPool2d">FractionalMaxPool2d (class in torch.nn)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.from(boolean)">from(boolean) (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.from(double)">from(double) (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.from(long)">from(long) (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.from(String)">from(String) (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.from(Tensor)">from(Tensor) (Java method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.from_buffer">from_buffer() (torch.FloatStorage static method)</a>
</li>
      <li><a href="dlpack.html#torch.utils.dlpack.from_dlpack">from_dlpack() (in module torch.utils.dlpack)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.from_file">from_file() (torch.FloatStorage static method)</a>
</li>
      <li><a href="quantization.html#torch.nn.qat.Conv2d.from_float">from_float() (torch.nn.qat.Conv2d class method)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.qat.Linear.from_float">(torch.nn.qat.Linear class method)</a>
</li>
        <li><a href="quantization.html#torch.nn.quantized.Conv2d.from_float">(torch.nn.quantized.Conv2d class method)</a>
</li>
        <li><a href="quantization.html#torch.nn.quantized.Conv3d.from_float">(torch.nn.quantized.Conv3d class method)</a>
</li>
        <li><a href="quantization.html#torch.nn.quantized.dynamic.Linear.from_float">(torch.nn.quantized.dynamic.Linear class method)</a>
</li>
        <li><a href="quantization.html#torch.nn.quantized.Linear.from_float">(torch.nn.quantized.Linear class method)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.Event.from_ipc_handle">from_ipc_handle() (torch.cuda.Event class method)</a>
</li>
      <li><a href="torch.html#torch.from_numpy">from_numpy() (in module torch)</a>
</li>
      <li><a href="nn.html#torch.nn.Embedding.from_pretrained">from_pretrained() (torch.nn.Embedding class method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.EmbeddingBag.from_pretrained">(torch.nn.EmbeddingBag class method)</a>
</li>
      </ul></li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlob(byte[], long[])">fromBlob(byte[], long[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlob(ByteBuffer, long[])">fromBlob(ByteBuffer, long[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlob(DoubleBuffer, long[])">fromBlob(DoubleBuffer, long[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlob(float[], long[])">fromBlob(float[], long[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlob(FloatBuffer, long[])">fromBlob(FloatBuffer, long[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlob(int[], long[])">fromBlob(int[], long[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlob(IntBuffer, long[])">fromBlob(IntBuffer, long[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlob(long[], double[])">fromBlob(long[], double[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlob(long[], long[])">fromBlob(long[], long[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlob(LongBuffer, long[])">fromBlob(LongBuffer, long[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlobUnsigned(byte[], long[])">fromBlobUnsigned(byte[], long[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.fromBlobUnsigned(ByteBuffer, long[])">fromBlobUnsigned(ByteBuffer, long[]) (Java method)</a>
</li>
      <li><a href="torch.html#torch.full">full() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.full_like">full_like() (in module torch)</a>
</li>
      <li><a href="autograd.html#torch.autograd.Function">Function (class in torch.autograd)</a>
</li>
      <li><a href="quantization.html#torch.quantization.fuse_modules">fuse_modules() (in module torch.quantization)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="G">G</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="distributions.html#torch.distributions.gamma.Gamma">Gamma (class in torch.distributions.gamma)</a>
</li>
      <li><a href="torch.html#torch.gather">gather() (in module torch)</a>

      <ul>
        <li><a href="cuda.html#torch.cuda.comm.gather">(in module torch.cuda.comm)</a>
</li>
        <li><a href="distributed.html#torch.distributed.gather">(in module torch.distributed)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.gather">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.ge">ge() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.ge">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.ge_">ge_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.GELU">GELU (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.gelu">gelu() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.Transformer.generate_square_subsequent_mask">generate_square_subsequent_mask() (torch.nn.Transformer method)</a>
</li>
      <li><a href="torch.html#torch._C.Generator">Generator (class in torch._C)</a>
</li>
      <li><a href="distributions.html#torch.distributions.geometric.Geometric">Geometric (class in torch.distributions.geometric)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.geometric_">geometric_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.geqrf">geqrf() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.geqrf">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.ger">ger() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.ger">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="multiprocessing.html#torch.multiprocessing.get_all_sharing_strategies">get_all_sharing_strategies() (in module torch.multiprocessing)</a>
</li>
      <li><a href="distributed.html#torch.distributed.get_backend">get_backend() (in module torch.distributed)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.get_backoff_factor">get_backoff_factor() (torch.cuda.amp.GradScaler method)</a>
</li>
      <li><a href="torch.html#torch.get_default_dtype">get_default_dtype() (in module torch)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.get_device">get_device() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.get_device">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.get_device_capability">get_device_capability() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.get_device_name">get_device_name() (in module torch.cuda)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.autograd.get_gradients">get_gradients() (in module torch.distributed.autograd)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.get_growth_factor">get_growth_factor() (torch.cuda.amp.GradScaler method)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.get_growth_interval">get_growth_interval() (torch.cuda.amp.GradScaler method)</a>
</li>
      <li><a href="optim.html#torch.optim.lr_scheduler.CyclicLR.get_lr">get_lr() (torch.optim.lr_scheduler.CyclicLR method)</a>
</li>
      <li><a href="torch.html#torch.get_num_interop_threads">get_num_interop_threads() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.get_num_threads">get_num_threads() (in module torch)</a>
</li>
      <li><a href="quantization.html#torch.quantization.get_observer_dict">get_observer_dict() (in module torch.quantization)</a>
</li>
      <li><a href="distributed.html#torch.distributed.get_rank">get_rank() (in module torch.distributed)</a>
</li>
      <li><a href="torch.html#torch.get_rng_state">get_rng_state() (in module torch)</a>

      <ul>
        <li><a href="cuda.html#torch.cuda.get_rng_state">(in module torch.cuda)</a>
</li>
        <li><a href="random.html#torch.random.get_rng_state">(in module torch.random)</a>, <a href="random.html#torch.random.get_rng_state">[1]</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="cuda.html#torch.cuda.get_rng_state_all">get_rng_state_all() (in module torch.cuda)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.get_scale">get_scale() (torch.cuda.amp.GradScaler method)</a>
</li>
      <li><a href="multiprocessing.html#torch.multiprocessing.get_sharing_strategy">get_sharing_strategy() (in module torch.multiprocessing)</a>
</li>
      <li><a href="torch.html#torch._C.Generator.get_state">get_state() (torch._C.Generator method)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.get_worker_info">get_worker_info() (in module torch.distributed.rpc)</a>

      <ul>
        <li><a href="data.html#torch.utils.data.get_worker_info">(in module torch.utils.data)</a>
</li>
      </ul></li>
      <li><a href="distributed.html#torch.distributed.get_world_size">get_world_size() (in module torch.distributed)</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_int8.html#org.pytorch.Tensor.Tensor_int8.getDataAsByteArray()">getDataAsByteArray() (Java method)</a>, <a href="org/pytorch/Tensor.html#org.pytorch.Tensor.getDataAsByteArray()">[1]</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_float64.html#org.pytorch.Tensor.Tensor_float64.getDataAsDoubleArray()">getDataAsDoubleArray() (Java method)</a>, <a href="org/pytorch/Tensor.html#org.pytorch.Tensor.getDataAsDoubleArray()">[1]</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_float32.html#org.pytorch.Tensor.Tensor_float32.getDataAsFloatArray()">getDataAsFloatArray() (Java method)</a>, <a href="org/pytorch/Tensor.html#org.pytorch.Tensor.getDataAsFloatArray()">[1]</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_int32.html#org.pytorch.Tensor.Tensor_int32.getDataAsIntArray()">getDataAsIntArray() (Java method)</a>, <a href="org/pytorch/Tensor.html#org.pytorch.Tensor.getDataAsIntArray()">[1]</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_int64.html#org.pytorch.Tensor.Tensor_int64.getDataAsLongArray()">getDataAsLongArray() (Java method)</a>, <a href="org/pytorch/Tensor.html#org.pytorch.Tensor.getDataAsLongArray()">[1]</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_uint8.html#org.pytorch.Tensor.Tensor_uint8.getDataAsUnsignedByteArray()">getDataAsUnsignedByteArray() (Java method)</a>, <a href="org/pytorch/Tensor.html#org.pytorch.Tensor.getDataAsUnsignedByteArray()">[1]</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_float32.html#org.pytorch.Tensor.Tensor_float32.getRawDataBuffer()">getRawDataBuffer() (Java method)</a>, <a href="org/pytorch/Tensor-Tensor_float64.html#org.pytorch.Tensor.Tensor_float64.getRawDataBuffer()">[1]</a>, <a href="org/pytorch/Tensor-Tensor_int32.html#org.pytorch.Tensor.Tensor_int32.getRawDataBuffer()">[2]</a>, <a href="org/pytorch/Tensor-Tensor_int64.html#org.pytorch.Tensor.Tensor_int64.getRawDataBuffer()">[3]</a>, <a href="org/pytorch/Tensor-Tensor_int8.html#org.pytorch.Tensor.Tensor_int8.getRawDataBuffer()">[4]</a>, <a href="org/pytorch/Tensor-Tensor_uint8.html#org.pytorch.Tensor.Tensor_uint8.getRawDataBuffer()">[5]</a>, <a href="org/pytorch/Tensor.html#org.pytorch.Tensor.getRawDataBuffer()">[6]</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.global_unstructured">global_unstructured() (in module torch.nn.utils.prune)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.glu">glu() (in module torch.nn.functional)</a>
</li>
      <li><a href="autograd.html#torch.Tensor.grad">grad (torch.Tensor attribute)</a>
</li>
      <li><a href="autograd.html#torch.autograd.grad">grad() (in module torch.autograd)</a>
</li>
      <li><a href="autograd.html#torch.autograd.gradcheck">gradcheck() (in module torch.autograd)</a>
</li>
      <li><a href="autograd.html#torch.autograd.gradgradcheck">gradgradcheck() (in module torch.autograd)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler">GradScaler (class in torch.cuda.amp)</a>
</li>
      <li><a href="jit.html#torch.jit.ScriptModule.graph">graph() (torch.jit.ScriptModule property)</a>
</li>
      <li><a href="distributions.html#torch.distributions.constraints.greater_than">greater_than (in module torch.distributions.constraints)</a>
</li>
      <li><a href="distributions.html#torch.distributions.constraints.greater_than_eq">greater_than_eq (in module torch.distributions.constraints)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.grid_sample">grid_sample() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.GroupNorm">GroupNorm (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.GRU">GRU (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.GRUCell">GRUCell (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.gt">gt() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.gt">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.gt_">gt_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.gumbel.Gumbel">Gumbel (class in torch.distributions.gumbel)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.gumbel_softmax">gumbel_softmax() (in module torch.nn.functional)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="H">H</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="storage.html#torch.FloatStorage.half">half() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.Module.half">(torch.nn.Module method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.half">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.constraints.half_open_interval">half_open_interval (in module torch.distributions.constraints)</a>
</li>
      <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy">HalfCauchy (class in torch.distributions.half_cauchy)</a>
</li>
      <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal">HalfNormal (class in torch.distributions.half_normal)</a>
</li>
      <li><a href="torch.html#torch.hamming_window">hamming_window() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.hann_window">hann_window() (in module torch)</a>
</li>
      <li><a href="nn.html#torch.nn.Hardshrink">Hardshrink (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.hardshrink">hardshrink() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.hardshrink">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Hardtanh">Hardtanh (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.hardtanh">hardtanh() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.hardtanh_">hardtanh_() (in module torch.nn.functional)</a>
</li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.has_enumerate_support">has_enumerate_support (torch.distributions.bernoulli.Bernoulli attribute)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.has_enumerate_support">(torch.distributions.binomial.Binomial attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.has_enumerate_support">(torch.distributions.categorical.Categorical attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.has_enumerate_support">(torch.distributions.one_hot_categorical.OneHotCategorical attribute)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.independent.Independent.has_enumerate_support">has_enumerate_support() (torch.distributions.independent.Independent property)</a>
</li>
      <li><a href="distributions.html#torch.distributions.beta.Beta.has_rsample">has_rsample (torch.distributions.beta.Beta attribute)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.has_rsample">(torch.distributions.cauchy.Cauchy attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.has_rsample">(torch.distributions.continuous_bernoulli.ContinuousBernoulli attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.dirichlet.Dirichlet.has_rsample">(torch.distributions.dirichlet.Dirichlet attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.has_rsample">(torch.distributions.exponential.Exponential attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.fishersnedecor.FisherSnedecor.has_rsample">(torch.distributions.fishersnedecor.FisherSnedecor attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gamma.Gamma.has_rsample">(torch.distributions.gamma.Gamma attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.has_rsample">(torch.distributions.half_cauchy.HalfCauchy attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.has_rsample">(torch.distributions.half_normal.HalfNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.has_rsample">(torch.distributions.laplace.Laplace attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.log_normal.LogNormal.has_rsample">(torch.distributions.log_normal.LogNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.has_rsample">(torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.has_rsample">(torch.distributions.mixture_same_family.MixtureSameFamily attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.has_rsample">(torch.distributions.multivariate_normal.MultivariateNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.has_rsample">(torch.distributions.normal.Normal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.RelaxedBernoulli.has_rsample">(torch.distributions.relaxed_bernoulli.RelaxedBernoulli attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.has_rsample">(torch.distributions.relaxed_categorical.RelaxedOneHotCategorical attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.studentT.StudentT.has_rsample">(torch.distributions.studentT.StudentT attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.has_rsample">(torch.distributions.uniform.Uniform attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.von_mises.VonMises.has_rsample">(torch.distributions.von_mises.VonMises attribute)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="distributions.html#torch.distributions.independent.Independent.has_rsample">has_rsample() (torch.distributions.independent.Independent property)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.transformed_distribution.TransformedDistribution.has_rsample">(torch.distributions.transformed_distribution.TransformedDistribution property)</a>
</li>
      </ul></li>
      <li><a href="hub.html#torch.hub.help">help() (in module torch.hub)</a>
</li>
      <li><a href="autograd.html#torch.autograd.functional.hessian">hessian() (in module torch.autograd.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.hinge_embedding_loss">hinge_embedding_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.HingeEmbeddingLoss">HingeEmbeddingLoss (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.histc">histc() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.histc">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="quantization.html#torch.quantization.HistogramObserver">HistogramObserver (class in torch.quantization)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.hspmm">hspmm() (torch.sparse.FloatTensor method)</a>
</li>
      <li><a href="autograd.html#torch.autograd.functional.hvp">hvp() (in module torch.autograd.functional)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="I">I</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.icdf">icdf() (torch.distributions.cauchy.Cauchy method)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.icdf">(torch.distributions.continuous_bernoulli.ContinuousBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.icdf">(torch.distributions.distribution.Distribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.icdf">(torch.distributions.exponential.Exponential method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.icdf">(torch.distributions.half_cauchy.HalfCauchy method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.icdf">(torch.distributions.half_normal.HalfNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.icdf">(torch.distributions.laplace.Laplace method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.icdf">(torch.distributions.normal.Normal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.transformed_distribution.TransformedDistribution.icdf">(torch.distributions.transformed_distribution.TransformedDistribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.icdf">(torch.distributions.uniform.Uniform method)</a>
</li>
      </ul></li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.WorkerInfo.id">id() (torch.distributed.rpc.WorkerInfo property)</a>
</li>
      <li><a href="nn.html#torch.nn.Identity">Identity (class in torch.nn)</a>

      <ul>
        <li><a href="nn.html#torch.nn.utils.prune.Identity">(class in torch.nn.utils.prune)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.utils.prune.identity">identity() (in module torch.nn.utils.prune)</a>
</li>
      <li><a href="torch.html#torch.ifft">ifft() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.ifft">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="jit.html#torch.jit.ignore">ignore() (in module torch.jit)</a>
</li>
      <li><a href="torch.html#torch.imag">imag() (in module torch)</a>
</li>
      <li><a href="org/pytorch/TensorImageUtils.html#org.pytorch.torchvision.TensorImageUtils.imageYUV420CenterCropToFloat32Tensor(Image, int, int, int, float[], float[])">imageYUV420CenterCropToFloat32Tensor(Image, int, int, int, float[], float[]) (Java method)</a>
</li>
      <li><a href="org/pytorch/TensorImageUtils.html#org.pytorch.torchvision.TensorImageUtils.imageYUV420CenterCropToFloatBuffer(Image, int, int, int, float[], float[], FloatBuffer, int)">imageYUV420CenterCropToFloatBuffer(Image, int, int, int, float[], float[], FloatBuffer, int) (Java method)</a>
</li>
      <li><a href="cpp_extension.html#torch.utils.cpp_extension.include_paths">include_paths() (in module torch.utils.cpp_extension)</a>
</li>
      <li><a href="distributions.html#torch.distributions.independent.Independent">Independent (class in torch.distributions.independent)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.index_add">index_add() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.index_add_">index_add_() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.index_copy">index_copy() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.index_copy_">index_copy_() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.index_fill">index_fill() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.index_fill_">index_fill_() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.index_put">index_put() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.index_put_">index_put_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.index_select">index_select() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.index_select">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.indices">indices() (torch.Tensor method)</a>
</li>
      <li><a href="cuda.html#torch.cuda.init">init() (in module torch.cuda)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.ProcessGroupRpcBackendOptions.init_method">init_method() (torch.distributed.rpc.ProcessGroupRpcBackendOptions property)</a>
</li>
      <li><a href="distributed.html#torch.distributed.init_process_group">init_process_group() (in module torch.distributed)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.init_rpc">init_rpc() (in module torch.distributed.rpc)</a>
</li>
      <li><a href="torch.html#torch.initial_seed">initial_seed() (in module torch)</a>

      <ul>
        <li><a href="cuda.html#torch.cuda.initial_seed">(in module torch.cuda)</a>
</li>
        <li><a href="random.html#torch.random.initial_seed">(in module torch.random)</a>, <a href="random.html#torch.random.initial_seed">[1]</a>
</li>
        <li><a href="torch.html#torch._C.Generator.initial_seed">(torch._C.Generator method)</a>
</li>
      </ul></li>
      <li><a href="jit.html#torch.jit.ScriptModule.inlined_graph">inlined_graph() (torch.jit.ScriptModule property)</a>
</li>
      <li><a href="nn.html#torch.nn.ModuleList.insert">insert() (torch.nn.ModuleList method)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.instance_norm">instance_norm() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.InstanceNorm1d">InstanceNorm1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.InstanceNorm2d">InstanceNorm2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.InstanceNorm3d">InstanceNorm3d (class in torch.nn)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.int">int() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.int">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="org/pytorch/DType.html#org.pytorch.DType.INT32">INT32 (Java field)</a>
</li>
      <li><a href="org/pytorch/DType.html#org.pytorch.DType.INT64">INT64 (Java field)</a>
</li>
      <li><a href="org/pytorch/DType.html#org.pytorch.DType.INT8">INT8 (Java field)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.int_repr">int_repr() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.constraints.integer_interval">integer_interval (in module torch.distributions.constraints)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.interpolate">interpolate() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.interpolate">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.constraints.interval">interval (in module torch.distributions.constraints)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.Transform.inv">inv() (torch.distributions.transforms.Transform property)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="torch.html#torch.inverse">inverse() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.inverse">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.ipc_collect">ipc_collect() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.Event.ipc_handle">ipc_handle() (torch.cuda.Event method)</a>
</li>
      <li><a href="distributed.html#torch.distributed.irecv">irecv() (in module torch.distributed)</a>
</li>
      <li><a href="torch.html#torch.irfft">irfft() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.irfft">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.is_available">is_available() (in module torch.cuda)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.is_coalesced">is_coalesced() (torch.sparse.FloatTensor method)</a>
</li>
      <li><a href="torch.html#torch.is_complex">is_complex() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.is_complex">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.is_contiguous">is_contiguous() (torch.Tensor method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.is_cuda">is_cuda (torch.FloatStorage attribute)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.is_cuda">(torch.Tensor attribute)</a>
</li>
      </ul></li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.is_enabled">is_enabled() (torch.cuda.amp.GradScaler method)</a>
</li>
      <li><a href="torch.html#torch.is_floating_point">is_floating_point() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.is_floating_point">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="onnx.html#torch.onnx.is_in_onnx_export">is_in_onnx_export() (in module torch.onnx)</a>
</li>
      <li><a href="cuda.html#torch.cuda.is_initialized">is_initialized() (in module torch.cuda)</a>

      <ul>
        <li><a href="distributed.html#torch.distributed.is_initialized">(in module torch.distributed)</a>
</li>
      </ul></li>
      <li><a href="autograd.html#torch.Tensor.is_leaf">is_leaf (torch.Tensor attribute)</a>
</li>
      <li><a href="distributed.html#torch.distributed.is_mpi_available">is_mpi_available() (in module torch.distributed)</a>
</li>
      <li><a href="distributed.html#torch.distributed.is_nccl_available">is_nccl_available() (in module torch.distributed)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.RRef.is_owner">is_owner() (torch.distributed.rpc.RRef method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.is_pinned">is_pinned() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.is_pinned">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.utils.prune.is_pruned">is_pruned() (in module torch.nn.utils.prune)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.is_quantized">is_quantized (torch.Tensor attribute)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.is_set_to">is_set_to() (torch.Tensor method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.is_shared">is_shared() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.is_shared">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.is_signed">is_signed() (torch.Tensor method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.is_sparse">is_sparse (torch.FloatStorage attribute)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.is_sparse">(torch.Tensor attribute)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.is_storage">is_storage() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.is_tensor">is_tensor() (in module torch)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isBool()">isBool() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isBoolList()">isBoolList() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isDictLongKey()">isDictLongKey() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isDictStringKey()">isDictStringKey() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isDouble()">isDouble() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isDoubleList()">isDoubleList() (Java method)</a>
</li>
      <li><a href="distributed.html#torch.distributed.isend">isend() (in module torch.distributed)</a>
</li>
      <li><a href="torch.html#torch.isfinite">isfinite() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.isinf">isinf() (in module torch)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isList()">isList() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isLong()">isLong() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isLongList()">isLongList() (Java method)</a>
</li>
      <li><a href="torch.html#torch.isnan">isnan() (in module torch)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isNull()">isNull() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isString()">isString() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isTensor()">isTensor() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isTensorList()">isTensorList() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.isTuple()">isTuple() (Java method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.item">item() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.ModuleDict.items">items() (torch.nn.ModuleDict method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.ParameterDict.items">(torch.nn.ParameterDict method)</a>
</li>
      </ul></li>
      <li><a href="data.html#torch.utils.data.IterableDataset">IterableDataset (class in torch.utils.data)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue">IValue (Java class)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="J">J</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="autograd.html#torch.autograd.functional.jacobian">jacobian() (in module torch.autograd.functional)</a>
</li>
      <li><a href="org/pytorch/DType.html#org.pytorch.DType.jniCode">jniCode (Java field)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="multiprocessing.html#torch.multiprocessing.SpawnContext.join">join() (torch.multiprocessing.SpawnContext method)</a>
</li>
      <li><a href="autograd.html#torch.autograd.functional.jvp">jvp() (in module torch.autograd.functional)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="K">K</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.init.html#torch.nn.init.kaiming_normal_">kaiming_normal_() (in module torch.nn.init)</a>
</li>
      <li><a href="nn.init.html#torch.nn.init.kaiming_uniform_">kaiming_uniform_() (in module torch.nn.init)</a>
</li>
      <li><a href="autograd.html#torch.autograd.profiler.profile.key_averages">key_averages() (torch.autograd.profiler.profile method)</a>
</li>
      <li><a href="nn.html#torch.nn.ModuleDict.keys">keys() (torch.nn.ModuleDict method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.ParameterDict.keys">(torch.nn.ParameterDict method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.functional.html#torch.nn.functional.kl_div">kl_div() (in module torch.nn.functional)</a>
</li>
      <li><a href="distributions.html#torch.distributions.kl.kl_divergence">kl_divergence() (in module torch.distributions.kl)</a>
</li>
      <li><a href="nn.html#torch.nn.KLDivLoss">KLDivLoss (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.kthvalue">kthvalue() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.kthvalue">(torch.Tensor method)</a>
</li>
      </ul></li>
  </ul></td>
</tr></table>

<h2 id="L">L</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.functional.html#torch.nn.functional.l1_loss">l1_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.l1_unstructured">l1_unstructured() (in module torch.nn.utils.prune)</a>
</li>
      <li><a href="nn.html#torch.nn.L1Loss">L1Loss (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.L1Unstructured">L1Unstructured (class in torch.nn.utils.prune)</a>
</li>
      <li><a href="optim.html#torch.optim.lr_scheduler.LambdaLR">LambdaLR (class in torch.optim.lr_scheduler)</a>
</li>
      <li><a href="distributions.html#torch.distributions.laplace.Laplace">Laplace (class in torch.distributions.laplace)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.layer_norm">layer_norm() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.LayerNorm">LayerNorm (class in torch.nn)</a>
</li>
      <li><a href="optim.html#torch.optim.LBFGS">LBFGS (class in torch.optim)</a>
</li>
      <li><a href="torch.html#torch.le">le() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.le">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.le_">le_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.leaky_relu">leaky_relu() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.leaky_relu_">leaky_relu_() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.LeakyReLU">LeakyReLU (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.lerp">lerp() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.lerp">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.lerp_">lerp_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.constraints.less_than">less_than (in module torch.distributions.constraints)</a>
</li>
      <li><a href="torch.html#torch.lgamma">lgamma() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.lgamma">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.lgamma_">lgamma_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.Linear">Linear (class in torch.nn)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.qat.Linear">(class in torch.nn.qat)</a>
</li>
        <li><a href="quantization.html#torch.nn.quantized.Linear">(class in torch.nn.quantized)</a>
</li>
        <li><a href="quantization.html#torch.nn.quantized.dynamic.Linear">(class in torch.nn.quantized.dynamic)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.linear">linear() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.linear">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="quantization.html#torch.nn.intrinsic.LinearReLU">LinearReLU (class in torch.nn.intrinsic)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.intrinsic.qat.LinearReLU">(class in torch.nn.intrinsic.qat)</a>
</li>
        <li><a href="quantization.html#torch.nn.intrinsic.quantized.LinearReLU">(class in torch.nn.intrinsic.quantized)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.linspace">linspace() (in module torch)</a>
</li>
      <li><a href="hub.html#torch.hub.list">list() (in module torch.hub)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.listFrom(boolean)">listFrom(boolean) (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.listFrom(double)">listFrom(double) (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.listFrom(IValue)">listFrom(IValue) (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.listFrom(long)">listFrom(long) (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.listFrom(Tensor)">listFrom(Tensor) (Java method)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.ln_structured">ln_structured() (in module torch.nn.utils.prune)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.LnStructured">LnStructured (class in torch.nn.utils.prune)</a>
</li>
      <li><a href="torch.html#torch.load">load() (in module torch)</a>

      <ul>
        <li><a href="hub.html#torch.hub.load">(in module torch.hub)</a>
</li>
        <li><a href="jit.html#torch.jit.load">(in module torch.jit)</a>
</li>
        <li><a href="cpp_extension.html#torch.utils.cpp_extension.load">(in module torch.utils.cpp_extension)</a>
</li>
      </ul></li>
      <li><a href="org/pytorch/Module.html#org.pytorch.Module.load(String)">load(String) (Java method)</a>
</li>
      <li><a href="cpp_extension.html#torch.utils.cpp_extension.load_inline">load_inline() (in module torch.utils.cpp_extension)</a>
</li>
      <li><a href="autograd.html#torch.autograd.profiler.load_nvprof">load_nvprof() (in module torch.autograd.profiler)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.load_state_dict">load_state_dict() (torch.cuda.amp.GradScaler method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.Module.load_state_dict">(torch.nn.Module method)</a>
</li>
        <li><a href="optim.html#torch.optim.lr_scheduler.LambdaLR.load_state_dict">(torch.optim.lr_scheduler.LambdaLR method)</a>
</li>
        <li><a href="optim.html#torch.optim.lr_scheduler.MultiplicativeLR.load_state_dict">(torch.optim.lr_scheduler.MultiplicativeLR method)</a>
</li>
        <li><a href="optim.html#torch.optim.Optimizer.load_state_dict">(torch.optim.Optimizer method)</a>
</li>
      </ul></li>
      <li><a href="hub.html#torch.hub.load_state_dict_from_url">load_state_dict_from_url() (in module torch.hub)</a>
</li>
      <li><a href="model_zoo.html#torch.utils.model_zoo.load_url">load_url() (in module torch.utils.model_zoo)</a>
</li>
      <li><a href="torch.html#torch.lobpcg">lobpcg() (in module torch)</a>
</li>
      <li><a href="distributions.html#torch.distributions.log_normal.LogNormal.loc">loc() (torch.distributions.log_normal.LogNormal property)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.local_response_norm">local_response_norm() (in module torch.nn.functional)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.RRef.local_value">local_value() (torch.distributed.rpc.RRef method)</a>
</li>
      <li><a href="nn.html#torch.nn.LocalResponseNorm">LocalResponseNorm (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.log">log() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.log">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.log10">log10() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.log10">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.log10_">log10_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.log1p">log1p() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.log1p">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.log1p_">log1p_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.log2">log2() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.log2">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.log2_">log2_() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.log_">log_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.Transform.log_abs_det_jacobian">log_abs_det_jacobian() (torch.distributions.transforms.Transform method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.log_normal_">log_normal_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.log_prob">log_prob() (torch.distributions.bernoulli.Bernoulli method)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.beta.Beta.log_prob">(torch.distributions.beta.Beta method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.log_prob">(torch.distributions.binomial.Binomial method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.log_prob">(torch.distributions.categorical.Categorical method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.log_prob">(torch.distributions.cauchy.Cauchy method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.log_prob">(torch.distributions.continuous_bernoulli.ContinuousBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.dirichlet.Dirichlet.log_prob">(torch.distributions.dirichlet.Dirichlet method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.log_prob">(torch.distributions.distribution.Distribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.log_prob">(torch.distributions.exponential.Exponential method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.fishersnedecor.FisherSnedecor.log_prob">(torch.distributions.fishersnedecor.FisherSnedecor method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gamma.Gamma.log_prob">(torch.distributions.gamma.Gamma method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.geometric.Geometric.log_prob">(torch.distributions.geometric.Geometric method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gumbel.Gumbel.log_prob">(torch.distributions.gumbel.Gumbel method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.log_prob">(torch.distributions.half_cauchy.HalfCauchy method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.log_prob">(torch.distributions.half_normal.HalfNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.independent.Independent.log_prob">(torch.distributions.independent.Independent method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.log_prob">(torch.distributions.laplace.Laplace method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.log_prob">(torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.log_prob">(torch.distributions.mixture_same_family.MixtureSameFamily method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multinomial.Multinomial.log_prob">(torch.distributions.multinomial.Multinomial method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.log_prob">(torch.distributions.multivariate_normal.MultivariateNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial.log_prob">(torch.distributions.negative_binomial.NegativeBinomial method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.log_prob">(torch.distributions.normal.Normal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.log_prob">(torch.distributions.one_hot_categorical.OneHotCategorical method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.poisson.Poisson.log_prob">(torch.distributions.poisson.Poisson method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.log_prob">(torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.studentT.StudentT.log_prob">(torch.distributions.studentT.StudentT method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.transformed_distribution.TransformedDistribution.log_prob">(torch.distributions.transformed_distribution.TransformedDistribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.log_prob">(torch.distributions.uniform.Uniform method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.von_mises.VonMises.log_prob">(torch.distributions.von_mises.VonMises method)</a>
</li>
        <li><a href="nn.html#torch.nn.AdaptiveLogSoftmaxWithLoss.log_prob">(torch.nn.AdaptiveLogSoftmaxWithLoss method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.functional.html#torch.nn.functional.log_softmax">log_softmax() (in module torch.nn.functional)</a>
</li>
      <li><a href="torch.html#torch.logdet">logdet() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.logdet">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.logical_and">logical_and() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.logical_and">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.logical_and_">logical_and_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.logical_not">logical_not() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.logical_not">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.logical_not_">logical_not_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.logical_or">logical_or() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.logical_or">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.logical_or_">logical_or_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.logical_xor">logical_xor() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.logical_xor">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.logical_xor_">logical_xor_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli">LogitRelaxedBernoulli (class in torch.distributions.relaxed_bernoulli)</a>
</li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.logits">logits (torch.distributions.bernoulli.Bernoulli attribute)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.logits">(torch.distributions.binomial.Binomial attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.logits">(torch.distributions.categorical.Categorical attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.logits">(torch.distributions.continuous_bernoulli.ContinuousBernoulli attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.geometric.Geometric.logits">(torch.distributions.geometric.Geometric attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial.logits">(torch.distributions.negative_binomial.NegativeBinomial attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.logits">(torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli attribute)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.multinomial.Multinomial.logits">logits() (torch.distributions.multinomial.Multinomial property)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.logits">(torch.distributions.one_hot_categorical.OneHotCategorical property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.RelaxedBernoulli.logits">(torch.distributions.relaxed_bernoulli.RelaxedBernoulli property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.logits">(torch.distributions.relaxed_categorical.RelaxedOneHotCategorical property)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.log_normal.LogNormal">LogNormal (class in torch.distributions.log_normal)</a>
</li>
      <li><a href="nn.html#torch.nn.LogSigmoid">LogSigmoid (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.logsigmoid">logsigmoid() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.LogSoftmax">LogSoftmax (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.logspace">logspace() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.logsumexp">logsumexp() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.logsumexp">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="storage.html#torch.FloatStorage.long">long() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.long">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.transforms.LowerCholeskyTransform">LowerCholeskyTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal">LowRankMultivariateNormal (class in torch.distributions.lowrank_multivariate_normal)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.lp_pool1d">lp_pool1d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.lp_pool2d">lp_pool2d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.LPPool1d">LPPool1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.LPPool2d">LPPool2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.LSTM">LSTM (class in torch.nn)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.dynamic.LSTM">(class in torch.nn.quantized.dynamic)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.LSTMCell">LSTMCell (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.lstsq">lstsq() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.lstsq">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.lt">lt() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.lt">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.lt_">lt_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.lu">lu() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.lu">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.lu_solve">lu_solve() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.lu_solve">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.lu_unpack">lu_unpack() (in module torch)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="M">M</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="torch.html#torch.manual_seed">manual_seed() (in module torch)</a>

      <ul>
        <li><a href="cuda.html#torch.cuda.manual_seed">(in module torch.cuda)</a>
</li>
        <li><a href="random.html#torch.random.manual_seed">(in module torch.random)</a>, <a href="random.html#torch.random.manual_seed">[1]</a>
</li>
        <li><a href="torch.html#torch._C.Generator.manual_seed">(torch._C.Generator method)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.manual_seed_all">manual_seed_all() (in module torch.cuda)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.map_">map_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.margin_ranking_loss">margin_ranking_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.MarginRankingLoss">MarginRankingLoss (class in torch.nn)</a>
</li>
      <li><a href="cuda.html#torch.cuda.nvtx.mark">mark() (in module torch.cuda.nvtx)</a>
</li>
      <li><a href="autograd.html#torch.autograd.function._ContextMethodMixin.mark_dirty">mark_dirty() (torch.autograd.function._ContextMethodMixin method)</a>
</li>
      <li><a href="autograd.html#torch.autograd.function._ContextMethodMixin.mark_non_differentiable">mark_non_differentiable() (torch.autograd.function._ContextMethodMixin method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.masked_fill">masked_fill() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.masked_fill_">masked_fill_() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.masked_scatter">masked_scatter() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.masked_scatter_">masked_scatter_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.masked_select">masked_select() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.masked_select">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.matmul">matmul() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.matmul">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.matrix_power">matrix_power() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.matrix_power">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.matrix_rank">matrix_rank() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.max">max() (in module torch)</a>, <a href="torch.html#torch.max">[1]</a>, <a href="torch.html#torch.max">[2]</a>, <a href="torch.html#torch.max">[3]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.max">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.max_memory_allocated">max_memory_allocated() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.max_memory_cached">max_memory_cached() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.max_memory_reserved">max_memory_reserved() (in module torch.cuda)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.max_pool1d">max_pool1d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.max_pool2d">max_pool2d() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.max_pool2d">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.max_pool3d">max_pool3d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.max_unpool1d">max_unpool1d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.max_unpool2d">max_unpool2d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.max_unpool3d">max_unpool3d() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.MaxPool1d">MaxPool1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.MaxPool2d">MaxPool2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.MaxPool3d">MaxPool3d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.MaxUnpool1d">MaxUnpool1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.MaxUnpool2d">MaxUnpool2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.MaxUnpool3d">MaxUnpool3d (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.mean">mean() (in module torch)</a>, <a href="torch.html#torch.mean">[1]</a>, <a href="torch.html#torch.mean">[2]</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.mean">(torch.distributions.bernoulli.Bernoulli property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.beta.Beta.mean">(torch.distributions.beta.Beta property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.mean">(torch.distributions.binomial.Binomial property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.mean">(torch.distributions.categorical.Categorical property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.mean">(torch.distributions.cauchy.Cauchy property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.mean">(torch.distributions.continuous_bernoulli.ContinuousBernoulli property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.dirichlet.Dirichlet.mean">(torch.distributions.dirichlet.Dirichlet property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.mean">(torch.distributions.distribution.Distribution property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.mean">(torch.distributions.exponential.Exponential property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.fishersnedecor.FisherSnedecor.mean">(torch.distributions.fishersnedecor.FisherSnedecor property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gamma.Gamma.mean">(torch.distributions.gamma.Gamma property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.geometric.Geometric.mean">(torch.distributions.geometric.Geometric property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gumbel.Gumbel.mean">(torch.distributions.gumbel.Gumbel property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.mean">(torch.distributions.half_cauchy.HalfCauchy property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.mean">(torch.distributions.half_normal.HalfNormal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.independent.Independent.mean">(torch.distributions.independent.Independent property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.mean">(torch.distributions.laplace.Laplace property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.log_normal.LogNormal.mean">(torch.distributions.log_normal.LogNormal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.mean">(torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.mean">(torch.distributions.mixture_same_family.MixtureSameFamily property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multinomial.Multinomial.mean">(torch.distributions.multinomial.Multinomial property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.mean">(torch.distributions.multivariate_normal.MultivariateNormal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial.mean">(torch.distributions.negative_binomial.NegativeBinomial property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.mean">(torch.distributions.normal.Normal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.mean">(torch.distributions.one_hot_categorical.OneHotCategorical property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.pareto.Pareto.mean">(torch.distributions.pareto.Pareto property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.poisson.Poisson.mean">(torch.distributions.poisson.Poisson property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.studentT.StudentT.mean">(torch.distributions.studentT.StudentT property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.mean">(torch.distributions.uniform.Uniform property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.von_mises.VonMises.mean">(torch.distributions.von_mises.VonMises property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.weibull.Weibull.mean">(torch.distributions.weibull.Weibull property)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.mean">(torch.Tensor method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="torch.html#torch.median">median() (in module torch)</a>, <a href="torch.html#torch.median">[1]</a>, <a href="torch.html#torch.median">[2]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.median">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.memory_allocated">memory_allocated() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.memory_cached">memory_cached() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.memory_reserved">memory_reserved() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.memory_snapshot">memory_snapshot() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.memory_stats">memory_stats() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.memory_summary">memory_summary() (in module torch.cuda)</a>
</li>
      <li><a href="torch.html#torch.meshgrid">meshgrid() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.min">min() (in module torch)</a>, <a href="torch.html#torch.min">[1]</a>, <a href="torch.html#torch.min">[2]</a>, <a href="torch.html#torch.min">[3]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.min">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="quantization.html#torch.quantization.MinMaxObserver">MinMaxObserver (class in torch.quantization)</a>
</li>
      <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.mixture_distribution">mixture_distribution() (torch.distributions.mixture_same_family.MixtureSameFamily property)</a>
</li>
      <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily">MixtureSameFamily (class in torch.distributions.mixture_same_family)</a>
</li>
      <li><a href="torch.html#torch.mm">mm() (in module torch)</a>

      <ul>
        <li><a href="sparse.html#torch.sparse.mm">(in module torch.sparse)</a>
</li>
        <li><a href="sparse.html#torch.sparse.FloatTensor.mm">(torch.sparse.FloatTensor method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.mm">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.mode">mode() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.mode">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Module">Module (class in torch.nn)</a>

      <ul>
        <li><a href="org/pytorch/Module.html#org.pytorch.Module">(Java class)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Module.to">Module.to() (in module torch.nn)</a>, <a href="nn.html#torch.nn.Module.to">[1]</a>, <a href="nn.html#torch.nn.Module.to">[2]</a>, <a href="nn.html#torch.nn.Module.to">[3]</a>
</li>
      <li><a href="nn.html#torch.nn.ModuleDict">ModuleDict (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.ModuleList">ModuleList (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.modules">modules() (torch.nn.Module method)</a>
</li>
      <li><a href="quantization.html#torch.quantization.MovingAverageMinMaxObserver">MovingAverageMinMaxObserver (class in torch.quantization)</a>
</li>
      <li><a href="quantization.html#torch.quantization.MovingAveragePerChannelMinMaxObserver">MovingAveragePerChannelMinMaxObserver (class in torch.quantization)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.mse_loss">mse_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.MSELoss">MSELoss (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.mul">mul() (in module torch)</a>, <a href="torch.html#torch.mul">[1]</a>, <a href="torch.html#torch.mul">[2]</a>

      <ul>
        <li><a href="sparse.html#torch.sparse.FloatTensor.mul">(torch.sparse.FloatTensor method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.mul">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.mul_">mul_() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.mul_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.multi_margin_loss">multi_margin_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.MultiheadAttention">MultiheadAttention (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.multilabel_margin_loss">multilabel_margin_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.multilabel_soft_margin_loss">multilabel_soft_margin_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.MultiLabelMarginLoss">MultiLabelMarginLoss (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.MultiLabelSoftMarginLoss">MultiLabelSoftMarginLoss (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.MultiMarginLoss">MultiMarginLoss (class in torch.nn)</a>
</li>
      <li><a href="distributions.html#torch.distributions.multinomial.Multinomial">Multinomial (class in torch.distributions.multinomial)</a>
</li>
      <li><a href="torch.html#torch.multinomial">multinomial() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.multinomial">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="optim.html#torch.optim.lr_scheduler.MultiplicativeLR">MultiplicativeLR (class in torch.optim.lr_scheduler)</a>
</li>
      <li><a href="optim.html#torch.optim.lr_scheduler.MultiStepLR">MultiStepLR (class in torch.optim.lr_scheduler)</a>
</li>
      <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal">MultivariateNormal (class in torch.distributions.multivariate_normal)</a>
</li>
      <li><a href="torch.html#torch.mv">mv() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.mv">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.mvlgamma">mvlgamma() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.mvlgamma">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.mvlgamma_">mvlgamma_() (torch.Tensor method)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="N">N</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.WorkerInfo.name">name() (torch.distributed.rpc.WorkerInfo property)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.named_buffers">named_buffers() (torch.nn.Module method)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.named_children">named_children() (torch.nn.Module method)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.named_modules">named_modules() (torch.nn.Module method)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.named_parameters">named_parameters() (torch.nn.Module method)</a>
</li>
      <li><a href="named_tensor.html#torch.Tensor.names">names (torch.Tensor attribute)</a>
</li>
      <li><a href="torch.html#torch.narrow">narrow() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.narrow">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.narrow_copy">narrow_copy() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.narrow_copy">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.ndim">ndim (torch.Tensor attribute)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.ndimension">ndimension() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.ne">ne() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.ne">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.ne_">ne_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.neg">neg() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.neg">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.neg_">neg_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial">NegativeBinomial (class in torch.distributions.negative_binomial)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.nelement">nelement() (torch.Tensor method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.new">new() (torch.FloatStorage method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.new_empty">new_empty() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.new_full">new_full() (torch.Tensor method)</a>
</li>
      <li><a href="distributed.html#torch.distributed.new_group">new_group() (in module torch.distributed)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="tensors.html#torch.Tensor.new_ones">new_ones() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.new_tensor">new_tensor() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.new_zeros">new_zeros() (torch.Tensor method)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.nll_loss">nll_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.NLLLoss">NLLLoss (class in torch.nn)</a>
</li>
      <li><a href="autograd.html#torch.autograd.no_grad">no_grad (class in torch.autograd)</a>
</li>
      <li><a href="torch.html#torch.no_grad">no_grad() (in module torch)</a>
</li>
      <li><a href="nn.html#torch.nn.parallel.DistributedDataParallel.no_sync">no_sync() (torch.nn.parallel.DistributedDataParallel method)</a>
</li>
      <li><a href="torch.html#torch.nonzero">nonzero() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.nonzero">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="quantization.html#torch.quantization.NoopObserver">NoopObserver (class in torch.quantization)</a>
</li>
      <li><a href="torch.html#torch.norm">norm() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.norm">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.normal.Normal">Normal (class in torch.distributions.normal)</a>
</li>
      <li><a href="torch.html#torch.normal">normal() (in module torch)</a>, <a href="torch.html#torch.normal">[1]</a>, <a href="torch.html#torch.normal">[2]</a>, <a href="torch.html#torch.normal">[3]</a>, <a href="torch.html#torch.normal">[4]</a>
</li>
      <li><a href="nn.init.html#torch.nn.init.normal_">normal_() (in module torch.nn.init)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.normal_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.normalize">normalize() (in module torch.nn.functional)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.ProcessGroupRpcBackendOptions.num_send_recv_threads">num_send_recv_threads() (torch.distributed.rpc.ProcessGroupRpcBackendOptions property)</a>
</li>
      <li><a href="torch.html#torch.numel">numel() (in module torch)</a>

      <ul>
        <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.numel()">(Java method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.numel">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.numel(long[])">numel(long[]) (Java method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.numpy">numpy() (torch.Tensor method)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="O">O</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.functional.html#torch.nn.functional.one_hot">one_hot() (in module torch.nn.functional)</a>
</li>
      <li><a href="optim.html#torch.optim.lr_scheduler.OneCycleLR">OneCycleLR (class in torch.optim.lr_scheduler)</a>
</li>
      <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical">OneHotCategorical (class in torch.distributions.one_hot_categorical)</a>
</li>
      <li><a href="torch.html#torch.ones">ones() (in module torch)</a>
</li>
      <li><a href="nn.init.html#torch.nn.init.ones_">ones_() (in module torch.nn.init)</a>
</li>
      <li><a href="torch.html#torch.ones_like">ones_like() (in module torch)</a>
</li>
      <li><a href="optim.html#torch.optim.Optimizer">Optimizer (class in torch.optim)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.optionalNull()">optionalNull() (Java method)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="org/pytorch/package-index.html#package-org.pytorch">org.pytorch (package)</a>
</li>
      <li><a href="org/pytorch/torchvision/package-index.html#package-org.pytorch.torchvision">org.pytorch.torchvision (package)</a>
</li>
      <li><a href="torch.html#torch.orgqr">orgqr() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.orgqr">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.ormqr">ormqr() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.ormqr">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.init.html#torch.nn.init.orthogonal_">orthogonal_() (in module torch.nn.init)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.RRef.owner">owner() (torch.distributed.rpc.RRef method)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.RRef.owner_name">owner_name() (torch.distributed.rpc.RRef method)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="P">P</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.html#torch.nn.utils.rnn.pack_padded_sequence">pack_padded_sequence() (in module torch.nn.utils.rnn)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.rnn.pack_sequence">pack_sequence() (in module torch.nn.utils.rnn)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.rnn.PackedSequence">PackedSequence() (in module torch.nn.utils.rnn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.pad">pad() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.rnn.pad_packed_sequence">pad_packed_sequence() (in module torch.nn.utils.rnn)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.rnn.pad_sequence">pad_sequence() (in module torch.nn.utils.rnn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.pairwise_distance">pairwise_distance() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.PairwiseDistance">PairwiseDistance (class in torch.nn)</a>
</li>
      <li><a href="__config__.html#torch.__config__.parallel_info">parallel_info() (in module torch.__config__)</a>
</li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.param_shape">param_shape() (torch.distributions.bernoulli.Bernoulli property)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.param_shape">(torch.distributions.binomial.Binomial property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.param_shape">(torch.distributions.categorical.Categorical property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.param_shape">(torch.distributions.continuous_bernoulli.ContinuousBernoulli property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multinomial.Multinomial.param_shape">(torch.distributions.multinomial.Multinomial property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial.param_shape">(torch.distributions.negative_binomial.NegativeBinomial property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.param_shape">(torch.distributions.one_hot_categorical.OneHotCategorical property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.param_shape">(torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli property)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Parameter">Parameter (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.ParameterDict">ParameterDict (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.ParameterList">ParameterList (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.parameters">parameters() (torch.nn.Module method)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.parameters_to_vector">parameters_to_vector() (in module torch.nn.utils)</a>
</li>
      <li><a href="distributions.html#torch.distributions.pareto.Pareto">Pareto (class in torch.distributions.pareto)</a>
</li>
      <li><a href="torch.html#torch.pca_lowrank">pca_lowrank() (in module torch)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.pdist">pdist() (in module torch.nn.functional)</a>
</li>
      <li><a href="quantization.html#torch.quantization.PerChannelMinMaxObserver">PerChannelMinMaxObserver (class in torch.quantization)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.permute">permute() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.distribution.Distribution.perplexity">perplexity() (torch.distributions.distribution.Distribution method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.pin_memory">pin_memory() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.pin_memory">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.pinverse">pinverse() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.pinverse">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.pixel_shuffle">pixel_shuffle() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.PixelShuffle">PixelShuffle (class in torch.nn)</a>
</li>
      <li><a href="distributions.html#torch.distributions.poisson.Poisson">Poisson (class in torch.distributions.poisson)</a>
</li>
      <li><a href="torch.html#torch.poisson">poisson() (in module torch)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.poisson_nll_loss">poisson_nll_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.PoissonNLLLoss">PoissonNLLLoss (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.polygamma">polygamma() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.polygamma">(torch.Tensor method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="tensors.html#torch.Tensor.polygamma_">polygamma_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.ModuleDict.pop">pop() (torch.nn.ModuleDict method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.ParameterDict.pop">(torch.nn.ParameterDict method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.pow">pow() (in module torch)</a>, <a href="torch.html#torch.pow">[1]</a>, <a href="torch.html#torch.pow">[2]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.pow">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.pow_">pow_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.PowerTransform">PowerTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.precision_matrix">precision_matrix (torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal attribute)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.precision_matrix">(torch.distributions.multivariate_normal.MultivariateNormal attribute)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.AdaptiveLogSoftmaxWithLoss.predict">predict() (torch.nn.AdaptiveLogSoftmaxWithLoss method)</a>
</li>
      <li><a href="nn.html#torch.nn.PReLU">PReLU (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.prelu">prelu() (in module torch.nn.functional)</a>
</li>
      <li><a href="quantization.html#torch.quantization.prepare">prepare() (in module torch.quantization)</a>
</li>
      <li><a href="quantization.html#torch.quantization.prepare_qat">prepare_qat() (in module torch.quantization)</a>
</li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.probs">probs (torch.distributions.bernoulli.Bernoulli attribute)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.probs">(torch.distributions.binomial.Binomial attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.probs">(torch.distributions.categorical.Categorical attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.probs">(torch.distributions.continuous_bernoulli.ContinuousBernoulli attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.geometric.Geometric.probs">(torch.distributions.geometric.Geometric attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial.probs">(torch.distributions.negative_binomial.NegativeBinomial attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.probs">(torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli attribute)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.multinomial.Multinomial.probs">probs() (torch.distributions.multinomial.Multinomial property)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.probs">(torch.distributions.one_hot_categorical.OneHotCategorical property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.RelaxedBernoulli.probs">(torch.distributions.relaxed_bernoulli.RelaxedBernoulli property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.probs">(torch.distributions.relaxed_categorical.RelaxedOneHotCategorical property)</a>
</li>
      </ul></li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.ProcessGroupRpcBackendOptions">ProcessGroupRpcBackendOptions (class in torch.distributed.rpc)</a>
</li>
      <li><a href="torch.html#torch.prod">prod() (in module torch)</a>, <a href="torch.html#torch.prod">[1]</a>, <a href="torch.html#torch.prod">[2]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.prod">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="autograd.html#torch.autograd.profiler.profile">profile (class in torch.autograd.profiler)</a>
</li>
      <li><a href="torch.html#torch.promote_types">promote_types() (in module torch)</a>
</li>
      <li><a href="quantization.html#torch.quantization.propagate_qconfig_">propagate_qconfig_() (in module torch.quantization)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.BasePruningMethod.prune">prune() (torch.nn.utils.prune.BasePruningMethod method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.utils.prune.CustomFromMask.prune">(torch.nn.utils.prune.CustomFromMask method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.Identity.prune">(torch.nn.utils.prune.Identity method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.L1Unstructured.prune">(torch.nn.utils.prune.L1Unstructured method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.LnStructured.prune">(torch.nn.utils.prune.LnStructured method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.PruningContainer.prune">(torch.nn.utils.prune.PruningContainer method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.RandomStructured.prune">(torch.nn.utils.prune.RandomStructured method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.RandomUnstructured.prune">(torch.nn.utils.prune.RandomUnstructured method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.utils.prune.PruningContainer">PruningContainer (class in torch.nn.utils.prune)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.put_">put_() (torch.Tensor method)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="Q">Q</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="tensors.html#torch.Tensor.q_per_channel_axis">q_per_channel_axis() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.q_per_channel_scales">q_per_channel_scales() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.q_per_channel_zero_points">q_per_channel_zero_points() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.q_scale">q_scale() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.q_zero_point">q_zero_point() (torch.Tensor method)</a>
</li>
      <li><a href="quantization.html#torch.quantization.QConfig">QConfig (class in torch.quantization)</a>
</li>
      <li><a href="quantization.html#torch.quantization.QConfigDynamic">QConfigDynamic (class in torch.quantization)</a>
</li>
      <li><a href="quantization.html#torch.nn.quantized.QFunctional">QFunctional (class in torch.nn.quantized)</a>
</li>
      <li><a href="torch.html#torch.qr">qr() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.qr">(torch.Tensor method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="tensors.html#torch.Tensor.qscheme">qscheme() (torch.Tensor method)</a>
</li>
      <li><a href="quantization.html#torch.nn.quantized.Quantize">Quantize (class in torch.nn.quantized)</a>
</li>
      <li><a href="quantization.html#torch.quantization.quantize">quantize() (in module torch.quantization)</a>
</li>
      <li><a href="quantization.html#torch.quantization.quantize_dynamic">quantize_dynamic() (in module torch.quantization)</a>
</li>
      <li><a href="torch.html#torch.quantize_per_channel">quantize_per_channel() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.quantize_per_tensor">quantize_per_tensor() (in module torch)</a>
</li>
      <li><a href="quantization.html#torch.quantization.quantize_qat">quantize_qat() (in module torch.quantization)</a>
</li>
      <li><a href="quantization.html#torch.quantization.QuantStub">QuantStub (class in torch.quantization)</a>
</li>
      <li><a href="quantization.html#torch.quantization.QuantWrapper">QuantWrapper (class in torch.quantization)</a>
</li>
      <li><a href="cuda.html#torch.cuda.Event.query">query() (torch.cuda.Event method)</a>

      <ul>
        <li><a href="cuda.html#torch.cuda.Stream.query">(torch.cuda.Stream method)</a>
</li>
      </ul></li>
  </ul></td>
</tr></table>

<h2 id="R">R</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="torch.html#torch.rand">rand() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.rand_like">rand_like() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.randint">randint() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.randint_like">randint_like() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.randn">randn() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.randn_like">randn_like() (in module torch)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.random_">random_() (torch.Tensor method)</a>
</li>
      <li><a href="data.html#torch.utils.data.random_split">random_split() (in module torch.utils.data)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.random_structured">random_structured() (in module torch.nn.utils.prune)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.random_unstructured">random_unstructured() (in module torch.nn.utils.prune)</a>
</li>
      <li><a href="data.html#torch.utils.data.RandomSampler">RandomSampler (class in torch.utils.data)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.RandomStructured">RandomStructured (class in torch.nn.utils.prune)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.RandomUnstructured">RandomUnstructured (class in torch.nn.utils.prune)</a>
</li>
      <li><a href="torch.html#torch.randperm">randperm() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.range">range() (in module torch)</a>
</li>
      <li><a href="cuda.html#torch.cuda.nvtx.range_pop">range_pop() (in module torch.cuda.nvtx)</a>
</li>
      <li><a href="cuda.html#torch.cuda.nvtx.range_push">range_push() (in module torch.cuda.nvtx)</a>
</li>
      <li><a href="torch.html#torch.real">real() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.reciprocal">reciprocal() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.reciprocal">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.reciprocal_">reciprocal_() (torch.Tensor method)</a>
</li>
      <li><a href="cuda.html#torch.cuda.Event.record">record() (torch.cuda.Event method)</a>
</li>
      <li><a href="cuda.html#torch.cuda.Stream.record_event">record_event() (torch.cuda.Stream method)</a>
</li>
      <li><a href="autograd.html#torch.autograd.profiler.record_function">record_function (class in torch.autograd.profiler)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.record_stream">record_stream() (torch.Tensor method)</a>
</li>
      <li><a href="quantization.html#torch.quantization.RecordingObserver">RecordingObserver (class in torch.quantization)</a>
</li>
      <li><a href="distributed.html#torch.distributed.recv">recv() (in module torch.distributed)</a>
</li>
      <li><a href="distributed.html#torch.distributed.reduce">reduce() (in module torch.distributed)</a>
</li>
      <li><a href="cuda.html#torch.cuda.comm.reduce_add">reduce_add() (in module torch.cuda.comm)</a>
</li>
      <li><a href="distributed.html#torch.distributed.reduce_multigpu">reduce_multigpu() (in module torch.distributed)</a>
</li>
      <li><a href="distributed.html#torch.distributed.reduce_op">reduce_op (class in torch.distributed)</a>
</li>
      <li><a href="optim.html#torch.optim.lr_scheduler.ReduceLROnPlateau">ReduceLROnPlateau (class in torch.optim.lr_scheduler)</a>
</li>
      <li><a href="distributed.html#torch.distributed.ReduceOp">ReduceOp (class in torch.distributed)</a>
</li>
      <li><a href="named_tensor.html#torch.Tensor.refine_names">refine_names() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.ReflectionPad1d">ReflectionPad1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.ReflectionPad2d">ReflectionPad2d (class in torch.nn)</a>
</li>
      <li><a href="distributions.html#torch.distributions.constraint_registry.ConstraintRegistry.register">register() (torch.distributions.constraint_registry.ConstraintRegistry method)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.register_backward_hook">register_backward_hook() (torch.nn.Module method)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.register_buffer">register_buffer() (torch.nn.Module method)</a>
</li>
      <li><a href="onnx.html#torch.onnx.register_custom_op_symbolic">register_custom_op_symbolic() (in module torch.onnx)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.register_forward_hook">register_forward_hook() (torch.nn.Module method)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.register_forward_pre_hook">register_forward_pre_hook() (torch.nn.Module method)</a>
</li>
      <li><a href="autograd.html#torch.Tensor.register_hook">register_hook() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.kl.register_kl">register_kl() (in module torch.distributions.kl)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.register_parameter">register_parameter() (torch.nn.Module method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.RelaxedBernoulli">RelaxedBernoulli (class in torch.distributions.relaxed_bernoulli)</a>
</li>
      <li><a href="distributions.html#torch.distributions.relaxed_categorical.RelaxedOneHotCategorical">RelaxedOneHotCategorical (class in torch.distributions.relaxed_categorical)</a>
</li>
      <li><a href="nn.html#torch.nn.ReLU">ReLU (class in torch.nn)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.ReLU">(class in torch.nn.quantized)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.relu">relu() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.relu">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.ReLU6">ReLU6 (class in torch.nn)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.ReLU6">(class in torch.nn.quantized)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.relu6">relu6() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.relu_">relu_() (in module torch.nn.functional)</a>
</li>
      <li><a href="torch.html#torch.remainder">remainder() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.remainder">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.remainder_">remainder_() (torch.Tensor method)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.remote">remote() (in module torch.distributed.rpc)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.prune.remove">remove() (in module torch.nn.utils.prune)</a>

      <ul>
        <li><a href="nn.html#torch.nn.utils.prune.BasePruningMethod.remove">(torch.nn.utils.prune.BasePruningMethod method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.CustomFromMask.remove">(torch.nn.utils.prune.CustomFromMask method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.Identity.remove">(torch.nn.utils.prune.Identity method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.L1Unstructured.remove">(torch.nn.utils.prune.L1Unstructured method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.LnStructured.remove">(torch.nn.utils.prune.LnStructured method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.PruningContainer.remove">(torch.nn.utils.prune.PruningContainer method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.RandomStructured.remove">(torch.nn.utils.prune.RandomStructured method)</a>
</li>
        <li><a href="nn.html#torch.nn.utils.prune.RandomUnstructured.remove">(torch.nn.utils.prune.RandomUnstructured method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.utils.remove_spectral_norm">remove_spectral_norm() (in module torch.nn.utils)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.html#torch.nn.utils.remove_weight_norm">remove_weight_norm() (in module torch.nn.utils)</a>
</li>
      <li><a href="named_tensor.html#torch.Tensor.rename">rename() (torch.Tensor method)</a>
</li>
      <li><a href="named_tensor.html#torch.Tensor.rename_">rename_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.renorm">renorm() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.renorm">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.renorm_">renorm_() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.repeat">repeat() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.repeat_interleave">repeat_interleave() (in module torch)</a>, <a href="torch.html#torch.repeat_interleave">[1]</a>, <a href="torch.html#torch.repeat_interleave">[2]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.repeat_interleave">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.ReplicationPad1d">ReplicationPad1d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.ReplicationPad2d">ReplicationPad2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.ReplicationPad3d">ReplicationPad3d (class in torch.nn)</a>
</li>
      <li><a href="autograd.html#torch.Tensor.requires_grad">requires_grad (torch.Tensor attribute)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.requires_grad_">requires_grad_() (torch.nn.Module method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.requires_grad_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.quasirandom.SobolEngine.reset">reset() (torch.quasirandom.SobolEngine method)</a>
</li>
      <li><a href="cuda.html#torch.cuda.reset_max_memory_allocated">reset_max_memory_allocated() (in module torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.reset_max_memory_cached">reset_max_memory_cached() (in module torch.cuda)</a>
</li>
      <li><a href="torch.html#torch.reshape">reshape() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.reshape">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.reshape_as">reshape_as() (torch.Tensor method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.resize_">resize_() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.resize_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.resize_as_">resize_as_() (torch.Tensor method)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.resizeAs_">resizeAs_() (torch.sparse.FloatTensor method)</a>
</li>
      <li><a href="torch.html#torch.result_type">result_type() (in module torch)</a>
</li>
      <li><a href="autograd.html#torch.Tensor.retain_grad">retain_grad() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.rfft">rfft() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.rfft">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="optim.html#torch.optim.RMSprop">RMSprop (class in torch.optim)</a>
</li>
      <li><a href="nn.html#torch.nn.RNN">RNN (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.RNNBase">RNNBase (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.RNNCell">RNNCell (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.roll">roll() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.roll">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.rot90">rot90() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.rot90">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.round">round() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.round">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.round_">round_() (torch.Tensor method)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.rpc_async">rpc_async() (in module torch.distributed.rpc)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.rpc_sync">rpc_sync() (in module torch.distributed.rpc)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.ProcessGroupRpcBackendOptions.rpc_timeout">rpc_timeout() (torch.distributed.rpc.ProcessGroupRpcBackendOptions property)</a>
</li>
      <li><a href="optim.html#torch.optim.Rprop">Rprop (class in torch.optim)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.RRef">RRef (class in torch.distributed.rpc)</a>
</li>
      <li><a href="nn.html#torch.nn.RReLU">RReLU (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.rrelu">rrelu() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.rrelu_">rrelu_() (in module torch.nn.functional)</a>
</li>
      <li><a href="distributions.html#torch.distributions.beta.Beta.rsample">rsample() (torch.distributions.beta.Beta method)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.rsample">(torch.distributions.cauchy.Cauchy method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.rsample">(torch.distributions.continuous_bernoulli.ContinuousBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.dirichlet.Dirichlet.rsample">(torch.distributions.dirichlet.Dirichlet method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.rsample">(torch.distributions.distribution.Distribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.rsample">(torch.distributions.exponential.Exponential method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.fishersnedecor.FisherSnedecor.rsample">(torch.distributions.fishersnedecor.FisherSnedecor method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gamma.Gamma.rsample">(torch.distributions.gamma.Gamma method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.independent.Independent.rsample">(torch.distributions.independent.Independent method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.rsample">(torch.distributions.laplace.Laplace method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.rsample">(torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.rsample">(torch.distributions.multivariate_normal.MultivariateNormal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.rsample">(torch.distributions.normal.Normal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.rsample">(torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.studentT.StudentT.rsample">(torch.distributions.studentT.StudentT method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.transformed_distribution.TransformedDistribution.rsample">(torch.distributions.transformed_distribution.TransformedDistribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.rsample">(torch.distributions.uniform.Uniform method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.rsqrt">rsqrt() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.rsqrt">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.rsqrt_">rsqrt_() (torch.Tensor method)</a>
</li>
      <li><a href="org/pytorch/Module.html#org.pytorch.Module.runMethod(String, IValue)">runMethod(String, IValue) (Java method)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="S">S</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.sample">sample() (torch.distributions.bernoulli.Bernoulli method)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.sample">(torch.distributions.binomial.Binomial method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.sample">(torch.distributions.categorical.Categorical method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.sample">(torch.distributions.continuous_bernoulli.ContinuousBernoulli method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.sample">(torch.distributions.distribution.Distribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.geometric.Geometric.sample">(torch.distributions.geometric.Geometric method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.independent.Independent.sample">(torch.distributions.independent.Independent method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.sample">(torch.distributions.mixture_same_family.MixtureSameFamily method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multinomial.Multinomial.sample">(torch.distributions.multinomial.Multinomial method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial.sample">(torch.distributions.negative_binomial.NegativeBinomial method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.sample">(torch.distributions.normal.Normal method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.sample">(torch.distributions.one_hot_categorical.OneHotCategorical method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.poisson.Poisson.sample">(torch.distributions.poisson.Poisson method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.transformed_distribution.TransformedDistribution.sample">(torch.distributions.transformed_distribution.TransformedDistribution method)</a>
</li>
        <li><a href="distributions.html#torch.distributions.von_mises.VonMises.sample">(torch.distributions.von_mises.VonMises method)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.distribution.Distribution.sample_n">sample_n() (torch.distributions.distribution.Distribution method)</a>
</li>
      <li><a href="data.html#torch.utils.data.Sampler">Sampler (class in torch.utils.data)</a>
</li>
      <li><a href="torch.html#torch.save">save() (in module torch)</a>

      <ul>
        <li><a href="jit.html#torch.jit.save">(in module torch.jit)</a>
</li>
        <li><a href="jit.html#torch.jit.ScriptModule.save">(torch.jit.ScriptModule method)</a>
</li>
      </ul></li>
      <li><a href="autograd.html#torch.autograd.function._ContextMethodMixin.save_for_backward">save_for_backward() (torch.autograd.function._ContextMethodMixin method)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.scale">scale() (torch.cuda.amp.GradScaler method)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.scale">(torch.distributions.half_cauchy.HalfCauchy property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.scale">(torch.distributions.half_normal.HalfNormal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.log_normal.LogNormal.scale">(torch.distributions.log_normal.LogNormal property)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.scale_tril">scale_tril (torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal attribute)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.scale_tril">(torch.distributions.multivariate_normal.MultivariateNormal attribute)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.comm.scatter">scatter() (in module torch.cuda.comm)</a>

      <ul>
        <li><a href="distributed.html#torch.distributed.scatter">(in module torch.distributed)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.scatter">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.scatter_">scatter_() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.scatter_add">scatter_add() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.scatter_add_">scatter_add_() (torch.Tensor method)</a>
</li>
      <li><a href="jit.html#torch.jit.script">script() (in module torch.jit)</a>
</li>
      <li><a href="jit.html#torch.jit.ScriptFunction">ScriptFunction (class in torch.jit)</a>
</li>
      <li><a href="jit.html#torch.jit.ScriptModule">ScriptModule (class in torch.jit)</a>
</li>
      <li><a href="torch.html#torch.seed">seed() (in module torch)</a>

      <ul>
        <li><a href="cuda.html#torch.cuda.seed">(in module torch.cuda)</a>
</li>
        <li><a href="random.html#torch.random.seed">(in module torch.random)</a>, <a href="random.html#torch.random.seed">[1]</a>
</li>
        <li><a href="torch.html#torch._C.Generator.seed">(torch._C.Generator method)</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.seed_all">seed_all() (in module torch.cuda)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.select">select() (torch.Tensor method)</a>
</li>
      <li><a href="autograd.html#torch.autograd.profiler.profile.self_cpu_time_total">self_cpu_time_total() (torch.autograd.profiler.profile property)</a>
</li>
      <li><a href="nn.html#torch.nn.SELU">SELU (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.selu">selu() (in module torch.nn.functional)</a>
</li>
      <li><a href="distributed.html#torch.distributed.send">send() (in module torch.distributed)</a>
</li>
      <li><a href="nn.html#torch.nn.Sequential">Sequential (class in torch.nn)</a>
</li>
      <li><a href="data.html#torch.utils.data.SequentialSampler">SequentialSampler (class in torch.utils.data)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.set_">set_() (torch.Tensor method)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.set_backoff_factor">set_backoff_factor() (torch.cuda.amp.GradScaler method)</a>
</li>
      <li><a href="torch.html#torch.set_default_dtype">set_default_dtype() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.set_default_tensor_type">set_default_tensor_type() (in module torch)</a>
</li>
      <li><a href="autograd.html#torch.autograd.set_detect_anomaly">set_detect_anomaly (class in torch.autograd)</a>
</li>
      <li><a href="cuda.html#torch.cuda.set_device">set_device() (in module torch.cuda)</a>
</li>
      <li><a href="hub.html#torch.hub.set_dir">set_dir() (in module torch.hub)</a>
</li>
      <li><a href="torch.html#torch.set_flush_denormal">set_flush_denormal() (in module torch)</a>
</li>
      <li><a href="autograd.html#torch.autograd.set_grad_enabled">set_grad_enabled (class in torch.autograd)</a>
</li>
      <li><a href="torch.html#torch.set_grad_enabled">set_grad_enabled() (in module torch)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.set_growth_factor">set_growth_factor() (torch.cuda.amp.GradScaler method)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.set_growth_interval">set_growth_interval() (torch.cuda.amp.GradScaler method)</a>
</li>
      <li><a href="torch.html#torch.set_num_interop_threads">set_num_interop_threads() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.set_num_threads">set_num_threads() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.set_printoptions">set_printoptions() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.set_rng_state">set_rng_state() (in module torch)</a>

      <ul>
        <li><a href="cuda.html#torch.cuda.set_rng_state">(in module torch.cuda)</a>
</li>
        <li><a href="random.html#torch.random.set_rng_state">(in module torch.random)</a>, <a href="random.html#torch.random.set_rng_state">[1]</a>
</li>
      </ul></li>
      <li><a href="cuda.html#torch.cuda.set_rng_state_all">set_rng_state_all() (in module torch.cuda)</a>
</li>
      <li><a href="multiprocessing.html#torch.multiprocessing.set_sharing_strategy">set_sharing_strategy() (in module torch.multiprocessing)</a>
</li>
      <li><a href="torch.html#torch._C.Generator.set_state">set_state() (torch._C.Generator method)</a>
</li>
      <li><a href="onnx.html#torch.onnx.set_training">set_training() (in module torch.onnx)</a>
</li>
      <li><a href="optim.html#torch.optim.SGD">SGD (class in torch.optim)</a>
</li>
      <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor.shape()">shape() (Java method)</a>
</li>
      <li><a href="onnx.html#torch.onnx.operators.shape_as_tensor">shape_as_tensor() (in module torch.onnx.operators)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.share_memory_">share_memory_() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.share_memory_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="storage.html#torch.FloatStorage.short">short() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.short">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="__config__.html#torch.__config__.show">show() (in module torch.__config__)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.shutdown">shutdown() (in module torch.distributed.rpc)</a>
</li>
      <li><a href="nn.html#torch.nn.Sigmoid">Sigmoid (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.sigmoid">sigmoid() (in module torch)</a>

      <ul>
        <li><a href="nn.functional.html#torch.nn.functional.sigmoid">(in module torch.nn.functional)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.sigmoid">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.sigmoid_">sigmoid_() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.SigmoidTransform">SigmoidTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="torch.html#torch.sign">sign() (in module torch)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.transforms.Transform.sign">(torch.distributions.transforms.Transform property)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.sign">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.sign_">sign_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.sin">sin() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.sin">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.sin_">sin_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.sinh">sinh() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.sinh">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.sinh_">sinh_() (torch.Tensor method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.size">size() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="sparse.html#torch.sparse.FloatTensor.size">(torch.sparse.FloatTensor method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.size">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.slogdet">slogdet() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.slogdet">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.smooth_l1_loss">smooth_l1_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.SmoothL1Loss">SmoothL1Loss (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.quasirandom.SobolEngine">SobolEngine (class in torch.quasirandom)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.soft_margin_loss">soft_margin_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.SoftMarginLoss">SoftMarginLoss (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.Softmax">Softmax (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.softmax">softmax() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.Softmax2d">Softmax2d (class in torch.nn)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.SoftmaxTransform">SoftmaxTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="nn.html#torch.nn.Softmin">Softmin (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.softmin">softmin() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.Softplus">Softplus (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.softplus">softplus() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.Softshrink">Softshrink (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.softshrink">softshrink() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.Softsign">Softsign (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.softsign">softsign() (in module torch.nn.functional)</a>
</li>
      <li><a href="torch.html#torch.solve">solve() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.solve">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.sort">sort() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.sort">(torch.Tensor method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="sparse.html#torch.sparse.FloatTensor.spadd">spadd() (torch.sparse.FloatTensor method)</a>
</li>
      <li><a href="nn.init.html#torch.nn.init.sparse_">sparse_() (in module torch.nn.init)</a>
</li>
      <li><a href="torch.html#torch.sparse_coo_tensor">sparse_coo_tensor() (in module torch)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.sparse_dim">sparse_dim() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.sparse_mask">sparse_mask() (torch.Tensor method)</a>
</li>
      <li><a href="optim.html#torch.optim.SparseAdam">SparseAdam (class in torch.optim)</a>
</li>
      <li><a href="multiprocessing.html#torch.multiprocessing.spawn">spawn() (in module torch.multiprocessing)</a>
</li>
      <li><a href="multiprocessing.html#torch.multiprocessing.SpawnContext">SpawnContext (class in torch.multiprocessing)</a>
</li>
      <li><a href="nn.html#torch.nn.utils.spectral_norm">spectral_norm() (in module torch.nn.utils)</a>
</li>
      <li><a href="torch.html#torch.split">split() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.split">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.spmm">spmm() (torch.sparse.FloatTensor method)</a>
</li>
      <li><a href="torch.html#torch.sqrt">sqrt() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.sqrt">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.sqrt_">sqrt_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.square">square() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.square">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.square_">square_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.squeeze">squeeze() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.squeeze">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.squeeze_">squeeze_() (torch.Tensor method)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.sspaddmm">sspaddmm() (torch.sparse.FloatTensor method)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.sspmm">sspmm() (torch.sparse.FloatTensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.constraints.stack">stack (in module torch.distributions.constraints)</a>
</li>
      <li><a href="torch.html#torch.stack">stack() (in module torch)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.StackTransform">StackTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.state_dict">state_dict() (torch.cuda.amp.GradScaler method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.Module.state_dict">(torch.nn.Module method)</a>
</li>
        <li><a href="optim.html#torch.optim.lr_scheduler.LambdaLR.state_dict">(torch.optim.lr_scheduler.LambdaLR method)</a>
</li>
        <li><a href="optim.html#torch.optim.lr_scheduler.MultiplicativeLR.state_dict">(torch.optim.lr_scheduler.MultiplicativeLR method)</a>
</li>
        <li><a href="optim.html#torch.optim.Optimizer.state_dict">(torch.optim.Optimizer method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.std">std() (in module torch)</a>, <a href="torch.html#torch.std">[1]</a>, <a href="torch.html#torch.std">[2]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.std">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.std_mean">std_mean() (in module torch)</a>, <a href="torch.html#torch.std_mean">[1]</a>, <a href="torch.html#torch.std_mean">[2]</a>
</li>
      <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.stddev">stddev() (torch.distributions.continuous_bernoulli.ContinuousBernoulli property)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.stddev">(torch.distributions.distribution.Distribution property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.stddev">(torch.distributions.exponential.Exponential property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gumbel.Gumbel.stddev">(torch.distributions.gumbel.Gumbel property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.stddev">(torch.distributions.laplace.Laplace property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.stddev">(torch.distributions.normal.Normal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.stddev">(torch.distributions.uniform.Uniform property)</a>
</li>
      </ul></li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.step">step() (torch.cuda.amp.GradScaler method)</a>

      <ul>
        <li><a href="rpc/rpc.html#torch.distributed.optim.DistributedOptimizer.step">(torch.distributed.optim.DistributedOptimizer method)</a>
</li>
        <li><a href="optim.html#torch.optim.Adadelta.step">(torch.optim.Adadelta method)</a>
</li>
        <li><a href="optim.html#torch.optim.Adagrad.step">(torch.optim.Adagrad method)</a>
</li>
        <li><a href="optim.html#torch.optim.Adam.step">(torch.optim.Adam method)</a>
</li>
        <li><a href="optim.html#torch.optim.Adamax.step">(torch.optim.Adamax method)</a>
</li>
        <li><a href="optim.html#torch.optim.AdamW.step">(torch.optim.AdamW method)</a>
</li>
        <li><a href="optim.html#torch.optim.ASGD.step">(torch.optim.ASGD method)</a>
</li>
        <li><a href="optim.html#torch.optim.LBFGS.step">(torch.optim.LBFGS method)</a>
</li>
        <li><a href="optim.html#torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.step">(torch.optim.lr_scheduler.CosineAnnealingWarmRestarts method)</a>
</li>
        <li><a href="optim.html#torch.optim.Optimizer.step">(torch.optim.Optimizer method)</a>
</li>
        <li><a href="optim.html#torch.optim.RMSprop.step">(torch.optim.RMSprop method)</a>
</li>
        <li><a href="optim.html#torch.optim.Rprop.step">(torch.optim.Rprop method)</a>
</li>
        <li><a href="optim.html#torch.optim.SGD.step">(torch.optim.SGD method)</a>
</li>
        <li><a href="optim.html#torch.optim.SparseAdam.step">(torch.optim.SparseAdam method)</a>
</li>
      </ul></li>
      <li><a href="optim.html#torch.optim.lr_scheduler.StepLR">StepLR (class in torch.optim.lr_scheduler)</a>
</li>
      <li><a href="torch.html#torch.stft">stft() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.stft">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.transforms.StickBreakingTransform">StickBreakingTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.storage">storage() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.storage_offset">storage_offset() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.storage_type">storage_type() (torch.Tensor method)</a>
</li>
      <li><a href="cuda.html#torch.cuda.Stream">Stream (class in torch.cuda)</a>
</li>
      <li><a href="cuda.html#torch.cuda.stream">stream() (in module torch.cuda)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.stride">stride() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.studentT.StudentT">StudentT (class in torch.distributions.studentT)</a>
</li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.sub">sub() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.sub">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.sub_">sub_() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.sub_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="data.html#torch.utils.data.Subset">Subset (class in torch.utils.data)</a>
</li>
      <li><a href="data.html#torch.utils.data.SubsetRandomSampler">SubsetRandomSampler (class in torch.utils.data)</a>
</li>
      <li><a href="torch.html#torch.sum">sum() (in module torch)</a>, <a href="torch.html#torch.sum">[1]</a>, <a href="torch.html#torch.sum">[2]</a>

      <ul>
        <li><a href="sparse.html#torch.sparse.sum">(in module torch.sparse)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.sum">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.sum_to_size">sum_to_size() (torch.Tensor method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.support">support (torch.distributions.bernoulli.Bernoulli attribute)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.beta.Beta.support">(torch.distributions.beta.Beta attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.support">(torch.distributions.cauchy.Cauchy attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.support">(torch.distributions.continuous_bernoulli.ContinuousBernoulli attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.dirichlet.Dirichlet.support">(torch.distributions.dirichlet.Dirichlet attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.support">(torch.distributions.exponential.Exponential attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.fishersnedecor.FisherSnedecor.support">(torch.distributions.fishersnedecor.FisherSnedecor attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gamma.Gamma.support">(torch.distributions.gamma.Gamma attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.geometric.Geometric.support">(torch.distributions.geometric.Geometric attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gumbel.Gumbel.support">(torch.distributions.gumbel.Gumbel attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.support">(torch.distributions.half_cauchy.HalfCauchy attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.support">(torch.distributions.half_normal.HalfNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.support">(torch.distributions.laplace.Laplace attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.log_normal.LogNormal.support">(torch.distributions.log_normal.LogNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.support">(torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.support">(torch.distributions.multivariate_normal.MultivariateNormal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial.support">(torch.distributions.negative_binomial.NegativeBinomial attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.support">(torch.distributions.normal.Normal attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.support">(torch.distributions.one_hot_categorical.OneHotCategorical attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.poisson.Poisson.support">(torch.distributions.poisson.Poisson attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.support">(torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.RelaxedBernoulli.support">(torch.distributions.relaxed_bernoulli.RelaxedBernoulli attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.support">(torch.distributions.relaxed_categorical.RelaxedOneHotCategorical attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.studentT.StudentT.support">(torch.distributions.studentT.StudentT attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.von_mises.VonMises.support">(torch.distributions.von_mises.VonMises attribute)</a>
</li>
        <li><a href="distributions.html#torch.distributions.weibull.Weibull.support">(torch.distributions.weibull.Weibull attribute)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.binomial.Binomial.support">support() (torch.distributions.binomial.Binomial property)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.support">(torch.distributions.categorical.Categorical property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.support">(torch.distributions.distribution.Distribution property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.independent.Independent.support">(torch.distributions.independent.Independent property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.support">(torch.distributions.mixture_same_family.MixtureSameFamily property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multinomial.Multinomial.support">(torch.distributions.multinomial.Multinomial property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.pareto.Pareto.support">(torch.distributions.pareto.Pareto property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.transformed_distribution.TransformedDistribution.support">(torch.distributions.transformed_distribution.TransformedDistribution property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.support">(torch.distributions.uniform.Uniform property)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.svd">svd() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.svd">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.svd_lowrank">svd_lowrank() (in module torch)</a>
</li>
      <li><a href="quantization.html#torch.quantization.swap_module">swap_module() (in module torch.quantization)</a>
</li>
      <li><a href="torch.html#torch.symeig">symeig() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.symeig">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.SyncBatchNorm">SyncBatchNorm (class in torch.nn)</a>
</li>
      <li><a href="cuda.html#torch.cuda.synchronize">synchronize() (in module torch.cuda)</a>

      <ul>
        <li><a href="cuda.html#torch.cuda.Event.synchronize">(torch.cuda.Event method)</a>
</li>
        <li><a href="cuda.html#torch.cuda.Stream.synchronize">(torch.cuda.Stream method)</a>
</li>
      </ul></li>
  </ul></td>
</tr></table>

<h2 id="T">T</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="tensors.html#torch.Tensor.T">T (torch.Tensor attribute)</a>
</li>
      <li><a href="torch.html#torch.t">t() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.t">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.t_">t_() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.t_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="autograd.html#torch.autograd.profiler.profile.table">table() (torch.autograd.profiler.profile method)</a>
</li>
      <li><a href="torch.html#torch.take">take() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.take">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.tan">tan() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.tan">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.tan_">tan_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.Tanh">Tanh (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.tanh">tanh() (in module torch)</a>

      <ul>
        <li><a href="nn.functional.html#torch.nn.functional.tanh">(in module torch.nn.functional)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.tanh">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.tanh_">tanh_() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.Tanhshrink">Tanhshrink (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.tanhshrink">tanhshrink() (in module torch.nn.functional)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.TanhTransform">TanhTransform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="distributions.html#torch.distributions.relaxed_bernoulli.RelaxedBernoulli.temperature">temperature() (torch.distributions.relaxed_bernoulli.RelaxedBernoulli property)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.temperature">(torch.distributions.relaxed_categorical.RelaxedOneHotCategorical property)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor">Tensor (class in torch)</a>

      <ul>
        <li><a href="org/pytorch/Tensor.html#org.pytorch.Tensor">(Java class)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.tensor">tensor() (in module torch)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.bernoulli_">Tensor.bernoulli_() (in module torch)</a>, <a href="tensors.html#torch.Tensor.bernoulli_">[1]</a>
</li>
      <li><a href="tensors.html#torch.Tensor.to">Tensor.to() (in module torch)</a>, <a href="tensors.html#torch.Tensor.to">[1]</a>, <a href="tensors.html#torch.Tensor.to">[2]</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_float32.html#org.pytorch.Tensor.Tensor_float32">Tensor_float32 (Java class)</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_float32.html#org.pytorch.Tensor.Tensor_float32.Tensor_float32(FloatBuffer, long[])">Tensor_float32(FloatBuffer, long[]) (Java constructor)</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_float64.html#org.pytorch.Tensor.Tensor_float64">Tensor_float64 (Java class)</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_int32.html#org.pytorch.Tensor.Tensor_int32">Tensor_int32 (Java class)</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_int64.html#org.pytorch.Tensor.Tensor_int64">Tensor_int64 (Java class)</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_int8.html#org.pytorch.Tensor.Tensor_int8">Tensor_int8 (Java class)</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_uint8.html#org.pytorch.Tensor.Tensor_uint8">Tensor_uint8 (Java class)</a>
</li>
      <li><a href="data.html#torch.utils.data.TensorDataset">TensorDataset (class in torch.utils.data)</a>
</li>
      <li><a href="torch.html#torch.tensordot">tensordot() (in module torch)</a>
</li>
      <li><a href="org/pytorch/TensorImageUtils.html#org.pytorch.torchvision.TensorImageUtils">TensorImageUtils (Java class)</a>
</li>
      <li><a href="nn.html#torch.nn.Threshold">Threshold (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.threshold">threshold() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.threshold_">threshold_() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.to">to() (torch.nn.Module method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.to">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.to_dense">to_dense() (torch.sparse.FloatTensor method)</a>
</li>
      <li><a href="dlpack.html#torch.utils.dlpack.to_dlpack">to_dlpack() (in module torch.utils.dlpack)</a>
</li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.RRef.to_here">to_here() (torch.distributed.rpc.RRef method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.to_mkldnn">to_mkldnn() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.to_sparse">to_sparse() (torch.Tensor method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toBool()">toBool() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toBoolList()">toBoolList() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toDictLongKey()">toDictLongKey() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toDictStringKey()">toDictStringKey() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toDouble()">toDouble() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toDoubleList()">toDoubleList() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toList()">toList() (Java method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.tolist">tolist() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.tolist">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toLong()">toLong() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toLongList()">toLongList() (Java method)</a>
</li>
      <li><a href="torch.html#torch.topk">topk() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.topk">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#module-torch">torch (module)</a>
</li>
      <li><a href="__config__.html#module-torch.__config__">torch.__config__ (module)</a>
</li>
      <li><a href="autograd.html#module-torch.autograd">torch.autograd (module)</a>
</li>
      <li><a href="cuda.html#module-torch.cuda">torch.cuda (module)</a>
</li>
      <li><a href="amp.html#module-torch.cuda.amp">torch.cuda.amp (module)</a>
</li>
      <li><a href="tensor_attributes.html#torch.torch.device">torch.device (class in torch)</a>
</li>
      <li><a href="distributed.html#module-torch.distributed">torch.distributed (module)</a>
</li>
      <li><a href="rpc/rpc.html#module-torch.distributed.autograd">torch.distributed.autograd (module)</a>
</li>
      <li><a href="distributed.html#module-torch.distributed.launch">torch.distributed.launch (module)</a>
</li>
      <li><a href="rpc/rpc.html#module-torch.distributed.optim">torch.distributed.optim (module)</a>
</li>
      <li><a href="rpc/rpc.html#module-torch.distributed.rpc">torch.distributed.rpc (module)</a>
</li>
      <li><a href="distributions.html#module-torch.distributions">torch.distributions (module)</a>
</li>
      <li><a href="distributions.html#module-torch.distributions.constraint_registry">torch.distributions.constraint_registry (module)</a>
</li>
      <li><a href="distributions.html#module-torch.distributions.constraints">torch.distributions.constraints (module)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="distributions.html#module-torch.distributions.kl">torch.distributions.kl (module)</a>
</li>
      <li><a href="distributions.html#module-torch.distributions.transforms">torch.distributions.transforms (module)</a>
</li>
      <li><a href="tensor_attributes.html#torch.torch.dtype">torch.dtype (class in torch)</a>
</li>
      <li><a href="type_info.html#torch.torch.finfo">torch.finfo (class in torch)</a>
</li>
      <li><a href="hub.html#module-torch.hub">torch.hub (module)</a>
</li>
      <li><a href="type_info.html#torch.torch.iinfo">torch.iinfo (class in torch)</a>
</li>
      <li><a href="jit.html#module-torch.jit">torch.jit (module)</a>
</li>
      <li><a href="jit_unsupported.html#module-torch.jit.unsupported_tensor_ops">torch.jit.unsupported_tensor_ops (module)</a>
</li>
      <li><a href="tensor_attributes.html#torch.torch.layout">torch.layout (class in torch)</a>
</li>
      <li><a href="tensor_attributes.html#torch.torch.memory_format">torch.memory_format (class in torch)</a>
</li>
      <li><a href="multiprocessing.html#module-torch.multiprocessing">torch.multiprocessing (module)</a>
</li>
      <li><a href="nn.html#module-torch.nn">torch.nn (module)</a>
</li>
      <li><a href="quantization.html#module-torch.nn.intrinsic">torch.nn.intrinsic (module)</a>
</li>
      <li><a href="quantization.html#module-torch.nn.intrinsic.qat">torch.nn.intrinsic.qat (module)</a>
</li>
      <li><a href="quantization.html#module-torch.nn.intrinsic.quantized">torch.nn.intrinsic.quantized (module)</a>
</li>
      <li><a href="quantization.html#module-torch.nn.qat">torch.nn.qat (module)</a>
</li>
      <li><a href="quantization.html#module-torch.nn.quantized">torch.nn.quantized (module)</a>
</li>
      <li><a href="quantization.html#module-torch.nn.quantized.dynamic">torch.nn.quantized.dynamic (module)</a>
</li>
      <li><a href="quantization.html#module-torch.nn.quantized.functional">torch.nn.quantized.functional (module)</a>
</li>
      <li><a href="onnx.html#module-torch.onnx">torch.onnx (module)</a>
</li>
      <li><a href="optim.html#module-torch.optim">torch.optim (module)</a>
</li>
      <li><a href="quantization.html#module-torch.quantization">torch.quantization (module)</a>
</li>
      <li><a href="random.html#module-torch.random">torch.random (module)</a>
</li>
      <li><a href="data.html#module-torch.utils.data">torch.utils.data (module)</a>
</li>
      <li><a href="model_zoo.html#module-torch.utils.model_zoo">torch.utils.model_zoo (module)</a>
</li>
      <li><a href="org/pytorch/TensorImageUtils.html#org.pytorch.torchvision.TensorImageUtils.TORCHVISION_NORM_MEAN_RGB">TORCHVISION_NORM_MEAN_RGB (Java field)</a>
</li>
      <li><a href="org/pytorch/TensorImageUtils.html#org.pytorch.torchvision.TensorImageUtils.TORCHVISION_NORM_STD_RGB">TORCHVISION_NORM_STD_RGB (Java field)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toStr()">toStr() (Java method)</a>
</li>
      <li><a href="org/pytorch/Tensor-Tensor_float32.html#org.pytorch.Tensor.Tensor_float32.toString()">toString() (Java method)</a>, <a href="org/pytorch/Tensor-Tensor_float64.html#org.pytorch.Tensor.Tensor_float64.toString()">[1]</a>, <a href="org/pytorch/Tensor-Tensor_int32.html#org.pytorch.Tensor.Tensor_int32.toString()">[2]</a>, <a href="org/pytorch/Tensor-Tensor_int64.html#org.pytorch.Tensor.Tensor_int64.toString()">[3]</a>, <a href="org/pytorch/Tensor-Tensor_int8.html#org.pytorch.Tensor.Tensor_int8.toString()">[4]</a>, <a href="org/pytorch/Tensor-Tensor_uint8.html#org.pytorch.Tensor.Tensor_uint8.toString()">[5]</a>
</li>
      <li><a href="autograd.html#torch.autograd.profiler.profile.total_average">total_average() (torch.autograd.profiler.profile method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toTensor()">toTensor() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toTensorList()">toTensorList() (Java method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.toTuple()">toTuple() (Java method)</a>
</li>
      <li><a href="torch.html#torch.trace">trace() (in module torch)</a>

      <ul>
        <li><a href="jit.html#torch.jit.trace">(in module torch.jit)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.trace">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="jit.html#torch.jit.trace_module">trace_module() (in module torch.jit)</a>
</li>
      <li><a href="nn.html#torch.nn.Module.train">train() (torch.nn.Module method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transforms.Transform">Transform (class in torch.distributions.transforms)</a>
</li>
      <li><a href="distributions.html#torch.distributions.transformed_distribution.TransformedDistribution">TransformedDistribution (class in torch.distributions.transformed_distribution)</a>
</li>
      <li><a href="nn.html#torch.nn.Transformer">Transformer (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.TransformerDecoder">TransformerDecoder (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.TransformerDecoderLayer">TransformerDecoderLayer (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.TransformerEncoder">TransformerEncoder (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.TransformerEncoderLayer">TransformerEncoderLayer (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.transpose">transpose() (in module torch)</a>

      <ul>
        <li><a href="sparse.html#torch.sparse.FloatTensor.transpose">(torch.sparse.FloatTensor method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.transpose">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="sparse.html#torch.sparse.FloatTensor.transpose_">transpose_() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.transpose_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.trapz">trapz() (in module torch)</a>, <a href="torch.html#torch.trapz">[1]</a>, <a href="torch.html#torch.trapz">[2]</a>
</li>
      <li><a href="torch.html#torch.triangular_solve">triangular_solve() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.triangular_solve">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.tril">tril() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.tril">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.tril_">tril_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.tril_indices">tril_indices() (in module torch)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.triplet_margin_loss">triplet_margin_loss() (in module torch.nn.functional)</a>
</li>
      <li><a href="nn.html#torch.nn.TripletMarginLoss">TripletMarginLoss (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.triu">triu() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.triu">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.triu_">triu_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.triu_indices">triu_indices() (in module torch)</a>
</li>
      <li><a href="torch.html#torch.true_divide">true_divide() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.true_divide">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.true_divide_">true_divide_() (torch.Tensor method)</a>
</li>
      <li><a href="torch.html#torch.trunc">trunc() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.trunc">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.trunc_">trunc_() (torch.Tensor method)</a>
</li>
      <li><a href="org/pytorch/IValue.html#org.pytorch.IValue.tupleFrom(IValue)">tupleFrom(IValue) (Java method)</a>
</li>
      <li><a href="storage.html#torch.FloatStorage.type">type() (torch.FloatStorage method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.Module.type">(torch.nn.Module method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.type">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.type_as">type_as() (torch.Tensor method)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="U">U</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="org/pytorch/DType.html#org.pytorch.DType.UINT8">UINT8 (Java field)</a>
</li>
      <li><a href="torch.html#torch.unbind">unbind() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.unbind">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="named_tensor.html#torch.Tensor.unflatten">unflatten() (torch.Tensor method)</a>
</li>
      <li><a href="nn.html#torch.nn.Unfold">Unfold (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.unfold">unfold() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.unfold">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.uniform.Uniform">Uniform (class in torch.distributions.uniform)</a>
</li>
      <li><a href="nn.init.html#torch.nn.init.uniform_">uniform_() (in module torch.nn.init)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.uniform_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.unique">unique() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.unique">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.unique_consecutive">unique_consecutive() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.unique_consecutive">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.unscale_">unscale_() (torch.cuda.amp.GradScaler method)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="torch.html#torch.unsqueeze">unsqueeze() (in module torch)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.unsqueeze">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="tensors.html#torch.Tensor.unsqueeze_">unsqueeze_() (torch.Tensor method)</a>
</li>
      <li><a href="jit.html#torch.jit.unused">unused() (in module torch.jit)</a>
</li>
      <li><a href="amp.html#torch.cuda.amp.GradScaler.update">update() (torch.cuda.amp.GradScaler method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.ModuleDict.update">(torch.nn.ModuleDict method)</a>
</li>
        <li><a href="nn.html#torch.nn.ParameterDict.update">(torch.nn.ParameterDict method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Upsample">Upsample (class in torch.nn)</a>
</li>
      <li><a href="nn.functional.html#torch.nn.functional.upsample">upsample() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.upsample">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.upsample_bilinear">upsample_bilinear() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.upsample_bilinear">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="nn.functional.html#torch.nn.functional.upsample_nearest">upsample_nearest() (in module torch.nn.functional)</a>

      <ul>
        <li><a href="quantization.html#torch.nn.quantized.functional.upsample_nearest">(in module torch.nn.quantized.functional)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.UpsamplingBilinear2d">UpsamplingBilinear2d (class in torch.nn)</a>
</li>
      <li><a href="nn.html#torch.nn.UpsamplingNearest2d">UpsamplingNearest2d (class in torch.nn)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="V">V</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.html#torch.nn.ModuleDict.values">values() (torch.nn.ModuleDict method)</a>

      <ul>
        <li><a href="nn.html#torch.nn.ParameterDict.values">(torch.nn.ParameterDict method)</a>
</li>
        <li><a href="tensors.html#torch.Tensor.values">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.var">var() (in module torch)</a>, <a href="torch.html#torch.var">[1]</a>, <a href="torch.html#torch.var">[2]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.var">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="torch.html#torch.var_mean">var_mean() (in module torch)</a>, <a href="torch.html#torch.var_mean">[1]</a>, <a href="torch.html#torch.var_mean">[2]</a>
</li>
      <li><a href="distributions.html#torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.variance">variance (torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal attribute)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.von_mises.VonMises.variance">(torch.distributions.von_mises.VonMises attribute)</a>
</li>
      </ul></li>
      <li><a href="distributions.html#torch.distributions.bernoulli.Bernoulli.variance">variance() (torch.distributions.bernoulli.Bernoulli property)</a>

      <ul>
        <li><a href="distributions.html#torch.distributions.beta.Beta.variance">(torch.distributions.beta.Beta property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.binomial.Binomial.variance">(torch.distributions.binomial.Binomial property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.categorical.Categorical.variance">(torch.distributions.categorical.Categorical property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.cauchy.Cauchy.variance">(torch.distributions.cauchy.Cauchy property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.continuous_bernoulli.ContinuousBernoulli.variance">(torch.distributions.continuous_bernoulli.ContinuousBernoulli property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.dirichlet.Dirichlet.variance">(torch.distributions.dirichlet.Dirichlet property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.distribution.Distribution.variance">(torch.distributions.distribution.Distribution property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.exponential.Exponential.variance">(torch.distributions.exponential.Exponential property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.fishersnedecor.FisherSnedecor.variance">(torch.distributions.fishersnedecor.FisherSnedecor property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gamma.Gamma.variance">(torch.distributions.gamma.Gamma property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.geometric.Geometric.variance">(torch.distributions.geometric.Geometric property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.gumbel.Gumbel.variance">(torch.distributions.gumbel.Gumbel property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_cauchy.HalfCauchy.variance">(torch.distributions.half_cauchy.HalfCauchy property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.half_normal.HalfNormal.variance">(torch.distributions.half_normal.HalfNormal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.independent.Independent.variance">(torch.distributions.independent.Independent property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.laplace.Laplace.variance">(torch.distributions.laplace.Laplace property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.log_normal.LogNormal.variance">(torch.distributions.log_normal.LogNormal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.mixture_same_family.MixtureSameFamily.variance">(torch.distributions.mixture_same_family.MixtureSameFamily property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multinomial.Multinomial.variance">(torch.distributions.multinomial.Multinomial property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.multivariate_normal.MultivariateNormal.variance">(torch.distributions.multivariate_normal.MultivariateNormal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.negative_binomial.NegativeBinomial.variance">(torch.distributions.negative_binomial.NegativeBinomial property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.normal.Normal.variance">(torch.distributions.normal.Normal property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.one_hot_categorical.OneHotCategorical.variance">(torch.distributions.one_hot_categorical.OneHotCategorical property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.pareto.Pareto.variance">(torch.distributions.pareto.Pareto property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.poisson.Poisson.variance">(torch.distributions.poisson.Poisson property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.studentT.StudentT.variance">(torch.distributions.studentT.StudentT property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.uniform.Uniform.variance">(torch.distributions.uniform.Uniform property)</a>
</li>
        <li><a href="distributions.html#torch.distributions.weibull.Weibull.variance">(torch.distributions.weibull.Weibull property)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.html#torch.nn.utils.vector_to_parameters">vector_to_parameters() (in module torch.nn.utils)</a>
</li>
      <li><a href="cpp_extension.html#torch.utils.cpp_extension.verify_ninja_availability">verify_ninja_availability() (in module torch.utils.cpp_extension)</a>
</li>
      <li><a href="autograd.html#torch.autograd.functional.vhp">vhp() (in module torch.autograd.functional)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.view">view() (torch.Tensor method)</a>
</li>
      <li><a href="tensors.html#torch.Tensor.view_as">view_as() (torch.Tensor method)</a>
</li>
      <li><a href="autograd.html#torch.autograd.functional.vjp">vjp() (in module torch.autograd.functional)</a>
</li>
      <li><a href="distributions.html#torch.distributions.von_mises.VonMises">VonMises (class in torch.distributions.von_mises)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="W">W</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="cuda.html#torch.cuda.Event.wait">wait() (torch.cuda.Event method)</a>
</li>
      <li><a href="cuda.html#torch.cuda.Stream.wait_event">wait_event() (torch.cuda.Stream method)</a>
</li>
      <li><a href="cuda.html#torch.cuda.Stream.wait_stream">wait_stream() (torch.cuda.Stream method)</a>
</li>
      <li><a href="distributions.html#torch.distributions.weibull.Weibull">Weibull (class in torch.distributions.weibull)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.html#torch.nn.utils.weight_norm">weight_norm() (in module torch.nn.utils)</a>
</li>
      <li><a href="data.html#torch.utils.data.WeightedRandomSampler">WeightedRandomSampler (class in torch.utils.data)</a>
</li>
      <li><a href="torch.html#torch.where">where() (in module torch)</a>, <a href="torch.html#torch.where">[1]</a>, <a href="torch.html#torch.where">[2]</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.where">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="rpc/rpc.html#torch.distributed.rpc.WorkerInfo">WorkerInfo (class in torch.distributed.rpc)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="X">X</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.init.html#torch.nn.init.xavier_normal_">xavier_normal_() (in module torch.nn.init)</a>
</li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.init.html#torch.nn.init.xavier_uniform_">xavier_uniform_() (in module torch.nn.init)</a>
</li>
  </ul></td>
</tr></table>

<h2 id="Z">Z</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="sparse.html#torch.sparse.FloatTensor.zero_">zero_() (torch.sparse.FloatTensor method)</a>

      <ul>
        <li><a href="tensors.html#torch.Tensor.zero_">(torch.Tensor method)</a>
</li>
      </ul></li>
      <li><a href="nn.html#torch.nn.Module.zero_grad">zero_grad() (torch.nn.Module method)</a>

      <ul>
        <li><a href="optim.html#torch.optim.Optimizer.zero_grad">(torch.optim.Optimizer method)</a>
</li>
      </ul></li>
  </ul></td>
  <td style="width: 33%; vertical-align: top;"><ul>
      <li><a href="nn.html#torch.nn.ZeroPad2d">ZeroPad2d (class in torch.nn)</a>
</li>
      <li><a href="torch.html#torch.zeros">zeros() (in module torch)</a>
</li>
      <li><a href="nn.init.html#torch.nn.init.zeros_">zeros_() (in module torch.nn.init)</a>
</li>
      <li><a href="torch.html#torch.zeros_like">zeros_like() (in module torch)</a>
</li>
  </ul></td>
</tr></table>



             </article>
             
            </div>
            <footer>
  

  

    <hr>

  

  <div role="contentinfo">
    <p>
        &copy; Copyright 2019, Torch Contributors.

    </p>
  </div>
    
      <div>
        Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
      </div>
     

</footer>

          </div>
        </div>

        <div class="pytorch-content-right" id="pytorch-content-right">
          <div class="pytorch-right-menu" id="pytorch-right-menu">
            <div class="pytorch-side-scroll" id="pytorch-side-scroll-right">
              
            </div>
          </div>
        </div>
      </section>
    </div>

  


  

     
       <script type="text/javascript" id="documentation_options" data-url_root="./" src="_static/documentation_options.js"></script>
         <script src="_static/jquery.js"></script>
         <script src="_static/underscore.js"></script>
         <script src="_static/doctools.js"></script>
         <script src="_static/language_data.js"></script>
     

  

  <script type="text/javascript" src="_static/js/vendor/popper.min.js"></script>
  <script type="text/javascript" src="_static/js/vendor/bootstrap.min.js"></script>
  <script src="https://cdnjs.cloudflare.com/ajax/libs/list.js/1.5.0/list.min.js"></script>
  <script type="text/javascript" src="_static/js/theme.js"></script>

  <script type="text/javascript">
      jQuery(function () {
          SphinxRtdTheme.Navigation.enable(true);
      });
  </script>
 
<script>
  (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
  (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
  m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
  })(window,document,'script','https://www.google-analytics.com/analytics.js','ga');

  ga('create', 'UA-90545585-1', 'auto');
  ga('send', 'pageview');

</script>

<script async src="https://www.googletagmanager.com/gtag/js?id=UA-117752657-2"></script>

<script>
  window.dataLayer = window.dataLayer || [];

  function gtag(){dataLayer.push(arguments);}

  gtag('js', new Date());
  gtag('config', 'UA-117752657-2');
</script>

<img height="1" width="1" style="border-style:none;" alt="" src="https://www.googleadservices.com/pagead/conversion/795629140/?label=txkmCPmdtosBENSssfsC&amp;guid=ON&amp;script=0"/>


  <!-- Begin Footer -->

  <div class="container-fluid docs-tutorials-resources" id="docs-tutorials-resources">
    <div class="container">
      <div class="row">
        <div class="col-md-4 text-center">
          <h2>Docs</h2>
          <p>Access comprehensive developer documentation for PyTorch</p>
          <a class="with-right-arrow" href="https://pytorch.org/docs/stable/index.html">View Docs</a>
        </div>

        <div class="col-md-4 text-center">
          <h2>Tutorials</h2>
          <p>Get in-depth tutorials for beginners and advanced developers</p>
          <a class="with-right-arrow" href="https://pytorch.org/tutorials">View Tutorials</a>
        </div>

        <div class="col-md-4 text-center">
          <h2>Resources</h2>
          <p>Find development resources and get your questions answered</p>
          <a class="with-right-arrow" href="https://pytorch.org/resources">View Resources</a>
        </div>
      </div>
    </div>
  </div>

  <footer class="site-footer">
    <div class="container footer-container">
      <div class="footer-logo-wrapper">
        <a href="https://pytorch.org/" class="footer-logo"></a>
      </div>

      <div class="footer-links-wrapper">
        <div class="footer-links-col">
          <ul>
            <li class="list-title"><a href="https://pytorch.org/">PyTorch</a></li>
            <li><a href="https://pytorch.org/get-started">Get Started</a></li>
            <li><a href="https://pytorch.org/features">Features</a></li>
            <li><a href="https://pytorch.org/ecosystem">Ecosystem</a></li>
            <li><a href="https://pytorch.org/blog/">Blog</a></li>
            <li><a href="https://github.com/pytorch/pytorch/blob/master/CONTRIBUTING.md">Contributing</a></li>
          </ul>
        </div>

        <div class="footer-links-col">
          <ul>
            <li class="list-title"><a href="https://pytorch.org/resources">Resources</a></li>
            <li><a href="https://pytorch.org/tutorials">Tutorials</a></li>
            <li><a href="https://pytorch.org/docs/stable/index.html">Docs</a></li>
            <li><a href="https://discuss.pytorch.org" target="_blank">Discuss</a></li>
            <li><a href="https://github.com/pytorch/pytorch/issues" target="_blank">Github Issues</a></li>
            <li><a href="https://pytorch.org/assets/brand-guidelines/PyTorch-Brand-Guidelines.pdf" target="_blank">Brand Guidelines</a></li>
          </ul>
        </div>

        <div class="footer-links-col follow-us-col">
          <ul>
            <li class="list-title">Stay Connected</li>
            <li>
              <div id="mc_embed_signup">
                <form
                  action="https://twitter.us14.list-manage.com/subscribe/post?u=75419c71fe0a935e53dfa4a3f&id=91d0dccd39"
                  method="post"
                  id="mc-embedded-subscribe-form"
                  name="mc-embedded-subscribe-form"
                  class="email-subscribe-form validate"
                  target="_blank"
                  novalidate>
                  <div id="mc_embed_signup_scroll" class="email-subscribe-form-fields-wrapper">
                    <div class="mc-field-group">
                      <label for="mce-EMAIL" style="display:none;">Email Address</label>
                      <input type="email" value="" name="EMAIL" class="required email" id="mce-EMAIL" placeholder="Email Address">
                    </div>

                    <div id="mce-responses" class="clear">
                      <div class="response" id="mce-error-response" style="display:none"></div>
                      <div class="response" id="mce-success-response" style="display:none"></div>
                    </div>    <!-- real people should not fill this in and expect good things - do not remove this or risk form bot signups-->

                    <div style="position: absolute; left: -5000px;" aria-hidden="true"><input type="text" name="b_75419c71fe0a935e53dfa4a3f_91d0dccd39" tabindex="-1" value=""></div>

                    <div class="clear">
                      <input type="submit" value="" name="subscribe" id="mc-embedded-subscribe" class="button email-subscribe-button">
                    </div>
                  </div>
                </form>
              </div>

            </li>
          </ul>

          <div class="footer-social-icons">
            <a href="https://www.facebook.com/pytorch" target="_blank" class="facebook"></a>
            <a href="https://twitter.com/pytorch" target="_blank" class="twitter"></a>
            <a href="https://www.youtube.com/pytorch" target="_blank" class="youtube"></a>
          </div>
        </div>
      </div>
    </div>
  </footer>

  <div class="cookie-banner-wrapper">
  <div class="container">
    <p class="gdpr-notice">To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. As the current maintainers of this site, Facebook’s Cookies Policy applies. Learn more, including about available controls: <a href="https://www.facebook.com/policies/cookies/">Cookies Policy</a>.</p>
    <img class="close-button" src="_static/images/pytorch-x.svg">
  </div>
</div>

  <!-- End Footer -->

  <!-- Begin Mobile Menu -->

  <div class="mobile-main-menu">
    <div class="container-fluid">
      <div class="container">
        <div class="mobile-main-menu-header-container">
          <a class="header-logo" href="https://pytorch.org/" aria-label="PyTorch"></a>
          <a class="main-menu-close-button" href="#" data-behavior="close-mobile-menu"></a>
        </div>
      </div>
    </div>

    <div class="mobile-main-menu-links-container">
      <div class="main-menu">
        <ul>
          <li>
            <a href="https://pytorch.org/get-started">Get Started</a>
          </li>

          <li>
            <a href="https://pytorch.org/features">Features</a>
          </li>

          <li>
            <a href="https://pytorch.org/ecosystem">Ecosystem</a>
          </li>

          <li>
            <a href="https://pytorch.org/mobile">Mobile</a>
          </li>

          <li>
            <a href="https://pytorch.org/hub">PyTorch Hub</a>
          </li>

          <li>
            <a href="https://pytorch.org/blog/">Blog</a>
          </li>

          <li>
            <a href="https://pytorch.org/tutorials">Tutorials</a>
          </li>

          <li class="active">
            <a href="https://pytorch.org/docs/stable/index.html">Docs</a>
          </li>

          <li>
            <a href="https://pytorch.org/resources">Resources</a>
          </li>

          <li>
            <a href="https://github.com/pytorch/pytorch">Github</a>
          </li>
        </ul>
      </div>
    </div>
  </div>

  <!-- End Mobile Menu -->

  <script type="text/javascript" src="_static/js/vendor/anchor.min.js"></script>

  <script type="text/javascript">
    $(document).ready(function() {
      mobileMenu.bind();
      mobileTOC.bind();
      pytorchAnchors.bind();
      sideMenus.bind();
      scrollToAnchor.bind();
      highlightNavigation.bind();
      mainMenuDropdown.bind();
      filterTags.bind();

      // Remove any empty p tags that Sphinx adds
      $("[data-tags='null']").remove();

      // Add class to links that have code blocks, since we cannot create links in code blocks
      $("article.pytorch-article a span.pre").each(function(e) {
        $(this).closest("a").addClass("has-code");
      });
    })
  </script>
</body>
</html>