


<!DOCTYPE html>
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
  <meta charset="utf-8">
  
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  
  <title>torch.jit &mdash; PyTorch master documentation</title>
  

  
  
  
  
    <link rel="canonical" href="https://pytorch.org/docs/stable/_modules/torch/jit.html"/>
  

  

  
  
    

  

  <link rel="stylesheet" href="../../_static/css/theme.css" type="text/css" />
  <!-- <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" /> -->
  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/katex@0.10.0-beta/dist/katex.min.css" type="text/css" />
  <link rel="stylesheet" href="../../_static/css/jit.css" type="text/css" />
  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/katex@0.11.1/dist/katex.min.css" type="text/css" />
  <link rel="stylesheet" href="../../_static/katex-math.css" type="text/css" />
    <link rel="index" title="Index" href="../../genindex.html" />
    <link rel="search" title="Search" href="../../search.html" /> 

  
  <script src="../../_static/js/modernizr.min.js"></script>

  <!-- Preload the theme fonts -->

<link rel="preload" href="../../_static/fonts/FreightSans/freight-sans-book.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="../../_static/fonts/FreightSans/freight-sans-medium.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="../../_static/fonts/IBMPlexMono/IBMPlexMono-Medium.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="../../_static/fonts/FreightSans/freight-sans-bold.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="../../_static/fonts/FreightSans/freight-sans-medium-italic.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="../../_static/fonts/IBMPlexMono/IBMPlexMono-SemiBold.woff2" as="font" type="font/woff2" crossorigin="anonymous">

<!-- Preload the katex fonts -->

<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Math-Italic.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Main-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Main-Bold.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Size1-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Size4-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Size2-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Size3-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/npm/katex@0.10.0/dist/fonts/KaTeX_Caligraphic-Regular.woff2" as="font" type="font/woff2" crossorigin="anonymous">
</head>

<div class="container-fluid header-holder tutorials-header" id="header-holder">
  <div class="container">
    <div class="header-container">
      <a class="header-logo" href="https://pytorch.org/" aria-label="PyTorch"></a>

      <div class="main-menu">
        <ul>
          <li>
            <a href="https://pytorch.org/get-started">Get Started</a>
          </li>

          <li>
            <div class="ecosystem-dropdown">
              <a id="dropdownMenuButton" data-toggle="ecosystem-dropdown">
                Ecosystem
              </a>
              <div class="ecosystem-dropdown-menu">
                <a class="nav-dropdown-item" href="https://pytorch.org/hub"">
                  <span class=dropdown-title>Models (Beta)</span>
                  <p>Discover, publish, and reuse pre-trained models</p>
                </a>
                <a class="nav-dropdown-item" href="https://pytorch.org/ecosystem">
                  <span class=dropdown-title>Tools & Libraries</span>
                  <p>Explore the ecosystem of tools and libraries</p>
                </a>
              </div>
            </div>
          </li>

          <li>
            <a href="https://pytorch.org/mobile">Mobile</a>
          </li>

          <li>
            <a href="https://pytorch.org/blog/">Blog</a>
          </li>

          <li>
            <a href="https://pytorch.org/tutorials">Tutorials</a>
          </li>

          <li class="active">
            <a href="https://pytorch.org/docs/stable/index.html">Docs</a>
          </li>

          <li>
            <div class="resources-dropdown">
              <a id="resourcesDropdownButton" data-toggle="resources-dropdown">
                Resources
              </a>
              <div class="resources-dropdown-menu">
                <a class="nav-dropdown-item" href="https://pytorch.org/resources"">
                  <span class=dropdown-title>Developer Resources</span>
                  <p>Find resources and get questions answered</p>
                </a>
                <a class="nav-dropdown-item" href="https://pytorch.org/features">
                  <span class=dropdown-title>About</span>
                  <p>Learn about PyTorch’s features and capabilities</p>
                </a>
              </div>
            </div>
          </li>

          <li>
            <a href="https://github.com/pytorch/pytorch">Github</a>
          </li>
        </ul>
      </div>

      <a class="main-menu-open-button" href="#" data-behavior="open-mobile-menu"></a>
    </div>

  </div>
</div>


<body class="pytorch-body">

   

    

    <div class="table-of-contents-link-wrapper">
      <span>Table of Contents</span>
      <a href="#" class="toggle-table-of-contents" data-behavior="toggle-table-of-contents"></a>
    </div>

    <nav data-toggle="wy-nav-shift" class="pytorch-left-menu" id="pytorch-left-menu">
      <div class="pytorch-side-scroll">
        <div class="pytorch-menu pytorch-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
          <div class="pytorch-left-menu-search">
            

            
              
              
                <div class="version">
                  master (1.5.0 )
                </div>
              
            

            


  


<div role="search">
  <form id="rtd-search-form" class="wy-form" action="../../search.html" method="get">
    <input type="text" name="q" placeholder="Search Docs" />
    <input type="hidden" name="check_keywords" value="yes" />
    <input type="hidden" name="area" value="default" />
  </form>
</div>

            
          </div>

          
<div>
  <a style="color:#F05732" href="https://pytorch.org/docs/stable/_modules/torch/jit.html">
    You are viewing unstable developer preview docs.
    Click here to view docs for latest stable release.
  </a>
</div>

            
            
              
            
            
              <p class="caption"><span class="caption-text">Notes</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../notes/amp_examples.html">Automatic Mixed Precision examples</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/autograd.html">Autograd mechanics</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/broadcasting.html">Broadcasting semantics</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/cpu_threading_torchscript_inference.html">CPU threading and TorchScript inference</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/cuda.html">CUDA semantics</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/ddp.html">Distributed Data Parallel</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/extending.html">Extending PyTorch</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/faq.html">Frequently Asked Questions</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/large_scale_deployments.html">Features for large-scale deployments</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/multiprocessing.html">Multiprocessing best practices</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/randomness.html">Reproducibility</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/serialization.html">Serialization semantics</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../notes/windows.html">Windows FAQ</a></li>
</ul>
<p class="caption"><span class="caption-text">Language Bindings</span></p>
<ul>
<li class="toctree-l1"><a class="reference external" href="https://pytorch.org/cppdocs/">C++ API</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../packages.html">Javadoc</a></li>
</ul>
<p class="caption"><span class="caption-text">Python API</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../torch.html">torch</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../nn.html">torch.nn</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../nn.functional.html">torch.nn.functional</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../tensors.html">torch.Tensor</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../tensor_attributes.html">Tensor Attributes</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../tensor_view.html">Tensor Views</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../autograd.html">torch.autograd</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../cuda.html">torch.cuda</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../amp.html">torch.cuda.amp</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../distributed.html">torch.distributed</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../distributions.html">torch.distributions</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../hub.html">torch.hub</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../jit.html">torch.jit</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../nn.init.html">torch.nn.init</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../onnx.html">torch.onnx</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../optim.html">torch.optim</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../quantization.html">Quantization</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../rpc/index.html">Distributed RPC Framework</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../random.html">torch.random</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../sparse.html">torch.sparse</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../storage.html">torch.Storage</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../bottleneck.html">torch.utils.bottleneck</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../checkpoint.html">torch.utils.checkpoint</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../cpp_extension.html">torch.utils.cpp_extension</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../data.html">torch.utils.data</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../dlpack.html">torch.utils.dlpack</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../model_zoo.html">torch.utils.model_zoo</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../tensorboard.html">torch.utils.tensorboard</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../type_info.html">Type Info</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../named_tensor.html">Named Tensors</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../name_inference.html">Named Tensors operator coverage</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../__config__.html">torch.__config__</a></li>
</ul>
<p class="caption"><span class="caption-text">Libraries</span></p>
<ul>
<li class="toctree-l1"><a class="reference external" href="https://pytorch.org/audio">torchaudio</a></li>
<li class="toctree-l1"><a class="reference external" href="https://pytorch.org/text">torchtext</a></li>
<li class="toctree-l1"><a class="reference external" href="https://pytorch.org/elastic/">TorchElastic</a></li>
<li class="toctree-l1"><a class="reference external" href="https://pytorch.org/serve">TorchServe</a></li>
<li class="toctree-l1"><a class="reference external" href="http://pytorch.org/xla/">PyTorch on XLA Devices</a></li>
</ul>
<p class="caption"><span class="caption-text">Community</span></p>
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../community/contribution_guide.html">PyTorch Contribution Guide</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../community/governance.html">PyTorch Governance</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../community/persons_of_interest.html">PyTorch Governance | Persons of Interest</a></li>
</ul>

            
          

        </div>
      </div>
    </nav>

    <div class="pytorch-container">
      <div class="pytorch-page-level-bar" id="pytorch-page-level-bar">
        <div class="pytorch-breadcrumbs-wrapper">
          















<div role="navigation" aria-label="breadcrumbs navigation">

  <ul class="pytorch-breadcrumbs">
    
      <li>
        <a href="../../index.html">
          
            Docs
          
        </a> &gt;
      </li>

        
          <li><a href="../index.html">Module code</a> &gt;</li>
        
          <li><a href="../torch.html">torch</a> &gt;</li>
        
      <li>torch.jit</li>
    
    
      <li class="pytorch-breadcrumbs-aside">
        
      </li>
    
  </ul>

  
</div>
        </div>

        <div class="pytorch-shortcuts-wrapper" id="pytorch-shortcuts-wrapper">
          Shortcuts
        </div>
      </div>

      <section data-toggle="wy-nav-shift" id="pytorch-content-wrap" class="pytorch-content-wrap">
        <div class="pytorch-content-left">

        
          
          <div class="rst-content">
          
            <div role="main" class="main-content" itemscope="itemscope" itemtype="http://schema.org/Article">
             <article itemprop="articleBody" id="pytorch-article" class="pytorch-article">
              
  <h1>Source code for torch.jit</h1><div class="highlight"><pre>
<span></span><span class="kn">import</span> <span class="nn">torch._C</span>
<span class="kn">import</span> <span class="nn">torch._jit_internal</span> <span class="k">as</span> <span class="nn">_jit_internal</span>
<span class="kn">import</span> <span class="nn">torch.jit.annotations</span>
<span class="kn">import</span> <span class="nn">torch.testing</span>
<span class="kn">import</span> <span class="nn">torch.jit._recursive</span>

<span class="kn">from</span> <span class="nn">torch.jit._recursive</span> <span class="kn">import</span> <span class="n">ScriptMethodStub</span>
<span class="kn">from</span> <span class="nn">torch.jit._builtins</span> <span class="kn">import</span> <span class="n">_find_builtin</span><span class="p">,</span> <span class="n">_get_builtin_table</span><span class="p">,</span> <span class="n">_register_builtin</span>  <span class="c1"># noqa</span>
<span class="kn">from</span> <span class="nn">torch._jit_internal</span> <span class="kn">import</span> <span class="n">_qualified_name</span>
<span class="kn">from</span> <span class="nn">torch.autograd</span> <span class="kn">import</span> <span class="n">Variable</span><span class="p">,</span> <span class="n">function</span>
<span class="kn">from</span> <span class="nn">torch.jit.frontend</span> <span class="kn">import</span> <span class="n">get_jit_class_def</span><span class="p">,</span> <span class="n">get_jit_def</span><span class="p">,</span> <span class="n">get_default_args</span>
<span class="kn">from</span> <span class="nn">torch.nn</span> <span class="kn">import</span> <span class="n">Module</span>
<span class="kn">from</span> <span class="nn">torch.serialization</span> <span class="kn">import</span> <span class="n">validate_cuda_device</span>
<span class="kn">from</span> <span class="nn">torch._six</span> <span class="kn">import</span> <span class="n">PY2</span><span class="p">,</span> <span class="n">PY37</span><span class="p">,</span> <span class="n">with_metaclass</span><span class="p">,</span> <span class="n">string_classes</span><span class="p">,</span> <span class="n">get_function_from_type</span>
<span class="kn">from</span> <span class="nn">torch.utils</span> <span class="kn">import</span> <span class="n">set_module</span>

<span class="kn">import</span> <span class="nn">collections</span>
<span class="kn">import</span> <span class="nn">contextlib</span>
<span class="kn">import</span> <span class="nn">copy</span>
<span class="kn">import</span> <span class="nn">functools</span>
<span class="kn">import</span> <span class="nn">inspect</span>
<span class="kn">import</span> <span class="nn">os</span>
<span class="kn">import</span> <span class="nn">pathlib</span>
<span class="kn">import</span> <span class="nn">pickle</span>
<span class="kn">import</span> <span class="nn">re</span>
<span class="kn">import</span> <span class="nn">sys</span>
<span class="kn">import</span> <span class="nn">textwrap</span>
<span class="kn">import</span> <span class="nn">warnings</span>
<span class="kn">import</span> <span class="nn">weakref</span>


<span class="c1"># These are imported so users can access them from the `torch.jit` module</span>
<span class="kn">from</span> <span class="nn">torch._jit_internal</span> <span class="kn">import</span> <span class="n">Final</span><span class="p">,</span> <span class="n">_overload</span><span class="p">,</span> <span class="n">_overload_method</span>
<span class="kn">from</span> <span class="nn">torch._jit_internal</span> <span class="kn">import</span> <span class="n">ignore</span><span class="p">,</span> <span class="n">export</span><span class="p">,</span> <span class="n">unused</span>

<span class="k">def</span> <span class="nf">_parse_env</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">default</span><span class="p">,</span> <span class="n">true_message</span><span class="p">,</span> <span class="n">false_message</span><span class="p">):</span>
    <span class="n">value</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="n">name</span><span class="p">)</span>
    <span class="k">if</span> <span class="n">value</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
        <span class="k">return</span> <span class="n">default</span>
    <span class="k">if</span> <span class="n">value</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span> <span class="ow">in</span> <span class="p">{</span><span class="s1">&#39;1&#39;</span><span class="p">,</span> <span class="s1">&#39;true&#39;</span><span class="p">,</span> <span class="s1">&#39;yes&#39;</span><span class="p">}:</span>
        <span class="k">return</span> <span class="kc">True</span>
    <span class="k">elif</span> <span class="n">value</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span> <span class="ow">in</span> <span class="p">{</span><span class="s1">&#39;0&#39;</span><span class="p">,</span> <span class="s1">&#39;false&#39;</span><span class="p">,</span> <span class="s1">&#39;no&#39;</span><span class="p">}:</span>
        <span class="k">return</span> <span class="kc">False</span>
    <span class="k">if</span> <span class="n">value</span> <span class="o">==</span> <span class="s1">&#39;1v&#39;</span><span class="p">:</span>
        <span class="nb">print</span><span class="p">(</span><span class="n">true_message</span><span class="p">)</span>
        <span class="k">return</span> <span class="kc">True</span>
    <span class="k">elif</span> <span class="n">value</span> <span class="o">==</span> <span class="s1">&#39;0v&#39;</span><span class="p">:</span>
        <span class="nb">print</span><span class="p">(</span><span class="n">false_message</span><span class="p">)</span>
        <span class="k">return</span> <span class="kc">False</span>
    <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;Unknown setting of </span><span class="si">{}</span><span class="s1">. Try using 0 or 1.&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">name</span><span class="p">))</span>


<span class="n">_enabled</span> <span class="o">=</span> <span class="n">_parse_env</span><span class="p">(</span><span class="s1">&#39;PYTORCH_JIT&#39;</span><span class="p">,</span> <span class="kc">True</span><span class="p">,</span> <span class="s2">&quot;&gt; Using PyTorch JIT&quot;</span><span class="p">,</span> <span class="s2">&quot;&gt; PyTorch JIT DISABLED&quot;</span><span class="p">)</span>
<span class="n">_flatten</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_flatten</span>
<span class="n">_unflatten</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_unflatten</span>
<span class="n">_jit_script_class_compile</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_script_class_compile</span>

<span class="c1"># The Python CompilationUnit. All functions and modules defined in Python will</span>
<span class="c1"># live in here. It&#39;s defined in Python because doing in cpp creates static</span>
<span class="c1"># destruction order issues.</span>
<span class="n">_python_cu</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">CompilationUnit</span><span class="p">()</span>

<span class="n">Future</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">Future</span>
<span class="n">set_module</span><span class="p">(</span><span class="n">Future</span><span class="p">,</span> <span class="s2">&quot;torch.jit&quot;</span><span class="p">)</span>
<span class="n">_fork</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">fork</span>
<span class="n">_wait</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">wait</span>

<span class="k">if</span> <span class="n">_enabled</span><span class="p">:</span>
    <span class="n">Attribute</span> <span class="o">=</span> <span class="n">collections</span><span class="o">.</span><span class="n">namedtuple</span><span class="p">(</span><span class="s1">&#39;Attribute&#39;</span><span class="p">,</span> <span class="p">[</span><span class="s1">&#39;value&#39;</span><span class="p">,</span> <span class="s1">&#39;type&#39;</span><span class="p">])</span>
<span class="k">else</span><span class="p">:</span>
    <span class="k">def</span> <span class="nf">Attribute</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="nb">type</span><span class="p">):</span>
        <span class="k">return</span> <span class="n">value</span>

<span class="nd">@contextlib</span><span class="o">.</span><span class="n">contextmanager</span>
<span class="k">def</span> <span class="nf">optimized_execution</span><span class="p">(</span><span class="n">should_optimize</span><span class="p">):</span>
    <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">    A context manager that controls whether the JIT&#39;s executor will run</span>
<span class="sd">    optimizations before executing a function.</span>
<span class="sd">    &quot;&quot;&quot;</span>
    <span class="n">stored_flag</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_get_graph_executor_optimize</span><span class="p">()</span>
    <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_set_graph_executor_optimize</span><span class="p">(</span><span class="n">should_optimize</span><span class="p">)</span>
    <span class="k">try</span><span class="p">:</span>
        <span class="k">yield</span>
    <span class="k">finally</span><span class="p">:</span>
        <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_set_graph_executor_optimize</span><span class="p">(</span><span class="n">stored_flag</span><span class="p">)</span>


<span class="n">DEFAULT_EXTRA_FILES_MAP</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">ExtraFilesMap</span><span class="p">()</span>


<div class="viewcode-block" id="save"><a class="viewcode-back" href="../../jit.html#torch.jit.save">[docs]</a><span class="k">def</span> <span class="nf">save</span><span class="p">(</span><span class="n">m</span><span class="p">,</span> <span class="n">f</span><span class="p">,</span> <span class="n">_extra_files</span><span class="o">=</span><span class="n">DEFAULT_EXTRA_FILES_MAP</span><span class="p">):</span>
    <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Save an offline version of this module for use in a separate process. The saved</span>
<span class="sd">        module serializes all of the methods, submodules, parameters, and attributes of this</span>
<span class="sd">        module. It can be loaded into the C++ API using ``torch::jit::load(filename)`` or into the Python</span>
<span class="sd">        API with :func:`torch.jit.load &lt;torch.jit.load&gt;`.</span>

<span class="sd">        To be able to save a module, it must not make any calls to native Python functions.</span>
<span class="sd">        This means that all submodules must be subclasses of :class:`ScriptModule` as well.</span>

<span class="sd">        .. DANGER::</span>
<span class="sd">           All modules, no matter their device, are always loaded onto the CPU during loading.</span>
<span class="sd">           This is different from :func:`torch.load`&#39;s semantics and may change in the future.</span>

<span class="sd">        Arguments:</span>
<span class="sd">            m: A :class:`ScriptModule` to save.</span>
<span class="sd">            f: A file-like object (has to implement write and flush) or a string</span>
<span class="sd">               containing a file name.</span>
<span class="sd">            _extra_files: Map from filename to contents which will be stored as part of &#39;f&#39;.</span>

<span class="sd">        .. warning::</span>
<span class="sd">            If you are using Python 2, ``torch.jit.save`` does NOT support :any:`StringIO.StringIO`</span>
<span class="sd">            as a valid file-like object. This is because the write method should return</span>
<span class="sd">            the number of bytes written; ``StringIO.write()`` does not do this.</span>

<span class="sd">            Please use something like ``io.BytesIO`` instead.</span>

<span class="sd">        Example:</span>

<span class="sd">        .. testcode::</span>

<span class="sd">            import torch</span>
<span class="sd">            import io</span>

<span class="sd">            class MyModule(torch.nn.Module):</span>
<span class="sd">                def forward(self, x):</span>
<span class="sd">                    return x + 10</span>

<span class="sd">            m = torch.jit.script(MyModule())</span>

<span class="sd">            # Save to file</span>
<span class="sd">            torch.jit.save(m, &#39;scriptmodule.pt&#39;)</span>
<span class="sd">            # This line is equivalent to the previous</span>
<span class="sd">            m.save(&quot;scriptmodule.pt&quot;)</span>

<span class="sd">            # Save to io.BytesIO buffer</span>
<span class="sd">            buffer = io.BytesIO()</span>
<span class="sd">            torch.jit.save(m, buffer)</span>

<span class="sd">            # Save with extra files</span>
<span class="sd">            extra_files = torch._C.ExtraFilesMap()</span>
<span class="sd">            extra_files[&#39;foo.txt&#39;] = &#39;bar&#39;</span>
<span class="sd">            torch.jit.save(m, &#39;scriptmodule.pt&#39;, _extra_files=extra_files)</span>
<span class="sd">    &quot;&quot;&quot;</span>
    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> <span class="nb">str</span><span class="p">)</span> <span class="ow">or</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> <span class="n">pathlib</span><span class="o">.</span><span class="n">Path</span><span class="p">):</span>
        <span class="n">m</span><span class="o">.</span><span class="n">save</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> <span class="n">_extra_files</span><span class="o">=</span><span class="n">_extra_files</span><span class="p">)</span>
    <span class="k">else</span><span class="p">:</span>
        <span class="n">ret</span> <span class="o">=</span> <span class="n">m</span><span class="o">.</span><span class="n">save_to_buffer</span><span class="p">(</span><span class="n">_extra_files</span><span class="o">=</span><span class="n">_extra_files</span><span class="p">)</span>
        <span class="n">f</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">ret</span><span class="p">)</span></div>

<div class="viewcode-block" id="load"><a class="viewcode-back" href="../../jit.html#torch.jit.load">[docs]</a><span class="k">def</span> <span class="nf">load</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> <span class="n">map_location</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">_extra_files</span><span class="o">=</span><span class="n">DEFAULT_EXTRA_FILES_MAP</span><span class="p">):</span>
    <span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Load a :class:`ScriptModule` or :class:`ScriptFunction` previously</span>
<span class="sd">        saved with :func:`torch.jit.save &lt;torch.jit.save&gt;`</span>

<span class="sd">        All previously saved modules, no matter their device, are first loaded onto CPU,</span>
<span class="sd">        and then are moved to the devices they were saved from. If this fails (e.g. because</span>
<span class="sd">        the run time system doesn&#39;t have certain devices), an exception is raised.</span>

<span class="sd">        Arguments:</span>
<span class="sd">            f: a file-like object (has to implement read, readline, tell, and seek),</span>
<span class="sd">                or a string containing a file name</span>
<span class="sd">            map_location (string or torch.device): A simplified version of ``map_location`` in</span>
<span class="sd">                ``torch.save`` used to dynamically remap storages to an alternative set of devices.</span>
<span class="sd">            _extra_files (dictionary of filename to content): The extra</span>
<span class="sd">                filenames given in the map would be loaded and their content</span>
<span class="sd">                would be stored in the provided map.</span>

<span class="sd">        Returns:</span>
<span class="sd">            A :class:`ScriptModule` object.</span>

<span class="sd">        Example:</span>

<span class="sd">        .. testcode::</span>

<span class="sd">            import torch</span>
<span class="sd">            import io</span>

<span class="sd">            torch.jit.load(&#39;scriptmodule.pt&#39;)</span>

<span class="sd">            # Load ScriptModule from io.BytesIO object</span>
<span class="sd">            with open(&#39;scriptmodule.pt&#39;, &#39;rb&#39;) as f:</span>
<span class="sd">                buffer = io.BytesIO(f.read())</span>

<span class="sd">            # Load all tensors to the original device</span>
<span class="sd">            torch.jit.load(buffer)</span>

<span class="sd">            # Load all tensors onto CPU, using a device</span>
<span class="sd">            buffer.seek(0)</span>
<span class="sd">            torch.jit.load(buffer, map_location=torch.device(&#39;cpu&#39;))</span>

<span class="sd">            # Load all tensors onto CPU, using a string</span>
<span class="sd">            buffer.seek(0)</span>
<span class="sd">            torch.jit.load(buffer, map_location=&#39;cpu&#39;)</span>

<span class="sd">            # Load with extra files.</span>
<span class="sd">            extra_files = torch._C.ExtraFilesMap()</span>
<span class="sd">            extra_files[&#39;foo.txt&#39;] = &#39;bar&#39;</span>
<span class="sd">            torch.jit.load(&#39;scriptmodule.pt&#39;, _extra_files=extra_files)</span>
<span class="sd">            print(extra_files[&#39;foo.txt&#39;])</span>

<span class="sd">        .. testoutput::</span>
<span class="sd">            :hide:</span>

<span class="sd">            ...</span>

<span class="sd">        .. testcleanup::</span>

<span class="sd">            import os</span>
<span class="sd">            os.remove(&quot;scriptmodule.pt&quot;)</span>
<span class="sd">    &quot;&quot;&quot;</span>
    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> <span class="n">string_classes</span><span class="p">):</span>
        <span class="k">if</span> <span class="ow">not</span> <span class="n">os</span><span class="o">.</span><span class="n">path</span><span class="o">.</span><span class="n">exists</span><span class="p">(</span><span class="n">f</span><span class="p">):</span>
            <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;The provided filename </span><span class="si">{}</span><span class="s2"> does not exist&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">f</span><span class="p">))</span>
        <span class="k">if</span> <span class="n">os</span><span class="o">.</span><span class="n">path</span><span class="o">.</span><span class="n">isdir</span><span class="p">(</span><span class="n">f</span><span class="p">):</span>
            <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;The provided filename </span><span class="si">{}</span><span class="s2"> is a directory&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">f</span><span class="p">))</span>
    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">map_location</span><span class="p">,</span> <span class="n">string_classes</span><span class="p">):</span>
        <span class="n">map_location</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">device</span><span class="p">(</span><span class="n">map_location</span><span class="p">)</span>
    <span class="k">elif</span> <span class="ow">not</span> <span class="p">(</span><span class="n">map_location</span> <span class="ow">is</span> <span class="kc">None</span> <span class="ow">or</span>
              <span class="nb">isinstance</span><span class="p">(</span><span class="n">map_location</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">device</span><span class="p">)):</span>
        <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;map_location should be either None, string or torch.device, &quot;</span>
                         <span class="s2">&quot;but got type: &quot;</span> <span class="o">+</span> <span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">map_location</span><span class="p">)))</span>
    <span class="k">if</span> <span class="p">(</span><span class="nb">str</span><span class="p">(</span><span class="n">map_location</span><span class="p">)</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">&#39;cuda&#39;</span><span class="p">)):</span>
        <span class="n">validate_cuda_device</span><span class="p">(</span><span class="n">map_location</span><span class="p">)</span>

    <span class="n">cu</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">CompilationUnit</span><span class="p">()</span>
    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> <span class="nb">str</span><span class="p">)</span> <span class="ow">or</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> <span class="n">pathlib</span><span class="o">.</span><span class="n">Path</span><span class="p">):</span>
        <span class="n">cpp_module</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">import_ir_module</span><span class="p">(</span><span class="n">cu</span><span class="p">,</span> <span class="n">f</span><span class="p">,</span> <span class="n">map_location</span><span class="p">,</span> <span class="n">_extra_files</span><span class="p">)</span>
    <span class="k">else</span><span class="p">:</span>
        <span class="n">cpp_module</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">import_ir_module_from_buffer</span><span class="p">(</span><span class="n">cu</span><span class="p">,</span> <span class="n">f</span><span class="o">.</span><span class="n">read</span><span class="p">(),</span> <span class="n">map_location</span><span class="p">,</span> <span class="n">_extra_files</span><span class="p">)</span>

    <span class="c1"># TODO: Pretty sure this approach loses ConstSequential status and such</span>
    <span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_recursive</span><span class="o">.</span><span class="n">wrap_cpp_module</span><span class="p">(</span><span class="n">cpp_module</span><span class="p">)</span></div>

<span class="k">def</span> <span class="nf">export_opnames</span><span class="p">(</span><span class="n">m</span><span class="p">):</span>
    <span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Returns a list of operator names of a script module and its submodules</span>
<span class="sd">    &quot;&quot;&quot;</span>
    <span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_export_opnames</span><span class="p">(</span><span class="n">m</span><span class="o">.</span><span class="n">_c</span><span class="p">)</span>

<span class="k">def</span> <span class="nf">_get_trace_graph</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> <span class="n">args</span><span class="o">=</span><span class="p">(),</span> <span class="n">kwargs</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">_force_outplace</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
                     <span class="n">return_inputs</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">_return_inputs_states</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
    <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">    .. warning::</span>
<span class="sd">        This function is internal-only and should only be used by the ONNX</span>
<span class="sd">        exporter. If you are trying to get a graph through tracing, please go</span>
<span class="sd">        through the public API instead::</span>

<span class="sd">            trace = torch.jit.trace(nn.LSTMCell(), (input, hidden))</span>
<span class="sd">            trace_graph = trace.graph</span>

<span class="sd">    Trace a function or model, returning a tuple consisting of the both the</span>
<span class="sd">    *trace* of an execution, as well as the original return value. If return_inputs,</span>
<span class="sd">    also returns the trace inputs as part of the tuple</span>

<span class="sd">    Tracing is guaranteed not to change the semantics of the function/module</span>
<span class="sd">    that is traced.</span>

<span class="sd">    Arguments:</span>
<span class="sd">        f (torch.nn.Module or function): the function or module</span>
<span class="sd">            to be traced.</span>
<span class="sd">        args (tuple or Tensor): the positional arguments to pass to the</span>
<span class="sd">            function/module to be traced.  A non-tuple is assumed to</span>
<span class="sd">            be a single positional argument to be passed to the model.</span>
<span class="sd">        kwargs (dict): the keyword arguments to pass to the function/module</span>
<span class="sd">            to be traced.</span>

<span class="sd">    Example (trace a cell):</span>

<span class="sd">    .. testcode::</span>

<span class="sd">        trace = torch.jit.trace(nn.LSTMCell(), (input, hidden))</span>
<span class="sd">    &quot;&quot;&quot;</span>
    <span class="k">if</span> <span class="n">kwargs</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
        <span class="n">kwargs</span> <span class="o">=</span> <span class="p">{}</span>
    <span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">):</span>
        <span class="n">args</span> <span class="o">=</span> <span class="p">(</span><span class="n">args</span><span class="p">,)</span>
    <span class="n">outs</span> <span class="o">=</span> <span class="n">ONNXTracedModule</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> <span class="n">_force_outplace</span><span class="p">,</span> <span class="n">return_inputs</span><span class="p">,</span> <span class="n">_return_inputs_states</span><span class="p">)(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
    <span class="k">return</span> <span class="n">outs</span>


<span class="k">def</span> <span class="nf">_unique_state_dict</span><span class="p">(</span><span class="n">module</span><span class="p">,</span> <span class="n">keep_vars</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
    <span class="c1"># since Parameter.detach() always creates a new torch.Tensor instance,</span>
    <span class="c1"># id(v) doesn&#39;t work with it. So we always get the Parameter or Buffer</span>
    <span class="c1"># as values, and deduplicate the params using Parameters and Buffers</span>
    <span class="n">state_dict</span> <span class="o">=</span> <span class="n">module</span><span class="o">.</span><span class="n">state_dict</span><span class="p">(</span><span class="n">keep_vars</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
    <span class="n">filtered_dict</span> <span class="o">=</span> <span class="nb">type</span><span class="p">(</span><span class="n">state_dict</span><span class="p">)()</span>
    <span class="n">seen_ids</span> <span class="o">=</span> <span class="nb">set</span><span class="p">()</span>
    <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">state_dict</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
        <span class="k">if</span> <span class="nb">id</span><span class="p">(</span><span class="n">v</span><span class="p">)</span> <span class="ow">in</span> <span class="n">seen_ids</span><span class="p">:</span>
            <span class="k">continue</span>
        <span class="n">seen_ids</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="nb">id</span><span class="p">(</span><span class="n">v</span><span class="p">))</span>
        <span class="k">if</span> <span class="n">keep_vars</span><span class="p">:</span>
            <span class="n">filtered_dict</span><span class="p">[</span><span class="n">k</span><span class="p">]</span> <span class="o">=</span> <span class="n">v</span>
        <span class="k">else</span><span class="p">:</span>
            <span class="n">filtered_dict</span><span class="p">[</span><span class="n">k</span><span class="p">]</span> <span class="o">=</span> <span class="n">v</span><span class="o">.</span><span class="n">detach</span><span class="p">()</span>
    <span class="k">return</span> <span class="n">filtered_dict</span>


<span class="k">def</span> <span class="nf">_create_interpreter_name_lookup_fn</span><span class="p">(</span><span class="n">frames_up</span><span class="o">=</span><span class="mi">1</span><span class="p">):</span>
    <span class="k">def</span> <span class="nf">_get_interpreter_name_for_var</span><span class="p">(</span><span class="n">var</span><span class="p">):</span>
        <span class="n">frame</span> <span class="o">=</span> <span class="n">inspect</span><span class="o">.</span><span class="n">currentframe</span><span class="p">()</span>
        <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span>
        <span class="k">while</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="n">frames_up</span> <span class="o">+</span> <span class="mi">1</span><span class="p">:</span>
            <span class="n">frame</span> <span class="o">=</span> <span class="n">frame</span><span class="o">.</span><span class="n">f_back</span>
            <span class="n">i</span> <span class="o">+=</span> <span class="mi">1</span>

        <span class="n">f_locals</span> <span class="o">=</span> <span class="n">frame</span><span class="o">.</span><span class="n">f_locals</span>
        <span class="n">f_globals</span> <span class="o">=</span> <span class="n">frame</span><span class="o">.</span><span class="n">f_globals</span>

        <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">f_locals</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
            <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">v</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">)</span> <span class="ow">and</span> <span class="n">var</span> <span class="ow">is</span> <span class="n">v</span><span class="p">:</span>
                <span class="k">return</span> <span class="n">k</span> <span class="k">if</span> <span class="n">k</span> <span class="o">!=</span> <span class="s1">&#39;self&#39;</span> <span class="k">else</span> <span class="s1">&#39;&#39;</span>
        <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">f_globals</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
            <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">v</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">)</span> <span class="ow">and</span> <span class="n">var</span> <span class="ow">is</span> <span class="n">v</span><span class="p">:</span>
                <span class="k">return</span> <span class="n">k</span> <span class="k">if</span> <span class="n">k</span> <span class="o">!=</span> <span class="s1">&#39;self&#39;</span> <span class="k">else</span> <span class="s1">&#39;&#39;</span>
        <span class="k">return</span> <span class="s1">&#39;&#39;</span>
    <span class="k">return</span> <span class="n">_get_interpreter_name_for_var</span>


<span class="k">class</span> <span class="nc">ONNXTracedModule</span><span class="p">(</span><span class="n">Module</span><span class="p">):</span>
    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">inner</span><span class="p">,</span> <span class="n">force_outplace</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">return_inputs</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">return_inputs_states</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
        <span class="nb">super</span><span class="p">(</span><span class="n">ONNXTracedModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span>
        <span class="c1"># inner may be a Module, or it may be an arbitrary callable</span>
        <span class="c1"># If it&#39;s a Module, we get its parameters automatically, which lets</span>
        <span class="c1"># us avoid a special casing functions versus modules.</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">inner</span> <span class="o">=</span> <span class="n">inner</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_force_outplace</span> <span class="o">=</span> <span class="n">force_outplace</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_return_inputs</span> <span class="o">=</span> <span class="n">return_inputs</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_return_inputs_states</span> <span class="o">=</span> <span class="n">return_inputs_states</span>

    <span class="k">def</span> <span class="nf">forward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">):</span>
        <span class="n">in_vars</span><span class="p">,</span> <span class="n">in_desc</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">args</span><span class="p">)</span>
        <span class="c1"># NOTE: use full state, because we need it for BatchNorm export</span>
        <span class="c1"># This differs from the compiler path, which doesn&#39;t support it at the moment.</span>
        <span class="n">module_state</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">_unique_state_dict</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">keep_vars</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span><span class="o">.</span><span class="n">values</span><span class="p">())</span>

        <span class="n">ret_inputs</span> <span class="o">=</span> <span class="p">[]</span>
        <span class="n">inputs_states</span> <span class="o">=</span> <span class="p">[]</span>
        <span class="n">outs</span> <span class="o">=</span> <span class="p">[]</span>

        <span class="k">def</span> <span class="nf">wrapper</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">):</span>
            <span class="n">trace_inputs</span> <span class="o">=</span> <span class="n">_unflatten</span><span class="p">(</span><span class="n">args</span><span class="p">[:</span><span class="nb">len</span><span class="p">(</span><span class="n">in_vars</span><span class="p">)],</span> <span class="n">in_desc</span><span class="p">)</span>

            <span class="n">ret_inputs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="nb">tuple</span><span class="p">(</span><span class="n">x</span><span class="o">.</span><span class="n">clone</span><span class="p">(</span><span class="n">memory_format</span><span class="o">=</span><span class="n">torch</span><span class="o">.</span><span class="n">preserve_format</span><span class="p">)</span> <span class="k">for</span> <span class="n">x</span> <span class="ow">in</span> <span class="n">args</span><span class="p">))</span>
            <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_return_inputs_states</span><span class="p">:</span>
                <span class="n">inputs_states</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">_unflatten</span><span class="p">(</span><span class="n">args</span><span class="p">[:</span><span class="nb">len</span><span class="p">(</span><span class="n">in_vars</span><span class="p">)],</span> <span class="n">in_desc</span><span class="p">))</span>
            <span class="n">outs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">inner</span><span class="p">(</span><span class="o">*</span><span class="n">trace_inputs</span><span class="p">))</span>
            <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_return_inputs_states</span><span class="p">:</span>
                <span class="n">inputs_states</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">=</span> <span class="p">(</span><span class="n">inputs_states</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">trace_inputs</span><span class="p">)</span>
            <span class="n">out_vars</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">outs</span><span class="p">)</span>
            <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">out_vars</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
                <span class="k">return</span> <span class="n">out_vars</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
            <span class="k">else</span><span class="p">:</span>
                <span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">out_vars</span><span class="p">)</span>

        <span class="n">graph</span><span class="p">,</span> <span class="n">out</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_create_graph_by_tracing</span><span class="p">(</span>
            <span class="n">wrapper</span><span class="p">,</span>
            <span class="n">in_vars</span> <span class="o">+</span> <span class="n">module_state</span><span class="p">,</span>
            <span class="n">_create_interpreter_name_lookup_fn</span><span class="p">(),</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">_force_outplace</span><span class="p">,</span>
        <span class="p">)</span>

        <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_return_inputs</span><span class="p">:</span>
            <span class="k">return</span> <span class="n">graph</span><span class="p">,</span> <span class="n">outs</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">ret_inputs</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
        <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_return_inputs_states</span><span class="p">:</span>
            <span class="k">return</span> <span class="n">graph</span><span class="p">,</span> <span class="n">outs</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">inputs_states</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
        <span class="k">else</span><span class="p">:</span>
            <span class="k">return</span> <span class="n">graph</span><span class="p">,</span> <span class="n">outs</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>


<span class="k">def</span> <span class="nf">_clone_inputs</span><span class="p">(</span><span class="n">args</span><span class="p">):</span>
    <span class="k">def</span> <span class="nf">clone_input</span><span class="p">(</span><span class="n">a</span><span class="p">):</span>
        <span class="k">if</span> <span class="n">a</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
            <span class="k">return</span> <span class="kc">None</span>
        <span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">):</span>
            <span class="c1"># TODO: figure out one liner to .clone() and set requires_grad</span>
            <span class="n">v</span> <span class="o">=</span> <span class="n">a</span><span class="o">.</span><span class="n">detach</span><span class="p">()</span><span class="o">.</span><span class="n">clone</span><span class="p">(</span><span class="n">memory_format</span><span class="o">=</span><span class="n">torch</span><span class="o">.</span><span class="n">preserve_format</span><span class="p">)</span><span class="o">.</span><span class="n">requires_grad_</span><span class="p">(</span><span class="n">a</span><span class="o">.</span><span class="n">requires_grad</span><span class="p">)</span>
            <span class="k">if</span> <span class="n">a</span><span class="o">.</span><span class="n">grad</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
                <span class="n">v</span><span class="o">.</span><span class="n">grad</span> <span class="o">=</span> <span class="n">clone_input</span><span class="p">(</span><span class="n">v</span><span class="o">.</span><span class="n">grad</span><span class="p">)</span>
            <span class="k">return</span> <span class="n">v</span>
        <span class="k">else</span><span class="p">:</span>
            <span class="k">return</span> <span class="n">a</span><span class="o">.</span><span class="n">clone</span><span class="p">(</span><span class="n">memory_format</span><span class="o">=</span><span class="n">torch</span><span class="o">.</span><span class="n">preserve_format</span><span class="p">)</span>
    <span class="k">return</span> <span class="n">function</span><span class="o">.</span><span class="n">_nested_map</span><span class="p">(</span><span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">),</span>
                                <span class="n">clone_input</span><span class="p">,</span> <span class="n">condition_msg</span><span class="o">=</span><span class="s2">&quot;tensors&quot;</span><span class="p">)(</span><span class="n">args</span><span class="p">)</span>


<span class="c1"># This is purely for developer debugging.  We are not going to advertise it.</span>
<span class="n">_JIT_TIME</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;PYTORCH_JIT_TIME&#39;</span><span class="p">,</span> <span class="kc">False</span><span class="p">)</span>  <span class="c1"># CUDA-only timing</span>
<span class="n">_JIT_DISABLE</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;PYTORCH_JIT_DISABLE&#39;</span><span class="p">,</span> <span class="kc">False</span><span class="p">)</span>
<span class="n">_JIT_STATS</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">environ</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;PYTORCH_JIT_STATS&#39;</span><span class="p">,</span> <span class="kc">False</span><span class="p">)</span>


<span class="nd">@contextlib</span><span class="o">.</span><span class="n">contextmanager</span>
<span class="k">def</span> <span class="nf">_time</span><span class="p">(</span><span class="n">trace_name</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">time</span><span class="o">=</span><span class="kc">True</span><span class="p">):</span>
    <span class="k">if</span> <span class="p">(</span><span class="ow">not</span> <span class="n">_JIT_TIME</span> <span class="ow">and</span> <span class="ow">not</span> <span class="n">time</span><span class="p">)</span> <span class="ow">or</span> <span class="ow">not</span> <span class="n">torch</span><span class="o">.</span><span class="n">cuda</span><span class="o">.</span><span class="n">is_available</span><span class="p">():</span>
        <span class="k">yield</span>
        <span class="k">return</span>
    <span class="n">stream</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">cuda</span><span class="o">.</span><span class="n">current_stream</span><span class="p">()</span>
    <span class="n">start</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">cuda</span><span class="o">.</span><span class="n">Event</span><span class="p">(</span><span class="n">enable_timing</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
    <span class="n">end</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">cuda</span><span class="o">.</span><span class="n">Event</span><span class="p">(</span><span class="n">enable_timing</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
    <span class="n">stream</span><span class="o">.</span><span class="n">record_event</span><span class="p">(</span><span class="n">start</span><span class="p">)</span>
    <span class="k">try</span><span class="p">:</span>
        <span class="k">yield</span>
    <span class="k">finally</span><span class="p">:</span>
        <span class="n">stream</span><span class="o">.</span><span class="n">record_event</span><span class="p">(</span><span class="n">end</span><span class="p">)</span>
        <span class="n">end</span><span class="o">.</span><span class="n">synchronize</span><span class="p">()</span>
        <span class="nb">print</span><span class="p">(</span><span class="s2">&quot;</span><span class="si">{}</span><span class="s2"> </span><span class="si">{}</span><span class="s2"> time: </span><span class="si">{}</span><span class="s2"> ms&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">trace_name</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">start</span><span class="o">.</span><span class="n">elapsed_time</span><span class="p">(</span><span class="n">end</span><span class="p">)))</span>


<span class="k">def</span> <span class="nf">verify</span><span class="p">(</span><span class="n">model</span><span class="p">,</span> <span class="n">args</span><span class="p">,</span> <span class="n">loss_fn</span><span class="o">=</span><span class="n">torch</span><span class="o">.</span><span class="n">sum</span><span class="p">,</span> <span class="n">devices</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
    <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">    Verify that a JIT compiled model has the same behavior as its uncompiled</span>
<span class="sd">    version along with its backwards pass.  If your model returns multiple</span>
<span class="sd">    outputs, you must also specify a `loss_fn` to produce a loss for which</span>
<span class="sd">    the backwards will be computed.</span>

<span class="sd">    This function has side-effects (e.g., it executes your model / saves and loads</span>
<span class="sd">    parameters), so don&#39;t expect the model to come out exactly the same as what</span>
<span class="sd">    you passed in.</span>

<span class="sd">    Arguments:</span>
<span class="sd">        model (compiled torch.nn.Module or function): the module/function to be</span>
<span class="sd">            verified.  The module/function definition MUST have been decorated with</span>
<span class="sd">            `@torch.jit.compile`.</span>
<span class="sd">        args (tuple or Tensor): the positional arguments to pass to the</span>
<span class="sd">            compiled function/module to be verified.  A non-tuple is assumed to</span>
<span class="sd">            be a single positional argument to be passed to the model.</span>
<span class="sd">        loss_fn (function, optional): the loss function to be applied to</span>
<span class="sd">            the output of the model, before backwards is invoked.  By default,</span>
<span class="sd">            we assume that a model returns a single result, and we :func:`torch.sum`</span>
<span class="sd">            before calling backwards; if this is inappropriate, you can pass your</span>
<span class="sd">            own loss function.  Note that if a model returns a tuple of results,</span>
<span class="sd">            these are passed as separate positional arguments to `loss_fn`.</span>
<span class="sd">        devices (iterable of device IDs, optional): the GPU devices which the</span>
<span class="sd">            compiled module will be run on.  This determines the RNG state we</span>
<span class="sd">            must save when running both compiled and uncompiled versions of the model.</span>
<span class="sd">    &quot;&quot;&quot;</span>
    <span class="c1"># TODO: In principle, we track device information in our trace, so it</span>
    <span class="c1"># should be possible to check if our execution actually obeyed the &#39;devices&#39;</span>
    <span class="c1"># the user provided.</span>

    <span class="c1"># TODO: Consider adding a utility function to torch.jit to test</span>
    <span class="c1"># for this case</span>
    <span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">model</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">CompiledFunction</span><span class="p">):</span>
        <span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="s2">&quot;Cannot verify an uncompiled module.  Add @torch.jit.compile to compile it&quot;</span><span class="p">)</span>
    <span class="n">is_module</span> <span class="o">=</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">model</span><span class="p">,</span> <span class="n">Module</span><span class="p">)</span>

    <span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">):</span>
        <span class="n">args</span> <span class="o">=</span> <span class="p">(</span><span class="n">args</span><span class="p">,)</span>

    <span class="n">saved_args</span> <span class="o">=</span> <span class="n">_clone_inputs</span><span class="p">(</span><span class="n">args</span><span class="p">)</span>
    <span class="k">if</span> <span class="n">is_module</span><span class="p">:</span>
        <span class="n">saved_state</span> <span class="o">=</span> <span class="n">copy</span><span class="o">.</span><span class="n">deepcopy</span><span class="p">(</span><span class="n">model</span><span class="o">.</span><span class="n">state_dict</span><span class="p">())</span>

    <span class="k">def</span> <span class="nf">run_fwd_bwd</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="n">force_trace</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">assert_compiled</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
        <span class="n">params</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">model</span><span class="o">.</span><span class="n">parameters</span><span class="p">())</span> <span class="k">if</span> <span class="n">is_module</span> <span class="k">else</span> <span class="p">[]</span>
        <span class="n">in_vars</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">((</span><span class="n">args</span><span class="p">,</span> <span class="n">params</span><span class="p">))</span>
        <span class="c1"># We use a special API to reset the trace and compile it from scratch.</span>
        <span class="n">compiled_fn</span> <span class="o">=</span> <span class="n">model</span>
        <span class="k">if</span> <span class="n">force_trace</span><span class="p">:</span>
            <span class="n">compiled_fn</span><span class="o">.</span><span class="n">clear_cache</span><span class="p">()</span>
        <span class="k">if</span> <span class="n">assert_compiled</span><span class="p">:</span>
            <span class="n">hits</span> <span class="o">=</span> <span class="n">compiled_fn</span><span class="o">.</span><span class="n">hits</span>
        <span class="n">out</span> <span class="o">=</span> <span class="n">model</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">)</span>
        <span class="k">if</span> <span class="n">assert_compiled</span> <span class="ow">and</span> <span class="n">compiled_fn</span><span class="o">.</span><span class="n">hits</span> <span class="o">==</span> <span class="n">hits</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;failed to use the compiled function&quot;</span><span class="p">)</span>
        <span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">out</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">):</span>
            <span class="n">out</span> <span class="o">=</span> <span class="p">(</span><span class="n">out</span><span class="p">,</span> <span class="p">)</span>
        <span class="k">if</span> <span class="n">loss_fn</span> <span class="o">==</span> <span class="n">torch</span><span class="o">.</span><span class="n">sum</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">out</span><span class="p">)</span> <span class="o">!=</span> <span class="mi">1</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">((</span><span class="s2">&quot;Model returns </span><span class="si">{}</span><span class="s2"> outputs, but default loss function &quot;</span>
                              <span class="s2">&quot;(torch.sum) can only handle a single output&quot;</span><span class="p">)</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">out</span><span class="p">)))</span>
        <span class="n">out_vars</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">out</span><span class="p">)</span>
        <span class="n">saved_outs</span> <span class="o">=</span> <span class="p">[</span><span class="n">v</span><span class="o">.</span><span class="n">detach</span><span class="p">()</span><span class="o">.</span><span class="n">clone</span><span class="p">(</span><span class="n">memory_format</span><span class="o">=</span><span class="n">torch</span><span class="o">.</span><span class="n">preserve_format</span><span class="p">)</span> <span class="k">for</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">out_vars</span><span class="p">]</span>
        <span class="n">loss</span> <span class="o">=</span> <span class="n">loss_fn</span><span class="p">(</span><span class="o">*</span><span class="n">out</span><span class="p">)</span>
        <span class="n">grads</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">autograd</span><span class="o">.</span><span class="n">grad</span><span class="p">([</span><span class="n">loss</span><span class="p">],</span> <span class="n">in_vars</span><span class="p">)</span>
        <span class="c1"># TODO: I&#39;m not sure if the clone here is necessary but it is safer</span>
        <span class="n">saved_grads</span> <span class="o">=</span> <span class="p">[</span><span class="n">v</span><span class="o">.</span><span class="n">detach</span><span class="p">()</span><span class="o">.</span><span class="n">clone</span><span class="p">(</span><span class="n">memory_format</span><span class="o">=</span><span class="n">torch</span><span class="o">.</span><span class="n">preserve_format</span><span class="p">)</span> <span class="k">for</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">grads</span><span class="p">]</span>
        <span class="k">return</span> <span class="p">(</span><span class="n">saved_outs</span><span class="p">,</span> <span class="n">saved_grads</span><span class="p">)</span>

    <span class="k">with</span> <span class="n">torch</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">fork_rng</span><span class="p">(</span><span class="n">devices</span><span class="p">,</span> <span class="n">_caller</span><span class="o">=</span><span class="s2">&quot;torch.jit.verify&quot;</span><span class="p">):</span>
        <span class="n">uncompiled_outs</span><span class="p">,</span> <span class="n">uncompiled_grads</span> <span class="o">=</span> <span class="n">run_fwd_bwd</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="n">force_trace</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
        <span class="k">assert</span> <span class="n">model</span><span class="o">.</span><span class="n">has_trace_for</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">)</span>

    <span class="k">if</span> <span class="n">is_module</span><span class="p">:</span>
        <span class="n">model</span><span class="o">.</span><span class="n">load_state_dict</span><span class="p">(</span><span class="n">saved_state</span><span class="p">)</span>
    <span class="n">compiled_outs</span><span class="p">,</span> <span class="n">compiled_grads</span> <span class="o">=</span> <span class="n">run_fwd_bwd</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="n">assert_compiled</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>

    <span class="n">_verify_equal</span><span class="p">(</span><span class="n">uncompiled_outs</span><span class="p">,</span> <span class="n">compiled_outs</span><span class="p">)</span>
    <span class="n">_verify_equal</span><span class="p">(</span><span class="n">uncompiled_grads</span><span class="p">,</span> <span class="n">compiled_grads</span><span class="p">)</span>


<span class="k">def</span> <span class="nf">_verify_equal</span><span class="p">(</span><span class="n">xs</span><span class="p">,</span> <span class="n">ys</span><span class="p">):</span>
    <span class="k">for</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">xs</span><span class="p">,</span> <span class="n">ys</span><span class="p">):</span>
        <span class="k">if</span> <span class="n">x</span><span class="o">.</span><span class="n">sub</span><span class="p">(</span><span class="n">y</span><span class="p">)</span><span class="o">.</span><span class="n">abs</span><span class="p">()</span><span class="o">.</span><span class="n">max</span><span class="p">()</span> <span class="o">&gt;</span> <span class="mf">1e-6</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;JIT and real computation mismatch&quot;</span><span class="p">)</span>


<span class="k">def</span> <span class="nf">indent</span><span class="p">(</span><span class="n">s</span><span class="p">):</span>
    <span class="k">return</span> <span class="s1">&#39;</span><span class="se">\n</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">join</span><span class="p">([</span><span class="s1">&#39;</span><span class="se">\t</span><span class="s1">&#39;</span> <span class="o">+</span> <span class="n">line</span> <span class="k">for</span> <span class="n">line</span> <span class="ow">in</span> <span class="n">s</span><span class="o">.</span><span class="n">splitlines</span><span class="p">()])</span>


<span class="k">class</span> <span class="nc">TracingCheckError</span><span class="p">(</span><span class="ne">Exception</span><span class="p">):</span>
    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">graph_diff_error</span><span class="p">,</span> <span class="n">tensor_compare_error</span><span class="p">,</span> <span class="n">extra_msg</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">message</span> <span class="o">=</span> <span class="s1">&#39;Tracing failed sanity checks!</span><span class="se">\n</span><span class="s1">&#39;</span>
        <span class="k">if</span> <span class="n">extra_msg</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">message</span> <span class="o">+=</span> <span class="n">extra_msg</span> <span class="o">+</span> <span class="s1">&#39;</span><span class="se">\n</span><span class="s1">&#39;</span>
        <span class="k">if</span> <span class="n">graph_diff_error</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">message</span> <span class="o">+=</span> <span class="s1">&#39;ERROR: Graphs differed across invocations!</span><span class="se">\n</span><span class="s1">&#39;</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">message</span> <span class="o">+=</span> <span class="n">indent</span><span class="p">(</span><span class="n">graph_diff_error</span><span class="p">)</span> <span class="o">+</span> <span class="s1">&#39;</span><span class="se">\n</span><span class="s1">&#39;</span>
        <span class="k">if</span> <span class="n">tensor_compare_error</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">message</span> <span class="o">+=</span> <span class="s1">&#39;ERROR: Tensor-valued Constant nodes differed in value &#39;</span> \
                            <span class="s1">&#39;across invocations. This often indicates that the tracer has&#39;</span> \
                            <span class="s1">&#39; encountered untraceable code.</span><span class="se">\n</span><span class="s1">&#39;</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">message</span> <span class="o">+=</span> <span class="n">indent</span><span class="p">(</span><span class="n">tensor_compare_error</span><span class="p">)</span> <span class="o">+</span> <span class="s1">&#39;</span><span class="se">\n</span><span class="s1">&#39;</span>
        <span class="nb">super</span><span class="p">(</span><span class="n">TracingCheckError</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">message</span><span class="p">)</span>


<span class="c1"># Check the traced module against a set of user-provided validation inputs</span>
<span class="nd">@torch</span><span class="o">.</span><span class="n">no_grad</span><span class="p">()</span>
<span class="k">def</span> <span class="nf">_check_trace</span><span class="p">(</span><span class="n">check_inputs</span><span class="p">,</span> <span class="n">func</span><span class="p">,</span> <span class="n">traced_func</span><span class="p">,</span> <span class="n">check_tolerance</span><span class="p">,</span>
                 <span class="n">force_outplace</span><span class="p">,</span> <span class="n">is_trace_module</span><span class="p">,</span> <span class="n">_module_class</span><span class="p">):</span>
    <span class="c1"># Note: tracing is independent of optimizations, which consume the trace</span>
    <span class="k">for</span> <span class="n">inputs</span> <span class="ow">in</span> <span class="n">check_inputs</span><span class="p">:</span>

        <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">):</span>
            <span class="n">inputs</span> <span class="o">=</span> <span class="p">(</span><span class="n">inputs</span><span class="p">,)</span>

        <span class="k">if</span> <span class="n">is_trace_module</span><span class="p">:</span>
            <span class="n">copied_dict</span> <span class="o">=</span> <span class="p">{}</span>
            <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">data</span> <span class="ow">in</span> <span class="n">inputs</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
                <span class="n">copied_dict</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">_clone_inputs</span><span class="p">(</span><span class="n">data</span><span class="p">)</span>
            <span class="n">check_mod</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">trace_module</span><span class="p">(</span>
                <span class="n">func</span><span class="o">.</span><span class="vm">__self__</span> <span class="k">if</span> <span class="nb">hasattr</span><span class="p">(</span><span class="n">func</span><span class="p">,</span> <span class="s1">&#39;__self__&#39;</span><span class="p">)</span> <span class="k">else</span> <span class="n">func</span><span class="p">,</span>
                <span class="n">copied_dict</span><span class="p">,</span>
                <span class="n">check_trace</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
                <span class="n">_force_outplace</span><span class="o">=</span><span class="n">force_outplace</span><span class="p">,</span>
                <span class="n">_module_class</span><span class="o">=</span><span class="n">_module_class</span><span class="p">,</span>
                <span class="n">_compilation_unit</span><span class="o">=</span><span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">CompilationUnit</span><span class="p">(),</span>
            <span class="p">)</span>
            <span class="n">check_mod_func</span> <span class="o">=</span> <span class="n">check_mod</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_get_method</span><span class="p">(</span><span class="n">traced_func</span><span class="o">.</span><span class="n">name</span><span class="p">)</span>
            <span class="n">inputs</span> <span class="o">=</span> <span class="n">inputs</span><span class="p">[</span><span class="n">traced_func</span><span class="o">.</span><span class="n">name</span><span class="p">]</span>
            <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">,</span> <span class="nb">dict</span><span class="p">)):</span>
                <span class="n">inputs</span> <span class="o">=</span> <span class="p">(</span><span class="n">inputs</span><span class="p">,)</span>
        <span class="k">else</span><span class="p">:</span>
            <span class="n">check_mod</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">trace</span><span class="p">(</span>
                <span class="n">func</span><span class="p">,</span>
                <span class="n">_clone_inputs</span><span class="p">(</span><span class="n">inputs</span><span class="p">),</span>
                <span class="n">check_trace</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
                <span class="n">_force_outplace</span><span class="o">=</span><span class="n">force_outplace</span><span class="p">,</span>
                <span class="n">_module_class</span><span class="o">=</span><span class="n">_module_class</span><span class="p">,</span>
            <span class="p">)</span>
            <span class="n">check_mod_func</span> <span class="o">=</span> <span class="n">check_mod</span>

        <span class="k">def</span> <span class="nf">graph_diagnostic_info</span><span class="p">():</span>
            <span class="n">mod_canonicalized</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_pass_canonicalize</span><span class="p">(</span><span class="n">traced_func</span><span class="o">.</span><span class="n">graph</span><span class="p">)</span>
            <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_pass_inline</span><span class="p">(</span><span class="n">mod_canonicalized</span><span class="p">)</span>
            <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_pass_erase_shape_information</span><span class="p">(</span><span class="n">mod_canonicalized</span><span class="p">)</span>
            <span class="n">mod_str</span> <span class="o">=</span> <span class="nb">str</span><span class="p">(</span><span class="n">mod_canonicalized</span><span class="p">)</span>
            <span class="n">mod_str</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">sub</span><span class="p">(</span><span class="sa">r</span><span class="s1">&#39;___torch_mangle_[0-9]+\.&#39;</span><span class="p">,</span> <span class="s1">&#39;&#39;</span><span class="p">,</span> <span class="n">mod_str</span><span class="p">)</span>
            <span class="n">check_canonicalized</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_pass_canonicalize</span><span class="p">(</span><span class="n">check_mod_func</span><span class="o">.</span><span class="n">graph</span><span class="p">)</span>
            <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_pass_inline</span><span class="p">(</span><span class="n">check_canonicalized</span><span class="p">)</span>
            <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_pass_erase_shape_information</span><span class="p">(</span><span class="n">check_canonicalized</span><span class="p">)</span>
            <span class="n">check_str</span> <span class="o">=</span> <span class="nb">str</span><span class="p">(</span><span class="n">check_canonicalized</span><span class="p">)</span>
            <span class="n">check_str</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">sub</span><span class="p">(</span><span class="sa">r</span><span class="s1">&#39;___torch_mangle_[0-9]+\.&#39;</span><span class="p">,</span> <span class="s1">&#39;&#39;</span><span class="p">,</span> <span class="n">check_str</span><span class="p">)</span>

            <span class="n">graph_diff_errors</span> <span class="o">=</span> <span class="kc">None</span>
            <span class="k">if</span> <span class="n">mod_str</span> <span class="o">!=</span> <span class="n">check_str</span><span class="p">:</span>
                <span class="kn">import</span> <span class="nn">difflib</span>
                <span class="n">graph_diff</span> <span class="o">=</span> <span class="n">difflib</span><span class="o">.</span><span class="n">ndiff</span><span class="p">(</span><span class="n">mod_str</span><span class="o">.</span><span class="n">splitlines</span><span class="p">(</span><span class="kc">True</span><span class="p">),</span>
                                           <span class="n">check_str</span><span class="o">.</span><span class="n">splitlines</span><span class="p">(</span><span class="kc">True</span><span class="p">))</span>
                <span class="n">graph_diff_errors</span> <span class="o">=</span> <span class="s1">&#39;Graph diff:</span><span class="se">\n</span><span class="s1">&#39;</span> <span class="o">+</span> <span class="n">indent</span><span class="p">(</span><span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">graph_diff</span><span class="p">))</span> <span class="o">+</span> <span class="s1">&#39;</span><span class="se">\n</span><span class="s1">&#39;</span>

                <span class="k">for</span> <span class="n">n_mod</span><span class="p">,</span> <span class="n">n_check</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">mod_canonicalized</span><span class="o">.</span><span class="n">nodes</span><span class="p">(),</span> <span class="n">check_canonicalized</span><span class="o">.</span><span class="n">nodes</span><span class="p">()):</span>
                    <span class="k">if</span> <span class="nb">str</span><span class="p">(</span><span class="n">n_mod</span><span class="p">)</span> <span class="o">!=</span> <span class="nb">str</span><span class="p">(</span><span class="n">n_check</span><span class="p">):</span>
                        <span class="n">graph_diff_errors</span> <span class="o">+=</span> <span class="s1">&#39;First diverging operator:</span><span class="se">\n</span><span class="s1">&#39;</span>
                        <span class="n">node_diff</span> <span class="o">=</span> <span class="n">difflib</span><span class="o">.</span><span class="n">ndiff</span><span class="p">(</span><span class="nb">str</span><span class="p">(</span><span class="n">n_mod</span><span class="p">)</span><span class="o">.</span><span class="n">splitlines</span><span class="p">(</span><span class="kc">True</span><span class="p">),</span>
                                                  <span class="nb">str</span><span class="p">(</span><span class="n">n_check</span><span class="p">)</span><span class="o">.</span><span class="n">splitlines</span><span class="p">(</span><span class="kc">True</span><span class="p">))</span>
                        <span class="n">source_printout</span> <span class="o">=</span> <span class="s1">&#39;Node diff:</span><span class="se">\n</span><span class="s1">&#39;</span> <span class="o">+</span> <span class="n">indent</span><span class="p">(</span><span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">node_diff</span><span class="p">))</span> <span class="o">+</span> <span class="s1">&#39;</span><span class="se">\n</span><span class="s1">&#39;</span>
                        <span class="n">mod_stack</span> <span class="o">=</span> <span class="n">n_mod</span><span class="o">.</span><span class="n">sourceRange</span><span class="p">()</span>
                        <span class="k">if</span> <span class="n">mod_stack</span><span class="p">:</span>
                            <span class="n">source_printout</span> <span class="o">+=</span> <span class="s1">&#39;Trace source location:</span><span class="se">\n</span><span class="s1">&#39;</span> <span class="o">+</span> <span class="n">indent</span><span class="p">(</span><span class="n">mod_stack</span><span class="p">)</span> <span class="o">+</span> <span class="s1">&#39;</span><span class="se">\n</span><span class="s1">&#39;</span>
                        <span class="n">check_stack</span> <span class="o">=</span> <span class="n">n_check</span><span class="o">.</span><span class="n">sourceRange</span><span class="p">()</span>
                        <span class="k">if</span> <span class="n">check_stack</span><span class="p">:</span>
                            <span class="n">source_printout</span> <span class="o">+=</span> <span class="s1">&#39;Check source location:</span><span class="se">\n</span><span class="s1">&#39;</span> <span class="o">+</span> <span class="n">indent</span><span class="p">(</span><span class="n">check_stack</span><span class="p">)</span> <span class="o">+</span> <span class="s1">&#39;</span><span class="se">\n</span><span class="s1">&#39;</span>
                        <span class="n">graph_diff_errors</span> <span class="o">+=</span> <span class="n">source_printout</span>

                        <span class="k">break</span>  <span class="c1"># For now, only print out the first pair of nodes that diverges</span>

            <span class="n">tensor_compare_errors</span> <span class="o">=</span> <span class="kc">None</span>
            <span class="c1"># Check Tensor-valued constant nodes</span>
            <span class="k">for</span> <span class="n">n_mod</span><span class="p">,</span> <span class="n">n_check</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">mod_canonicalized</span><span class="o">.</span><span class="n">nodes</span><span class="p">(),</span> <span class="n">check_canonicalized</span><span class="o">.</span><span class="n">nodes</span><span class="p">()):</span>
                <span class="k">if</span> <span class="n">n_mod</span><span class="o">.</span><span class="n">kind</span><span class="p">()</span> <span class="o">!=</span> <span class="n">n_check</span><span class="o">.</span><span class="n">kind</span><span class="p">():</span>
                    <span class="k">break</span>  <span class="c1"># Graphs have already diverged</span>

                <span class="k">if</span> <span class="n">n_mod</span><span class="o">.</span><span class="n">kind</span><span class="p">()</span> <span class="o">==</span> <span class="s1">&#39;prim::Constant&#39;</span> <span class="ow">and</span> <span class="ow">not</span> <span class="p">(</span><span class="n">n_mod</span><span class="o">.</span><span class="n">mustBeNone</span><span class="p">()</span> <span class="ow">or</span> <span class="n">n_check</span><span class="o">.</span><span class="n">mustBeNone</span><span class="p">()):</span>
                    <span class="k">if</span> <span class="ow">not</span> <span class="n">n_mod</span><span class="o">.</span><span class="n">hasAttribute</span><span class="p">(</span><span class="s1">&#39;value&#39;</span><span class="p">):</span>
                        <span class="k">continue</span>
                    <span class="k">if</span> <span class="n">n_mod</span><span class="o">.</span><span class="n">kindOf</span><span class="p">(</span><span class="s1">&#39;value&#39;</span><span class="p">)</span> <span class="o">!=</span> <span class="s1">&#39;t&#39;</span> <span class="ow">or</span> <span class="n">n_check</span><span class="o">.</span><span class="n">kindOf</span><span class="p">(</span><span class="s1">&#39;value&#39;</span><span class="p">)</span> <span class="o">!=</span> <span class="s1">&#39;t&#39;</span><span class="p">:</span>
                        <span class="k">continue</span>

                    <span class="n">mod_tensor_val</span> <span class="o">=</span> <span class="n">n_mod</span><span class="o">.</span><span class="n">t</span><span class="p">(</span><span class="s1">&#39;value&#39;</span><span class="p">)</span>
                    <span class="n">check_tensor_val</span> <span class="o">=</span> <span class="n">n_check</span><span class="o">.</span><span class="n">t</span><span class="p">(</span><span class="s1">&#39;value&#39;</span><span class="p">)</span>

                    <span class="k">try</span><span class="p">:</span>
                        <span class="n">torch</span><span class="o">.</span><span class="n">testing</span><span class="o">.</span><span class="n">assert_allclose</span><span class="p">(</span><span class="n">mod_tensor_val</span><span class="p">,</span> <span class="n">check_tensor_val</span><span class="p">)</span>
                    <span class="k">except</span> <span class="p">(</span><span class="ne">RuntimeError</span><span class="p">,</span> <span class="ne">AssertionError</span><span class="p">)</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span>
                        <span class="k">if</span> <span class="n">tensor_compare_errors</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
                            <span class="n">tensor_compare_errors</span> <span class="o">=</span> <span class="s1">&#39;&#39;</span>
                        <span class="n">tensor_compare_errors</span> <span class="o">+=</span> <span class="s1">&#39;Node:</span><span class="se">\n</span><span class="s1">&#39;</span> <span class="o">+</span> <span class="n">indent</span><span class="p">(</span><span class="nb">str</span><span class="p">(</span><span class="n">n_mod</span><span class="p">))</span> <span class="o">+</span> <span class="s1">&#39;</span><span class="se">\n</span><span class="s1">&#39;</span>
                        <span class="n">compare_stack</span> <span class="o">=</span> <span class="n">n_mod</span><span class="o">.</span><span class="n">sourceRange</span><span class="p">()</span>
                        <span class="k">if</span> <span class="n">compare_stack</span><span class="p">:</span>
                            <span class="n">tensor_compare_errors</span> <span class="o">+=</span> <span class="s1">&#39;Source Location:</span><span class="se">\n</span><span class="s1">&#39;</span> <span class="o">+</span> <span class="n">indent</span><span class="p">(</span><span class="n">compare_stack</span><span class="p">)</span> <span class="o">+</span> <span class="s1">&#39;</span><span class="se">\n</span><span class="s1">&#39;</span>
                        <span class="n">tensor_compare_errors</span> <span class="o">+=</span> <span class="s1">&#39;Comparison exception: &#39;</span> <span class="o">+</span> <span class="n">indent</span><span class="p">(</span><span class="nb">str</span><span class="p">(</span><span class="n">e</span><span class="p">))</span>

                        <span class="k">break</span>  <span class="c1"># For now, only print the first diverging pair</span>

            <span class="k">return</span> <span class="n">graph_diff_errors</span><span class="p">,</span> <span class="n">tensor_compare_errors</span>

        <span class="k">def</span> <span class="nf">wrap_retval</span><span class="p">(</span><span class="n">x</span><span class="p">):</span>
            <span class="k">return</span> <span class="n">x</span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)</span> <span class="k">else</span> <span class="p">(</span><span class="n">x</span><span class="p">,)</span>

        <span class="k">def</span> <span class="nf">run_mod_and_filter_tensor_outputs</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">inputs</span><span class="p">,</span> <span class="n">running_what</span><span class="p">):</span>
            <span class="k">try</span><span class="p">:</span>
                <span class="n">outs</span> <span class="o">=</span> <span class="n">wrap_retval</span><span class="p">(</span><span class="n">mod</span><span class="p">(</span><span class="o">*</span><span class="n">_clone_inputs</span><span class="p">(</span><span class="n">inputs</span><span class="p">)))</span>
                <span class="n">outs</span> <span class="o">=</span> <span class="p">[</span><span class="n">out</span> <span class="k">for</span> <span class="n">out</span> <span class="ow">in</span> <span class="n">outs</span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">out</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">)]</span>
                <span class="k">return</span> <span class="n">outs</span>
            <span class="k">except</span> <span class="ne">Exception</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span>
                <span class="k">raise</span> <span class="n">TracingCheckError</span><span class="p">(</span><span class="o">*</span><span class="n">graph_diagnostic_info</span><span class="p">(),</span>
                                        <span class="n">extra_msg</span><span class="o">=</span><span class="s1">&#39;Encountered an exception while running the &#39;</span> <span class="o">+</span> <span class="n">running_what</span> <span class="o">+</span>
                                                  <span class="s1">&#39; with test inputs.</span><span class="se">\n</span><span class="s1">Exception:</span><span class="se">\n</span><span class="s1">&#39;</span> <span class="o">+</span> <span class="n">indent</span><span class="p">(</span><span class="nb">str</span><span class="p">(</span><span class="n">e</span><span class="p">)))</span>

        <span class="n">has_warned</span> <span class="o">=</span> <span class="p">[</span><span class="kc">False</span><span class="p">]</span>

        <span class="k">def</span> <span class="nf">maybe_warn_nondeterministic</span><span class="p">():</span>
            <span class="k">if</span> <span class="n">has_warned</span><span class="p">[</span><span class="mi">0</span><span class="p">]:</span>
                <span class="k">return</span>
            <span class="n">has_warned</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">=</span> <span class="kc">True</span>
            <span class="n">nondeterm_ops</span> <span class="o">=</span> <span class="p">[</span><span class="n">op</span> <span class="k">for</span> <span class="n">op</span> <span class="ow">in</span> <span class="n">traced_func</span><span class="o">.</span><span class="n">graph</span><span class="o">.</span><span class="n">nodes</span><span class="p">()</span> <span class="k">if</span> <span class="n">op</span><span class="o">.</span><span class="n">isNondeterministic</span><span class="p">()]</span>
            <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">nondeterm_ops</span><span class="p">)</span> <span class="o">&gt;</span> <span class="mi">0</span><span class="p">:</span>
                <span class="n">nondeterministic_ops_warning</span> <span class="o">=</span> <span class="s2">&quot;Trace had nondeterministic nodes. &quot;</span>
                <span class="n">nondeterministic_ops_warning</span> <span class="o">+=</span> <span class="s2">&quot;Did you forget call .eval() on your model? Nodes:</span><span class="se">\n</span><span class="s2">&quot;</span>
                <span class="n">nondeterministic_ops_warning</span> <span class="o">+=</span> <span class="s2">&quot;</span><span class="se">\n</span><span class="s2">&quot;</span><span class="o">.</span><span class="n">join</span><span class="p">([</span><span class="n">indent</span><span class="p">(</span><span class="nb">str</span><span class="p">(</span><span class="n">op</span><span class="p">))</span> <span class="k">for</span> <span class="n">op</span> <span class="ow">in</span> <span class="n">nondeterm_ops</span><span class="p">][:</span><span class="mi">20</span><span class="p">])</span>
                <span class="n">nondeterministic_ops_warning</span> <span class="o">+=</span> <span class="s2">&quot;</span><span class="se">\n</span><span class="s2">This may cause errors in trace checking. To disable trace checking,&quot;</span>\
                                                <span class="s2">&quot; pass check_trace=False to torch.jit.trace()&quot;</span>
                <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="n">nondeterministic_ops_warning</span><span class="p">,</span> <span class="n">category</span><span class="o">=</span><span class="n">TracerWarning</span><span class="p">,</span> <span class="n">stacklevel</span><span class="o">=</span><span class="mi">5</span><span class="p">)</span>

        <span class="k">def</span> <span class="nf">compare_outputs</span><span class="p">(</span><span class="n">original</span><span class="p">,</span> <span class="n">reference</span><span class="p">,</span> <span class="n">match_what</span><span class="p">):</span>
            <span class="n">all_ok</span> <span class="o">=</span> <span class="kc">True</span>
            <span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="p">(</span><span class="n">orig</span><span class="p">,</span> <span class="n">ref</span><span class="p">)</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="nb">zip</span><span class="p">(</span><span class="n">original</span><span class="p">,</span> <span class="n">reference</span><span class="p">)):</span>
                <span class="k">try</span><span class="p">:</span>
                    <span class="k">if</span> <span class="n">orig</span><span class="o">.</span><span class="n">is_quantized</span><span class="p">:</span>
                        <span class="n">orig</span> <span class="o">=</span> <span class="n">orig</span><span class="o">.</span><span class="n">dequantize</span><span class="p">()</span>
                    <span class="k">if</span> <span class="n">ref</span><span class="o">.</span><span class="n">is_quantized</span><span class="p">:</span>
                        <span class="n">ref</span> <span class="o">=</span> <span class="n">ref</span><span class="o">.</span><span class="n">dequantize</span><span class="p">()</span>
                    <span class="n">torch</span><span class="o">.</span><span class="n">testing</span><span class="o">.</span><span class="n">assert_allclose</span><span class="p">(</span><span class="n">orig</span><span class="o">.</span><span class="n">double</span><span class="p">(),</span> <span class="n">ref</span><span class="o">.</span><span class="n">double</span><span class="p">(),</span> <span class="n">rtol</span><span class="o">=</span><span class="n">check_tolerance</span><span class="p">,</span>
                                                  <span class="n">atol</span><span class="o">=</span><span class="n">torch</span><span class="o">.</span><span class="n">testing</span><span class="o">.</span><span class="n">_get_default_tolerance</span><span class="p">(</span><span class="n">orig</span><span class="p">,</span> <span class="n">ref</span><span class="p">)[</span><span class="mi">1</span><span class="p">])</span>
                <span class="k">except</span> <span class="ne">AssertionError</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span>
                    <span class="n">maybe_warn_nondeterministic</span><span class="p">()</span>
                    <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">&#39;Output nr &#39;</span> <span class="o">+</span> <span class="nb">str</span><span class="p">(</span><span class="n">i</span> <span class="o">+</span> <span class="mi">1</span><span class="p">)</span> <span class="o">+</span> <span class="s1">&#39;. of the traced function does not match &#39;</span>
                                  <span class="s1">&#39;the corresponding output of the &#39;</span> <span class="o">+</span> <span class="n">match_what</span> <span class="o">+</span> <span class="s1">&#39;. Detailed error:</span><span class="se">\n</span><span class="s1">&#39;</span> <span class="o">+</span> <span class="nb">str</span><span class="p">(</span><span class="n">e</span><span class="p">),</span>
                                  <span class="n">category</span><span class="o">=</span><span class="n">TracerWarning</span><span class="p">,</span> <span class="n">stacklevel</span><span class="o">=</span><span class="mi">4</span><span class="p">)</span>
                    <span class="n">all_ok</span> <span class="o">=</span> <span class="kc">False</span>

            <span class="k">return</span> <span class="n">all_ok</span>

        <span class="n">traced_outs</span> <span class="o">=</span> <span class="n">run_mod_and_filter_tensor_outputs</span><span class="p">(</span><span class="n">traced_func</span><span class="p">,</span> <span class="n">inputs</span><span class="p">,</span> <span class="s1">&#39;trace&#39;</span><span class="p">)</span>
        <span class="n">fn_outs</span> <span class="o">=</span> <span class="n">run_mod_and_filter_tensor_outputs</span><span class="p">(</span><span class="n">func</span><span class="p">,</span> <span class="n">inputs</span><span class="p">,</span> <span class="s1">&#39;Python function&#39;</span><span class="p">)</span>
        <span class="k">if</span> <span class="n">compare_outputs</span><span class="p">(</span><span class="n">traced_outs</span><span class="p">,</span> <span class="n">fn_outs</span><span class="p">,</span> <span class="s1">&#39;Python function&#39;</span><span class="p">):</span>
            <span class="n">check_outs</span> <span class="o">=</span> <span class="n">run_mod_and_filter_tensor_outputs</span><span class="p">(</span><span class="n">check_mod_func</span><span class="p">,</span> <span class="n">inputs</span><span class="p">,</span> <span class="s1">&#39;repeated trace&#39;</span><span class="p">)</span>
            <span class="n">compare_outputs</span><span class="p">(</span><span class="n">traced_outs</span><span class="p">,</span> <span class="n">check_outs</span><span class="p">,</span> <span class="s1">&#39;repeated trace&#39;</span><span class="p">)</span>

        <span class="n">diag_info</span> <span class="o">=</span> <span class="n">graph_diagnostic_info</span><span class="p">()</span>
        <span class="k">if</span> <span class="nb">any</span><span class="p">(</span><span class="n">info</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span> <span class="k">for</span> <span class="n">info</span> <span class="ow">in</span> <span class="n">diag_info</span><span class="p">):</span>
            <span class="k">raise</span> <span class="n">TracingCheckError</span><span class="p">(</span><span class="o">*</span><span class="n">diag_info</span><span class="p">)</span>


<span class="k">class</span> <span class="nc">TracerWarning</span><span class="p">(</span><span class="ne">Warning</span><span class="p">):</span>
    <span class="nd">@staticmethod</span>
    <span class="k">def</span> <span class="nf">ignore_lib_warnings</span><span class="p">():</span>
        <span class="c1"># We ignore warnings from all submodules excluding the JIT, because we need them e.g. for _check_trace</span>
        <span class="n">warnings</span><span class="o">.</span><span class="n">filterwarnings</span><span class="p">(</span><span class="s1">&#39;ignore&#39;</span><span class="p">,</span> <span class="n">category</span><span class="o">=</span><span class="n">TracerWarning</span><span class="p">,</span> <span class="n">module</span><span class="o">=</span><span class="s1">&#39;torch.(?!jit)&#39;</span><span class="p">)</span>


<span class="c1"># We ignore the tracer warnings coming form inside the library, because all our shape</span>
<span class="c1"># checks in nn will trigger them.</span>
<span class="n">TracerWarning</span><span class="o">.</span><span class="n">ignore_lib_warnings</span><span class="p">()</span>
<span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_tracer_warn_use_python</span><span class="p">()</span>


<span class="k">def</span> <span class="nf">make_tuple</span><span class="p">(</span><span class="n">example_inputs</span><span class="p">):</span>
    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">example_inputs</span><span class="p">,</span> <span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">,</span> <span class="nb">dict</span><span class="p">)):</span>
        <span class="k">return</span> <span class="p">(</span><span class="n">example_inputs</span><span class="p">,)</span>
    <span class="c1"># done primarily so that weird iterables fail here and not pybind11 code</span>
    <span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">example_inputs</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">):</span>
        <span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">example_inputs</span><span class="p">)</span>
    <span class="k">return</span> <span class="n">example_inputs</span>


<span class="k">def</span> <span class="nf">make_module</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">_module_class</span><span class="p">,</span> <span class="n">_compilation_unit</span><span class="p">):</span>
    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">ScriptModule</span><span class="p">):</span>
        <span class="k">return</span> <span class="n">mod</span>
    <span class="k">elif</span> <span class="n">torch</span><span class="o">.</span><span class="n">_jit_internal</span><span class="o">.</span><span class="n">module_has_exports</span><span class="p">(</span><span class="n">mod</span><span class="p">):</span>
        <span class="k">def</span> <span class="nf">make_stubs_from_exported_methods</span><span class="p">(</span><span class="n">mod</span><span class="p">):</span>
            <span class="n">exported</span> <span class="o">=</span> <span class="p">[]</span>
            <span class="k">for</span> <span class="n">name</span> <span class="ow">in</span> <span class="nb">dir</span><span class="p">(</span><span class="n">mod</span><span class="p">):</span>
                <span class="n">item</span> <span class="o">=</span> <span class="nb">getattr</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span>
                <span class="k">if</span> <span class="n">torch</span><span class="o">.</span><span class="n">_jit_internal</span><span class="o">.</span><span class="n">get_torchscript_modifier</span><span class="p">(</span><span class="n">item</span><span class="p">)</span> <span class="ow">is</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">EXPORT</span><span class="p">:</span>
                    <span class="n">exported</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">name</span><span class="p">)</span>

            <span class="n">stubs</span> <span class="o">=</span> <span class="p">[]</span>
            <span class="k">for</span> <span class="n">method</span> <span class="ow">in</span> <span class="n">exported</span><span class="p">:</span>
                <span class="n">stubs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_recursive</span><span class="o">.</span><span class="n">make_stub_from_method</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">method</span><span class="p">))</span>
            <span class="k">return</span> <span class="n">stubs</span>

        <span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_recursive</span><span class="o">.</span><span class="n">create_script_module</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">make_stubs_from_exported_methods</span><span class="p">,</span> <span class="n">share_types</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span>
    <span class="k">else</span><span class="p">:</span>
        <span class="k">if</span> <span class="n">_module_class</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
            <span class="n">_module_class</span> <span class="o">=</span> <span class="n">TopLevelTracedModule</span>
        <span class="k">return</span> <span class="n">_module_class</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">_compilation_unit</span><span class="o">=</span><span class="n">_compilation_unit</span><span class="p">)</span>

<span class="k">def</span> <span class="nf">wrap_check_inputs</span><span class="p">(</span><span class="n">check_inputs</span><span class="p">):</span>
    <span class="k">if</span> <span class="n">check_inputs</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
        <span class="k">return</span> <span class="kc">None</span>

    <span class="k">return</span> <span class="p">[{</span><span class="s1">&#39;forward&#39;</span> <span class="p">:</span> <span class="n">c</span><span class="p">}</span> <span class="k">for</span> <span class="n">c</span> <span class="ow">in</span> <span class="n">check_inputs</span><span class="p">]</span>

<div class="viewcode-block" id="trace"><a class="viewcode-back" href="../../jit.html#torch.jit.trace">[docs]</a><span class="k">def</span> <span class="nf">trace</span><span class="p">(</span><span class="n">func</span><span class="p">,</span>
          <span class="n">example_inputs</span><span class="p">,</span>
          <span class="n">optimize</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
          <span class="n">check_trace</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span>
          <span class="n">check_inputs</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
          <span class="n">check_tolerance</span><span class="o">=</span><span class="mf">1e-5</span><span class="p">,</span>
          <span class="n">_force_outplace</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
          <span class="n">_module_class</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
          <span class="n">_compilation_unit</span><span class="o">=</span><span class="n">_python_cu</span><span class="p">):</span>
    <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">    Trace a function and return an executable  or :class:`ScriptFunction`</span>
<span class="sd">    that will be optimized using just-in-time compilation. Tracing is ideal for</span>
<span class="sd">    code that operates only on ``Tensor``\\s and lists, dictionaries, and tuples of ``Tensor``\\s.</span>

<span class="sd">    Using ``torch.jit.trace`` and :func:`torch.jit.trace_module&lt;torch.jit.trace_module&gt;`, you can turn an existing module or Python</span>
<span class="sd">    function into a TorchScript :class:`ScriptFunction` or :class:`ScriptModule`. You must provide example inputs,</span>
<span class="sd">    and we run the function, recording the operations performed on all the tensors.</span>

<span class="sd">    * The resulting recording of a standalone function produces :class:`ScriptFunction`.</span>
<span class="sd">    * The resulting recording of ``forward`` function of ``nn.Module`` or ``nn.Module`` produces :class:`ScriptModule`.</span>

<span class="sd">    This module also contains any parameters that the original</span>
<span class="sd">    module had as well.</span>

<span class="sd">    .. warning::</span>
<span class="sd">        Tracing only correctly records functions and modules which are not data</span>
<span class="sd">        dependent (e.g., do not have conditionals on data in tensors) and do not have</span>
<span class="sd">        any untracked external dependencies (e.g., perform input/output or</span>
<span class="sd">        access global variables). Tracing only records operations done when the given</span>
<span class="sd">        function is run on the given</span>
<span class="sd">        tensors. Therefore, the returned :class:`ScriptModule` will always run the same traced</span>
<span class="sd">        graph on any input. This has some important implications when your module is</span>
<span class="sd">        expected to run different sets of operations, depending on the input and/or the</span>
<span class="sd">        module state. For example,</span>

<span class="sd">        * Tracing will not record any control-flow like if-statements or loops.</span>
<span class="sd">          When this control-flow is constant across your module, this is fine and it often</span>
<span class="sd">          inlines the control-flow decisions. But sometimes the control-flow is actually part</span>
<span class="sd">          of the model itself. For instance, a recurrent network is a loop over</span>
<span class="sd">          the (possibly dynamic) length of an input sequence.</span>
<span class="sd">        * In the returned :class:`ScriptModule`, operations that have different</span>
<span class="sd">          behaviors in ``training`` and ``eval`` modes will always behave as if it</span>
<span class="sd">          is in the mode it was in during tracing, no matter which mode the</span>
<span class="sd">          :class:`ScriptModule` is in.</span>

<span class="sd">        In cases like these, tracing would not be appropriate and :func:`scripting &lt;torch.jit.script&gt;` is a better</span>
<span class="sd">        choice. If you trace such models, you may silently get</span>
<span class="sd">        incorrect results on subsequent invocations of the model. The tracer</span>
<span class="sd">        will try to emit warnings when doing something that may cause an</span>
<span class="sd">        incorrect trace to be produced.</span>

<span class="sd">    Arguments:</span>
<span class="sd">        func (callable or torch.nn.Module):  A Python function or ``torch.nn.Module``</span>
<span class="sd">                                             that will be run with ``example_inputs``.</span>
<span class="sd">                                             arguments and returns to ``func`` must be tensors</span>
<span class="sd">                                             or (possibly nested) tuples that</span>
<span class="sd">                                             contain tensors. When a module is passed to</span>
<span class="sd">                                             :func:`torch.jit.trace &lt;torch.jit.trace&gt;`, only the</span>
<span class="sd">                                             ``forward`` method is run and traced</span>
<span class="sd">                                             (see :func:`torch.jit.trace &lt;torch.jit.trace_module&gt;` for details).</span>
<span class="sd">        example_inputs (tuple):  A tuple of example inputs that will be passed to the function</span>
<span class="sd">                                 while tracing. The resulting trace can be run with</span>
<span class="sd">                                 inputs of different types and shapes assuming the traced operations</span>
<span class="sd">                                 support those types and shapes. ``example_inputs`` may also be a single</span>
<span class="sd">                                 Tensor in which case it is automatically wrapped in a tuple.</span>

<span class="sd">    Keyword arguments:</span>
<span class="sd">        check_trace (bool, optional): Check if the same inputs run through</span>
<span class="sd">                                      traced code produce the same outputs. Default: ``True``. You might want</span>
<span class="sd">                                      to disable this if, for example, your network contains non-</span>
<span class="sd">                                      deterministic ops or if you are sure that the network is correct despite</span>
<span class="sd">                                      a checker failure.</span>

<span class="sd">        check_inputs (list of tuples, optional): A list of tuples of input arguments that should be used</span>
<span class="sd">                                                 to check the trace against what is expected. Each tuple</span>
<span class="sd">                                                 is equivalent to a set of input arguments that would</span>
<span class="sd">                                                 be specified in ``example_inputs``. For best results, pass in a</span>
<span class="sd">                                                 set of checking inputs representative of the space of</span>
<span class="sd">                                                 shapes and types of inputs you expect the network to see.</span>
<span class="sd">                                                 If not specified, the original ``example_inputs`` are used for checking</span>
<span class="sd">        check_tolerance (float, optional): Floating-point comparison tolerance to use in the checker procedure.</span>
<span class="sd">                                           This can be used to relax the checker strictness in the event that</span>
<span class="sd">                                           results diverge numerically for a known reason, such as operator fusion.</span>

<span class="sd">    Returns:</span>
<span class="sd">        If ``callable`` is ``nn.Module`` or ``forward`` of ``nn.Module``, ``trace`` returns</span>
<span class="sd">        a :class:`ScriptModule` object with a single ``forward`` method containing the traced code.</span>
<span class="sd">        The returned :class:`ScriptModule` will have the same set of sub-modules and parameters as the</span>
<span class="sd">        original ``nn.Module``.</span>
<span class="sd">        If ``callable`` is a standalone function, ``trace`` returns :class:`ScriptFunction`</span>

<span class="sd">    Example (tracing a function):</span>

<span class="sd">    .. testcode::</span>

<span class="sd">        import torch</span>

<span class="sd">        def foo(x, y):</span>
<span class="sd">            return 2 * x + y</span>

<span class="sd">        # Run `foo` with the provided inputs and record the tensor operations</span>
<span class="sd">        traced_foo = torch.jit.trace(foo, (torch.rand(3), torch.rand(3)))</span>

<span class="sd">        # `traced_foo` can now be run with the TorchScript interpreter or saved</span>
<span class="sd">        # and loaded in a Python-free environment</span>

<span class="sd">    Example (tracing an existing module)::</span>

<span class="sd">        import torch</span>
<span class="sd">        import torch.nn as nn</span>

<span class="sd">        class Net(nn.Module):</span>
<span class="sd">            def __init__(self):</span>
<span class="sd">                super(Net, self).__init__()</span>
<span class="sd">                self.conv = nn.Conv2d(1, 1, 3)</span>

<span class="sd">            def forward(self, x):</span>
<span class="sd">                return self.conv(x)</span>

<span class="sd">        n = Net()</span>
<span class="sd">        example_weight = torch.rand(1, 1, 3, 3)</span>
<span class="sd">        example_forward_input = torch.rand(1, 1, 3, 3)</span>

<span class="sd">        # Trace a specific method and construct `ScriptModule` with</span>
<span class="sd">        # a single `forward` method</span>
<span class="sd">        module = torch.jit.trace(n.forward, example_forward_input)</span>

<span class="sd">        # Trace a module (implicitly traces `forward`) and construct a</span>
<span class="sd">        # `ScriptModule` with a single `forward` method</span>
<span class="sd">        module = torch.jit.trace(n, example_forward_input)</span>

<span class="sd">    &quot;&quot;&quot;</span>
    <span class="k">if</span> <span class="ow">not</span> <span class="n">_enabled</span><span class="p">:</span>
        <span class="k">return</span> <span class="n">func</span>
    <span class="k">if</span> <span class="n">optimize</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
        <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s2">&quot;`optimize` is deprecated and has no effect. Use `with torch.jit.optimized_execution() instead&quot;</span><span class="p">)</span>

    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">func</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">ScriptModule</span><span class="p">):</span>
        <span class="c1"># it is hard to trace it because the forward method on ScriptModule is already defined, so it</span>
        <span class="c1"># would result in an error.</span>
        <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">&#39;The input to trace is already a ScriptModule, tracing it is a no-op. Returning the object as is.&#39;</span><span class="p">)</span>
        <span class="k">return</span> <span class="n">func</span>


    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">func</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
        <span class="k">return</span> <span class="n">trace_module</span><span class="p">(</span><span class="n">func</span><span class="p">,</span> <span class="p">{</span><span class="s1">&#39;forward&#39;</span><span class="p">:</span> <span class="n">example_inputs</span><span class="p">},</span> <span class="kc">None</span><span class="p">,</span>
                            <span class="n">check_trace</span><span class="p">,</span> <span class="n">wrap_check_inputs</span><span class="p">(</span><span class="n">check_inputs</span><span class="p">),</span>
                            <span class="n">check_tolerance</span><span class="p">,</span> <span class="n">_force_outplace</span><span class="p">,</span> <span class="n">_module_class</span><span class="p">)</span>

    <span class="k">if</span> <span class="p">(</span><span class="nb">hasattr</span><span class="p">(</span><span class="n">func</span><span class="p">,</span> <span class="s1">&#39;__self__&#39;</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">func</span><span class="o">.</span><span class="vm">__self__</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">)</span> <span class="ow">and</span>
            <span class="n">func</span><span class="o">.</span><span class="vm">__name__</span> <span class="o">==</span> <span class="s1">&#39;forward&#39;</span><span class="p">):</span>
        <span class="k">return</span> <span class="n">trace_module</span><span class="p">(</span><span class="n">func</span><span class="o">.</span><span class="vm">__self__</span><span class="p">,</span> <span class="p">{</span><span class="s1">&#39;forward&#39;</span><span class="p">:</span> <span class="n">example_inputs</span><span class="p">},</span> <span class="kc">None</span><span class="p">,</span>
                            <span class="n">check_trace</span><span class="p">,</span> <span class="n">wrap_check_inputs</span><span class="p">(</span><span class="n">check_inputs</span><span class="p">),</span>
                            <span class="n">check_tolerance</span><span class="p">,</span> <span class="n">_force_outplace</span><span class="p">,</span> <span class="n">_module_class</span><span class="p">)</span>

    <span class="c1"># Special case for common case of passing a single Tensor</span>
    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">example_inputs</span><span class="p">,</span> <span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">,</span> <span class="nb">dict</span><span class="p">)):</span>
        <span class="n">example_inputs</span> <span class="o">=</span> <span class="p">(</span><span class="n">example_inputs</span><span class="p">,)</span>
    <span class="c1"># done primarily so that weird iterables fail here and not pybind11 code</span>
    <span class="k">elif</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">example_inputs</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">):</span>
        <span class="n">example_inputs</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">example_inputs</span><span class="p">)</span>

    <span class="n">var_lookup_fn</span> <span class="o">=</span> <span class="n">_create_interpreter_name_lookup_fn</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>

    <span class="k">if</span> <span class="p">(</span><span class="nb">hasattr</span><span class="p">(</span><span class="n">func</span><span class="p">,</span> <span class="s1">&#39;__self__&#39;</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">func</span><span class="o">.</span><span class="vm">__self__</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">)):</span>
        <span class="k">raise</span> <span class="ne">AttributeError</span><span class="p">(</span><span class="s2">&quot;trace doesn&#39;t support compiling individual module&#39;s functions.</span><span class="se">\n</span><span class="s2">&quot;</span>
                             <span class="s2">&quot;Please use trace_module&quot;</span><span class="p">)</span>

    <span class="n">name</span> <span class="o">=</span> <span class="n">_qualified_name</span><span class="p">(</span><span class="n">func</span><span class="p">)</span>
    <span class="n">traced</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_create_function_from_trace</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">func</span><span class="p">,</span> <span class="n">example_inputs</span><span class="p">,</span>
                                                  <span class="n">var_lookup_fn</span><span class="p">,</span>
                                                  <span class="n">_force_outplace</span><span class="p">)</span>

    <span class="c1"># Check the trace against new traces created from user-specified inputs</span>
    <span class="k">if</span> <span class="n">check_trace</span><span class="p">:</span>
        <span class="k">if</span> <span class="n">check_inputs</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
            <span class="n">_check_trace</span><span class="p">(</span><span class="n">check_inputs</span><span class="p">,</span> <span class="n">func</span><span class="p">,</span> <span class="n">traced</span><span class="p">,</span> <span class="n">check_tolerance</span><span class="p">,</span> <span class="n">_force_outplace</span><span class="p">,</span> <span class="kc">False</span><span class="p">,</span> <span class="n">_module_class</span><span class="p">)</span>
        <span class="k">else</span><span class="p">:</span>
            <span class="n">_check_trace</span><span class="p">([</span><span class="n">example_inputs</span><span class="p">],</span> <span class="n">func</span><span class="p">,</span> <span class="n">traced</span><span class="p">,</span> <span class="n">check_tolerance</span><span class="p">,</span> <span class="n">_force_outplace</span><span class="p">,</span> <span class="kc">False</span><span class="p">,</span> <span class="n">_module_class</span><span class="p">)</span>

    <span class="k">return</span> <span class="n">traced</span></div>

<span class="n">_trace_module_map</span> <span class="o">=</span> <span class="kc">None</span>

<div class="viewcode-block" id="trace_module"><a class="viewcode-back" href="../../jit.html#torch.jit.trace_module">[docs]</a><span class="k">def</span> <span class="nf">trace_module</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span>
                 <span class="n">inputs</span><span class="p">,</span>
                 <span class="n">optimize</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
                 <span class="n">check_trace</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span>
                 <span class="n">check_inputs</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
                 <span class="n">check_tolerance</span><span class="o">=</span><span class="mf">1e-5</span><span class="p">,</span>
                 <span class="n">_force_outplace</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
                 <span class="n">_module_class</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
                 <span class="n">_compilation_unit</span><span class="o">=</span><span class="n">_python_cu</span><span class="p">):</span>
    <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">    Trace a module and return an executable :class:`ScriptModule` that will be optimized</span>
<span class="sd">    using just-in-time compilation. When a module is passed to :func:`torch.jit.trace &lt;torch.jit.trace&gt;`, only</span>
<span class="sd">    the ``forward`` method is run and traced. With ``trace_module``, you can specify a dictionary of</span>
<span class="sd">    method names to example inputs to trace (see the ``example_inputs``) argument below.</span>

<span class="sd">    See :func:`torch.jit.trace &lt;torch.jit.trace&gt;` for more information on tracing.</span>

<span class="sd">    Arguments:</span>
<span class="sd">        mod (torch.nn.Module):  A ``torch.nn.Module`` containing methods whose names are</span>
<span class="sd">                                specified in ``example_inputs``. The given methods will be compiled</span>
<span class="sd">                                as a part of a single `ScriptModule`.</span>
<span class="sd">        example_inputs (dict):  A dict containing sample inputs indexed by method names in ``mod``.</span>
<span class="sd">                                The inputs will be passed to methods whose names correspond to inputs&#39;</span>
<span class="sd">                                keys while tracing.</span>
<span class="sd">                                ``{ &#39;forward&#39; : example_forward_input, &#39;method2&#39;: example_method2_input}``</span>
<span class="sd">    Keyword arguments:</span>
<span class="sd">        check_trace (``bool``, optional): Check if the same inputs run through</span>
<span class="sd">                                      traced code produce the same outputs. Default: ``True``. You might want</span>
<span class="sd">                                      to disable this if, for example, your network contains non-</span>
<span class="sd">                                      deterministic ops or if you are sure that the network is correct despite</span>
<span class="sd">                                      a checker failure.</span>

<span class="sd">        check_inputs (list of dicts, optional): A list of dicts of input arguments that should be used</span>
<span class="sd">                                                 to check the trace against what is expected. Each tuple</span>
<span class="sd">                                                 is equivalent to a set of input arguments that would</span>
<span class="sd">                                                 be specified in ``example_inputs``. For best results, pass in a</span>
<span class="sd">                                                 set of checking inputs representative of the space of</span>
<span class="sd">                                                 shapes and types of inputs you expect the network to see.</span>
<span class="sd">                                                 If not specified, the original ``example_inputs`` are used for checking</span>
<span class="sd">        check_tolerance (float, optional): Floating-point comparison tolerance to use in the checker procedure.</span>
<span class="sd">                                           This can be used to relax the checker strictness in the event that</span>
<span class="sd">                                           results diverge numerically for a known reason, such as operator fusion.</span>

<span class="sd">    Returns:</span>
<span class="sd">        A :class:`ScriptModule` object with a single ``forward`` method containing the traced code.</span>
<span class="sd">        When ``func`` is a ``torch.nn.Module``, the returned :class:`ScriptModule` will have the same set of</span>
<span class="sd">        sub-modules and parameters as ``func``.</span>

<span class="sd">    Example (tracing a module with multiple methods)::</span>

<span class="sd">        import torch</span>
<span class="sd">        import torch.nn as nn</span>

<span class="sd">        class Net(nn.Module):</span>
<span class="sd">            def __init__(self):</span>
<span class="sd">                super(Net, self).__init__()</span>
<span class="sd">                self.conv = nn.Conv2d(1, 1, 3)</span>

<span class="sd">            def forward(self, x):</span>
<span class="sd">                return self.conv(x)</span>

<span class="sd">            def weighted_kernel_sum(self, weight):</span>
<span class="sd">                return weight * self.conv.weight</span>


<span class="sd">        n = Net()</span>
<span class="sd">        example_weight = torch.rand(1, 1, 3, 3)</span>
<span class="sd">        example_forward_input = torch.rand(1, 1, 3, 3)</span>

<span class="sd">        # Trace a specific method and construct `ScriptModule` with</span>
<span class="sd">        # a single `forward` method</span>
<span class="sd">        module = torch.jit.trace(n.forward, example_forward_input)</span>

<span class="sd">        # Trace a module (implicitly traces `forward`) and construct a</span>
<span class="sd">        # `ScriptModule` with a single `forward` method</span>
<span class="sd">        module = torch.jit.trace(n, example_forward_input)</span>

<span class="sd">        # Trace specific methods on a module (specified in `inputs`), constructs</span>
<span class="sd">        # a `ScriptModule` with `forward` and `weighted_kernel_sum` methods</span>
<span class="sd">        inputs = {&#39;forward&#39; : example_forward_input, &#39;weighted_kernel_sum&#39; : example_weight}</span>
<span class="sd">        module = torch.jit.trace_module(n, inputs)</span>

<span class="sd">    &quot;&quot;&quot;</span>
    <span class="k">if</span> <span class="ow">not</span> <span class="n">_enabled</span><span class="p">:</span>
        <span class="k">return</span> <span class="n">mod</span>
    <span class="k">if</span> <span class="n">optimize</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
        <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s2">&quot;`optimize` is deprecated and has no effect. Use `with torch.jit.optimized_execution() instead&quot;</span><span class="p">)</span>

    <span class="n">var_lookup_fn</span> <span class="o">=</span> <span class="n">_create_interpreter_name_lookup_fn</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>

    <span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
        <span class="k">raise</span> <span class="ne">AttributeError</span><span class="p">(</span><span class="s2">&quot;expected torch.nn.Module as the first argument&quot;</span><span class="p">)</span>

    <span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="nb">dict</span><span class="p">):</span>
        <span class="k">raise</span> <span class="ne">AttributeError</span><span class="p">(</span><span class="s2">&quot;expected a dictionary of (method_name, input) pairs&quot;</span><span class="p">)</span>

    <span class="n">old_module_map</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_trace_module_map</span>
    <span class="k">try</span><span class="p">:</span>
        <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_trace_module_map</span> <span class="o">=</span> <span class="p">{}</span>

        <span class="k">def</span> <span class="nf">register_submods</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">prefix</span><span class="p">):</span>
            <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">child</span> <span class="ow">in</span> <span class="n">mod</span><span class="o">.</span><span class="n">named_children</span><span class="p">():</span>
                <span class="n">submod_qualname</span> <span class="o">=</span> <span class="n">prefix</span> <span class="o">+</span> <span class="s1">&#39;.&#39;</span> <span class="o">+</span> <span class="n">name</span>
                <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_trace_module_map</span><span class="p">[</span><span class="n">child</span><span class="p">]</span> <span class="o">=</span> <span class="n">submod_qualname</span>
                <span class="n">register_submods</span><span class="p">(</span><span class="n">child</span><span class="p">,</span> <span class="n">submod_qualname</span><span class="p">)</span>

        <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_trace_module_map</span><span class="p">[</span><span class="s1">&#39;__module&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">mod</span>
        <span class="n">register_submods</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="s1">&#39;__module&#39;</span><span class="p">)</span>

        <span class="n">module</span> <span class="o">=</span> <span class="n">make_module</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">_module_class</span><span class="p">,</span> <span class="n">_compilation_unit</span><span class="p">)</span>

        <span class="k">for</span> <span class="n">method_name</span><span class="p">,</span> <span class="n">example_inputs</span> <span class="ow">in</span> <span class="n">inputs</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
            <span class="c1"># this is needed since Module.__call__ sets up some extra tracing</span>
            <span class="n">func</span> <span class="o">=</span> <span class="n">mod</span> <span class="k">if</span> <span class="n">method_name</span> <span class="o">==</span> <span class="s2">&quot;forward&quot;</span> <span class="k">else</span> <span class="nb">getattr</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">method_name</span><span class="p">)</span>
            <span class="n">example_inputs</span> <span class="o">=</span> <span class="n">make_tuple</span><span class="p">(</span><span class="n">example_inputs</span><span class="p">)</span>
            <span class="n">module</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_create_method_from_trace</span><span class="p">(</span><span class="n">method_name</span><span class="p">,</span> <span class="n">func</span><span class="p">,</span> <span class="n">example_inputs</span><span class="p">,</span> <span class="n">var_lookup_fn</span><span class="p">,</span> <span class="n">_force_outplace</span><span class="p">)</span>
            <span class="n">check_trace_method</span> <span class="o">=</span> <span class="n">module</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_get_method</span><span class="p">(</span><span class="n">method_name</span><span class="p">)</span>

            <span class="c1"># Check the trace against new traces created from user-specified inputs</span>
            <span class="k">if</span> <span class="n">check_trace</span><span class="p">:</span>
                <span class="k">if</span> <span class="n">check_inputs</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
                    <span class="n">_check_trace</span><span class="p">(</span><span class="n">check_inputs</span><span class="p">,</span> <span class="n">func</span><span class="p">,</span> <span class="n">check_trace_method</span><span class="p">,</span>
                                 <span class="n">check_tolerance</span><span class="p">,</span> <span class="n">_force_outplace</span><span class="p">,</span> <span class="kc">True</span><span class="p">,</span> <span class="n">_module_class</span><span class="p">)</span>
                <span class="k">else</span><span class="p">:</span>
                    <span class="n">_check_trace</span><span class="p">([</span><span class="n">inputs</span><span class="p">],</span> <span class="n">func</span><span class="p">,</span> <span class="n">check_trace_method</span><span class="p">,</span>
                                 <span class="n">check_tolerance</span><span class="p">,</span> <span class="n">_force_outplace</span><span class="p">,</span> <span class="kc">True</span><span class="p">,</span> <span class="n">_module_class</span><span class="p">)</span>
    <span class="k">finally</span><span class="p">:</span>
        <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_trace_module_map</span> <span class="o">=</span> <span class="n">old_module_map</span>

    <span class="k">return</span> <span class="n">module</span></div>


<span class="k">class</span> <span class="nc">CompilationUnit</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">lang</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">_frames_up</span><span class="o">=</span><span class="mi">0</span><span class="p">):</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_c</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">CompilationUnit</span><span class="p">()</span>
        <span class="k">if</span> <span class="n">lang</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">define</span><span class="p">(</span><span class="n">lang</span><span class="p">,</span> <span class="n">_frames_up</span><span class="o">=</span><span class="n">_frames_up</span> <span class="o">+</span> <span class="mi">1</span><span class="p">)</span>

    <span class="k">def</span> <span class="nf">define</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">lang</span><span class="p">,</span> <span class="n">rcb</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">_frames_up</span><span class="o">=</span><span class="mi">0</span><span class="p">):</span>
        <span class="k">if</span> <span class="ow">not</span> <span class="n">rcb</span><span class="p">:</span>
            <span class="n">rcb</span> <span class="o">=</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">createResolutionCallbackFromFrame</span><span class="p">(</span><span class="n">_frames_up</span> <span class="o">+</span> <span class="mi">1</span><span class="p">)</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">define</span><span class="p">(</span><span class="n">lang</span><span class="p">,</span> <span class="n">rcb</span><span class="p">)</span>

    <span class="k">def</span> <span class="fm">__getattr__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">attr</span><span class="p">):</span>
        <span class="n">r</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">find_function</span><span class="p">(</span><span class="n">attr</span><span class="p">)</span>
        <span class="k">if</span> <span class="n">r</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">AttributeError</span><span class="p">(</span><span class="s2">&quot;&#39;CompilationUnit&#39; has no attribute &#39;</span><span class="si">{}</span><span class="s2">&#39;&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">attr</span><span class="p">))</span>
        <span class="k">return</span> <span class="n">r</span>


<span class="k">def</span> <span class="nf">_try_get_dispatched_fn</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
    <span class="k">if</span> <span class="ow">not</span> <span class="n">callable</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
        <span class="k">return</span> <span class="kc">None</span>
    <span class="k">return</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">boolean_dispatched</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="n">fn</span><span class="p">)</span>


<span class="k">def</span> <span class="nf">_try_get_overloaded_fn</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">field</span><span class="p">):</span>
    <span class="k">return</span> <span class="n">mod</span><span class="o">.</span><span class="n">_overloads</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="n">field</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">mod</span><span class="p">,</span> <span class="n">ScriptModule</span><span class="p">)</span> <span class="k">else</span> <span class="kc">None</span>


<span class="k">class</span> <span class="nc">ScriptWarning</span><span class="p">(</span><span class="ne">Warning</span><span class="p">):</span>
    <span class="k">pass</span>


<span class="nd">@contextlib</span><span class="o">.</span><span class="n">contextmanager</span>
<span class="k">def</span> <span class="nf">_disable_emit_hooks</span><span class="p">():</span>
    <span class="n">hooks</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_get_emit_hooks</span><span class="p">()</span>
    <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_set_emit_hooks</span><span class="p">(</span><span class="kc">None</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span>
    <span class="k">yield</span>
    <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_set_emit_hooks</span><span class="p">(</span><span class="n">hooks</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">hooks</span><span class="p">[</span><span class="mi">1</span><span class="p">])</span>


<span class="c1"># ScriptClasses must be new-style classes because we construct them using their</span>
<span class="c1"># __new__ method.</span>
<span class="k">def</span> <span class="nf">_is_new_style_class</span><span class="p">(</span><span class="bp">cls</span><span class="p">):</span>
    <span class="k">if</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">cls</span><span class="p">,</span> <span class="s1">&#39;__class__&#39;</span><span class="p">):</span>
        <span class="k">return</span> <span class="p">(</span><span class="s1">&#39;__dict__&#39;</span> <span class="ow">in</span> <span class="nb">dir</span><span class="p">(</span><span class="bp">cls</span><span class="p">)</span> <span class="ow">or</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">cls</span><span class="p">,</span> <span class="s1">&#39;__slots__&#39;</span><span class="p">))</span>


<span class="k">def</span> <span class="nf">whichmodule</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
    <span class="sd">&quot;&quot;&quot;Find the module an object belong to.&quot;&quot;&quot;</span>
    <span class="n">module_name</span> <span class="o">=</span> <span class="nb">getattr</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="s1">&#39;__module__&#39;</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span>
    <span class="c1"># Protect the iteration by using a list copy of sys.modules against dynamic</span>
    <span class="c1"># modules that trigger imports of other modules upon calls to getattr.</span>
    <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">module</span> <span class="ow">in</span> <span class="nb">list</span><span class="p">(</span><span class="n">sys</span><span class="o">.</span><span class="n">modules</span><span class="o">.</span><span class="n">items</span><span class="p">()):</span>
        <span class="k">if</span> <span class="n">name</span> <span class="o">==</span> <span class="s1">&#39;__main__&#39;</span> <span class="ow">or</span> <span class="n">module</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
            <span class="k">continue</span>
        <span class="k">try</span><span class="p">:</span>
            <span class="k">if</span> <span class="n">_getattribute</span><span class="p">(</span><span class="n">module</span><span class="p">,</span> <span class="n">name</span><span class="p">)[</span><span class="mi">0</span><span class="p">]</span> <span class="ow">is</span> <span class="n">obj</span><span class="p">:</span>
                <span class="k">return</span> <span class="n">module_name</span>
        <span class="k">except</span> <span class="ne">AttributeError</span><span class="p">:</span>
            <span class="k">pass</span>
    <span class="k">return</span> <span class="s1">&#39;__main__&#39;</span>

<span class="k">def</span> <span class="nf">_compile_and_register_class</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">rcb</span><span class="p">,</span> <span class="n">qualified_name</span><span class="p">):</span>
    <span class="n">ast</span> <span class="o">=</span> <span class="n">get_jit_class_def</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">obj</span><span class="o">.</span><span class="vm">__name__</span><span class="p">)</span>
    <span class="n">_jit_script_class_compile</span><span class="p">(</span><span class="n">qualified_name</span><span class="p">,</span> <span class="n">ast</span><span class="p">,</span> <span class="n">rcb</span><span class="p">)</span>
    <span class="n">_add_script_class</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">qualified_name</span><span class="p">)</span>

<div class="viewcode-block" id="script"><a class="viewcode-back" href="../../jit.html#torch.jit.script">[docs]</a><span class="k">def</span> <span class="nf">script</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">optimize</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">_frames_up</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">_rcb</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
    <span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">    Scripting a function or ``nn.Module`` will inspect the source code, compile</span>
<span class="sd">    it as TorchScript code using the TorchScript compiler, and return a :class:`ScriptModule` or</span>
<span class="sd">    :class:`ScriptFunction`. TorchScript itself is a subset of the Python language, so not all</span>
<span class="sd">    features in Python work, but we provide enough functionality to compute on</span>
<span class="sd">    tensors and do control-dependent operations. For a complete guide, see the</span>
<span class="sd">    :ref:`language-reference`.</span>

<span class="sd">    ``torch.jit.script`` can be used as a function for modules and functions, and as a decorator</span>
<span class="sd">    ``@torch.jit.script`` for :ref:`torchscript-classes` and functions.</span>

<span class="sd">    Arguments:</span>
<span class="sd">        obj (callable, class, or ``nn.Module``):  The ``nn.Module``, function, or class type to</span>
<span class="sd">                                                  compile.</span>

<span class="sd">    Returns:</span>
<span class="sd">        If ``obj`` is ``nn.Module``, ``script`` returns</span>
<span class="sd">        a :class:`ScriptModule` object. The returned :class:`ScriptModule` will</span>
<span class="sd">        have the same set of sub-modules and parameters as the</span>
<span class="sd">        original ``nn.Module``. If ``obj`` is a standalone function,</span>
<span class="sd">        a :class:`ScriptFunction` will be returned.</span>

<span class="sd">    **Scripting a function**</span>
<span class="sd">        The ``@torch.jit.script`` decorator will construct a :class:`ScriptFunction`</span>
<span class="sd">        by compiling the body of the function.</span>

<span class="sd">        Example (scripting a function):</span>

<span class="sd">        .. testcode::</span>

<span class="sd">            import torch</span>

<span class="sd">            @torch.jit.script</span>
<span class="sd">            def foo(x, y):</span>
<span class="sd">                if x.max() &gt; y.max():</span>
<span class="sd">                    r = x</span>
<span class="sd">                else:</span>
<span class="sd">                    r = y</span>
<span class="sd">                return r</span>

<span class="sd">            print(type(foo))  # torch.jit.ScriptFuncion</span>

<span class="sd">            # See the compiled graph as Python code</span>
<span class="sd">            print(foo.code)</span>

<span class="sd">            # Call the function using the TorchScript interpreter</span>
<span class="sd">            foo(torch.ones(2, 2), torch.ones(2, 2))</span>

<span class="sd">        .. testoutput::</span>
<span class="sd">            :hide:</span>

<span class="sd">            ...</span>

<span class="sd">    **Scripting an nn.Module**</span>
<span class="sd">        Scripting an ``nn.Module`` by default will compile the ``forward`` method and recursively</span>
<span class="sd">        compile any methods, submodules, and functions called by ``forward``. If a ``nn.Module`` only uses</span>
<span class="sd">        features supported in TorchScript, no changes to the original module code should be necessary. ``script``</span>
<span class="sd">        will construct :class:`ScriptModule` that has copies of the attributes, parameters, and methods of</span>
<span class="sd">        the original module.</span>

<span class="sd">        Example (scripting a simple module with a Parameter):</span>

<span class="sd">        .. testcode::</span>

<span class="sd">            import torch</span>

<span class="sd">            class MyModule(torch.nn.Module):</span>
<span class="sd">                def __init__(self, N, M):</span>
<span class="sd">                    super(MyModule, self).__init__()</span>
<span class="sd">                    # This parameter will be copied to the new ScriptModule</span>
<span class="sd">                    self.weight = torch.nn.Parameter(torch.rand(N, M))</span>

<span class="sd">                    # When this submodule is used, it will be compiled</span>
<span class="sd">                    self.linear = torch.nn.Linear(N, M)</span>

<span class="sd">                def forward(self, input):</span>
<span class="sd">                    output = self.weight.mv(input)</span>

<span class="sd">                    # This calls the `forward` method of the `nn.Linear` module, which will</span>
<span class="sd">                    # cause the `self.linear` submodule to be compiled to a `ScriptModule` here</span>
<span class="sd">                    output = self.linear(output)</span>
<span class="sd">                    return output</span>

<span class="sd">            scripted_module = torch.jit.script(MyModule(2, 3))</span>

<span class="sd">        Example (scripting a module with traced submodules):</span>

<span class="sd">        .. testcode::</span>

<span class="sd">            import torch</span>
<span class="sd">            import torch.nn as nn</span>
<span class="sd">            import torch.nn.functional as F</span>

<span class="sd">            class MyModule(nn.Module):</span>
<span class="sd">                def __init__(self):</span>
<span class="sd">                    super(MyModule, self).__init__()</span>
<span class="sd">                    # torch.jit.trace produces a ScriptModule&#39;s conv1 and conv2</span>
<span class="sd">                    self.conv1 = torch.jit.trace(nn.Conv2d(1, 20, 5), torch.rand(1, 1, 16, 16))</span>
<span class="sd">                    self.conv2 = torch.jit.trace(nn.Conv2d(20, 20, 5), torch.rand(1, 20, 16, 16))</span>

<span class="sd">                def forward(self, input):</span>
<span class="sd">                  input = F.relu(self.conv1(input))</span>
<span class="sd">                  input = F.relu(self.conv2(input))</span>
<span class="sd">                  return input</span>

<span class="sd">            scripted_module = torch.jit.script(MyModule())</span>

<span class="sd">        To compile a method other than ``forward`` (and recursively compile anything it calls), add</span>
<span class="sd">        the :func:`@torch.jit.export &lt;torch.jit.export&gt;` decorator to the method. To opt out of compilation</span>
<span class="sd">        use :func:`@torch.jit.ignore &lt;torch.jit.ignore&gt;` or :func:`@torch.jit.unused &lt;torch.jit.unused&gt;`.</span>

<span class="sd">        Example (an exported and ignored method in a module)::</span>

<span class="sd">            import torch</span>
<span class="sd">            import torch.nn as nn</span>

<span class="sd">            class MyModule(nn.Module):</span>
<span class="sd">                def __init__(self):</span>
<span class="sd">                    super(MyModule, self).__init__()</span>

<span class="sd">                @torch.jit.export</span>
<span class="sd">                def some_entry_point(self, input):</span>
<span class="sd">                    return input + 10</span>

<span class="sd">                @torch.jit.ignore</span>
<span class="sd">                def python_only_fn(self, input):</span>
<span class="sd">                    # This function won&#39;t be compiled, so any</span>
<span class="sd">                    # Python APIs can be used</span>
<span class="sd">                    import pdb</span>
<span class="sd">                    pdb.set_trace()</span>

<span class="sd">                def forward(self, input):</span>
<span class="sd">                    if self.training:</span>
<span class="sd">                        self.python_only_fn(input)</span>
<span class="sd">                    return input * 99</span>

<span class="sd">            scripted_module = torch.jit.script(MyModule())</span>
<span class="sd">            print(scripted_module.some_entry_point(torch.randn(2, 2)))</span>
<span class="sd">            print(scripted_module(torch.randn(2, 2)))</span>
<span class="sd">    &quot;&quot;&quot;</span>
    <span class="k">if</span> <span class="ow">not</span> <span class="n">_enabled</span><span class="p">:</span>
        <span class="k">return</span> <span class="n">obj</span>

    <span class="k">if</span> <span class="n">optimize</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
        <span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s2">&quot;`optimize` is deprecated and has no effect. Use `with torch.jit.optimized_execution() instead&quot;</span><span class="p">)</span>
    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">ScriptModule</span><span class="p">):</span>
        <span class="k">return</span> <span class="n">obj</span>

    <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
        <span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_recursive</span><span class="o">.</span><span class="n">create_script_module</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_recursive</span><span class="o">.</span><span class="n">infer_methods_to_compile</span><span class="p">)</span>

    <span class="n">qualified_name</span> <span class="o">=</span> <span class="n">_qualified_name</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span>
    <span class="k">if</span> <span class="n">inspect</span><span class="o">.</span><span class="n">isclass</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
        <span class="c1"># If this type is a `nn.Module` subclass, they probably meant to pass</span>
        <span class="c1"># an instance instead of a Module</span>
        <span class="k">if</span> <span class="nb">issubclass</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
            <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;Type &#39;</span><span class="si">{}</span><span class="s2">&#39; cannot be compiled since it inherits&quot;</span>
                               <span class="s2">&quot; from nn.Module,&quot;</span>
                               <span class="s2">&quot; pass an instance instead&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">obj</span><span class="p">))</span>

        <span class="k">if</span> <span class="ow">not</span> <span class="n">_is_new_style_class</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
            <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;TorchScript classes must be new-style classes. &quot;</span>
                               <span class="s2">&quot;Please inherit from &#39;object&#39;.&quot;</span><span class="p">)</span>
        <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">obj</span><span class="o">.</span><span class="n">mro</span><span class="p">())</span> <span class="o">&gt;</span> <span class="mi">2</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;TorchScript classes does not support inheritance yet. &quot;</span>
                               <span class="s2">&quot;Please directly inherit from &#39;object&#39;.&quot;</span><span class="p">)</span>
        <span class="k">if</span> <span class="n">_rcb</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
            <span class="n">_rcb</span> <span class="o">=</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">createResolutionCallbackFromFrame</span><span class="p">(</span><span class="n">_frames_up</span> <span class="o">+</span> <span class="mi">1</span><span class="p">)</span>
        <span class="n">_compile_and_register_class</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">_rcb</span><span class="p">,</span> <span class="n">qualified_name</span><span class="p">)</span>
        <span class="k">return</span> <span class="n">obj</span>
    <span class="k">else</span><span class="p">:</span>
        <span class="n">_check_directly_compile_overloaded</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span>
        <span class="n">maybe_already_compiled_fn</span> <span class="o">=</span> <span class="n">_try_get_jit_cached_function</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span>
        <span class="k">if</span> <span class="n">maybe_already_compiled_fn</span><span class="p">:</span>
            <span class="k">return</span> <span class="n">maybe_already_compiled_fn</span>
        <span class="n">ast</span> <span class="o">=</span> <span class="n">get_jit_def</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span>
        <span class="k">if</span> <span class="n">_rcb</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
            <span class="n">_rcb</span> <span class="o">=</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">createResolutionCallbackFromClosure</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span>
        <span class="n">fn</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_script_compile</span><span class="p">(</span><span class="n">qualified_name</span><span class="p">,</span> <span class="n">ast</span><span class="p">,</span> <span class="n">_rcb</span><span class="p">,</span> <span class="n">get_default_args</span><span class="p">(</span><span class="n">obj</span><span class="p">))</span>
        <span class="c1"># Forward docstrings</span>
        <span class="n">fn</span><span class="o">.</span><span class="vm">__doc__</span> <span class="o">=</span> <span class="n">obj</span><span class="o">.</span><span class="vm">__doc__</span>
        <span class="n">_set_jit_function_cache</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">fn</span><span class="p">)</span>
        <span class="k">return</span> <span class="n">fn</span></div>

<span class="k">def</span> <span class="nf">interface</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
    <span class="k">if</span> <span class="ow">not</span> <span class="n">inspect</span><span class="o">.</span><span class="n">isclass</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
        <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;interface must be applied to a class&quot;</span><span class="p">)</span>
    <span class="k">if</span> <span class="ow">not</span> <span class="n">_is_new_style_class</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
        <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;TorchScript interfaces must inherit from &#39;object&#39;&quot;</span><span class="p">)</span>

    <span class="n">is_module_interface</span> <span class="o">=</span> <span class="nb">issubclass</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">obj</span><span class="o">.</span><span class="n">mro</span><span class="p">())</span> <span class="o">==</span> <span class="mi">3</span>

    <span class="k">if</span> <span class="ow">not</span> <span class="n">is_module_interface</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">obj</span><span class="o">.</span><span class="n">mro</span><span class="p">())</span> <span class="o">&gt;</span> <span class="mi">2</span><span class="p">:</span>
        <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;TorchScript interface does not support inheritance yet. &quot;</span>
                           <span class="s2">&quot;Please directly inherit from &#39;object&#39; or &#39;nn.Module&#39;.&quot;</span><span class="p">)</span>

    <span class="n">qualified_name</span> <span class="o">=</span> <span class="n">_qualified_name</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span>
    <span class="n">rcb</span> <span class="o">=</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">createResolutionCallbackFromFrame</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
    <span class="c1"># if this type is a `nn.Module` subclass, generate an module interface type</span>
    <span class="c1"># instead of a class interface type, an module interface type only compile</span>
    <span class="c1"># the user provided methods as part of the interface</span>
    <span class="n">ast</span> <span class="o">=</span> <span class="n">get_jit_class_def</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">obj</span><span class="o">.</span><span class="vm">__name__</span><span class="p">)</span>
    <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_script_interface_compile</span><span class="p">(</span><span class="n">qualified_name</span><span class="p">,</span> <span class="n">ast</span><span class="p">,</span> <span class="n">rcb</span><span class="p">,</span> <span class="n">is_module_interface</span><span class="p">)</span>
    <span class="n">obj</span><span class="o">.</span><span class="n">__torch_script_interface__</span> <span class="o">=</span> <span class="kc">True</span>
    <span class="k">return</span> <span class="n">obj</span>



<span class="k">def</span> <span class="nf">script_method</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
    <span class="k">if</span> <span class="ow">not</span> <span class="n">_enabled</span><span class="p">:</span>
        <span class="k">return</span> <span class="n">fn</span>
    <span class="c1"># NOTE: we need to traverse two frames here because the meta-class frame</span>
    <span class="c1"># for ScriptModule will be present, as opposed to invoking @script on a</span>
    <span class="c1"># a function or invoking define() on a CompilationUnit.</span>
    <span class="c1"># The stack will look like:</span>
    <span class="c1">#</span>
    <span class="c1"># 0. createResolutionCallback()</span>
    <span class="c1"># 1. script_method()</span>
    <span class="c1"># 2. ScriptModule metaclass frame</span>
    <span class="c1"># 3. Surrounding scope</span>
    <span class="c1">#</span>
    <span class="c1"># createResolutionCallback internally adds 1 to get us to the scope of this</span>
    <span class="c1"># function (the calling function). Adding 2 gets us to the proper surrounding scope.</span>
    <span class="n">_rcb</span> <span class="o">=</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">createResolutionCallbackFromFrame</span><span class="p">(</span><span class="n">frames_up</span><span class="o">=</span><span class="mi">2</span><span class="p">)</span>
    <span class="n">ast</span> <span class="o">=</span> <span class="n">get_jit_def</span><span class="p">(</span><span class="n">fn</span><span class="p">,</span> <span class="n">self_name</span><span class="o">=</span><span class="s2">&quot;ScriptModule&quot;</span><span class="p">)</span>
    <span class="k">return</span> <span class="n">ScriptMethodStub</span><span class="p">(</span><span class="n">_rcb</span><span class="p">,</span> <span class="n">ast</span><span class="p">,</span> <span class="n">fn</span><span class="p">)</span>



<span class="c1"># These OrderedDictWrapper classes replace the actual OrderedDicts in</span>
<span class="c1"># module with versions that get/set properties inside of Module.</span>
<span class="c1"># This allows us to reuse most of nn.Module while still storing the</span>
<span class="c1"># data in C++.</span>
<span class="c1"># Each OrderedDict needs to support:</span>
<span class="c1">#  x not in view</span>
<span class="c1">#  x in view</span>
<span class="c1">#  view[name] = ...</span>
<span class="c1">#  view.values()</span>
<span class="c1">#  del view[name]</span>
<span class="c1">#  view.items()</span>
<span class="c1">#  view.keys()</span>
<span class="c1">#  len(view)</span>

<span class="k">class</span> <span class="nc">OrderedDictWrapper</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">_c</span><span class="p">):</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_c</span> <span class="o">=</span> <span class="n">_c</span>

    <span class="k">def</span> <span class="nf">keys</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="k">return</span> <span class="p">[</span><span class="n">k</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">items</span><span class="p">()]</span>

    <span class="k">def</span> <span class="nf">values</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="k">return</span> <span class="p">[</span><span class="n">v</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">items</span><span class="p">()]</span>

    <span class="k">def</span> <span class="fm">__len__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="k">return</span> <span class="nb">len</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">values</span><span class="p">())</span>

    <span class="k">def</span> <span class="fm">__delitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">k</span><span class="p">):</span>
        <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;cannot delete methods or parameters of a script module&quot;</span><span class="p">)</span>

    <span class="k">def</span> <span class="nf">items</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">items</span><span class="p">()</span>

    <span class="k">def</span> <span class="fm">__setitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">):</span>
        <span class="k">if</span> <span class="n">k</span> <span class="ow">not</span> <span class="ow">in</span> <span class="bp">self</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;Can&#39;t add a new parameter after ScriptModule construction.&quot;</span>
                               <span class="s2">&quot; Tried to add &#39;</span><span class="si">{}</span><span class="s2">&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">))</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">setattr</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span>

    <span class="k">def</span> <span class="fm">__contains__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">k</span><span class="p">):</span>
        <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">contains</span><span class="p">(</span><span class="n">k</span><span class="p">)</span>

    <span class="k">def</span> <span class="fm">__getitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">k</span><span class="p">):</span>
        <span class="k">if</span> <span class="n">k</span> <span class="ow">not</span> <span class="ow">in</span> <span class="bp">self</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">KeyError</span><span class="p">(</span><span class="n">k</span><span class="p">)</span>
        <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">getattr</span><span class="p">(</span><span class="n">k</span><span class="p">)</span>


<span class="k">class</span> <span class="nc">OrderedModuleDict</span><span class="p">(</span><span class="n">OrderedDictWrapper</span><span class="p">):</span>
    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">module</span><span class="p">,</span> <span class="n">python_dict</span><span class="p">):</span>
        <span class="nb">super</span><span class="p">(</span><span class="n">OrderedModuleDict</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">ModuleDict</span><span class="p">(</span><span class="n">module</span><span class="p">))</span>
        <span class="c1"># contains _both_ script modules and non-script python-only modules</span>

        <span class="c1"># because script modules are subclassed in python and the</span>
        <span class="c1"># C++ Module class will not hold references to them,</span>
        <span class="c1"># to ensure that you always get the same python value here</span>
        <span class="c1"># we store it in the python dict as well</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_python_modules</span> <span class="o">=</span> <span class="n">python_dict</span>

    <span class="k">def</span> <span class="nf">items</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="n">r</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_python_modules</span><span class="o">.</span><span class="n">items</span><span class="p">()</span>
        <span class="k">return</span> <span class="n">r</span>

    <span class="k">def</span> <span class="fm">__contains__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">k</span><span class="p">):</span>
        <span class="k">return</span> <span class="n">k</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">_python_modules</span>

    <span class="k">def</span> <span class="fm">__setitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">):</span>
        <span class="c1"># Cases where sub-module can be re-assigned after ScriptModule construction</span>
        <span class="c1"># 1. If the attr is an module interface type, it&#39;s guaranteed that the module is</span>
        <span class="c1">#    not inlined in the graph, so it&#39;s safe to swap a new ScriptModule in.</span>
        <span class="c1"># 2. if the new value if a ScriptModule with the same JIT type, IR won&#39;t change</span>
        <span class="c1">#    and it&#39;s legit to swap a new module in.</span>
        <span class="c1"># In these two cases we allow swapping a new scripted module and update the</span>
        <span class="c1"># corresponding python module dict to keep sync.</span>
        <span class="c1"># Note: the value to be swapped in has to be ScriptModule instead of nn.Module,</span>
        <span class="c1"># otherwise it&#39;s illegal and we throw error.</span>
        <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">v</span><span class="p">,</span> <span class="n">ScriptModule</span><span class="p">):</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">setattr</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">_python_modules</span><span class="p">[</span><span class="n">k</span><span class="p">]</span> <span class="o">=</span> <span class="n">v</span>
        <span class="k">else</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;Cannot re-assign modules in a ScriptModule with non-scripted &quot;</span>
                               <span class="s2">&quot;module, tried to replace existing module &#39;</span><span class="si">{}</span><span class="s2">&#39;: </span><span class="si">{}</span><span class="s2">&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">))</span>


    <span class="k">def</span> <span class="fm">__getitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">k</span><span class="p">):</span>
        <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_python_modules</span><span class="p">[</span><span class="n">k</span><span class="p">]</span>

<span class="c1"># For each user-defined class that subclasses ScriptModule, this meta-class:</span>
<span class="c1"># (1) finds all the methods annotated with @script_method in a ScriptModule and</span>
<span class="c1">#     removes them from the class attributes</span>
<span class="c1"># (2) puts a wrapper around the class&#39;s __init__ method to recusively compile</span>
<span class="c1">#     all of the script_methods with the module after the original __init__ has</span>
<span class="c1">#     run. This has to occur after the user-defined __init__ so that submodules and</span>
<span class="c1">#     parameters are initialized _before_ the script compiler resolve references to</span>
<span class="c1">#     `self.param` or `self.module`.</span>
<span class="k">class</span> <span class="nc">ScriptMeta</span><span class="p">(</span><span class="nb">type</span><span class="p">):</span>
    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">cls</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">bases</span><span class="p">,</span> <span class="n">attrs</span><span class="p">):</span>
        <span class="c1"># Aggregate all the ScriptMethods and constants from superclasses</span>
        <span class="bp">cls</span><span class="o">.</span><span class="n">_methods</span> <span class="o">=</span> <span class="p">{}</span>
        <span class="bp">cls</span><span class="o">.</span><span class="n">_constants_set</span> <span class="o">=</span> <span class="nb">set</span><span class="p">(</span><span class="nb">getattr</span><span class="p">(</span><span class="bp">cls</span><span class="p">,</span> <span class="s1">&#39;__constants__&#39;</span><span class="p">,</span> <span class="p">()))</span>
        <span class="k">for</span> <span class="n">base</span> <span class="ow">in</span> <span class="nb">reversed</span><span class="p">(</span><span class="n">bases</span><span class="p">):</span>
            <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="nb">getattr</span><span class="p">(</span><span class="n">base</span><span class="p">,</span> <span class="s1">&#39;_methods&#39;</span><span class="p">,</span> <span class="p">{})</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
                <span class="bp">cls</span><span class="o">.</span><span class="n">_methods</span><span class="p">[</span><span class="n">k</span><span class="p">]</span> <span class="o">=</span> <span class="n">v</span>
            <span class="n">base_constants</span> <span class="o">=</span> <span class="nb">getattr</span><span class="p">(</span><span class="n">base</span><span class="p">,</span> <span class="s1">&#39;_constants_set&#39;</span><span class="p">,</span> <span class="nb">set</span><span class="p">())</span>
            <span class="bp">cls</span><span class="o">.</span><span class="n">_constants_set</span> <span class="o">=</span> <span class="bp">cls</span><span class="o">.</span><span class="n">_constants_set</span><span class="o">.</span><span class="n">union</span><span class="p">(</span><span class="n">base_constants</span><span class="p">)</span>

        <span class="c1"># find all the script methods of the current class</span>
        <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="nb">sorted</span><span class="p">(</span><span class="n">attrs</span><span class="o">.</span><span class="n">items</span><span class="p">()):</span>
            <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">v</span><span class="p">,</span> <span class="n">ScriptMethodStub</span><span class="p">):</span>
                <span class="nb">delattr</span><span class="p">(</span><span class="bp">cls</span><span class="p">,</span> <span class="n">k</span><span class="p">)</span>
                <span class="bp">cls</span><span class="o">.</span><span class="n">_methods</span><span class="p">[</span><span class="n">v</span><span class="o">.</span><span class="n">original_method</span><span class="o">.</span><span class="vm">__name__</span><span class="p">]</span> <span class="o">=</span> <span class="n">v</span>

        <span class="k">if</span> <span class="nb">getattr</span><span class="p">(</span><span class="bp">cls</span><span class="p">,</span> <span class="s1">&#39;_disable_script_meta&#39;</span><span class="p">,</span> <span class="kc">False</span><span class="p">):</span>
            <span class="c1"># We leave built-in ScriptModule types alone, since this metaclass</span>
            <span class="c1"># is only for compiling user classes that inherit from</span>
            <span class="c1"># ScriptModule.</span>
            <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">ScriptMeta</span><span class="p">,</span> <span class="bp">cls</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">bases</span><span class="p">,</span> <span class="n">attrs</span><span class="p">)</span>

        <span class="n">original_init</span> <span class="o">=</span> <span class="nb">getattr</span><span class="p">(</span><span class="bp">cls</span><span class="p">,</span> <span class="s1">&#39;__init__&#39;</span><span class="p">,</span> <span class="k">lambda</span> <span class="bp">self</span><span class="p">:</span> <span class="kc">None</span><span class="p">)</span>

        <span class="nd">@functools</span><span class="o">.</span><span class="n">wraps</span><span class="p">(</span><span class="n">original_init</span><span class="p">)</span>
        <span class="k">def</span> <span class="nf">init_then_script</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
            <span class="n">original_init</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
            <span class="k">if</span> <span class="nb">type</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> <span class="o">==</span> <span class="bp">cls</span><span class="p">:</span>
                <span class="k">def</span> <span class="nf">make_stubs</span><span class="p">(</span><span class="n">module</span><span class="p">):</span>
                    <span class="bp">cls</span> <span class="o">=</span> <span class="nb">type</span><span class="p">(</span><span class="n">module</span><span class="p">)</span>
                    <span class="k">return</span> <span class="p">[</span><span class="n">v</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="nb">sorted</span><span class="p">(</span><span class="bp">cls</span><span class="o">.</span><span class="n">_methods</span><span class="o">.</span><span class="n">items</span><span class="p">())]</span>

                <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">[</span><span class="s2">&quot;_actual_script_module&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_recursive</span><span class="o">.</span><span class="n">create_script_module</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">make_stubs</span><span class="p">)</span>

                <span class="c1"># Delete the Python attributes that now shadow the ScriptModule</span>
                <span class="c1"># ones, so that __getattr__ and __setattr__ will properly find</span>
                <span class="c1"># the scripted versions.</span>
                <span class="n">concrete_type</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_actual_script_module</span><span class="o">.</span><span class="n">_concrete_type</span>
                <span class="k">for</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">concrete_type</span><span class="o">.</span><span class="n">get_attributes</span><span class="p">():</span>
                    <span class="nb">delattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">)</span>
                <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">_</span> <span class="ow">in</span> <span class="n">concrete_type</span><span class="o">.</span><span class="n">get_modules</span><span class="p">():</span>
                    <span class="nb">delattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">)</span>
                <span class="k">for</span> <span class="n">name</span> <span class="ow">in</span> <span class="p">(</span><span class="s2">&quot;_parameters&quot;</span><span class="p">,</span> <span class="s2">&quot;_buffers&quot;</span><span class="p">,</span> <span class="s2">&quot;_modules&quot;</span><span class="p">):</span>
                    <span class="nb">delattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">)</span>

        <span class="bp">cls</span><span class="o">.</span><span class="fm">__init__</span> <span class="o">=</span> <span class="n">init_then_script</span>
        <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">ScriptMeta</span><span class="p">,</span> <span class="bp">cls</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">bases</span><span class="p">,</span> <span class="n">attrs</span><span class="p">)</span>


<span class="k">if</span> <span class="n">_enabled</span><span class="p">:</span>

    <span class="c1"># this is a Python &#39;non-data descriptor&#39; that causes the first access</span>
    <span class="c1"># to ScriptModule&#39;s forward to lookup the forward method and stash</span>
    <span class="c1"># it in the objects dict. Due to the standard rules for attribute lookup</span>
    <span class="c1"># subsequent lookups will just directly return the previously looked up method.</span>
    <span class="c1"># This is necessary because nn.Module defines forward as a method. If we</span>
    <span class="c1"># did nothing __getattr__ would not be called. Instead we&#39;d get nn.Module.forward</span>
    <span class="c1"># which always throws an exception.</span>
    <span class="k">class</span> <span class="nc">_CachedForward</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
        <span class="k">def</span> <span class="fm">__get__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">obj</span><span class="p">,</span> <span class="bp">cls</span><span class="p">):</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="fm">__getattr__</span><span class="p">(</span><span class="s1">&#39;forward&#39;</span><span class="p">)</span>

    <span class="k">class</span> <span class="nc">ScriptModule</span><span class="p">(</span><span class="n">with_metaclass</span><span class="p">(</span><span class="n">ScriptMeta</span><span class="p">,</span> <span class="n">Module</span><span class="p">)):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        ``ScriptModule``s wrap a C++ ``torch::jit::Module``. ``ScriptModule``s</span>
<span class="sd">        contain methods, attributes, parameters, and</span>
<span class="sd">        constants. These can be accessed the same as on a normal ``nn.Module``.</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="nb">super</span><span class="p">(</span><span class="n">ScriptModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span>

        <span class="n">forward</span> <span class="o">=</span> <span class="n">_CachedForward</span><span class="p">()</span>

        <span class="k">def</span> <span class="fm">__getattr__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">attr</span><span class="p">):</span>
            <span class="k">if</span> <span class="s2">&quot;_actual_script_module&quot;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">:</span>
                <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">ScriptModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__getattr__</span><span class="p">(</span><span class="n">attr</span><span class="p">)</span>
            <span class="k">return</span> <span class="nb">getattr</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_actual_script_module</span><span class="p">,</span> <span class="n">attr</span><span class="p">)</span>

        <span class="k">def</span> <span class="fm">__setattr__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
            <span class="k">if</span> <span class="s2">&quot;_actual_script_module&quot;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">:</span>
                <span class="c1"># Unwrap torch.jit.Attribute into a regular setattr + recording</span>
                <span class="c1"># the provided type in __annotations__.</span>
                <span class="c1">#</span>
                <span class="c1"># This ensures that if we use the attr again in `__init__`, it</span>
                <span class="c1"># will look like the actual value, not an instance of Attribute.</span>
                <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">Attribute</span><span class="p">):</span>
                    <span class="k">if</span> <span class="ow">not</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="s2">&quot;__annotations__&quot;</span><span class="p">):</span>
                        <span class="bp">self</span><span class="o">.</span><span class="vm">__annotations__</span> <span class="o">=</span> <span class="p">{}</span>
                    <span class="bp">self</span><span class="o">.</span><span class="vm">__annotations__</span><span class="p">[</span><span class="n">attr</span><span class="p">]</span> <span class="o">=</span> <span class="n">value</span><span class="o">.</span><span class="n">type</span>
                    <span class="n">value</span> <span class="o">=</span> <span class="n">value</span><span class="o">.</span><span class="n">value</span>
                <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">ScriptModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__setattr__</span><span class="p">(</span><span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">)</span>

            <span class="nb">setattr</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_actual_script_module</span><span class="p">,</span> <span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">)</span>

        <span class="k">def</span> <span class="nf">define</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">src</span><span class="p">):</span>
            <span class="k">if</span> <span class="s2">&quot;_actual_script_module&quot;</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">:</span>
                <span class="c1"># If we have completed initialization, just defer to the</span>
                <span class="c1"># backing RecursiveScriptModule to eagerly compile the provided</span>
                <span class="c1"># source.</span>
                <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_actual_script_module</span><span class="o">.</span><span class="n">define</span><span class="p">(</span><span class="n">src</span><span class="p">)</span>

            <span class="c1"># Otherwise, we are still in the object&#39;s __init__.</span>
            <span class="c1"># In that case, add `src` as a stub to be compiled.</span>
            <span class="c1">#</span>
            <span class="c1"># We use frames_up=1 to get to the proper surrounding scope. The stack</span>
            <span class="c1"># will look like:</span>
            <span class="c1"># 0. createResolutionCallback</span>
            <span class="c1"># 1. define()</span>
            <span class="c1"># 2. surrounding scope.</span>
            <span class="c1">#</span>
            <span class="c1"># createResolutionCallback internally adds 1 to get us to our frame, then</span>
            <span class="c1"># we add 1 to get to the proper surrounding scope.</span>
            <span class="n">rcb</span> <span class="o">=</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">createResolutionCallbackFromFrame</span><span class="p">(</span><span class="n">frames_up</span><span class="o">=</span><span class="mi">1</span><span class="p">)</span>
            <span class="n">ast</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_parse_source_def</span><span class="p">(</span><span class="n">src</span><span class="p">)</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">_methods</span><span class="p">[</span><span class="n">ast</span><span class="o">.</span><span class="n">name</span><span class="p">()</span><span class="o">.</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">ScriptMethodStub</span><span class="p">(</span><span class="n">rcb</span><span class="p">,</span> <span class="n">ast</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span>


    <span class="k">class</span> <span class="nc">RecursiveScriptModule</span><span class="p">(</span><span class="n">ScriptModule</span><span class="p">):</span>
        <span class="c1"># XXX: RecursiveScriptModule inherits from ScriptModule for the sole</span>
        <span class="c1"># reason that it retains the existing isinstance(ScriptModule)</span>
        <span class="c1"># behavior.</span>
        <span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        The core data structure in TorchScript is the ``ScriptModule``. It is an</span>
<span class="sd">        analogue of torch&#39;s ``nn.Module`` and represents an entire model as a tree of</span>
<span class="sd">        submodules. Like normal modules, each individual module in a ``ScriptModule`` can</span>
<span class="sd">        have submodules, parameters, and methods. In ``nn.Module``\s methods are implemented</span>
<span class="sd">        as Python functions, but in ``ScriptModule``\s methods are implemented as</span>
<span class="sd">        TorchScript functions,  a statically-typed subset of Python that contains all</span>
<span class="sd">        of PyTorch&#39;s built-in Tensor operations. This difference allows your</span>
<span class="sd">        ``ScriptModule``\s code to run without the need for a Python interpreter.</span>

<span class="sd">        ``ScriptModule``\s should not be created manually, instead use</span>
<span class="sd">        either :func:`tracing &lt;torch.jit.trace&gt;` or :func:`scripting &lt;torch.jit.script&gt;`.</span>
<span class="sd">        Tracing and scripting can be applied incrementally and :ref:`composed as necessary &lt;Types&gt;`.</span>

<span class="sd">        * Tracing records the tensor operations as executed with a set of example inputs and uses these</span>
<span class="sd">          operations to construct a computation graph. You can use the full dynamic behavior of Python with tracing,</span>
<span class="sd">          but values other than Tensors and control flow aren&#39;t captured in the graph.</span>

<span class="sd">        * Scripting inspects the Python code of the model</span>
<span class="sd">          and compiles it to TorchScript. Scripting allows the use of many `types`_ of values and supports dynamic control flow.</span>
<span class="sd">          Many, but not all features of Python are supported by the compiler, so changes to the source code may be necessary.</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="n">_disable_script_meta</span> <span class="o">=</span> <span class="kc">True</span>

        <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">cpp_module</span><span class="p">):</span>
            <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">[</span><span class="s1">&#39;_initializing&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="kc">True</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">_c</span> <span class="o">=</span> <span class="n">cpp_module</span>
            <span class="nb">super</span><span class="p">(</span><span class="n">RecursiveScriptModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span>
            <span class="c1"># Delete the &#39;training&#39; attribute set up by `Module.__init__`. It</span>
            <span class="c1"># will get set on the underlying cpp module, so we delete it here</span>
            <span class="c1"># to avoid this version shadowing the cpp module version.</span>
            <span class="nb">delattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="s1">&#39;training&#39;</span><span class="p">)</span>

        <span class="nd">@staticmethod</span>
        <span class="k">def</span> <span class="nf">_construct</span><span class="p">(</span><span class="n">cpp_module</span><span class="p">,</span> <span class="n">init_fn</span><span class="p">):</span>
            <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">            Construct a RecursiveScriptModule that&#39;s ready for use. PyTorch</span>
<span class="sd">            code should use this to construct a RecursiveScriptModule instead</span>
<span class="sd">            of instead of calling `__init__` directly, as it makes sure the</span>
<span class="sd">            object is properly finalized (and in the future we may take</span>
<span class="sd">            control of how the RecursiveScriptModule instance is created).</span>

<span class="sd">            Arguments:</span>
<span class="sd">                cpp_module:  The C++ Module that will hold the actual state of</span>
<span class="sd">                             this RecursiveScriptModule instance.</span>
<span class="sd">                init_fn:  Lambda that initializes the RecursiveScriptModule passed to it.</span>
<span class="sd">            &quot;&quot;&quot;</span>
            <span class="n">script_module</span> <span class="o">=</span> <span class="n">RecursiveScriptModule</span><span class="p">(</span><span class="n">cpp_module</span><span class="p">)</span>
            <span class="n">init_fn</span><span class="p">(</span><span class="n">script_module</span><span class="p">)</span>

            <span class="c1"># Finalize the ScriptModule: replace the nn.Module state with our</span>
            <span class="c1"># custom implementations and flip the _initializing bit.</span>
            <span class="n">script_module</span><span class="o">.</span><span class="n">_parameters</span> <span class="o">=</span> <span class="n">OrderedDictWrapper</span><span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">ParameterDict</span><span class="p">(</span><span class="n">script_module</span><span class="o">.</span><span class="n">_c</span><span class="p">))</span>
            <span class="n">script_module</span><span class="o">.</span><span class="n">_buffers</span> <span class="o">=</span> <span class="n">OrderedDictWrapper</span><span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">BufferDict</span><span class="p">(</span><span class="n">script_module</span><span class="o">.</span><span class="n">_c</span><span class="p">))</span>
            <span class="n">script_module</span><span class="o">.</span><span class="n">_modules</span> <span class="o">=</span> <span class="n">OrderedModuleDict</span><span class="p">(</span><span class="n">script_module</span><span class="o">.</span><span class="n">_c</span><span class="p">,</span> <span class="n">script_module</span><span class="o">.</span><span class="n">_modules</span><span class="p">)</span>
            <span class="n">script_module</span><span class="o">.</span><span class="n">_initializing</span> <span class="o">=</span> <span class="kc">False</span>
            <span class="k">return</span> <span class="n">script_module</span>

        <span class="nd">@property</span>
        <span class="k">def</span> <span class="nf">graph</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">            Returns a string representation of the internal graph for the</span>
<span class="sd">            ``forward`` method. See `Interpreting Graphs`_ for details.</span>
<span class="sd">            &quot;&quot;&quot;</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">forward</span><span class="o">.</span><span class="n">graph</span>

        <span class="nd">@property</span>
        <span class="k">def</span> <span class="nf">inlined_graph</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">            Returns a string representation of the internal graph for the</span>
<span class="sd">            ``forward`` method. This graph will be preprocessed to inline all function and method calls.</span>
<span class="sd">            See `Interpreting Graphs`_ for details.</span>
<span class="sd">            &quot;&quot;&quot;</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">forward</span><span class="o">.</span><span class="n">inlined_graph</span>

        <span class="nd">@property</span>
        <span class="k">def</span> <span class="nf">code</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">            Returns a pretty-printed representation (as valid Python syntax) of</span>
<span class="sd">            the internal graph for the ``forward`` method. See `Inspecting Code`_</span>
<span class="sd">            for details.</span>
<span class="sd">            &quot;&quot;&quot;</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">forward</span><span class="o">.</span><span class="n">code</span>

        <span class="k">def</span> <span class="nf">save</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
            <span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">            save(f, _extra_files=ExtraFilesMap{})</span>

<span class="sd">            See :func:`torch.jit.save &lt;torch.jit.save&gt;` for details.</span>
<span class="sd">            &quot;&quot;&quot;</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">save</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>

        <span class="k">def</span> <span class="nf">_save_for_lite_interpreter</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
            <span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">            _save_for_lite_interpreter(f)</span>

<span class="sd">            Add (or update) the bytecode session to the script model. The updated model is used</span>
<span class="sd">            in lite interpreter for mobile applications.</span>

<span class="sd">            Arguments:</span>
<span class="sd">                f: a string containing a file name.</span>
<span class="sd">                _extra_files: Map from filename to contents which will be stored as part of &#39;f&#39;.</span>

<span class="sd">            &quot;&quot;&quot;</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_save_for_mobile</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>

        <span class="k">def</span> <span class="nf">save_to_buffer</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">save_to_buffer</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>

        <span class="k">def</span> <span class="nf">get_debug_state</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">get_debug_state</span><span class="p">()</span>

        <span class="k">def</span> <span class="nf">extra_repr</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="k">return</span> <span class="s1">&#39;original_name=</span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">original_name</span><span class="p">)</span>

        <span class="k">def</span> <span class="nf">graph_for</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">forward</span><span class="o">.</span><span class="n">graph_for</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>

        <span class="nd">@property</span>
        <span class="k">def</span> <span class="nf">original_name</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="k">if</span> <span class="nb">type</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> <span class="o">==</span> <span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_type</span><span class="p">()</span><span class="o">.</span><span class="n">name</span><span class="p">()):</span>
                <span class="k">return</span> <span class="s1">&#39;&#39;</span>
            <span class="k">return</span> <span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_type</span><span class="p">()</span><span class="o">.</span><span class="n">name</span><span class="p">())</span>

        <span class="k">def</span> <span class="nf">define</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">src</span><span class="p">):</span>
            <span class="c1"># We use frames_up=1 to get to the proper surrounding scope. The stack</span>
            <span class="c1"># will look like:</span>
            <span class="c1"># 0. createResolutionCallback</span>
            <span class="c1"># 1. define()</span>
            <span class="c1"># 2. surrounding scope.</span>
            <span class="c1">#</span>
            <span class="c1"># createResolutionCallback internally adds 1 to get us to our frame, then</span>
            <span class="c1"># we add 1 to get to the proper surrounding scope.</span>
            <span class="n">rcb</span> <span class="o">=</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">createResolutionCallbackFromFrame</span><span class="p">(</span><span class="n">frames_up</span><span class="o">=</span><span class="mi">1</span><span class="p">)</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_define</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_concrete_type</span><span class="p">,</span> <span class="n">src</span><span class="p">,</span> <span class="n">rcb</span><span class="p">)</span>

        <span class="k">def</span> <span class="fm">__getattr__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">attr</span><span class="p">):</span>
            <span class="k">if</span> <span class="s1">&#39;_initializing&#39;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">:</span>
                <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;ScriptModule has not been initialized, did you forget to call super&#39;s init?&quot;</span><span class="p">)</span>

            <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_initializing</span><span class="p">:</span>
                <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">RecursiveScriptModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__getattr__</span><span class="p">(</span><span class="n">attr</span><span class="p">)</span>

            <span class="c1"># _modules check is before hasattr since modules are included as attributes in _c,</span>
            <span class="c1"># but we want to get the python wrapper from _modules instead of the raw _c object.</span>
            <span class="k">if</span> <span class="n">attr</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">_modules</span><span class="p">:</span>
                <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_modules</span><span class="p">[</span><span class="n">attr</span><span class="p">]</span>
            <span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">hasattr</span><span class="p">(</span><span class="n">attr</span><span class="p">):</span>
                <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">getattr</span><span class="p">(</span><span class="n">attr</span><span class="p">)</span>
            <span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_has_method</span><span class="p">(</span><span class="n">attr</span><span class="p">):</span>
                <span class="n">script_method</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_get_method</span><span class="p">(</span><span class="n">attr</span><span class="p">)</span>
                <span class="c1"># cache method so future calls do not go through __getattr__</span>
                <span class="c1"># to improve invocation performance</span>
                <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">[</span><span class="n">attr</span><span class="p">]</span> <span class="o">=</span> <span class="n">script_method</span>
                <span class="k">return</span> <span class="n">script_method</span>

            <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">RecursiveScriptModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__getattr__</span><span class="p">(</span><span class="n">attr</span><span class="p">)</span>

        <span class="k">def</span> <span class="fm">__setattr__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
            <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_initializing</span><span class="p">:</span>
                <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">RecursiveScriptModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__setattr__</span><span class="p">(</span><span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">)</span>

            <span class="k">if</span> <span class="n">attr</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">_modules</span><span class="p">:</span>
                <span class="bp">self</span><span class="o">.</span><span class="n">_modules</span><span class="p">[</span><span class="n">attr</span><span class="p">]</span> <span class="o">=</span> <span class="n">value</span>
            <span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">hasattr</span><span class="p">(</span><span class="n">attr</span><span class="p">):</span>
                <span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">setattr</span><span class="p">(</span><span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">)</span>
            <span class="k">elif</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="s2">&quot;_concrete_type&quot;</span><span class="p">)</span> <span class="ow">and</span> <span class="n">attr</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">_concrete_type</span><span class="o">.</span><span class="n">get_constants</span><span class="p">()</span><span class="o">.</span><span class="n">keys</span><span class="p">():</span>
                <span class="c1"># TODO: we don&#39;t have _concrete_type set after load(), and in general we lose constant information.</span>
                <span class="c1"># We should encode constants as class type attributes (or something) so it persists across save/load.</span>
                <span class="k">raise</span> <span class="ne">AttributeError</span><span class="p">(</span><span class="s2">&quot;Cannot mutate TorchScript constant value: &#39;</span><span class="si">{}</span><span class="s2">&#39;. Value: &#39;</span><span class="si">{}</span><span class="s2">&#39;&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">))</span>
            <span class="k">else</span><span class="p">:</span>
                <span class="c1"># We allow setting Python attributes on the ScriptModule, for</span>
                <span class="c1"># when people want to stash some convenience info on it.</span>
                <span class="c1"># TODO: it&#39;s possible that the following is confusing:</span>
                <span class="c1">#   s = torch.jit.script(...)</span>
                <span class="c1">#   s.python_attr = ...</span>
                <span class="c1">#   s.save()   &lt;--- this doesn&#39;t have `python_attr`</span>
                <span class="c1"># It&#39;s fairly trivial to save enough info to warn in this case.</span>
                <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">RecursiveScriptModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__setattr__</span><span class="p">(</span><span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">)</span>

        <span class="k">def</span> <span class="nf">copy</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_recursive</span><span class="o">.</span><span class="n">wrap_cpp_module</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_clone</span><span class="p">())</span>

        <span class="k">def</span> <span class="nf">copy_instance</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_recursive</span><span class="o">.</span><span class="n">wrap_cpp_module</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_c</span><span class="o">.</span><span class="n">_clone_instance</span><span class="p">())</span>

        <span class="k">def</span> <span class="nf">__getstate__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="k">raise</span> <span class="n">pickle</span><span class="o">.</span><span class="n">PickleError</span><span class="p">(</span>
                <span class="s2">&quot;ScriptModules cannot be deepcopied using copy.deepcopy or saved using torch.save. &quot;</span> <span class="o">+</span>
                <span class="s2">&quot;Mixed serialization of script and non-script modules is not supported. &quot;</span> <span class="o">+</span>
                <span class="s2">&quot;For purely script modules use my_script_module.save(&lt;filename&gt;) instead.&quot;</span><span class="p">)</span>

        <span class="c1"># Python magic methods do method lookups on an object&#39;s class type, instead of looking up</span>
        <span class="c1"># the method defines on the class instance. In order to continue to expose the magic methods</span>
        <span class="c1"># of builtin-containers (ModuleList, Sequential, ModuleDict) to python we</span>
        <span class="c1"># define magic methods here as a shim to the correct attribute.</span>
        <span class="k">def</span> <span class="nf">forward_magic_method</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">method_name</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
            <span class="n">self_method</span> <span class="o">=</span> <span class="nb">getattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">method_name</span><span class="p">)</span>
            <span class="k">if</span> <span class="nb">getattr</span><span class="p">(</span><span class="n">self_method</span><span class="p">,</span> <span class="s2">&quot;__func__&quot;</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span> <span class="o">==</span> <span class="nb">getattr</span><span class="p">(</span><span class="n">RecursiveScriptModule</span><span class="p">,</span> <span class="n">method_name</span><span class="p">):</span>
                <span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span>
            <span class="k">return</span> <span class="n">self_method</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>

        <span class="k">def</span> <span class="fm">__iter__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">forward_magic_method</span><span class="p">(</span><span class="s2">&quot;__iter__&quot;</span><span class="p">)</span>

        <span class="k">def</span> <span class="fm">__getitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">idx</span><span class="p">):</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">forward_magic_method</span><span class="p">(</span><span class="s2">&quot;__getitem__&quot;</span><span class="p">,</span> <span class="n">idx</span><span class="p">)</span>

        <span class="k">def</span> <span class="fm">__len__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">forward_magic_method</span><span class="p">(</span><span class="s2">&quot;__len__&quot;</span><span class="p">)</span>

        <span class="k">def</span> <span class="fm">__contains__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">key</span><span class="p">):</span>
            <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">forward_magic_method</span><span class="p">(</span><span class="s2">&quot;__contains__&quot;</span><span class="p">,</span> <span class="n">key</span><span class="p">)</span>

        <span class="c1"># dir is defined by the base nn.Module, so instead of throwing if</span>
        <span class="c1"># it is not overriden, we call into the nn.Module __dir__ method</span>
        <span class="k">def</span> <span class="fm">__dir__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="n">self_method</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="fm">__dir__</span>
            <span class="k">if</span> <span class="n">self_method</span><span class="o">.</span><span class="vm">__func__</span> <span class="o">==</span> <span class="n">get_function_from_type</span><span class="p">(</span><span class="n">RecursiveScriptModule</span><span class="p">,</span> <span class="s2">&quot;__dir__&quot;</span><span class="p">):</span>
                <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">RecursiveScriptModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__dir__</span><span class="p">()</span>
            <span class="k">return</span> <span class="n">self_method</span><span class="p">()</span>

        <span class="c1"># to resolve bool(value), python looks if __bool__ is defined then __iter__</span>
        <span class="c1"># is defined then returns true for classes. because __iter__() on this</span>
        <span class="c1"># class throws if it isn&#39;t overriden, we define __bool__ to preserve default behavior</span>
        <span class="k">def</span> <span class="fm">__bool__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="n">self_method</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="fm">__bool__</span>
            <span class="k">if</span> <span class="n">self_method</span><span class="o">.</span><span class="vm">__func__</span> <span class="o">==</span> <span class="n">get_function_from_type</span><span class="p">(</span><span class="n">RecursiveScriptModule</span><span class="p">,</span> <span class="s2">&quot;__bool__&quot;</span><span class="p">):</span>
                <span class="k">return</span> <span class="kc">True</span>
            <span class="k">return</span> <span class="n">self_method</span><span class="p">()</span>

    <span class="c1"># Need to copy all RecursiveScriptModule methods to ScriptModule.</span>
    <span class="c1">#</span>
    <span class="c1"># This is because `super(MyScriptModule, self).foo()` does not use</span>
    <span class="c1"># `__getattr__` to look up `foo`. So we need to make each method available on</span>
    <span class="c1"># the ScriptModule manually.</span>
    <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">item</span> <span class="ow">in</span> <span class="n">RecursiveScriptModule</span><span class="o">.</span><span class="vm">__dict__</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
        <span class="k">if</span> <span class="ow">not</span> <span class="n">callable</span><span class="p">(</span><span class="n">item</span><span class="p">)</span> <span class="ow">and</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">item</span><span class="p">,</span> <span class="nb">property</span><span class="p">):</span>
            <span class="k">continue</span>
        <span class="k">if</span> <span class="n">name</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">&#39;__&#39;</span><span class="p">)</span> <span class="ow">or</span> <span class="nb">hasattr</span><span class="p">(</span><span class="n">ScriptModule</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
            <span class="k">continue</span>
        <span class="c1"># We can copy over the implementation wholesale because besides the</span>
        <span class="c1"># `super()` thing above, ScriptModule behaves exactly like</span>
        <span class="c1"># RecursiveScriptModule</span>
        <span class="nb">setattr</span><span class="p">(</span><span class="n">ScriptModule</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">item</span><span class="p">)</span>

    <span class="k">def</span> <span class="nf">_get_methods</span><span class="p">(</span><span class="bp">cls</span><span class="p">):</span>
        <span class="kn">import</span> <span class="nn">inspect</span>
        <span class="c1"># In Python 3 unbound methods are functions, but in Python 2 they are methods</span>
        <span class="k">return</span> <span class="n">inspect</span><span class="o">.</span><span class="n">getmembers</span><span class="p">(</span><span class="bp">cls</span><span class="p">,</span> <span class="n">predicate</span><span class="o">=</span><span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="n">inspect</span><span class="o">.</span><span class="n">isfunction</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="ow">or</span> <span class="n">inspect</span><span class="o">.</span><span class="n">ismethod</span><span class="p">(</span><span class="n">x</span><span class="p">))</span>


    <span class="n">_compiled_methods_whitelist</span> <span class="o">=</span> <span class="p">{</span>
        <span class="s1">&#39;forward&#39;</span><span class="p">,</span> <span class="s1">&#39;register_buffer&#39;</span><span class="p">,</span> <span class="s1">&#39;register_parameter&#39;</span><span class="p">,</span> <span class="s1">&#39;add_module&#39;</span><span class="p">,</span>
        <span class="s1">&#39;_apply&#39;</span><span class="p">,</span> <span class="s1">&#39;apply&#39;</span><span class="p">,</span> <span class="s1">&#39;cuda&#39;</span><span class="p">,</span> <span class="s1">&#39;cpu&#39;</span><span class="p">,</span> <span class="s1">&#39;to&#39;</span><span class="p">,</span> <span class="s1">&#39;type&#39;</span><span class="p">,</span> <span class="s1">&#39;float&#39;</span><span class="p">,</span> <span class="s1">&#39;double&#39;</span><span class="p">,</span> <span class="s1">&#39;half&#39;</span><span class="p">,</span>
        <span class="s1">&#39;state_dict&#39;</span><span class="p">,</span> <span class="s1">&#39;_save_to_state_dict&#39;</span><span class="p">,</span> <span class="s1">&#39;load_state_dict&#39;</span><span class="p">,</span>
        <span class="s1">&#39;_load_from_state_dict&#39;</span><span class="p">,</span> <span class="s1">&#39;_named_members&#39;</span><span class="p">,</span> <span class="s1">&#39;parameters&#39;</span><span class="p">,</span> <span class="s1">&#39;named_parameters&#39;</span><span class="p">,</span>
        <span class="s1">&#39;buffers&#39;</span><span class="p">,</span> <span class="s1">&#39;named_buffers&#39;</span><span class="p">,</span> <span class="s1">&#39;children&#39;</span><span class="p">,</span> <span class="s1">&#39;named_children&#39;</span><span class="p">,</span> <span class="s1">&#39;modules&#39;</span><span class="p">,</span>
        <span class="s1">&#39;named_modules&#39;</span><span class="p">,</span> <span class="s1">&#39;zero_grad&#39;</span><span class="p">,</span> <span class="s1">&#39;share_memory&#39;</span><span class="p">,</span> <span class="s1">&#39;_get_name&#39;</span><span class="p">,</span> <span class="s1">&#39;extra_repr&#39;</span><span class="p">,</span>
        <span class="s1">&#39;_slow_forward&#39;</span><span class="p">,</span> <span class="s1">&#39;_tracing_name&#39;</span><span class="p">,</span> <span class="s1">&#39;eval&#39;</span><span class="p">,</span> <span class="s1">&#39;train&#39;</span><span class="p">,</span>
    <span class="p">}</span>


    <span class="k">def</span> <span class="nf">_make_fail</span><span class="p">(</span><span class="n">name</span><span class="p">):</span>
        <span class="k">def</span> <span class="nf">fail</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
            <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="n">name</span> <span class="o">+</span> <span class="s2">&quot; is not supported on ScriptModules&quot;</span><span class="p">)</span>
        <span class="k">return</span> <span class="n">fail</span>

    <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">method</span> <span class="ow">in</span> <span class="n">_get_methods</span><span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
        <span class="k">if</span> <span class="n">name</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">&#39;__&#39;</span><span class="p">):</span>
            <span class="k">continue</span>
        <span class="k">if</span> <span class="n">name</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">RecursiveScriptModule</span><span class="o">.</span><span class="vm">__dict__</span> <span class="ow">and</span> <span class="n">name</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">_compiled_methods_whitelist</span><span class="p">:</span>
            <span class="nb">setattr</span><span class="p">(</span><span class="n">RecursiveScriptModule</span><span class="p">,</span> <span class="n">method</span><span class="o">.</span><span class="vm">__name__</span><span class="p">,</span> <span class="n">_make_fail</span><span class="p">(</span><span class="n">name</span><span class="p">))</span>

<span class="k">else</span><span class="p">:</span>
    <span class="c1"># TODO MAKE SURE THAT DISABLING WORKS</span>
<div class="viewcode-block" id="ScriptModule"><a class="viewcode-back" href="../../jit.html#torch.jit.ScriptModule">[docs]</a>    <span class="k">class</span> <span class="nc">ScriptModule</span><span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
        <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
            <span class="nb">super</span><span class="p">(</span><span class="n">ScriptModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span></div>


<span class="k">class</span> <span class="nc">TracedModule</span><span class="p">(</span><span class="n">ScriptModule</span><span class="p">):</span>
    <span class="n">_disable_script_meta</span> <span class="o">=</span> <span class="kc">True</span>

    <span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">orig</span><span class="p">,</span> <span class="n">id_set</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">_compilation_unit</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
        <span class="c1"># XXX: orig can be a nn.Module or a function!</span>
        <span class="nb">super</span><span class="p">(</span><span class="n">TracedModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">()</span>
        <span class="k">assert</span><span class="p">(</span><span class="nb">isinstance</span><span class="p">(</span><span class="n">orig</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">))</span>

        <span class="c1"># Copy a subset of `orig` to a temporary nn.Module.</span>
        <span class="c1"># This is a way to customize what will actually get compiled by create_script_module</span>
        <span class="n">id_set</span> <span class="o">=</span> <span class="nb">set</span><span class="p">()</span>

        <span class="c1"># This allows us to preserve the original module&#39;s qualified name by defining a new</span>
        <span class="c1"># type with the attribute _jit_override_qualname. In torch._jit_internal._qualified_name</span>
        <span class="c1"># we have a special case that will look up this attribute to override whatever qualname</span>
        <span class="c1"># we would get from the python type system</span>
        <span class="k">class</span> <span class="nc">QualnameWrapper</span><span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">nn</span><span class="o">.</span><span class="n">Module</span><span class="p">):</span>
            <span class="k">pass</span>
        <span class="n">QualnameWrapper</span><span class="o">.</span><span class="n">_jit_override_qualname</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_jit_internal</span><span class="o">.</span><span class="n">_qualified_name</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">orig</span><span class="p">))</span>

        <span class="n">tmp_module</span> <span class="o">=</span> <span class="n">QualnameWrapper</span><span class="p">()</span>

        <span class="k">def</span> <span class="nf">check_unique</span><span class="p">(</span><span class="n">param</span><span class="p">):</span>
            <span class="k">if</span> <span class="n">param</span> <span class="ow">in</span> <span class="n">id_set</span><span class="p">:</span>
                <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;TracedModules don&#39;t support parameter sharing between modules&quot;</span><span class="p">)</span>
            <span class="n">id_set</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">param</span><span class="p">)</span>

        <span class="n">tmp_module</span><span class="o">.</span><span class="n">training</span> <span class="o">=</span> <span class="n">orig</span><span class="o">.</span><span class="n">training</span>

        <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">param</span> <span class="ow">in</span> <span class="n">orig</span><span class="o">.</span><span class="n">_parameters</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
            <span class="k">if</span> <span class="n">param</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
                <span class="n">tmp_module</span><span class="o">.</span><span class="n">_parameters</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">param</span>
                <span class="n">check_unique</span><span class="p">(</span><span class="n">param</span><span class="p">)</span>
        <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">buf</span> <span class="ow">in</span> <span class="n">orig</span><span class="o">.</span><span class="n">_buffers</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
            <span class="k">if</span> <span class="n">buf</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
                <span class="n">tmp_module</span><span class="o">.</span><span class="n">_buffers</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">buf</span>
                <span class="n">check_unique</span><span class="p">(</span><span class="n">buf</span><span class="p">)</span>
        <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">val</span> <span class="ow">in</span> <span class="n">orig</span><span class="o">.</span><span class="vm">__dict__</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
            <span class="k">if</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_is_script_object</span><span class="p">(</span><span class="n">val</span><span class="p">)</span> <span class="ow">and</span> <span class="n">name</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">orig</span><span class="o">.</span><span class="n">_parameters</span> <span class="ow">and</span> <span class="n">name</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">orig</span><span class="o">.</span><span class="n">_buffers</span><span class="p">:</span>
                <span class="nb">setattr</span><span class="p">(</span><span class="n">tmp_module</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">val</span><span class="p">)</span>

        <span class="k">if</span> <span class="n">orig</span><span class="o">.</span><span class="n">_backward_hooks</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;Modules that have backward hooks assigned can&#39;t be compiled: &quot;</span> <span class="o">+</span> <span class="nb">str</span><span class="p">(</span><span class="n">orig</span><span class="p">))</span>

        <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">submodule</span> <span class="ow">in</span> <span class="n">orig</span><span class="o">.</span><span class="n">_modules</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
            <span class="n">tmp_module</span><span class="o">.</span><span class="n">_modules</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">make_module</span><span class="p">(</span><span class="n">submodule</span><span class="p">,</span> <span class="n">TracedModule</span><span class="p">,</span> <span class="n">_compilation_unit</span><span class="o">=</span><span class="kc">None</span><span class="p">)</span>

        <span class="n">script_module</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">_recursive</span><span class="o">.</span><span class="n">create_script_module</span><span class="p">(</span><span class="n">tmp_module</span><span class="p">,</span> <span class="k">lambda</span> <span class="n">module</span><span class="p">:</span> <span class="p">(),</span> <span class="n">share_types</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span>

        <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">[</span><span class="s1">&#39;_name&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="nb">type</span><span class="p">(</span><span class="n">orig</span><span class="p">)</span><span class="o">.</span><span class="vm">__name__</span>
        <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">[</span><span class="s1">&#39;_actual_script_module&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">script_module</span>
        <span class="k">for</span> <span class="n">name</span> <span class="ow">in</span> <span class="p">(</span><span class="s2">&quot;_parameters&quot;</span><span class="p">,</span> <span class="s2">&quot;_buffers&quot;</span><span class="p">,</span> <span class="s2">&quot;_modules&quot;</span><span class="p">):</span>
            <span class="nb">delattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">)</span>

    <span class="k">def</span> <span class="nf">forward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
        <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s1">&#39;Trace submodules cannot be called.&#39;</span><span class="p">)</span>

    <span class="k">def</span> <span class="fm">__getattr__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">attr</span><span class="p">):</span>
        <span class="k">if</span> <span class="s2">&quot;_actual_script_module&quot;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">:</span>
            <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">TracedModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__getattr__</span><span class="p">(</span><span class="n">attr</span><span class="p">)</span>
        <span class="k">return</span> <span class="nb">getattr</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_actual_script_module</span><span class="p">,</span> <span class="n">attr</span><span class="p">)</span>

    <span class="k">def</span> <span class="fm">__setattr__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
        <span class="k">if</span> <span class="s2">&quot;_actual_script_module&quot;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">:</span>
            <span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">TracedModule</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__setattr__</span><span class="p">(</span><span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">)</span>
        <span class="nb">setattr</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_actual_script_module</span><span class="p">,</span> <span class="n">attr</span><span class="p">,</span> <span class="n">value</span><span class="p">)</span>

    <span class="k">def</span> <span class="nf">_get_name</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_name</span>

    <span class="k">def</span> <span class="nf">extra_repr</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="k">return</span> <span class="s1">&#39;original_name=</span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_name</span><span class="p">)</span>


<span class="k">if</span> <span class="n">_enabled</span><span class="p">:</span>
    <span class="k">class</span> <span class="nc">TopLevelTracedModule</span><span class="p">(</span><span class="n">TracedModule</span><span class="p">):</span>
        <span class="n">forward</span> <span class="o">=</span> <span class="n">_CachedForward</span><span class="p">()</span>

<span class="k">def</span> <span class="nf">is_scripting</span><span class="p">():</span>
    <span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd">    Function that returns True when in compilation and False otherwise. This</span>
<span class="sd">    is useful especially with the @unused decorator to leave code in your</span>
<span class="sd">    model that is not yet TorchScript compatible.</span>
<span class="sd">    .. testcode::</span>

<span class="sd">        import torch</span>

<span class="sd">        @torch.jit.unused</span>
<span class="sd">        def unsupported_linear_op(x):</span>
<span class="sd">            return x</span>

<span class="sd">        def linear(x):</span>
<span class="sd">           if not torch.jit.is_scripting():</span>
<span class="sd">              return torch.linear(x)</span>
<span class="sd">           else:</span>
<span class="sd">              return unsupported_linear_op(x)</span>
<span class="sd">    &quot;&quot;&quot;</span>
    <span class="k">return</span> <span class="kc">False</span>

<span class="k">def</span> <span class="nf">_unwrap_optional</span><span class="p">(</span><span class="n">x</span><span class="p">):</span>
    <span class="k">assert</span> <span class="n">x</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">,</span> <span class="s2">&quot;Unwrapping null optional&quot;</span>
    <span class="k">return</span> <span class="n">x</span>

<span class="n">_register_builtin</span><span class="p">(</span><span class="n">_unwrap_optional</span><span class="p">,</span> <span class="s1">&#39;aten::_unwrap_optional&#39;</span><span class="p">)</span>
<span class="n">_register_builtin</span><span class="p">(</span><span class="n">_wait</span><span class="p">,</span> <span class="s1">&#39;aten::wait&#39;</span><span class="p">)</span>
<span class="n">_register_builtin</span><span class="p">(</span><span class="n">is_scripting</span><span class="p">,</span> <span class="s1">&#39;aten::is_scripting&#39;</span><span class="p">)</span>


<span class="c1"># Caching: we currently cache compilation of free functions and overloaded functions.</span>
<span class="c1"># To cache free functions we hold a weak ref to the function object and</span>
<span class="c1"># map to the compiled fn&#39;s qualified name.</span>
<span class="c1"># To cache overloaded functions we hold a weak ref to the function obj and</span>
<span class="c1"># map to all of its overloaded compiled fns.</span>
<span class="c1"># In the future we could consider caching more types of objects so that</span>
<span class="c1"># aliasing is preserved across separate compilations of the same object.</span>

<span class="n">_jit_caching_layer</span> <span class="o">=</span> <span class="n">weakref</span><span class="o">.</span><span class="n">WeakKeyDictionary</span><span class="p">()</span>
<span class="n">_jit_function_overload_caching</span> <span class="o">=</span> <span class="n">weakref</span><span class="o">.</span><span class="n">WeakKeyDictionary</span><span class="p">()</span>

<span class="k">def</span> <span class="nf">_try_get_jit_cached_overloads</span><span class="p">(</span><span class="n">key</span><span class="p">):</span>
    <span class="n">qual_names</span> <span class="o">=</span> <span class="n">_jit_function_overload_caching</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span>
    <span class="k">if</span> <span class="n">qual_names</span><span class="p">:</span>
        <span class="k">return</span> <span class="p">[</span><span class="n">_python_cu</span><span class="o">.</span><span class="n">find_function</span><span class="p">(</span><span class="n">qual_name</span><span class="p">)</span> <span class="k">for</span> <span class="n">qual_name</span> <span class="ow">in</span> <span class="n">qual_names</span><span class="p">]</span>
    <span class="k">else</span><span class="p">:</span>
        <span class="k">return</span> <span class="kc">None</span>

<span class="k">def</span> <span class="nf">_set_jit_overload_cache</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="n">compiled_fns</span><span class="p">):</span>
    <span class="n">_jit_function_overload_caching</span><span class="p">[</span><span class="n">key</span><span class="p">]</span> <span class="o">=</span> <span class="p">[</span><span class="n">fn</span><span class="o">.</span><span class="n">qualified_name</span> <span class="k">for</span> <span class="n">fn</span> <span class="ow">in</span> <span class="n">compiled_fns</span><span class="p">]</span>

<span class="k">def</span> <span class="nf">_try_get_jit_cached_function</span><span class="p">(</span><span class="n">key</span><span class="p">):</span>
    <span class="n">qual_name</span> <span class="o">=</span> <span class="n">_jit_caching_layer</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span>
    <span class="k">if</span> <span class="n">qual_name</span><span class="p">:</span>
        <span class="k">return</span> <span class="n">_python_cu</span><span class="o">.</span><span class="n">find_function</span><span class="p">(</span><span class="n">qual_name</span><span class="p">)</span>
    <span class="k">else</span><span class="p">:</span>
        <span class="k">return</span> <span class="kc">None</span>

<span class="k">def</span> <span class="nf">_set_jit_function_cache</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
    <span class="c1"># only free functions currently supported</span>
    <span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">ScriptFunction</span><span class="p">)</span>
    <span class="n">_jit_caching_layer</span><span class="p">[</span><span class="n">key</span><span class="p">]</span> <span class="o">=</span> <span class="n">value</span><span class="o">.</span><span class="n">qualified_name</span>


<span class="c1"># qualified_name =&gt; ScriptClass mapping</span>
<span class="n">_script_classes</span> <span class="o">=</span> <span class="p">{}</span>


<span class="k">def</span> <span class="nf">_add_script_class</span><span class="p">(</span><span class="bp">cls</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
    <span class="bp">cls</span><span class="o">.</span><span class="n">__torch_script_class__</span> <span class="o">=</span> <span class="kc">True</span>
    <span class="k">global</span> <span class="n">_script_classes</span>
    <span class="n">_script_classes</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="bp">cls</span>


<span class="k">def</span> <span class="nf">_get_script_class</span><span class="p">(</span><span class="n">name</span><span class="p">):</span>
    <span class="k">global</span> <span class="n">_script_classes</span>
    <span class="k">if</span> <span class="n">name</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">_script_classes</span><span class="p">:</span>
        <span class="k">return</span> <span class="kc">None</span>
    <span class="k">return</span> <span class="n">_script_classes</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>

<span class="c1"># overloads are registered in _jit_internal and compiled here so that _overload</span>
<span class="c1"># can be used in nn/functional.py without an import cycle</span>

<span class="k">def</span> <span class="nf">_check_overload_defaults</span><span class="p">(</span><span class="n">impl_defaults</span><span class="p">,</span> <span class="n">overload_defaults</span><span class="p">,</span> <span class="n">loc</span><span class="p">):</span>
    <span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">overload_value</span> <span class="ow">in</span> <span class="n">overload_defaults</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
        <span class="k">if</span> <span class="n">name</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">impl_defaults</span> <span class="ow">or</span> <span class="n">impl_defaults</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">!=</span> <span class="n">overload_value</span><span class="p">:</span>
            <span class="k">raise</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">frontend</span><span class="o">.</span><span class="n">FrontendError</span><span class="p">(</span>
                <span class="n">loc</span><span class="p">,</span> <span class="s2">&quot;Default parameters on overloads do not affect the runtime so they &quot;</span>
                <span class="s2">&quot;must equal to the default parameter on the implementation function. Found on &quot;</span>
                <span class="s2">&quot;parameter </span><span class="si">{name}</span><span class="s2">&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">name</span><span class="o">=</span><span class="n">name</span><span class="p">))</span>

<span class="k">def</span> <span class="nf">_compile_function_with_overload</span><span class="p">(</span><span class="n">overload_fn</span><span class="p">,</span> <span class="n">qual_name</span><span class="p">,</span> <span class="n">impl_fn</span><span class="p">):</span>
    <span class="n">overload_decl</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">get_jit_def</span><span class="p">(</span><span class="n">overload_fn</span><span class="p">)</span><span class="o">.</span><span class="n">decl</span><span class="p">()</span>
    <span class="n">overload_signature</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">annotations</span><span class="o">.</span><span class="n">get_signature</span><span class="p">(</span><span class="n">overload_fn</span><span class="p">,</span> <span class="kc">None</span><span class="p">,</span> <span class="kc">None</span><span class="p">,</span> <span class="n">inspect</span><span class="o">.</span><span class="n">ismethod</span><span class="p">(</span><span class="n">overload_fn</span><span class="p">))</span>
    <span class="n">impl_ast</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">get_jit_def</span><span class="p">(</span><span class="n">impl_fn</span><span class="p">)</span>
    <span class="n">overload_defaults</span> <span class="o">=</span> <span class="n">get_default_args</span><span class="p">(</span><span class="n">overload_fn</span><span class="p">)</span>
    <span class="n">implementation_defaults</span> <span class="o">=</span> <span class="n">get_default_args</span><span class="p">(</span><span class="n">impl_fn</span><span class="p">)</span>
    <span class="n">_rcb</span> <span class="o">=</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">createResolutionCallbackFromClosure</span><span class="p">(</span><span class="n">impl_fn</span><span class="p">)</span>
    <span class="n">_check_overload_defaults</span><span class="p">(</span><span class="n">implementation_defaults</span><span class="p">,</span> <span class="n">overload_defaults</span><span class="p">,</span> <span class="n">overload_decl</span><span class="o">.</span><span class="n">range</span><span class="p">())</span>
    <span class="n">fn</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_script_compile_overload</span><span class="p">(</span><span class="n">qual_name</span><span class="p">,</span> <span class="n">overload_decl</span><span class="p">,</span> <span class="n">impl_ast</span><span class="p">,</span> <span class="n">_rcb</span><span class="p">,</span>
                                               <span class="n">implementation_defaults</span><span class="p">,</span> <span class="n">overload_signature</span><span class="p">)</span>
    <span class="k">return</span> <span class="n">fn</span>

<span class="k">def</span> <span class="nf">_get_overloads</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
    <span class="c1"># check for cached compiled fns</span>
    <span class="n">existing_compiled_fns</span> <span class="o">=</span> <span class="n">_try_get_jit_cached_overloads</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span>
    <span class="n">qual_name</span> <span class="o">=</span> <span class="n">_qualified_name</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span>
    <span class="n">uncompiled_overloads</span> <span class="o">=</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">_get_fn_overloads</span><span class="p">(</span><span class="n">qual_name</span><span class="p">)</span>
    <span class="k">if</span> <span class="n">uncompiled_overloads</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
        <span class="k">return</span> <span class="n">existing_compiled_fns</span>

    <span class="n">compiled_fns</span> <span class="o">=</span> <span class="p">[]</span>
    <span class="k">for</span> <span class="n">overload_fn</span> <span class="ow">in</span> <span class="n">uncompiled_overloads</span><span class="p">:</span>
        <span class="n">compiled_fns</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">_compile_function_with_overload</span><span class="p">(</span><span class="n">overload_fn</span><span class="p">,</span> <span class="n">qual_name</span><span class="p">,</span> <span class="n">obj</span><span class="p">))</span>

    <span class="k">if</span> <span class="n">existing_compiled_fns</span><span class="p">:</span>
        <span class="n">compiled_fns</span> <span class="o">=</span> <span class="n">existing_compiled_fns</span> <span class="o">+</span> <span class="n">compiled_fns</span>

    <span class="c1"># cache compilation, remove information stored to do compilation</span>
    <span class="n">_set_jit_overload_cache</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">compiled_fns</span><span class="p">)</span>
    <span class="n">_jit_internal</span><span class="o">.</span><span class="n">_clear_fn_overloads</span><span class="p">(</span><span class="n">qual_name</span><span class="p">)</span>
    <span class="k">return</span> <span class="n">compiled_fns</span>

<span class="k">def</span> <span class="nf">_check_directly_compile_overloaded</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
    <span class="n">qual_name</span> <span class="o">=</span> <span class="n">_qualified_name</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span>
    <span class="k">if</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">_get_fn_overloads</span><span class="p">(</span><span class="n">qual_name</span><span class="p">)</span> <span class="ow">or</span> <span class="n">_try_get_jit_cached_overloads</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
        <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;Function </span><span class="si">{}</span><span class="s2"> cannot be directly compiled because it&quot;</span>
                           <span class="s2">&quot; is overloaded. It must be used in a context of a function&quot;</span>
                           <span class="s2">&quot; where its inputs can determine which overload to call.&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">qual_name</span><span class="p">))</span>

<span class="c1"># torch.jit.Error</span>
<span class="n">Error</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">JITException</span>
<span class="n">set_module</span><span class="p">(</span><span class="n">Error</span><span class="p">,</span> <span class="s2">&quot;torch.jit&quot;</span><span class="p">)</span>
<span class="c1"># This is not perfect but works in common cases</span>
<span class="n">Error</span><span class="o">.</span><span class="vm">__name__</span> <span class="o">=</span> <span class="s2">&quot;Error&quot;</span>
<span class="n">Error</span><span class="o">.</span><span class="vm">__qualname__</span> <span class="o">=</span> <span class="s2">&quot;Error&quot;</span>

<span class="k">def</span> <span class="nf">_get_named_tuple_properties</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
    <span class="k">assert</span> <span class="nb">issubclass</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">hasattr</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="s1">&#39;_fields&#39;</span><span class="p">)</span>
    <span class="n">fields</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">obj</span><span class="o">.</span><span class="n">_fields</span><span class="p">)</span>
    <span class="n">annotations</span> <span class="o">=</span> <span class="p">[]</span>
    <span class="n">has_annotations</span> <span class="o">=</span> <span class="nb">hasattr</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="s1">&#39;__annotations__&#39;</span><span class="p">)</span>
    <span class="k">for</span> <span class="n">field</span> <span class="ow">in</span> <span class="n">fields</span><span class="p">:</span>
        <span class="k">if</span> <span class="n">has_annotations</span> <span class="ow">and</span> <span class="n">field</span> <span class="ow">in</span> <span class="n">obj</span><span class="o">.</span><span class="vm">__annotations__</span><span class="p">:</span>
            <span class="n">the_type</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">jit</span><span class="o">.</span><span class="n">annotations</span><span class="o">.</span><span class="n">ann_to_type</span><span class="p">(</span><span class="n">obj</span><span class="o">.</span><span class="vm">__annotations__</span><span class="p">[</span><span class="n">field</span><span class="p">],</span> <span class="n">_jit_internal</span><span class="o">.</span><span class="n">fake_range</span><span class="p">())</span>
            <span class="n">annotations</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">the_type</span><span class="p">)</span>
        <span class="k">else</span><span class="p">:</span>
            <span class="n">annotations</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">TensorType</span><span class="o">.</span><span class="n">get</span><span class="p">())</span>
    <span class="k">return</span> <span class="nb">type</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span><span class="o">.</span><span class="vm">__name__</span><span class="p">,</span> <span class="n">fields</span><span class="p">,</span> <span class="n">annotations</span>

<span class="k">def</span> <span class="nf">_create_named_tuple</span><span class="p">(</span><span class="n">t</span><span class="p">,</span> <span class="n">unqual_name</span><span class="p">,</span> <span class="n">field_names</span><span class="p">):</span>
    <span class="n">TupleType</span> <span class="o">=</span> <span class="n">collections</span><span class="o">.</span><span class="n">namedtuple</span><span class="p">(</span><span class="n">unqual_name</span><span class="p">,</span> <span class="n">field_names</span><span class="p">)</span>
    <span class="k">return</span> <span class="n">TupleType</span><span class="p">(</span><span class="o">*</span><span class="n">t</span><span class="p">)</span>

<span class="k">class</span> <span class="nc">_disable_tracing</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
    <span class="k">def</span> <span class="fm">__enter__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">state</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_get_tracing_state</span><span class="p">()</span>
        <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_set_tracing_state</span><span class="p">(</span><span class="kc">None</span><span class="p">)</span>

    <span class="k">def</span> <span class="fm">__exit__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">):</span>
        <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_set_tracing_state</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">state</span><span class="p">)</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">state</span> <span class="o">=</span> <span class="kc">None</span>


<span class="c1"># for use in python if using annotate</span>
<span class="k">def</span> <span class="nf">annotate</span><span class="p">(</span><span class="n">the_type</span><span class="p">,</span> <span class="n">the_value</span><span class="p">):</span>
    <span class="c1"># noop in python</span>
    <span class="k">return</span> <span class="n">the_value</span>

<span class="n">last_executed_optimized_graph</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_last_executed_optimized_graph</span>


<span class="k">def</span> <span class="nf">_graph_for</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
    <span class="bp">self</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
    <span class="k">return</span> <span class="n">last_executed_optimized_graph</span><span class="p">()</span>

<span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">ScriptMethod</span><span class="o">.</span><span class="n">graph_for</span> <span class="o">=</span> <span class="n">_graph_for</span>
<span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">ScriptFunction</span><span class="o">.</span><span class="n">graph_for</span> <span class="o">=</span> <span class="n">_graph_for</span>
<span class="n">ScriptFunction</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">ScriptFunction</span>
<span class="n">ScriptFunction</span><span class="o">.</span><span class="vm">__doc__</span> <span class="o">=</span> <span class="s2">&quot;&quot;&quot;</span>
<span class="s2">Functionally equivalent to a :class:`ScriptModule`, but represents a single</span>
<span class="s2">function and does not have any attributes or Parameters.</span>
<span class="s2">&quot;&quot;&quot;</span>
<span class="n">set_module</span><span class="p">(</span><span class="n">ScriptFunction</span><span class="p">,</span> <span class="s2">&quot;torch.jit&quot;</span><span class="p">)</span>

<span class="k">if</span> <span class="ow">not</span> <span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_jit_init</span><span class="p">():</span>
    <span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s2">&quot;JIT initialization failed&quot;</span><span class="p">)</span>
</pre></div>

             </article>
             
            </div>
            <footer>
  

  

    <hr>

  

  <div role="contentinfo">
    <p>
        &copy; Copyright 2019, Torch Contributors.

    </p>
  </div>
    
      <div>
        Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
      </div>
     

</footer>

          </div>
        </div>

        <div class="pytorch-content-right" id="pytorch-content-right">
          <div class="pytorch-right-menu" id="pytorch-right-menu">
            <div class="pytorch-side-scroll" id="pytorch-side-scroll-right">
              
            </div>
          </div>
        </div>
      </section>
    </div>

  


  

     
       <script type="text/javascript" id="documentation_options" data-url_root="../../" src="../../_static/documentation_options.js"></script>
         <script src="../../_static/jquery.js"></script>
         <script src="../../_static/underscore.js"></script>
         <script src="../../_static/doctools.js"></script>
         <script src="../../_static/language_data.js"></script>
     

  

  <script type="text/javascript" src="../../_static/js/vendor/popper.min.js"></script>
  <script type="text/javascript" src="../../_static/js/vendor/bootstrap.min.js"></script>
  <script src="https://cdnjs.cloudflare.com/ajax/libs/list.js/1.5.0/list.min.js"></script>
  <script type="text/javascript" src="../../_static/js/theme.js"></script>

  <script type="text/javascript">
      jQuery(function () {
          SphinxRtdTheme.Navigation.enable(true);
      });
  </script>
 
<script>
  (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
  (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
  m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
  })(window,document,'script','https://www.google-analytics.com/analytics.js','ga');

  ga('create', 'UA-90545585-1', 'auto');
  ga('send', 'pageview');

</script>

<script async src="https://www.googletagmanager.com/gtag/js?id=UA-117752657-2"></script>

<script>
  window.dataLayer = window.dataLayer || [];

  function gtag(){dataLayer.push(arguments);}

  gtag('js', new Date());
  gtag('config', 'UA-117752657-2');
</script>

<img height="1" width="1" style="border-style:none;" alt="" src="https://www.googleadservices.com/pagead/conversion/795629140/?label=txkmCPmdtosBENSssfsC&amp;guid=ON&amp;script=0"/>


  <!-- Begin Footer -->

  <div class="container-fluid docs-tutorials-resources" id="docs-tutorials-resources">
    <div class="container">
      <div class="row">
        <div class="col-md-4 text-center">
          <h2>Docs</h2>
          <p>Access comprehensive developer documentation for PyTorch</p>
          <a class="with-right-arrow" href="https://pytorch.org/docs/stable/index.html">View Docs</a>
        </div>

        <div class="col-md-4 text-center">
          <h2>Tutorials</h2>
          <p>Get in-depth tutorials for beginners and advanced developers</p>
          <a class="with-right-arrow" href="https://pytorch.org/tutorials">View Tutorials</a>
        </div>

        <div class="col-md-4 text-center">
          <h2>Resources</h2>
          <p>Find development resources and get your questions answered</p>
          <a class="with-right-arrow" href="https://pytorch.org/resources">View Resources</a>
        </div>
      </div>
    </div>
  </div>

  <footer class="site-footer">
    <div class="container footer-container">
      <div class="footer-logo-wrapper">
        <a href="https://pytorch.org/" class="footer-logo"></a>
      </div>

      <div class="footer-links-wrapper">
        <div class="footer-links-col">
          <ul>
            <li class="list-title"><a href="https://pytorch.org/">PyTorch</a></li>
            <li><a href="https://pytorch.org/get-started">Get Started</a></li>
            <li><a href="https://pytorch.org/features">Features</a></li>
            <li><a href="https://pytorch.org/ecosystem">Ecosystem</a></li>
            <li><a href="https://pytorch.org/blog/">Blog</a></li>
            <li><a href="https://github.com/pytorch/pytorch/blob/master/CONTRIBUTING.md">Contributing</a></li>
          </ul>
        </div>

        <div class="footer-links-col">
          <ul>
            <li class="list-title"><a href="https://pytorch.org/resources">Resources</a></li>
            <li><a href="https://pytorch.org/tutorials">Tutorials</a></li>
            <li><a href="https://pytorch.org/docs/stable/index.html">Docs</a></li>
            <li><a href="https://discuss.pytorch.org" target="_blank">Discuss</a></li>
            <li><a href="https://github.com/pytorch/pytorch/issues" target="_blank">Github Issues</a></li>
            <li><a href="https://pytorch.org/assets/brand-guidelines/PyTorch-Brand-Guidelines.pdf" target="_blank">Brand Guidelines</a></li>
          </ul>
        </div>

        <div class="footer-links-col follow-us-col">
          <ul>
            <li class="list-title">Stay Connected</li>
            <li>
              <div id="mc_embed_signup">
                <form
                  action="https://twitter.us14.list-manage.com/subscribe/post?u=75419c71fe0a935e53dfa4a3f&id=91d0dccd39"
                  method="post"
                  id="mc-embedded-subscribe-form"
                  name="mc-embedded-subscribe-form"
                  class="email-subscribe-form validate"
                  target="_blank"
                  novalidate>
                  <div id="mc_embed_signup_scroll" class="email-subscribe-form-fields-wrapper">
                    <div class="mc-field-group">
                      <label for="mce-EMAIL" style="display:none;">Email Address</label>
                      <input type="email" value="" name="EMAIL" class="required email" id="mce-EMAIL" placeholder="Email Address">
                    </div>

                    <div id="mce-responses" class="clear">
                      <div class="response" id="mce-error-response" style="display:none"></div>
                      <div class="response" id="mce-success-response" style="display:none"></div>
                    </div>    <!-- real people should not fill this in and expect good things - do not remove this or risk form bot signups-->

                    <div style="position: absolute; left: -5000px;" aria-hidden="true"><input type="text" name="b_75419c71fe0a935e53dfa4a3f_91d0dccd39" tabindex="-1" value=""></div>

                    <div class="clear">
                      <input type="submit" value="" name="subscribe" id="mc-embedded-subscribe" class="button email-subscribe-button">
                    </div>
                  </div>
                </form>
              </div>

            </li>
          </ul>

          <div class="footer-social-icons">
            <a href="https://www.facebook.com/pytorch" target="_blank" class="facebook"></a>
            <a href="https://twitter.com/pytorch" target="_blank" class="twitter"></a>
            <a href="https://www.youtube.com/pytorch" target="_blank" class="youtube"></a>
          </div>
        </div>
      </div>
    </div>
  </footer>

  <div class="cookie-banner-wrapper">
  <div class="container">
    <p class="gdpr-notice">To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. As the current maintainers of this site, Facebook’s Cookies Policy applies. Learn more, including about available controls: <a href="https://www.facebook.com/policies/cookies/">Cookies Policy</a>.</p>
    <img class="close-button" src="../../_static/images/pytorch-x.svg">
  </div>
</div>

  <!-- End Footer -->

  <!-- Begin Mobile Menu -->

  <div class="mobile-main-menu">
    <div class="container-fluid">
      <div class="container">
        <div class="mobile-main-menu-header-container">
          <a class="header-logo" href="https://pytorch.org/" aria-label="PyTorch"></a>
          <a class="main-menu-close-button" href="#" data-behavior="close-mobile-menu"></a>
        </div>
      </div>
    </div>

    <div class="mobile-main-menu-links-container">
      <div class="main-menu">
        <ul>
          <li>
            <a href="https://pytorch.org/get-started">Get Started</a>
          </li>

          <li>
            <a href="https://pytorch.org/features">Features</a>
          </li>

          <li>
            <a href="https://pytorch.org/ecosystem">Ecosystem</a>
          </li>

          <li>
            <a href="https://pytorch.org/mobile">Mobile</a>
          </li>

          <li>
            <a href="https://pytorch.org/hub">PyTorch Hub</a>
          </li>

          <li>
            <a href="https://pytorch.org/blog/">Blog</a>
          </li>

          <li>
            <a href="https://pytorch.org/tutorials">Tutorials</a>
          </li>

          <li class="active">
            <a href="https://pytorch.org/docs/stable/index.html">Docs</a>
          </li>

          <li>
            <a href="https://pytorch.org/resources">Resources</a>
          </li>

          <li>
            <a href="https://github.com/pytorch/pytorch">Github</a>
          </li>
        </ul>
      </div>
    </div>
  </div>

  <!-- End Mobile Menu -->

  <script type="text/javascript" src="../../_static/js/vendor/anchor.min.js"></script>

  <script type="text/javascript">
    $(document).ready(function() {
      mobileMenu.bind();
      mobileTOC.bind();
      pytorchAnchors.bind();
      sideMenus.bind();
      scrollToAnchor.bind();
      highlightNavigation.bind();
      mainMenuDropdown.bind();
      filterTags.bind();

      // Remove any empty p tags that Sphinx adds
      $("[data-tags='null']").remove();

      // Add class to links that have code blocks, since we cannot create links in code blocks
      $("article.pytorch-article a span.pre").each(function(e) {
        $(this).closest("a").addClass("has-code");
      });
    })
  </script>
</body>
</html>