

<!DOCTYPE html>
<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
  <meta charset="utf-8">
  
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  <meta name="Description" content="scikit-learn: machine learning in Python">

  
  <title>sklearn.neighbors.KDTree &mdash; scikit-learn 0.22 documentation</title>
  
  <link rel="canonical" href="http://scikit-learn.org/stable/modules/generated/sklearn.neighbors.KDTree.html" />

  
  <link rel="shortcut icon" href="../../_static/favicon.ico"/>
  

  <link rel="stylesheet" href="../../_static/css/vendor/bootstrap.min.css" type="text/css" />
  <link rel="stylesheet" href="../../_static/gallery.css" type="text/css" />
  <link rel="stylesheet" href="../../_static/css/theme.css" type="text/css" />
<script id="documentation_options" data-url_root="../../" src="../../_static/documentation_options.js"></script>
<script src="../../_static/jquery.js"></script> 
</head>
<body>
<nav id="navbar" class="sk-docs-navbar navbar navbar-expand-md navbar-light bg-light py-0">
  <div class="container-fluid sk-docs-container px-0">
      <a class="navbar-brand py-0" href="../../index.html">
        <img
          class="sk-brand-img"
          src="../../_static/scikit-learn-logo-small.png"
          alt="logo"/>
      </a>
    <button
      id="sk-navbar-toggler"
      class="navbar-toggler"
      type="button"
      data-toggle="collapse"
      data-target="#navbarSupportedContent"
      aria-controls="navbarSupportedContent"
      aria-expanded="false"
      aria-label="Toggle navigation"
    >
      <span class="navbar-toggler-icon"></span>
    </button>

    <div class="sk-navbar-collapse collapse navbar-collapse" id="navbarSupportedContent">
      <ul class="navbar-nav mr-auto">
        <li class="nav-item">
          <a class="sk-nav-link nav-link" href="../../install.html">Install</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link" href="../../user_guide.html">User Guide</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link" href="../classes.html">API</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link" href="../../auto_examples/index.html">Examples</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link nav-more-item-mobile-items" href="../../getting_started.html">Getting Started</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link nav-more-item-mobile-items" href="../../tutorial/index.html">Tutorial</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link nav-more-item-mobile-items" href="../../glossary.html">Glossary</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link nav-more-item-mobile-items" href="../../developers/index.html">Development</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link nav-more-item-mobile-items" href="../../faq.html">FAQ</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link nav-more-item-mobile-items" href="../../related_projects.html">Related packages</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link nav-more-item-mobile-items" href="../../roadmap.html">Roadmap</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link nav-more-item-mobile-items" href="../../about.html">About us</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link nav-more-item-mobile-items" href="https://github.com/scikit-learn/scikit-learn">GitHub</a>
        </li>
        <li class="nav-item">
          <a class="sk-nav-link nav-link nav-more-item-mobile-items" href="https://scikit-learn.org/dev/versions.html">Other Versions</a>
        </li>
        <li class="nav-item dropdown nav-more-item-dropdown">
          <a class="sk-nav-link nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">More</a>
          <div class="dropdown-menu" aria-labelledby="navbarDropdown">
              <a class="sk-nav-dropdown-item dropdown-item" href="../../getting_started.html">Getting Started</a>
              <a class="sk-nav-dropdown-item dropdown-item" href="../../tutorial/index.html">Tutorial</a>
              <a class="sk-nav-dropdown-item dropdown-item" href="../../glossary.html">Glossary</a>
              <a class="sk-nav-dropdown-item dropdown-item" href="../../developers/index.html">Development</a>
              <a class="sk-nav-dropdown-item dropdown-item" href="../../faq.html">FAQ</a>
              <a class="sk-nav-dropdown-item dropdown-item" href="../../related_projects.html">Related packages</a>
              <a class="sk-nav-dropdown-item dropdown-item" href="../../roadmap.html">Roadmap</a>
              <a class="sk-nav-dropdown-item dropdown-item" href="../../about.html">About us</a>
              <a class="sk-nav-dropdown-item dropdown-item" href="https://github.com/scikit-learn/scikit-learn">GitHub</a>
              <a class="sk-nav-dropdown-item dropdown-item" href="https://scikit-learn.org/dev/versions.html">Other Versions</a>
          </div>
        </li>
      </ul>
      <div id="searchbox" role="search">
          <div class="searchformwrapper">
          <form class="search" action="../../search.html" method="get">
            <input class="sk-search-text-input" type="text" name="q" aria-labelledby="searchlabel" />
            <input class="sk-search-text-btn" type="submit" value="Go" />
          </form>
          </div>
      </div>
    </div>
  </div>
</nav>
<div class="d-flex" id="sk-doc-wrapper">
    <input type="checkbox" name="sk-toggle-checkbox" id="sk-toggle-checkbox">
    <label id="sk-sidemenu-toggle" class="sk-btn-toggle-toc btn sk-btn-primary" for="sk-toggle-checkbox">Toggle Menu</label>
    <div id="sk-sidebar-wrapper" class="border-right">
      <div class="sk-sidebar-toc-wrapper">
        <div class="sk-sidebar-toc-logo">
          <a href="../../index.html">
            <img
              class="sk-brand-img"
              src="../../_static/scikit-learn-logo-small.png"
              alt="logo"/>
          </a>
        </div>
        <div class="btn-group w-100 mb-2" role="group" aria-label="rellinks">
            <a href="sklearn.neighbors.DistanceMetric.html" role="button" class="btn sk-btn-rellink py-1" sk-rellink-tooltip="sklearn.neighbors.DistanceMetric">Prev</a><a href="../classes.html" role="button" class="btn sk-btn-rellink py-1" sk-rellink-tooltip="API Reference">Up</a>
            <a href="sklearn.neighbors.KernelDensity.html" role="button" class="btn sk-btn-rellink py-1" sk-rellink-tooltip="sklearn.neighbors.KernelDensity">Next</a>
        </div>
        <div class="alert alert-danger p-1 mb-2" role="alert">
          <p class="text-center mb-0">
          <strong>scikit-learn 0.22</strong><br/>
          <a href="http://scikit-learn.org/dev/versions.html">Other versions</a>
          </p>
        </div>
        <div class="alert alert-warning p-1 mb-2" role="alert">
          <p class="text-center mb-0">
            Please <a class="font-weight-bold" href="../../about.html#citing-scikit-learn"><string>cite us</string></a> if you use the software.
          </p>
        </div>
          <div class="sk-sidebar-toc">
            <ul>
<li><a class="reference internal" href="#"><code class="xref py py-mod docutils literal notranslate"><span class="pre">sklearn.neighbors</span></code>.KDTree</a></li>
</ul>

          </div>
      </div>
    </div>
    <div id="sk-page-content-wrapper">
      <div class="sk-page-content container-fluid body px-md-3" role="main">
        
  <div class="section" id="sklearn-neighbors-kdtree">
<h1><a class="reference internal" href="../classes.html#module-sklearn.neighbors" title="sklearn.neighbors"><code class="xref py py-mod docutils literal notranslate"><span class="pre">sklearn.neighbors</span></code></a>.KDTree<a class="headerlink" href="#sklearn-neighbors-kdtree" title="Permalink to this headline">¶</a></h1>
<dl class="class">
<dt id="sklearn.neighbors.KDTree">
<em class="property">class </em><code class="sig-prename descclassname">sklearn.neighbors.</code><code class="sig-name descname">KDTree</code><a class="headerlink" href="#sklearn.neighbors.KDTree" title="Permalink to this definition">¶</a></dt>
<dd><p>KDTree for fast generalized N-point problems</p>
<p>KDTree(X, leaf_size=40, metric=’minkowski’, **kwargs)</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><dl class="simple">
<dt><strong>X</strong><span class="classifier">array-like of shape (n_samples, n_features)</span></dt><dd><p>n_samples is the number of points in the data set, and
n_features is the dimension of the parameter space.
Note: if X is a C-contiguous array of doubles then data will
not be copied. Otherwise, an internal copy will be made.</p>
</dd>
<dt><strong>leaf_size</strong><span class="classifier">positive integer (default = 40)</span></dt><dd><p>Number of points at which to switch to brute-force. Changing
leaf_size will not affect the results of a query, but can
significantly impact the speed of a query and the memory required
to store the constructed tree.  The amount of memory needed to
store the tree scales as approximately n_samples / leaf_size.
For a specified <code class="docutils literal notranslate"><span class="pre">leaf_size</span></code>, a leaf node is guaranteed to
satisfy <code class="docutils literal notranslate"><span class="pre">leaf_size</span> <span class="pre">&lt;=</span> <span class="pre">n_points</span> <span class="pre">&lt;=</span> <span class="pre">2</span> <span class="pre">*</span> <span class="pre">leaf_size</span></code>, except in
the case that <code class="docutils literal notranslate"><span class="pre">n_samples</span> <span class="pre">&lt;</span> <span class="pre">leaf_size</span></code>.</p>
</dd>
<dt><strong>metric</strong><span class="classifier">string or DistanceMetric object</span></dt><dd><p>the distance metric to use for the tree.  Default=’minkowski’
with p=2 (that is, a euclidean metric). See the documentation
of the DistanceMetric class for a list of available metrics.
kd_tree.valid_metrics gives a list of the metrics which
are valid for KDTree.</p>
</dd>
<dt><strong>Additional keywords are passed to the distance metric class.</strong></dt><dd></dd>
<dt><strong>Note: Callable functions in the metric parameter are NOT supported for KDTree</strong></dt><dd></dd>
<dt><strong>and Ball Tree. Function call overhead will result in very poor performance.</strong></dt><dd></dd>
</dl>
</dd>
<dt class="field-even">Attributes</dt>
<dd class="field-even"><dl class="simple">
<dt><strong>data</strong><span class="classifier">memory view</span></dt><dd><p>The training data</p>
</dd>
</dl>
</dd>
</dl>
<p class="rubric">Examples</p>
<p>Query for k-nearest neighbors</p>
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">rng</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">RandomState</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">X</span> <span class="o">=</span> <span class="n">rng</span><span class="o">.</span><span class="n">random_sample</span><span class="p">((</span><span class="mi">10</span><span class="p">,</span> <span class="mi">3</span><span class="p">))</span>  <span class="c1"># 10 points in 3 dimensions</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">tree</span> <span class="o">=</span> <span class="n">KDTree</span><span class="p">(</span><span class="n">X</span><span class="p">,</span> <span class="n">leaf_size</span><span class="o">=</span><span class="mi">2</span><span class="p">)</span>              <span class="c1"># doctest: +SKIP</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">dist</span><span class="p">,</span> <span class="n">ind</span> <span class="o">=</span> <span class="n">tree</span><span class="o">.</span><span class="n">query</span><span class="p">(</span><span class="n">X</span><span class="p">[:</span><span class="mi">1</span><span class="p">],</span> <span class="n">k</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>                <span class="c1"># doctest: +SKIP</span>
<span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="n">ind</span><span class="p">)</span>  <span class="c1"># indices of 3 closest neighbors</span>
<span class="go">[0 3 1]</span>
<span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="n">dist</span><span class="p">)</span>  <span class="c1"># distances to 3 closest neighbors</span>
<span class="go">[ 0.          0.19662693  0.29473397]</span>
</pre></div>
</div>
<p>Pickle and Unpickle a tree.  Note that the state of the tree is saved in the
pickle operation: the tree needs not be rebuilt upon unpickling.</p>
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">pickle</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">rng</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">RandomState</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">X</span> <span class="o">=</span> <span class="n">rng</span><span class="o">.</span><span class="n">random_sample</span><span class="p">((</span><span class="mi">10</span><span class="p">,</span> <span class="mi">3</span><span class="p">))</span>  <span class="c1"># 10 points in 3 dimensions</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">tree</span> <span class="o">=</span> <span class="n">KDTree</span><span class="p">(</span><span class="n">X</span><span class="p">,</span> <span class="n">leaf_size</span><span class="o">=</span><span class="mi">2</span><span class="p">)</span>        <span class="c1"># doctest: +SKIP</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">s</span> <span class="o">=</span> <span class="n">pickle</span><span class="o">.</span><span class="n">dumps</span><span class="p">(</span><span class="n">tree</span><span class="p">)</span>                     <span class="c1"># doctest: +SKIP</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">tree_copy</span> <span class="o">=</span> <span class="n">pickle</span><span class="o">.</span><span class="n">loads</span><span class="p">(</span><span class="n">s</span><span class="p">)</span>                <span class="c1"># doctest: +SKIP</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">dist</span><span class="p">,</span> <span class="n">ind</span> <span class="o">=</span> <span class="n">tree_copy</span><span class="o">.</span><span class="n">query</span><span class="p">(</span><span class="n">X</span><span class="p">[:</span><span class="mi">1</span><span class="p">],</span> <span class="n">k</span><span class="o">=</span><span class="mi">3</span><span class="p">)</span>     <span class="c1"># doctest: +SKIP</span>
<span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="n">ind</span><span class="p">)</span>  <span class="c1"># indices of 3 closest neighbors</span>
<span class="go">[0 3 1]</span>
<span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="n">dist</span><span class="p">)</span>  <span class="c1"># distances to 3 closest neighbors</span>
<span class="go">[ 0.          0.19662693  0.29473397]</span>
</pre></div>
</div>
<p>Query for neighbors within a given radius</p>
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">rng</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">RandomState</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">X</span> <span class="o">=</span> <span class="n">rng</span><span class="o">.</span><span class="n">random_sample</span><span class="p">((</span><span class="mi">10</span><span class="p">,</span> <span class="mi">3</span><span class="p">))</span>  <span class="c1"># 10 points in 3 dimensions</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">tree</span> <span class="o">=</span> <span class="n">KDTree</span><span class="p">(</span><span class="n">X</span><span class="p">,</span> <span class="n">leaf_size</span><span class="o">=</span><span class="mi">2</span><span class="p">)</span>     <span class="c1"># doctest: +SKIP</span>
<span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="n">tree</span><span class="o">.</span><span class="n">query_radius</span><span class="p">(</span><span class="n">X</span><span class="p">[:</span><span class="mi">1</span><span class="p">],</span> <span class="n">r</span><span class="o">=</span><span class="mf">0.3</span><span class="p">,</span> <span class="n">count_only</span><span class="o">=</span><span class="kc">True</span><span class="p">))</span>
<span class="go">3</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">ind</span> <span class="o">=</span> <span class="n">tree</span><span class="o">.</span><span class="n">query_radius</span><span class="p">(</span><span class="n">X</span><span class="p">[:</span><span class="mi">1</span><span class="p">],</span> <span class="n">r</span><span class="o">=</span><span class="mf">0.3</span><span class="p">)</span>  <span class="c1"># doctest: +SKIP</span>
<span class="gp">&gt;&gt;&gt; </span><span class="nb">print</span><span class="p">(</span><span class="n">ind</span><span class="p">)</span>  <span class="c1"># indices of neighbors within distance 0.3</span>
<span class="go">[3 0 1]</span>
</pre></div>
</div>
<p>Compute a gaussian kernel density estimate:</p>
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">rng</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">RandomState</span><span class="p">(</span><span class="mi">42</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">X</span> <span class="o">=</span> <span class="n">rng</span><span class="o">.</span><span class="n">random_sample</span><span class="p">((</span><span class="mi">100</span><span class="p">,</span> <span class="mi">3</span><span class="p">))</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">tree</span> <span class="o">=</span> <span class="n">KDTree</span><span class="p">(</span><span class="n">X</span><span class="p">)</span>                <span class="c1"># doctest: +SKIP</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">tree</span><span class="o">.</span><span class="n">kernel_density</span><span class="p">(</span><span class="n">X</span><span class="p">[:</span><span class="mi">3</span><span class="p">],</span> <span class="n">h</span><span class="o">=</span><span class="mf">0.1</span><span class="p">,</span> <span class="n">kernel</span><span class="o">=</span><span class="s1">&#39;gaussian&#39;</span><span class="p">)</span>
<span class="go">array([ 6.94114649,  7.83281226,  7.2071716 ])</span>
</pre></div>
</div>
<p>Compute a two-point auto-correlation function</p>
<div class="highlight-default notranslate"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">rng</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">RandomState</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">X</span> <span class="o">=</span> <span class="n">rng</span><span class="o">.</span><span class="n">random_sample</span><span class="p">((</span><span class="mi">30</span><span class="p">,</span> <span class="mi">3</span><span class="p">))</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">r</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">linspace</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">5</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">tree</span> <span class="o">=</span> <span class="n">KDTree</span><span class="p">(</span><span class="n">X</span><span class="p">)</span>                <span class="c1"># doctest: +SKIP</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">tree</span><span class="o">.</span><span class="n">two_point_correlation</span><span class="p">(</span><span class="n">X</span><span class="p">,</span> <span class="n">r</span><span class="p">)</span>
<span class="go">array([ 30,  62, 278, 580, 820])</span>
</pre></div>
</div>
<p class="rubric">Methods</p>
<table class="longtable docutils align-default">
<colgroup>
<col style="width: 10%" />
<col style="width: 90%" />
</colgroup>
<tbody>
<tr class="row-odd"><td><p><a class="reference internal" href="#sklearn.neighbors.KDTree.kernel_density" title="sklearn.neighbors.KDTree.kernel_density"><code class="xref py py-obj docutils literal notranslate"><span class="pre">kernel_density</span></code></a>(self, X, h[, kernel, atol, …])</p></td>
<td><p>Compute the kernel density estimate at points X with the given kernel, using the distance metric specified at tree creation.</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="#sklearn.neighbors.KDTree.query" title="sklearn.neighbors.KDTree.query"><code class="xref py py-obj docutils literal notranslate"><span class="pre">query</span></code></a>(X[, k, return_distance, dualtree, …])</p></td>
<td><p>query the tree for the k nearest neighbors</p></td>
</tr>
<tr class="row-odd"><td><p><a class="reference internal" href="#sklearn.neighbors.KDTree.query_radius" title="sklearn.neighbors.KDTree.query_radius"><code class="xref py py-obj docutils literal notranslate"><span class="pre">query_radius</span></code></a>()</p></td>
<td><p>query_radius(self, X, r, count_only = False):</p></td>
</tr>
<tr class="row-even"><td><p><a class="reference internal" href="#sklearn.neighbors.KDTree.two_point_correlation" title="sklearn.neighbors.KDTree.two_point_correlation"><code class="xref py py-obj docutils literal notranslate"><span class="pre">two_point_correlation</span></code></a>()</p></td>
<td><p>Compute the two-point correlation function</p></td>
</tr>
</tbody>
</table>
<table class="docutils align-default">
<colgroup>
<col style="width: 64%" />
<col style="width: 36%" />
</colgroup>
<tbody>
<tr class="row-odd"><td><p><strong>get_arrays</strong></p></td>
<td></td>
</tr>
<tr class="row-even"><td><p><strong>get_n_calls</strong></p></td>
<td></td>
</tr>
<tr class="row-odd"><td><p><strong>get_tree_stats</strong></p></td>
<td></td>
</tr>
<tr class="row-even"><td><p><strong>reset_n_calls</strong></p></td>
<td></td>
</tr>
</tbody>
</table>
<dl class="method">
<dt id="sklearn.neighbors.KDTree.__init__">
<code class="sig-name descname">__init__</code><span class="sig-paren">(</span><em class="sig-param">self</em>, <em class="sig-param">/</em>, <em class="sig-param">*args</em>, <em class="sig-param">**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#sklearn.neighbors.KDTree.__init__" title="Permalink to this definition">¶</a></dt>
<dd><p>Initialize self.  See help(type(self)) for accurate signature.</p>
</dd></dl>

<dl class="method">
<dt id="sklearn.neighbors.KDTree.kernel_density">
<code class="sig-name descname">kernel_density</code><span class="sig-paren">(</span><em class="sig-param">self</em>, <em class="sig-param">X</em>, <em class="sig-param">h</em>, <em class="sig-param">kernel='gaussian'</em>, <em class="sig-param">atol=0</em>, <em class="sig-param">rtol=1E-8</em>, <em class="sig-param">breadth_first=True</em>, <em class="sig-param">return_log=False</em><span class="sig-paren">)</span><a class="headerlink" href="#sklearn.neighbors.KDTree.kernel_density" title="Permalink to this definition">¶</a></dt>
<dd><p>Compute the kernel density estimate at points X with the given kernel,
using the distance metric specified at tree creation.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><dl class="simple">
<dt><strong>X</strong><span class="classifier">array-like of shape (n_samples, n_features)</span></dt><dd><p>An array of points to query.  Last dimension should match dimension
of training data.</p>
</dd>
<dt><strong>h</strong><span class="classifier">float</span></dt><dd><p>the bandwidth of the kernel</p>
</dd>
<dt><strong>kernel</strong><span class="classifier">string</span></dt><dd><p>specify the kernel to use.  Options are
- ‘gaussian’
- ‘tophat’
- ‘epanechnikov’
- ‘exponential’
- ‘linear’
- ‘cosine’
Default is kernel = ‘gaussian’</p>
</dd>
<dt><strong>atol, rtol</strong><span class="classifier">float (default = 0)</span></dt><dd><p>Specify the desired relative and absolute tolerance of the result.
If the true result is K_true, then the returned result K_ret
satisfies <code class="docutils literal notranslate"><span class="pre">abs(K_true</span> <span class="pre">-</span> <span class="pre">K_ret)</span> <span class="pre">&lt;</span> <span class="pre">atol</span> <span class="pre">+</span> <span class="pre">rtol</span> <span class="pre">*</span> <span class="pre">K_ret</span></code>
The default is zero (i.e. machine precision) for both.</p>
</dd>
<dt><strong>breadth_first</strong><span class="classifier">boolean (default = False)</span></dt><dd><p>if True, use a breadth-first search.  If False (default) use a
depth-first search.  Breadth-first is generally faster for
compact kernels and/or high tolerances.</p>
</dd>
<dt><strong>return_log</strong><span class="classifier">boolean (default = False)</span></dt><dd><p>return the logarithm of the result.  This can be more accurate
than returning the result itself for narrow kernels.</p>
</dd>
</dl>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><dl class="simple">
<dt><strong>density</strong><span class="classifier">ndarray</span></dt><dd><p>The array of (log)-density evaluations, shape = X.shape[:-1]</p>
</dd>
</dl>
</dd>
</dl>
</dd></dl>

<dl class="method">
<dt id="sklearn.neighbors.KDTree.query">
<code class="sig-name descname">query</code><span class="sig-paren">(</span><em class="sig-param">X</em>, <em class="sig-param">k=1</em>, <em class="sig-param">return_distance=True</em>, <em class="sig-param">dualtree=False</em>, <em class="sig-param">breadth_first=False</em><span class="sig-paren">)</span><a class="headerlink" href="#sklearn.neighbors.KDTree.query" title="Permalink to this definition">¶</a></dt>
<dd><p>query the tree for the k nearest neighbors</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><dl class="simple">
<dt><strong>X</strong><span class="classifier">array-like of shape (n_samples, n_features)</span></dt><dd><p>An array of points to query</p>
</dd>
<dt><strong>k</strong><span class="classifier">integer  (default = 1)</span></dt><dd><p>The number of nearest neighbors to return</p>
</dd>
<dt><strong>return_distance</strong><span class="classifier">boolean (default = True)</span></dt><dd><p>if True, return a tuple (d, i) of distances and indices
if False, return array i</p>
</dd>
<dt><strong>dualtree</strong><span class="classifier">boolean (default = False)</span></dt><dd><p>if True, use the dual tree formalism for the query: a tree is
built for the query points, and the pair of trees is used to
efficiently search this space.  This can lead to better
performance as the number of points grows large.</p>
</dd>
<dt><strong>breadth_first</strong><span class="classifier">boolean (default = False)</span></dt><dd><p>if True, then query the nodes in a breadth-first manner.
Otherwise, query the nodes in a depth-first manner.</p>
</dd>
<dt><strong>sort_results</strong><span class="classifier">boolean (default = True)</span></dt><dd><p>if True, then distances and indices of each point are sorted
on return, so that the first column contains the closest points.
Otherwise, neighbors are returned in an arbitrary order.</p>
</dd>
</dl>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><dl class="simple">
<dt><strong>i</strong><span class="classifier">if return_distance == False</span></dt><dd></dd>
<dt><strong>(d,i)</strong><span class="classifier">if return_distance == True</span></dt><dd></dd>
<dt><strong>d</strong><span class="classifier">array of doubles - shape: x.shape[:-1] + (k,)</span></dt><dd><p>each entry gives the list of distances to the
neighbors of the corresponding point</p>
</dd>
<dt><strong>i</strong><span class="classifier">array of integers - shape: x.shape[:-1] + (k,)</span></dt><dd><p>each entry gives the list of indices of
neighbors of the corresponding point</p>
</dd>
</dl>
</dd>
</dl>
</dd></dl>

<dl class="method">
<dt id="sklearn.neighbors.KDTree.query_radius">
<code class="sig-name descname">query_radius</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#sklearn.neighbors.KDTree.query_radius" title="Permalink to this definition">¶</a></dt>
<dd><p>query_radius(self, X, r, count_only = False):</p>
<p>query the tree for neighbors within a radius r</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><dl class="simple">
<dt><strong>X</strong><span class="classifier">array-like of shape (n_samples, n_features)</span></dt><dd><p>An array of points to query</p>
</dd>
<dt><strong>r</strong><span class="classifier">distance within which neighbors are returned</span></dt><dd><p>r can be a single value, or an array of values of shape
x.shape[:-1] if different radii are desired for each point.</p>
</dd>
<dt><strong>return_distance</strong><span class="classifier">boolean (default = False)</span></dt><dd><p>if True,  return distances to neighbors of each point
if False, return only neighbors
Note that unlike the query() method, setting return_distance=True
here adds to the computation time.  Not all distances need to be
calculated explicitly for return_distance=False.  Results are
not sorted by default: see <code class="docutils literal notranslate"><span class="pre">sort_results</span></code> keyword.</p>
</dd>
<dt><strong>count_only</strong><span class="classifier">boolean (default = False)</span></dt><dd><p>if True,  return only the count of points within distance r
if False, return the indices of all points within distance r
If return_distance==True, setting count_only=True will
result in an error.</p>
</dd>
<dt><strong>sort_results</strong><span class="classifier">boolean (default = False)</span></dt><dd><p>if True, the distances and indices will be sorted before being
returned.  If False, the results will not be sorted.  If
return_distance == False, setting sort_results = True will
result in an error.</p>
</dd>
</dl>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><dl class="simple">
<dt><strong>count</strong><span class="classifier">if count_only == True</span></dt><dd></dd>
<dt><strong>ind</strong><span class="classifier">if count_only == False and return_distance == False</span></dt><dd></dd>
<dt><strong>(ind, dist)</strong><span class="classifier">if count_only == False and return_distance == True</span></dt><dd></dd>
<dt><strong>count</strong><span class="classifier">array of integers, shape = X.shape[:-1]</span></dt><dd><p>each entry gives the number of neighbors within
a distance r of the corresponding point.</p>
</dd>
<dt><strong>ind</strong><span class="classifier">array of objects, shape = X.shape[:-1]</span></dt><dd><p>each element is a numpy integer array listing the indices of
neighbors of the corresponding point.  Note that unlike
the results of a k-neighbors query, the returned neighbors
are not sorted by distance by default.</p>
</dd>
<dt><strong>dist</strong><span class="classifier">array of objects, shape = X.shape[:-1]</span></dt><dd><p>each element is a numpy double array
listing the distances corresponding to indices in i.</p>
</dd>
</dl>
</dd>
</dl>
</dd></dl>

<dl class="method">
<dt id="sklearn.neighbors.KDTree.two_point_correlation">
<code class="sig-name descname">two_point_correlation</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#sklearn.neighbors.KDTree.two_point_correlation" title="Permalink to this definition">¶</a></dt>
<dd><p>Compute the two-point correlation function</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><dl class="simple">
<dt><strong>X</strong><span class="classifier">array-like of shape (n_samples, n_features)</span></dt><dd><p>An array of points to query.  Last dimension should match dimension
of training data.</p>
</dd>
<dt><strong>r</strong><span class="classifier">array_like</span></dt><dd><p>A one-dimensional array of distances</p>
</dd>
<dt><strong>dualtree</strong><span class="classifier">boolean (default = False)</span></dt><dd><p>If true, use a dualtree algorithm.  Otherwise, use a single-tree
algorithm.  Dual tree algorithms can have better scaling for
large N.</p>
</dd>
</dl>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><dl class="simple">
<dt><strong>counts</strong><span class="classifier">ndarray</span></dt><dd><p>counts[i] contains the number of pairs of points with distance
less than or equal to r[i]</p>
</dd>
</dl>
</dd>
</dl>
</dd></dl>

</dd></dl>

<div class="clearer"></div></div>


      </div>
    <div class="container">
      <footer class="sk-content-footer">
            &copy; 2007 - 2019, scikit-learn developers (BSD License).
          <a href="../../_sources/modules/generated/sklearn.neighbors.KDTree.rst.txt" rel="nofollow">Show this page source</a>
      </footer>
    </div>
  </div>
</div>
<script src="../../_static/js/vendor/bootstrap.min.js"></script>

<script>
    window.ga=window.ga||function(){(ga.q=ga.q||[]).push(arguments)};ga.l=+new Date;
    ga('create', 'UA-22606712-2', 'auto');
    ga('set', 'anonymizeIp', true);
    ga('send', 'pageview');
</script>
<script async src='https://www.google-analytics.com/analytics.js'></script>


<script>
$(document).ready(function() {
    /* Add a [>>>] button on the top-right corner of code samples to hide
     * the >>> and ... prompts and the output and thus make the code
     * copyable. */
    var div = $('.highlight-python .highlight,' +
                '.highlight-python3 .highlight,' +
                '.highlight-pycon .highlight,' +
		'.highlight-default .highlight')
    var pre = div.find('pre');

    // get the styles from the current theme
    pre.parent().parent().css('position', 'relative');
    var hide_text = 'Hide prompts and outputs';
    var show_text = 'Show prompts and outputs';

    // create and add the button to all the code blocks that contain >>>
    div.each(function(index) {
        var jthis = $(this);
        if (jthis.find('.gp').length > 0) {
            var button = $('<span class="copybutton">&gt;&gt;&gt;</span>');
            button.attr('title', hide_text);
            button.data('hidden', 'false');
            jthis.prepend(button);
        }
        // tracebacks (.gt) contain bare text elements that need to be
        // wrapped in a span to work with .nextUntil() (see later)
        jthis.find('pre:has(.gt)').contents().filter(function() {
            return ((this.nodeType == 3) && (this.data.trim().length > 0));
        }).wrap('<span>');
    });

    // define the behavior of the button when it's clicked
    $('.copybutton').click(function(e){
        e.preventDefault();
        var button = $(this);
        if (button.data('hidden') === 'false') {
            // hide the code output
            button.parent().find('.go, .gp, .gt').hide();
            button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden');
            button.css('text-decoration', 'line-through');
            button.attr('title', show_text);
            button.data('hidden', 'true');
        } else {
            // show the code output
            button.parent().find('.go, .gp, .gt').show();
            button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible');
            button.css('text-decoration', 'none');
            button.attr('title', hide_text);
            button.data('hidden', 'false');
        }
    });

	/*** Add permalink buttons next to glossary terms ***/
	$('dl.glossary > dt[id]').append(function() {
		return ('<a class="headerlink" href="#' +
			    this.getAttribute('id') +
			    '" title="Permalink to this term">¶</a>');
	});
  /*** Hide navbar when scrolling down ***/
  // Returns true when headerlink target matches hash in url
  (function() {
    hashTargetOnTop = function() {
        var hash = window.location.hash;
        if ( hash.length < 2 ) { return false; }

        var target = document.getElementById( hash.slice(1) );
        if ( target === null ) { return false; }

        var top = target.getBoundingClientRect().top;
        return (top < 2) && (top > -2);
    };

    // Hide navbar on load if hash target is on top
    var navBar = document.getElementById("navbar");
    var navBarToggler = document.getElementById("sk-navbar-toggler");
    var navBarHeightHidden = "-" + navBar.getBoundingClientRect().height + "px";
    var $window = $(window);

    hideNavBar = function() {
        navBar.style.top = navBarHeightHidden;
    };

    showNavBar = function() {
        navBar.style.top = "0";
    }

    if (hashTargetOnTop()) {
        hideNavBar()
    }

    var prevScrollpos = window.pageYOffset;
    hideOnScroll = function(lastScrollTop) {
        if (($window.width() < 768) && (navBarToggler.getAttribute("aria-expanded") === 'true')) {
            return;
        }
        if (lastScrollTop > 2 && (prevScrollpos <= lastScrollTop) || hashTargetOnTop()){
            hideNavBar()
        } else {
            showNavBar()
        }
        prevScrollpos = lastScrollTop;
    };

    /*** high preformance scroll event listener***/
    var raf = window.requestAnimationFrame ||
        window.webkitRequestAnimationFrame ||
        window.mozRequestAnimationFrame ||
        window.msRequestAnimationFrame ||
        window.oRequestAnimationFrame;
    var lastScrollTop = $window.scrollTop();

    if (raf) {
        loop();
    }

    function loop() {
        var scrollTop = $window.scrollTop();
        if (lastScrollTop === scrollTop) {
            raf(loop);
            return;
        } else {
            lastScrollTop = scrollTop;
            hideOnScroll(lastScrollTop);
            raf(loop);
        }
    }
  })();
});

</script>
    
<script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-chtml.js"></script>
    
</body>
</html>