<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">


<html xmlns="http://www.w3.org/1999/xhtml">
  <head>
    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
    
    <title>Sampling Methods &#8212; pgmpy 0.1.2 documentation</title>
    
    <link rel="stylesheet" href="_static/sphinxdoc.css" type="text/css" />
    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
    
    <script type="text/javascript">
      var DOCUMENTATION_OPTIONS = {
        URL_ROOT:    './',
        VERSION:     '0.1.2',
        COLLAPSE_INDEX: false,
        FILE_SUFFIX: '.html',
        HAS_SOURCE:  true,
        SOURCELINK_SUFFIX: '.txt'
      };
    </script>
    <script type="text/javascript" src="_static/jquery.js"></script>
    <script type="text/javascript" src="_static/underscore.js"></script>
    <script type="text/javascript" src="_static/doctools.js"></script>
    <link rel="index" title="Index" href="genindex.html" />
    <link rel="search" title="Search" href="search.html" />
    <link rel="next" title="Algorithms for Inference" href="inference.html" />
    <link rel="prev" title="Factor" href="factors.html" /> 
  </head>
  <body role="document">
    <div class="related" role="navigation" aria-label="related navigation">
      <h3>Navigation</h3>
      <ul>
        <li class="right" style="margin-right: 10px">
          <a href="genindex.html" title="General Index"
             accesskey="I">index</a></li>
        <li class="right" >
          <a href="py-modindex.html" title="Python Module Index"
             >modules</a> |</li>
        <li class="right" >
          <a href="inference.html" title="Algorithms for Inference"
             accesskey="N">next</a> |</li>
        <li class="right" >
          <a href="factors.html" title="Factor"
             accesskey="P">previous</a> |</li>
        <li class="nav-item nav-item-0"><a href="index.html">pgmpy 0.1.2 documentation</a> &#187;</li> 
      </ul>
    </div>
      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
        <div class="sphinxsidebarwrapper">
            <p class="logo"><a href="index.html">
              <img class="logo" src="_static/logo.png" alt="Logo"/>
            </a></p>
  <h3><a href="index.html">Table Of Contents</a></h3>
  <ul>
<li><a class="reference internal" href="#">Sampling Methods</a><ul>
<li><a class="reference internal" href="#gibbs-sampler">Gibbs Sampler</a></li>
<li><a class="reference internal" href="#bayesian-model-samplers">Bayesian Model Samplers</a></li>
<li><a class="reference internal" href="#module-pgmpy.sampling.HMC">Hamiltonian Monte Carlo</a></li>
<li><a class="reference internal" href="#module-pgmpy.sampling.NUTS">No U-Turn Sampler</a></li>
</ul>
</li>
</ul>

  <h4>Previous topic</h4>
  <p class="topless"><a href="factors.html"
                        title="previous chapter">Factor</a></p>
  <h4>Next topic</h4>
  <p class="topless"><a href="inference.html"
                        title="next chapter">Algorithms for Inference</a></p>
  <div role="note" aria-label="source link">
    <h3>This Page</h3>
    <ul class="this-page-menu">
      <li><a href="_sources/sampling.rst.txt"
            rel="nofollow">Show Source</a></li>
    </ul>
   </div>
<div id="searchbox" style="display: none" role="search">
  <h3>Quick search</h3>
    <form class="search" action="search.html" method="get">
      <div><input type="text" name="q" /></div>
      <div><input type="submit" value="Go" /></div>
      <input type="hidden" name="check_keywords" value="yes" />
      <input type="hidden" name="area" value="default" />
    </form>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
        </div>
      </div>

    <div class="document">
      <div class="documentwrapper">
        <div class="bodywrapper">
          <div class="body" role="main">
            
  <div class="section" id="sampling-methods">
<h1>Sampling Methods<a class="headerlink" href="#sampling-methods" title="Permalink to this headline">¶</a></h1>
<div class="section" id="gibbs-sampler">
<h2>Gibbs Sampler<a class="headerlink" href="#gibbs-sampler" title="Permalink to this headline">¶</a></h2>
<dl class="class">
<dt id="pgmpy.sampling.Sampling.GibbsSampling">
<em class="property">class </em><code class="descclassname">pgmpy.sampling.Sampling.</code><code class="descname">GibbsSampling</code><span class="sig-paren">(</span><em>model=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/Sampling.html#GibbsSampling"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.Sampling.GibbsSampling" title="Permalink to this definition">¶</a></dt>
<dd><p>Class for performing Gibbs sampling.</p>
<dl class="method">
<dt id="pgmpy.sampling.Sampling.GibbsSampling.generate_sample">
<code class="descname">generate_sample</code><span class="sig-paren">(</span><em>start_state=None</em>, <em>size=1</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/Sampling.html#GibbsSampling.generate_sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.Sampling.GibbsSampling.generate_sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Generator version of self.sample</p>
</dd></dl>

<dl class="method">
<dt id="pgmpy.sampling.Sampling.GibbsSampling.sample">
<code class="descname">sample</code><span class="sig-paren">(</span><em>start_state=None</em>, <em>size=1</em>, <em>return_type='dataframe'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/Sampling.html#GibbsSampling.sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.Sampling.GibbsSampling.sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Sample from the Markov Chain.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument</p>
<blockquote class="last">
<div><p>the generated samples</p>
</div></blockquote>
</td>
</tr>
</tbody>
</table>
</dd></dl>

</dd></dl>

</div>
<div class="section" id="bayesian-model-samplers">
<h2>Bayesian Model Samplers<a class="headerlink" href="#bayesian-model-samplers" title="Permalink to this headline">¶</a></h2>
<dl class="class">
<dt id="pgmpy.sampling.Sampling.BayesianModelSampling">
<em class="property">class </em><code class="descclassname">pgmpy.sampling.Sampling.</code><code class="descname">BayesianModelSampling</code><span class="sig-paren">(</span><em>model</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/Sampling.html#BayesianModelSampling"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.Sampling.BayesianModelSampling" title="Permalink to this definition">¶</a></dt>
<dd><p>Class for sampling methods specific to Bayesian Models</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>model: instance of BayesianModel</strong></p>
<blockquote class="last">
<div><p>model on which inference queries will be computed</p>
</div></blockquote>
</td>
</tr>
</tbody>
</table>
<dl class="method">
<dt id="pgmpy.sampling.Sampling.BayesianModelSampling.forward_sample">
<code class="descname">forward_sample</code><span class="sig-paren">(</span><em>size=1</em>, <em>return_type='dataframe'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/Sampling.html#BayesianModelSampling.forward_sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.Sampling.BayesianModelSampling.forward_sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Generates sample(s) from joint distribution of the bayesian network.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>size: int</strong></p>
<blockquote>
<div><p>size of sample to be generated</p>
</div></blockquote>
<p><strong>return_type: string (dataframe | recarray)</strong></p>
<blockquote>
<div><p>Return type for samples, either of &#8216;dataframe&#8217; or &#8216;recarray&#8217;.
Defaults to &#8216;dataframe&#8217;</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument</p>
<blockquote class="last">
<div><p>the generated samples</p>
</div></blockquote>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.models.BayesianModel</span> <span class="k">import</span> <span class="n">BayesianModel</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.discrete</span> <span class="k">import</span> <span class="n">TabularCPD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">BayesianModelSampling</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">student</span> <span class="o">=</span> <span class="n">BayesianModel</span><span class="p">([(</span><span class="s1">&#39;diff&#39;</span><span class="p">,</span> <span class="s1">&#39;grade&#39;</span><span class="p">),</span> <span class="p">(</span><span class="s1">&#39;intel&#39;</span><span class="p">,</span> <span class="s1">&#39;grade&#39;</span><span class="p">)])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">cpd_d</span> <span class="o">=</span> <span class="n">TabularCPD</span><span class="p">(</span><span class="s1">&#39;diff&#39;</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="p">[[</span><span class="mf">0.6</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.4</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">cpd_i</span> <span class="o">=</span> <span class="n">TabularCPD</span><span class="p">(</span><span class="s1">&#39;intel&#39;</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="p">[[</span><span class="mf">0.7</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.3</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">cpd_g</span> <span class="o">=</span> <span class="n">TabularCPD</span><span class="p">(</span><span class="s1">&#39;grade&#39;</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="p">[[</span><span class="mf">0.3</span><span class="p">,</span> <span class="mf">0.05</span><span class="p">,</span> <span class="mf">0.9</span><span class="p">,</span> <span class="mf">0.5</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.4</span><span class="p">,</span> <span class="mf">0.25</span><span class="p">,</span>
<span class="gp">... </span>               <span class="mf">0.08</span><span class="p">,</span> <span class="mf">0.3</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.3</span><span class="p">,</span> <span class="mf">0.7</span><span class="p">,</span> <span class="mf">0.02</span><span class="p">,</span> <span class="mf">0.2</span><span class="p">]],</span>
<span class="gp">... </span>               <span class="p">[</span><span class="s1">&#39;intel&#39;</span><span class="p">,</span> <span class="s1">&#39;diff&#39;</span><span class="p">],</span> <span class="p">[</span><span class="mi">2</span><span class="p">,</span> <span class="mi">2</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">student</span><span class="o">.</span><span class="n">add_cpds</span><span class="p">(</span><span class="n">cpd_d</span><span class="p">,</span> <span class="n">cpd_i</span><span class="p">,</span> <span class="n">cpd_g</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">inference</span> <span class="o">=</span> <span class="n">BayesianModelSampling</span><span class="p">(</span><span class="n">student</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">inference</span><span class="o">.</span><span class="n">forward_sample</span><span class="p">(</span><span class="n">size</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">return_type</span><span class="o">=</span><span class="s1">&#39;recarray&#39;</span><span class="p">)</span>
<span class="go">rec.array([(0, 0, 1), (1, 0, 2)], dtype=</span>
<span class="go">          [(&#39;diff&#39;, &#39;&lt;i8&#39;), (&#39;intel&#39;, &#39;&lt;i8&#39;), (&#39;grade&#39;, &#39;&lt;i8&#39;)])</span>
</pre></div>
</div>
</dd></dl>

<dl class="method">
<dt id="pgmpy.sampling.Sampling.BayesianModelSampling.likelihood_weighted_sample">
<code class="descname">likelihood_weighted_sample</code><span class="sig-paren">(</span><em>evidence=None</em>, <em>size=1</em>, <em>return_type='dataframe'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/Sampling.html#BayesianModelSampling.likelihood_weighted_sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.Sampling.BayesianModelSampling.likelihood_weighted_sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Generates weighted sample(s) from joint distribution of the bayesian
network, that comply with the given evidence.
&#8216;Probabilistic Graphical Model Principles and Techniques&#8217;, Koller and
Friedman, Algorithm 12.2 pp 493.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>evidence: list of `pgmpy.factor.State` namedtuples</strong></p>
<blockquote>
<div><p>None if no evidence</p>
</div></blockquote>
<p><strong>size: int</strong></p>
<blockquote>
<div><p>size of sample to be generated</p>
</div></blockquote>
<p><strong>return_type: string (dataframe | recarray)</strong></p>
<blockquote>
<div><p>Return type for samples, either of &#8216;dataframe&#8217; or &#8216;recarray&#8217;.
Defaults to &#8216;dataframe&#8217;</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument</p>
<blockquote class="last">
<div><p>the generated samples with corresponding weights</p>
</div></blockquote>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.discrete</span> <span class="k">import</span> <span class="n">State</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.models.BayesianModel</span> <span class="k">import</span> <span class="n">BayesianModel</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.discrete</span> <span class="k">import</span> <span class="n">TabularCPD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">BayesianModelSampling</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">student</span> <span class="o">=</span> <span class="n">BayesianModel</span><span class="p">([(</span><span class="s1">&#39;diff&#39;</span><span class="p">,</span> <span class="s1">&#39;grade&#39;</span><span class="p">),</span> <span class="p">(</span><span class="s1">&#39;intel&#39;</span><span class="p">,</span> <span class="s1">&#39;grade&#39;</span><span class="p">)])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">cpd_d</span> <span class="o">=</span> <span class="n">TabularCPD</span><span class="p">(</span><span class="s1">&#39;diff&#39;</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="p">[[</span><span class="mf">0.6</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.4</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">cpd_i</span> <span class="o">=</span> <span class="n">TabularCPD</span><span class="p">(</span><span class="s1">&#39;intel&#39;</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="p">[[</span><span class="mf">0.7</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.3</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">cpd_g</span> <span class="o">=</span> <span class="n">TabularCPD</span><span class="p">(</span><span class="s1">&#39;grade&#39;</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="p">[[</span><span class="mf">0.3</span><span class="p">,</span> <span class="mf">0.05</span><span class="p">,</span> <span class="mf">0.9</span><span class="p">,</span> <span class="mf">0.5</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.4</span><span class="p">,</span> <span class="mf">0.25</span><span class="p">,</span>
<span class="gp">... </span>        <span class="mf">0.08</span><span class="p">,</span> <span class="mf">0.3</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.3</span><span class="p">,</span> <span class="mf">0.7</span><span class="p">,</span> <span class="mf">0.02</span><span class="p">,</span> <span class="mf">0.2</span><span class="p">]],</span>
<span class="gp">... </span>        <span class="p">[</span><span class="s1">&#39;intel&#39;</span><span class="p">,</span> <span class="s1">&#39;diff&#39;</span><span class="p">],</span> <span class="p">[</span><span class="mi">2</span><span class="p">,</span> <span class="mi">2</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">student</span><span class="o">.</span><span class="n">add_cpds</span><span class="p">(</span><span class="n">cpd_d</span><span class="p">,</span> <span class="n">cpd_i</span><span class="p">,</span> <span class="n">cpd_g</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">inference</span> <span class="o">=</span> <span class="n">BayesianModelSampling</span><span class="p">(</span><span class="n">student</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">evidence</span> <span class="o">=</span> <span class="p">[</span><span class="n">State</span><span class="p">(</span><span class="s1">&#39;diff&#39;</span><span class="p">,</span> <span class="mi">0</span><span class="p">)]</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">inference</span><span class="o">.</span><span class="n">likelihood_weighted_sample</span><span class="p">(</span><span class="n">evidence</span><span class="o">=</span><span class="n">evidence</span><span class="p">,</span> <span class="n">size</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">return_type</span><span class="o">=</span><span class="s1">&#39;recarray&#39;</span><span class="p">)</span>
<span class="go">rec.array([(0, 0, 1, 0.6), (0, 0, 2, 0.6)], dtype=</span>
<span class="go">          [(&#39;diff&#39;, &#39;&lt;i8&#39;), (&#39;intel&#39;, &#39;&lt;i8&#39;), (&#39;grade&#39;, &#39;&lt;i8&#39;), (&#39;_weight&#39;, &#39;&lt;f8&#39;)])</span>
</pre></div>
</div>
</dd></dl>

<dl class="method">
<dt id="pgmpy.sampling.Sampling.BayesianModelSampling.rejection_sample">
<code class="descname">rejection_sample</code><span class="sig-paren">(</span><em>evidence=None</em>, <em>size=1</em>, <em>return_type='dataframe'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/Sampling.html#BayesianModelSampling.rejection_sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.Sampling.BayesianModelSampling.rejection_sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Generates sample(s) from joint distribution of the bayesian network,
given the evidence.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>evidence: list of `pgmpy.factor.State` namedtuples</strong></p>
<blockquote>
<div><p>None if no evidence</p>
</div></blockquote>
<p><strong>size: int</strong></p>
<blockquote>
<div><p>size of sample to be generated</p>
</div></blockquote>
<p><strong>return_type: string (dataframe | recarray)</strong></p>
<blockquote>
<div><p>Return type for samples, either of &#8216;dataframe&#8217; or &#8216;recarray&#8217;.
Defaults to &#8216;dataframe&#8217;</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument</p>
<blockquote class="last">
<div><p>the generated samples</p>
</div></blockquote>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.models.BayesianModel</span> <span class="k">import</span> <span class="n">BayesianModel</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.discrete</span> <span class="k">import</span> <span class="n">TabularCPD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.discrete</span> <span class="k">import</span> <span class="n">State</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">BayesianModelSampling</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">student</span> <span class="o">=</span> <span class="n">BayesianModel</span><span class="p">([(</span><span class="s1">&#39;diff&#39;</span><span class="p">,</span> <span class="s1">&#39;grade&#39;</span><span class="p">),</span> <span class="p">(</span><span class="s1">&#39;intel&#39;</span><span class="p">,</span> <span class="s1">&#39;grade&#39;</span><span class="p">)])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">cpd_d</span> <span class="o">=</span> <span class="n">TabularCPD</span><span class="p">(</span><span class="s1">&#39;diff&#39;</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="p">[[</span><span class="mf">0.6</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.4</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">cpd_i</span> <span class="o">=</span> <span class="n">TabularCPD</span><span class="p">(</span><span class="s1">&#39;intel&#39;</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="p">[[</span><span class="mf">0.7</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.3</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">cpd_g</span> <span class="o">=</span> <span class="n">TabularCPD</span><span class="p">(</span><span class="s1">&#39;grade&#39;</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="p">[[</span><span class="mf">0.3</span><span class="p">,</span> <span class="mf">0.05</span><span class="p">,</span> <span class="mf">0.9</span><span class="p">,</span> <span class="mf">0.5</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.4</span><span class="p">,</span> <span class="mf">0.25</span><span class="p">,</span>
<span class="gp">... </span>               <span class="mf">0.08</span><span class="p">,</span> <span class="mf">0.3</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.3</span><span class="p">,</span> <span class="mf">0.7</span><span class="p">,</span> <span class="mf">0.02</span><span class="p">,</span> <span class="mf">0.2</span><span class="p">]],</span>
<span class="gp">... </span>               <span class="p">[</span><span class="s1">&#39;intel&#39;</span><span class="p">,</span> <span class="s1">&#39;diff&#39;</span><span class="p">],</span> <span class="p">[</span><span class="mi">2</span><span class="p">,</span> <span class="mi">2</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">student</span><span class="o">.</span><span class="n">add_cpds</span><span class="p">(</span><span class="n">cpd_d</span><span class="p">,</span> <span class="n">cpd_i</span><span class="p">,</span> <span class="n">cpd_g</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">inference</span> <span class="o">=</span> <span class="n">BayesianModelSampling</span><span class="p">(</span><span class="n">student</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">evidence</span> <span class="o">=</span> <span class="p">[</span><span class="n">State</span><span class="p">(</span><span class="n">var</span><span class="o">=</span><span class="s1">&#39;diff&#39;</span><span class="p">,</span> <span class="n">state</span><span class="o">=</span><span class="mi">0</span><span class="p">)]</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">inference</span><span class="o">.</span><span class="n">rejection_sample</span><span class="p">(</span><span class="n">evidence</span><span class="o">=</span><span class="n">evidence</span><span class="p">,</span> <span class="n">size</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">return_type</span><span class="o">=</span><span class="s1">&#39;dataframe&#39;</span><span class="p">)</span>
<span class="go">        intel       diff       grade</span>
<span class="go">0         0          0          1</span>
<span class="go">1         0          0          1</span>
</pre></div>
</div>
</dd></dl>

</dd></dl>

</div>
<div class="section" id="module-pgmpy.sampling.HMC">
<span id="hamiltonian-monte-carlo"></span><h2>Hamiltonian Monte Carlo<a class="headerlink" href="#module-pgmpy.sampling.HMC" title="Permalink to this headline">¶</a></h2>
<p>A collection of methods for sampling from continuous models in pgmpy</p>
<dl class="class">
<dt id="pgmpy.sampling.HMC.HamiltonianMC">
<em class="property">class </em><code class="descclassname">pgmpy.sampling.HMC.</code><code class="descname">HamiltonianMC</code><span class="sig-paren">(</span><em>model</em>, <em>grad_log_pdf</em>, <em>simulate_dynamics=&lt;class 'pgmpy.sampling.base.LeapFrog'&gt;</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/HMC.html#HamiltonianMC"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.HMC.HamiltonianMC" title="Permalink to this definition">¶</a></dt>
<dd><p>Class for performing sampling using simple
Hamiltonian Monte Carlo</p>
<p class="rubric">References</p>
<p>R.Neal. Handbook of Markov Chain Monte Carlo,
chapter 5: MCMC Using Hamiltonian Dynamics.
CRC Press, 2011.</p>
<dl class="method">
<dt id="pgmpy.sampling.HMC.HamiltonianMC.generate_sample">
<code class="descname">generate_sample</code><span class="sig-paren">(</span><em>initial_pos</em>, <em>num_samples</em>, <em>trajectory_length</em>, <em>stepsize=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/HMC.html#HamiltonianMC.generate_sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.HMC.HamiltonianMC.generate_sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Method returns a generator type object whose each iteration yields a sample
using Hamiltonian Monte Carlo</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>initial_pos: A 1d array like object</strong></p>
<blockquote>
<div><p>Vector representing values of parameter position, the starting
state in markov chain.</p>
</div></blockquote>
<p><strong>num_samples: int</strong></p>
<blockquote>
<div><p>Number of samples to be generated</p>
</div></blockquote>
<p><strong>trajectory_length: int or float</strong></p>
<blockquote>
<div><p>Target trajectory length, stepsize * number of steps(L),
where L is the number of steps taken per HMC iteration,
and stepsize is step size for splitting time method.</p>
</div></blockquote>
<p><strong>stepsize: float , defaults to None</strong></p>
<blockquote>
<div><p>The stepsize for proposing new values of position and momentum in simulate_dynamics
If None, then will be choosen suitably</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last">genrator: yielding a 1d numpy.array type object for a sample</p>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">HamiltonianMC</span> <span class="k">as</span> <span class="n">HMC</span><span class="p">,</span> <span class="n">GradLogPDFGaussian</span> <span class="k">as</span> <span class="n">GLPG</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors</span> <span class="k">import</span> <span class="n">GaussianDistribution</span> <span class="k">as</span> <span class="n">JGD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">mean</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">4</span><span class="p">,</span> <span class="o">-</span><span class="mi">1</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">covariance</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">3</span><span class="p">,</span> <span class="mf">0.4</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.4</span><span class="p">,</span> <span class="mi">3</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">JGD</span><span class="p">([</span><span class="s1">&#39;x&#39;</span><span class="p">,</span> <span class="s1">&#39;y&#39;</span><span class="p">],</span> <span class="n">mean</span><span class="p">,</span> <span class="n">covariance</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">sampler</span> <span class="o">=</span> <span class="n">HMC</span><span class="p">(</span><span class="n">model</span><span class="o">=</span><span class="n">model</span><span class="p">,</span> <span class="n">grad_log_pdf</span><span class="o">=</span><span class="n">GLPG</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">gen_samples</span> <span class="o">=</span> <span class="n">sampler</span><span class="o">.</span><span class="n">generate_sample</span><span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="o">-</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">]),</span> <span class="n">num_samples</span> <span class="o">=</span> <span class="mi">10000</span><span class="p">,</span>
<span class="gp">... </span>                                      <span class="n">trajectory_length</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">stepsize</span><span class="o">=</span><span class="mf">0.25</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples_array</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="n">sample</span> <span class="k">for</span> <span class="n">sample</span> <span class="ow">in</span> <span class="n">gen_samples</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples_array</span>
<span class="go">array([[ 0.1467264 ,  0.27143857],</span>
<span class="go">       [ 4.0371448 ,  0.15871274],</span>
<span class="go">       [ 3.24656208, -1.03742621],</span>
<span class="go">       ...,</span>
<span class="go">       [ 6.45975905,  1.97941306],</span>
<span class="go">       [ 4.89007171,  0.15413156],</span>
<span class="go">       [ 5.9528083 ,  1.92983158]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">np</span><span class="o">.</span><span class="n">cov</span><span class="p">(</span><span class="n">samples_array</span><span class="o">.</span><span class="n">T</span><span class="p">)</span>
<span class="go">array([[ 2.95692642,  0.4379419 ],</span>
<span class="go">       [ 0.4379419 ,  3.00939434]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">sampler</span><span class="o">.</span><span class="n">acceptance_rate</span>
<span class="go">0.9969</span>
</pre></div>
</div>
</dd></dl>

<dl class="method">
<dt id="pgmpy.sampling.HMC.HamiltonianMC.sample">
<code class="descname">sample</code><span class="sig-paren">(</span><em>initial_pos</em>, <em>num_samples</em>, <em>trajectory_length</em>, <em>stepsize=None</em>, <em>return_type='dataframe'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/HMC.html#HamiltonianMC.sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.HMC.HamiltonianMC.sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Method to return samples using Hamiltonian Monte Carlo</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>initial_pos: A 1d array like object</strong></p>
<blockquote>
<div><p>Vector representing values of parameter position, the starting
state in markov chain.</p>
</div></blockquote>
<p><strong>num_samples: int</strong></p>
<blockquote>
<div><p>Number of samples to be generated</p>
</div></blockquote>
<p><strong>trajectory_length: int or float</strong></p>
<blockquote>
<div><p>Target trajectory length, stepsize * number of steps(L),
where L is the number of steps taken per HMC iteration,
and stepsize is step size for splitting time method.</p>
</div></blockquote>
<p><strong>stepsize: float , defaults to None</strong></p>
<blockquote>
<div><p>The stepsize for proposing new values of position and momentum in simulate_dynamics
If None, then will be choosen suitably</p>
</div></blockquote>
<p><strong>return_type: string (dataframe | recarray)</strong></p>
<blockquote>
<div><p>Return type for samples, either of &#8216;dataframe&#8217; or &#8216;recarray&#8217;.
Defaults to &#8216;dataframe&#8217;</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last">sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument</p>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">HamiltonianMC</span> <span class="k">as</span> <span class="n">HMC</span><span class="p">,</span> <span class="n">GradLogPDFGaussian</span><span class="p">,</span> <span class="n">ModifiedEuler</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.continuous</span> <span class="k">import</span> <span class="n">GaussianDistribution</span> <span class="k">as</span> <span class="n">JGD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">mean</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="o">-</span><span class="mi">1</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">covariance</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="mf">0.2</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.2</span><span class="p">,</span> <span class="mi">1</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">JGD</span><span class="p">([</span><span class="s1">&#39;x&#39;</span><span class="p">,</span> <span class="s1">&#39;y&#39;</span><span class="p">],</span> <span class="n">mean</span><span class="p">,</span> <span class="n">covariance</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">sampler</span> <span class="o">=</span> <span class="n">HMC</span><span class="p">(</span><span class="n">model</span><span class="o">=</span><span class="n">model</span><span class="p">,</span> <span class="n">grad_log_pdf</span><span class="o">=</span><span class="n">GradLogPDFGaussian</span><span class="p">,</span> <span class="n">simulate_dynamics</span><span class="o">=</span><span class="n">ModifiedEuler</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span> <span class="o">=</span> <span class="n">sampler</span><span class="o">.</span><span class="n">sample</span><span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">]),</span> <span class="n">num_samples</span> <span class="o">=</span> <span class="mi">5</span><span class="p">,</span>
<span class="gp">... </span>                         <span class="n">trajectory_length</span><span class="o">=</span><span class="mi">6</span><span class="p">,</span> <span class="n">stepsize</span><span class="o">=</span><span class="mf">0.25</span><span class="p">,</span> <span class="n">return_type</span><span class="o">=</span><span class="s1">&#39;dataframe&#39;</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span>
<span class="go">               x              y</span>
<span class="go">0   1.000000e+00   1.000000e+00</span>
<span class="go">1   1.592133e+00   1.152911e+00</span>
<span class="go">2   1.608700e+00   1.315349e+00</span>
<span class="go">3   1.608700e+00   1.315349e+00</span>
<span class="go">4   6.843856e-01   6.237043e-01</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">mean</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">4</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="o">-</span><span class="mi">1</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">covariance</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="mf">0.7</span> <span class="p">,</span> <span class="mf">0.8</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.7</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mf">0.2</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.8</span><span class="p">,</span> <span class="mf">0.2</span><span class="p">,</span> <span class="mi">1</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">JGD</span><span class="p">([</span><span class="s1">&#39;x&#39;</span><span class="p">,</span> <span class="s1">&#39;y&#39;</span><span class="p">,</span> <span class="s1">&#39;z&#39;</span><span class="p">],</span> <span class="n">mean</span><span class="p">,</span> <span class="n">covariance</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">sampler</span> <span class="o">=</span> <span class="n">HMC</span><span class="p">(</span><span class="n">model</span><span class="o">=</span><span class="n">model</span><span class="p">,</span> <span class="n">grad_log_pdf</span><span class="o">=</span><span class="n">GLPG</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span> <span class="o">=</span> <span class="n">sampler</span><span class="o">.</span><span class="n">sample</span><span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">]),</span> <span class="n">num_samples</span> <span class="o">=</span> <span class="mi">10000</span><span class="p">,</span>
<span class="gp">... </span>                         <span class="n">trajectory_length</span><span class="o">=</span><span class="mi">6</span><span class="p">,</span> <span class="n">stepsize</span><span class="o">=</span><span class="mf">0.25</span><span class="p">,</span> <span class="n">return_type</span><span class="o">=</span><span class="s1">&#39;dataframe&#39;</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">np</span><span class="o">.</span><span class="n">cov</span><span class="p">(</span><span class="n">samples</span><span class="o">.</span><span class="n">values</span><span class="o">.</span><span class="n">T</span><span class="p">)</span>
<span class="go">array([[ 1.00795398,  0.71384233,  0.79802097],</span>
<span class="go">       [ 0.71384233,  1.00633524,  0.21313767],</span>
<span class="go">       [ 0.79802097,  0.21313767,  0.98519017]])</span>
</pre></div>
</div>
</dd></dl>

</dd></dl>

<dl class="class">
<dt id="pgmpy.sampling.HMC.HamiltonianMCDA">
<em class="property">class </em><code class="descclassname">pgmpy.sampling.HMC.</code><code class="descname">HamiltonianMCDA</code><span class="sig-paren">(</span><em>model</em>, <em>grad_log_pdf=None</em>, <em>simulate_dynamics=&lt;class 'pgmpy.sampling.base.LeapFrog'&gt;</em>, <em>delta=0.65</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/HMC.html#HamiltonianMCDA"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.HMC.HamiltonianMCDA" title="Permalink to this definition">¶</a></dt>
<dd><p>Class for performing sampling in Continuous model
using Hamiltonian Monte Carlo with dual averaging for
adaptaion of parameter stepsize.</p>
<p class="rubric">References</p>
<p>Matthew D. Hoffman, Andrew Gelman, The No-U-Turn Sampler: Adaptively
Setting Path Lengths in Hamiltonian Monte Carlo. Journal of
Machine Learning Research 15 (2014) 1351-1381
Algorithm 5 : Hamiltonian Monte Carlo with dual averaging</p>
<dl class="method">
<dt id="pgmpy.sampling.HMC.HamiltonianMCDA.generate_sample">
<code class="descname">generate_sample</code><span class="sig-paren">(</span><em>initial_pos</em>, <em>num_adapt</em>, <em>num_samples</em>, <em>trajectory_length</em>, <em>stepsize=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/HMC.html#HamiltonianMCDA.generate_sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.HMC.HamiltonianMCDA.generate_sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Method returns a generator type object whose each iteration yields a sample
using Hamiltonian Monte Carlo</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>initial_pos: A 1d array like object</strong></p>
<blockquote>
<div><p>Vector representing values of parameter position, the starting
state in markov chain.</p>
</div></blockquote>
<p><strong>num_adapt: int</strong></p>
<blockquote>
<div><p>The number of interations to run the adaptation of stepsize</p>
</div></blockquote>
<p><strong>num_samples: int</strong></p>
<blockquote>
<div><p>Number of samples to be generated</p>
</div></blockquote>
<p><strong>trajectory_length: int or float</strong></p>
<blockquote>
<div><p>Target trajectory length, stepsize * number of steps(L),
where L is the number of steps taken to propose new values of position and momentum
per HMC iteration and stepsize is step size.</p>
</div></blockquote>
<p><strong>stepsize: float , defaults to None</strong></p>
<blockquote>
<div><p>The stepsize for proposing new values of position and momentum in simulate_dynamics
If None, then will be choosen suitably</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last">genrator: yielding a numpy.array type object for a sample</p>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">HamiltonianMCDA</span> <span class="k">as</span> <span class="n">HMCda</span><span class="p">,</span> <span class="n">GradLogPDFGaussian</span> <span class="k">as</span> <span class="n">GLPG</span><span class="p">,</span> <span class="n">LeapFrog</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.continuous</span> <span class="k">import</span> <span class="n">GaussianDistribution</span> <span class="k">as</span> <span class="n">JGD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">mean</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">covariance</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="mf">0.7</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.7</span><span class="p">,</span> <span class="mi">3</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">JGD</span><span class="p">([</span><span class="s1">&#39;x&#39;</span><span class="p">,</span> <span class="s1">&#39;y&#39;</span><span class="p">],</span> <span class="n">mean</span><span class="p">,</span> <span class="n">covariance</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">sampler</span> <span class="o">=</span> <span class="n">HMCda</span><span class="p">(</span><span class="n">model</span><span class="o">=</span><span class="n">model</span><span class="p">,</span> <span class="n">grad_log_pdf</span><span class="o">=</span><span class="n">GLPG</span><span class="p">,</span> <span class="n">simulate_dynamics</span><span class="o">=</span><span class="n">LeapFrog</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">gen_samples</span> <span class="o">=</span> <span class="n">sampler</span><span class="o">.</span><span class="n">generate_sample</span><span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">]),</span> <span class="n">num_adapt</span><span class="o">=</span><span class="mi">10000</span><span class="p">,</span>
<span class="gp">... </span>                                      <span class="n">num_samples</span> <span class="o">=</span> <span class="mi">10000</span><span class="p">,</span> <span class="n">trajectory_length</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">stepsize</span><span class="o">=</span><span class="kc">None</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples_array</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="n">sample</span> <span class="k">for</span> <span class="n">sample</span> <span class="ow">in</span> <span class="n">gen_samples</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">np</span><span class="o">.</span><span class="n">cov</span><span class="p">(</span><span class="n">samples_array</span><span class="o">.</span><span class="n">T</span><span class="p">)</span>
<span class="go">array([[ 0.98432155,  0.69517394],</span>
<span class="go">       [ 0.69517394,  2.95449533]])</span>
</pre></div>
</div>
</dd></dl>

<dl class="method">
<dt id="pgmpy.sampling.HMC.HamiltonianMCDA.sample">
<code class="descname">sample</code><span class="sig-paren">(</span><em>initial_pos</em>, <em>num_adapt</em>, <em>num_samples</em>, <em>trajectory_length</em>, <em>stepsize=None</em>, <em>return_type='dataframe'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/HMC.html#HamiltonianMCDA.sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.HMC.HamiltonianMCDA.sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Method to return samples using Hamiltonian Monte Carlo</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>initial_pos: A 1d array like object</strong></p>
<blockquote>
<div><p>Vector representing values of parameter position, the starting
state in markov chain.</p>
</div></blockquote>
<p><strong>num_adapt: int</strong></p>
<blockquote>
<div><p>The number of interations to run the adaptation of stepsize</p>
</div></blockquote>
<p><strong>num_samples: int</strong></p>
<blockquote>
<div><p>Number of samples to be generated</p>
</div></blockquote>
<p><strong>trajectory_length: int or float</strong></p>
<blockquote>
<div><p>Target trajectory length, stepsize * number of steps(L),
where L is the number of steps taken per HMC iteration,
and stepsize is step size for splitting time method.</p>
</div></blockquote>
<p><strong>stepsize: float , defaults to None</strong></p>
<blockquote>
<div><p>The stepsize for proposing new values of position and momentum in simulate_dynamics
If None, then will be choosen suitably</p>
</div></blockquote>
<p><strong>return_type: string (dataframe | recarray)</strong></p>
<blockquote>
<div><p>Return type for samples, either of &#8216;dataframe&#8217; or &#8216;recarray&#8217;.
Defaults to &#8216;dataframe&#8217;</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last">sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument</p>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">HamiltonianMCDA</span> <span class="k">as</span> <span class="n">HMCda</span><span class="p">,</span> <span class="n">GradLogPDFGaussian</span> <span class="k">as</span> <span class="n">GLPG</span><span class="p">,</span> <span class="n">LeapFrog</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.continuous</span> <span class="k">import</span> <span class="n">GaussianDistribution</span> <span class="k">as</span> <span class="n">JGD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">mean</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">covariance</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="mf">0.7</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.7</span><span class="p">,</span> <span class="mi">3</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">JGD</span><span class="p">([</span><span class="s1">&#39;x&#39;</span><span class="p">,</span> <span class="s1">&#39;y&#39;</span><span class="p">],</span> <span class="n">mean</span><span class="p">,</span> <span class="n">covariance</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">sampler</span> <span class="o">=</span> <span class="n">HMCda</span><span class="p">(</span><span class="n">model</span><span class="o">=</span><span class="n">model</span><span class="p">,</span> <span class="n">grad_log_pdf</span><span class="o">=</span><span class="n">GLPG</span><span class="p">,</span> <span class="n">simulate_dynamics</span><span class="o">=</span><span class="n">LeapFrog</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span> <span class="o">=</span> <span class="n">sampler</span><span class="o">.</span><span class="n">sample</span><span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">]),</span> <span class="n">num_adapt</span><span class="o">=</span><span class="mi">10000</span><span class="p">,</span> <span class="n">num_samples</span> <span class="o">=</span> <span class="mi">10000</span><span class="p">,</span>
<span class="gp">... </span>                         <span class="n">trajectory_length</span><span class="o">=</span><span class="mi">2</span><span class="p">,</span> <span class="n">stepsize</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">return_type</span><span class="o">=</span><span class="s1">&#39;recarray&#39;</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples_array</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="n">samples</span><span class="p">[</span><span class="n">var_name</span><span class="p">]</span> <span class="k">for</span> <span class="n">var_name</span> <span class="ow">in</span> <span class="n">model</span><span class="o">.</span><span class="n">variables</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">np</span><span class="o">.</span><span class="n">cov</span><span class="p">(</span><span class="n">samples_array</span><span class="p">)</span>
<span class="go">array([[ 0.98432155,  0.66517394],</span>
<span class="go">       [ 0.66517394,  2.95449533]])</span>
</pre></div>
</div>
</dd></dl>

</dd></dl>

</div>
<div class="section" id="module-pgmpy.sampling.NUTS">
<span id="no-u-turn-sampler"></span><h2>No U-Turn Sampler<a class="headerlink" href="#module-pgmpy.sampling.NUTS" title="Permalink to this headline">¶</a></h2>
<dl class="class">
<dt id="pgmpy.sampling.NUTS.NoUTurnSampler">
<em class="property">class </em><code class="descclassname">pgmpy.sampling.NUTS.</code><code class="descname">NoUTurnSampler</code><span class="sig-paren">(</span><em>model</em>, <em>grad_log_pdf</em>, <em>simulate_dynamics=&lt;class 'pgmpy.sampling.base.LeapFrog'&gt;</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/NUTS.html#NoUTurnSampler"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.NUTS.NoUTurnSampler" title="Permalink to this definition">¶</a></dt>
<dd><p>Class for performing sampling in Continuous model
using No U Turn Sampler (a variant of Hamiltonian Monte Carlo)</p>
<p class="rubric">References</p>
<p>Matthew D. Hoffman, Andrew Gelman, The No-U-Turn Sampler: Adaptively
Setting Path Lengths in Hamiltonian Monte Carlo. Journal of
Machine Learning Research 15 (2014) 1351-1381
Algorithm 3 : Efficient No-U-Turn Sampler</p>
<dl class="method">
<dt id="pgmpy.sampling.NUTS.NoUTurnSampler.generate_sample">
<code class="descname">generate_sample</code><span class="sig-paren">(</span><em>initial_pos</em>, <em>num_samples</em>, <em>stepsize=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/NUTS.html#NoUTurnSampler.generate_sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.NUTS.NoUTurnSampler.generate_sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Returns a generator type object whose each iteration yields a sample</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>initial_pos: A 1d array like object</strong></p>
<blockquote>
<div><p>Vector representing values of parameter position, the starting
state in markov chain.</p>
</div></blockquote>
<p><strong>num_samples: int</strong></p>
<blockquote>
<div><p>Number of samples to be generated</p>
</div></blockquote>
<p><strong>stepsize: float , defaults to None</strong></p>
<blockquote>
<div><p>The stepsize for proposing new values of position and momentum in simulate_dynamics
If None, then will be choosen suitably</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last">generator: yielding a numpy.array type object for a sample</p>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">NoUTurnSampler</span> <span class="k">as</span> <span class="n">NUTS</span><span class="p">,</span> <span class="n">GradLogPDFGaussian</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.continuous</span> <span class="k">import</span> <span class="n">GaussianDistribution</span> <span class="k">as</span> <span class="n">JGD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">mean</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">11</span><span class="p">,</span> <span class="o">-</span><span class="mi">6</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">covariance</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mf">0.7</span><span class="p">,</span> <span class="mf">0.2</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.2</span><span class="p">,</span> <span class="mi">14</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">JGD</span><span class="p">([</span><span class="s1">&#39;x&#39;</span><span class="p">,</span> <span class="s1">&#39;y&#39;</span><span class="p">],</span> <span class="n">mean</span><span class="p">,</span> <span class="n">covariance</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">sampler</span> <span class="o">=</span> <span class="n">NUTS</span><span class="p">(</span><span class="n">model</span><span class="o">=</span><span class="n">model</span><span class="p">,</span> <span class="n">grad_log_pdf</span><span class="o">=</span><span class="n">GradLogPDFGaussian</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span> <span class="o">=</span> <span class="n">sampler</span><span class="o">.</span><span class="n">generate_sample</span><span class="p">(</span><span class="n">initial_pos</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">]),</span> <span class="n">num_samples</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span> <span class="n">stepsize</span><span class="o">=</span><span class="mf">0.4</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="n">sample</span> <span class="k">for</span> <span class="n">sample</span> <span class="ow">in</span> <span class="n">samples</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span>
<span class="go">array([[ 10.26357538,   0.10062725],</span>
<span class="go">       [ 12.70600336,   0.63392499],</span>
<span class="go">       [ 10.95523217,  -0.62079273],</span>
<span class="go">       [ 10.66263031,  -4.08135962],</span>
<span class="go">       [ 10.59255762,  -8.48085076],</span>
<span class="go">       [  9.99860242,  -9.47096032],</span>
<span class="go">       [ 10.5733564 ,  -9.83504745],</span>
<span class="go">       [ 11.51302059,  -9.49919523],</span>
<span class="go">       [ 11.31892143,  -8.5873259 ],</span>
<span class="go">       [ 11.29008667,  -0.43809674]])</span>
</pre></div>
</div>
</dd></dl>

<dl class="method">
<dt id="pgmpy.sampling.NUTS.NoUTurnSampler.sample">
<code class="descname">sample</code><span class="sig-paren">(</span><em>initial_pos</em>, <em>num_samples</em>, <em>stepsize=None</em>, <em>return_type='dataframe'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/NUTS.html#NoUTurnSampler.sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.NUTS.NoUTurnSampler.sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Method to return samples using No U Turn Sampler</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>initial_pos: A 1d array like object</strong></p>
<blockquote>
<div><p>Vector representing values of parameter position, the starting
state in markov chain.</p>
</div></blockquote>
<p><strong>num_samples: int</strong></p>
<blockquote>
<div><p>Number of samples to be generated</p>
</div></blockquote>
<p><strong>stepsize: float , defaults to None</strong></p>
<blockquote>
<div><p>The stepsize for proposing new values of position and momentum in simulate_dynamics
If None, then will be choosen suitably</p>
</div></blockquote>
<p><strong>return_type: string (dataframe | recarray)</strong></p>
<blockquote>
<div><p>Return type for samples, either of &#8216;dataframe&#8217; or &#8216;recarray&#8217;.
Defaults to &#8216;dataframe&#8217;</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last">sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument</p>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">NoUTurnSampler</span> <span class="k">as</span> <span class="n">NUTS</span><span class="p">,</span> <span class="n">GradLogPDFGaussian</span><span class="p">,</span> <span class="n">LeapFrog</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.continuous</span> <span class="k">import</span> <span class="n">GaussianDistribution</span> <span class="k">as</span> <span class="n">JGD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">mean</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">0</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="mi">0</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">covariance</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">6</span><span class="p">,</span> <span class="mf">0.7</span><span class="p">,</span> <span class="mf">0.2</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.7</span><span class="p">,</span> <span class="mi">3</span><span class="p">,</span> <span class="mf">0.9</span><span class="p">],</span> <span class="p">[</span><span class="mf">0.2</span><span class="p">,</span> <span class="mf">0.9</span><span class="p">,</span> <span class="mi">1</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">JGD</span><span class="p">([</span><span class="s1">&#39;x&#39;</span><span class="p">,</span> <span class="s1">&#39;y&#39;</span><span class="p">,</span> <span class="s1">&#39;z&#39;</span><span class="p">],</span> <span class="n">mean</span><span class="p">,</span> <span class="n">covariance</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">sampler</span> <span class="o">=</span> <span class="n">NUTS</span><span class="p">(</span><span class="n">model</span><span class="o">=</span><span class="n">model</span><span class="p">,</span> <span class="n">grad_log_pdf</span><span class="o">=</span><span class="n">GradLogPDFGaussian</span><span class="p">,</span> <span class="n">simulate_dynamics</span><span class="o">=</span><span class="n">LeapFrog</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span> <span class="o">=</span> <span class="n">sampler</span><span class="o">.</span><span class="n">sample</span><span class="p">(</span><span class="n">initial_pos</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="mi">1</span><span class="p">]),</span> <span class="n">num_samples</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span>
<span class="gp">... </span>                         <span class="n">stepsize</span><span class="o">=</span><span class="mf">0.4</span><span class="p">,</span> <span class="n">return_type</span><span class="o">=</span><span class="s1">&#39;dataframe&#39;</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span>
<span class="go">          x         y         z</span>
<span class="go">0  1.000000  1.000000  1.000000</span>
<span class="go">1  1.760756  0.271543 -0.613309</span>
<span class="go">2  1.883387  0.990745 -0.611720</span>
<span class="go">3  0.980812  0.340336 -0.916283</span>
<span class="go">4  0.781338  0.647220 -0.948640</span>
<span class="go">5  0.040308 -1.391406  0.412201</span>
<span class="go">6  1.179549 -1.450552  1.105216</span>
<span class="go">7  1.100320 -1.313926  1.207815</span>
<span class="go">8  1.484520 -1.349247  0.768599</span>
<span class="go">9  0.934942 -1.894589  0.471772</span>
</pre></div>
</div>
</dd></dl>

</dd></dl>

<dl class="class">
<dt id="pgmpy.sampling.NUTS.NoUTurnSamplerDA">
<em class="property">class </em><code class="descclassname">pgmpy.sampling.NUTS.</code><code class="descname">NoUTurnSamplerDA</code><span class="sig-paren">(</span><em>model</em>, <em>grad_log_pdf</em>, <em>simulate_dynamics=&lt;class 'pgmpy.sampling.base.LeapFrog'&gt;</em>, <em>delta=0.65</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/NUTS.html#NoUTurnSamplerDA"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.NUTS.NoUTurnSamplerDA" title="Permalink to this definition">¶</a></dt>
<dd><p>Class for performing sampling in Continuous model
using No U Turn sampler with dual averaging for
adaptation of parameter stepsize.</p>
<p class="rubric">References</p>
<p>Matthew D. Hoffman, Andrew Gelman, The No-U-Turn Sampler: Adaptively
Setting Path Lengths in Hamiltonian Monte Carlo. Journal of
Machine Learning Research 15 (2014) 1351-1381
Algorithm 6 : No-U-Turn Sampler with Dual Averaging</p>
<dl class="method">
<dt id="pgmpy.sampling.NUTS.NoUTurnSamplerDA.generate_sample">
<code class="descname">generate_sample</code><span class="sig-paren">(</span><em>initial_pos</em>, <em>num_adapt</em>, <em>num_samples</em>, <em>stepsize=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/NUTS.html#NoUTurnSamplerDA.generate_sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.NUTS.NoUTurnSamplerDA.generate_sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Returns a generator type object whose each iteration yields a sample</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>initial_pos: A 1d array like object</strong></p>
<blockquote>
<div><p>Vector representing values of parameter position, the starting
state in markov chain.</p>
</div></blockquote>
<p><strong>num_adapt: int</strong></p>
<blockquote>
<div><p>The number of interations to run the adaptation of stepsize</p>
</div></blockquote>
<p><strong>num_samples: int</strong></p>
<blockquote>
<div><p>Number of samples to be generated</p>
</div></blockquote>
<p><strong>stepsize: float , defaults to None</strong></p>
<blockquote>
<div><p>The stepsize for proposing new values of position and momentum in simulate_dynamics
If None, then will be choosen suitably</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last">genrator: yielding a numpy.array type object for a sample</p>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">NoUTurnSamplerDA</span> <span class="k">as</span> <span class="n">NUTSda</span><span class="p">,</span> <span class="n">GradLogPDFGaussian</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.continuous</span> <span class="k">import</span> <span class="n">GaussianDistribution</span> <span class="k">as</span> <span class="n">JGD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">mean</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="o">-</span><span class="mi">100</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">covariance</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="o">-</span><span class="mi">12</span><span class="p">,</span> <span class="mi">45</span><span class="p">],</span> <span class="p">[</span><span class="mi">45</span><span class="p">,</span> <span class="o">-</span><span class="mi">10</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">JGD</span><span class="p">([</span><span class="s1">&#39;a&#39;</span><span class="p">,</span> <span class="s1">&#39;b&#39;</span><span class="p">],</span> <span class="n">mean</span><span class="p">,</span> <span class="n">covariance</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">sampler</span> <span class="o">=</span> <span class="n">NUTSda</span><span class="p">(</span><span class="n">model</span><span class="o">=</span><span class="n">model</span><span class="p">,</span> <span class="n">grad_log_pdf</span><span class="o">=</span><span class="n">GradLogPDFGaussian</span><span class="p">,</span> <span class="n">simulate_dynamics</span><span class="o">=</span><span class="n">LeapFrog</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span> <span class="o">=</span> <span class="n">sampler</span><span class="o">.</span><span class="n">generate_sample</span><span class="p">(</span><span class="n">initial_pos</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">12</span><span class="p">,</span> <span class="o">-</span><span class="mi">4</span><span class="p">]),</span> <span class="n">num_adapt</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span>
<span class="gp">... </span>                                  <span class="n">num_samples</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span> <span class="n">stepsize</span><span class="o">=</span><span class="mf">0.1</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span>
<span class="go">&lt;generator object NoUTurnSamplerDA.generate_sample at 0x7f4fed46a4c0&gt;</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples_array</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="n">sample</span> <span class="k">for</span> <span class="n">sample</span> <span class="ow">in</span> <span class="n">samples</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples_array</span>
<span class="go">array([[ 11.89963386,  -4.06572636],</span>
<span class="go">       [ 10.3453755 ,  -7.5700289 ],</span>
<span class="go">       [-26.56899659, -15.3920684 ],</span>
<span class="go">       [-29.97143077, -12.0801625 ],</span>
<span class="go">       [-29.97143077, -12.0801625 ],</span>
<span class="go">       [-33.07960829,  -8.90440347],</span>
<span class="go">       [-55.28263496, -17.31718524],</span>
<span class="go">       [-55.28263496, -17.31718524],</span>
<span class="go">       [-56.63440044, -16.03309364],</span>
<span class="go">       [-63.880094  , -19.19981944]])</span>
</pre></div>
</div>
</dd></dl>

<dl class="method">
<dt id="pgmpy.sampling.NUTS.NoUTurnSamplerDA.sample">
<code class="descname">sample</code><span class="sig-paren">(</span><em>initial_pos</em>, <em>num_adapt</em>, <em>num_samples</em>, <em>stepsize=None</em>, <em>return_type='dataframe'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/pgmpy/sampling/NUTS.html#NoUTurnSamplerDA.sample"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#pgmpy.sampling.NUTS.NoUTurnSamplerDA.sample" title="Permalink to this definition">¶</a></dt>
<dd><p>Returns samples using No U Turn Sampler with dual averaging</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>initial_pos: A 1d array like object</strong></p>
<blockquote>
<div><p>Vector representing values of parameter position, the starting
state in markov chain.</p>
</div></blockquote>
<p><strong>num_adapt: int</strong></p>
<blockquote>
<div><p>The number of interations to run the adaptation of stepsize</p>
</div></blockquote>
<p><strong>num_samples: int</strong></p>
<blockquote>
<div><p>Number of samples to be generated</p>
</div></blockquote>
<p><strong>stepsize: float , defaults to None</strong></p>
<blockquote>
<div><p>The stepsize for proposing new values of position and momentum in simulate_dynamics
If None, then will be choosen suitably</p>
</div></blockquote>
<p><strong>return_type: string (dataframe | recarray)</strong></p>
<blockquote>
<div><p>Return type for samples, either of &#8216;dataframe&#8217; or &#8216;recarray&#8217;.
Defaults to &#8216;dataframe&#8217;</p>
</div></blockquote>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last">sampled: A pandas.DataFrame or a numpy.recarray object depending upon return_type argument</p>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-default"><div class="highlight"><pre><span></span><span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.sampling</span> <span class="k">import</span> <span class="n">NoUTurnSamplerDA</span> <span class="k">as</span> <span class="n">NUTSda</span><span class="p">,</span> <span class="n">GradLogPDFGaussian</span><span class="p">,</span> <span class="n">LeapFrog</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">from</span> <span class="nn">pgmpy.factors.continuous</span> <span class="k">import</span> <span class="n">GaussianDistribution</span> <span class="k">as</span> <span class="n">JGD</span>
<span class="gp">&gt;&gt;&gt; </span><span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">mean</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">10</span><span class="p">,</span> <span class="o">-</span><span class="mi">13</span><span class="p">])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">covariance</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">16</span><span class="p">,</span> <span class="o">-</span><span class="mi">3</span><span class="p">],</span> <span class="p">[</span><span class="o">-</span><span class="mi">3</span><span class="p">,</span> <span class="mi">13</span><span class="p">]])</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">model</span> <span class="o">=</span> <span class="n">JGD</span><span class="p">([</span><span class="s1">&#39;x&#39;</span><span class="p">,</span> <span class="s1">&#39;y&#39;</span><span class="p">],</span> <span class="n">mean</span><span class="p">,</span> <span class="n">covariance</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">sampler</span> <span class="o">=</span> <span class="n">NUTSda</span><span class="p">(</span><span class="n">model</span><span class="o">=</span><span class="n">model</span><span class="p">,</span> <span class="n">grad_log_pdf</span><span class="o">=</span><span class="n">GradLogPDFGaussian</span><span class="p">,</span> <span class="n">simulate_dynamics</span><span class="o">=</span><span class="n">LeapFrog</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span> <span class="o">=</span> <span class="n">sampler</span><span class="o">.</span><span class="n">sample</span><span class="p">(</span><span class="n">initial_pos</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">12</span><span class="p">,</span> <span class="o">-</span><span class="mi">4</span><span class="p">]),</span> <span class="n">num_adapt</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span> <span class="n">num_samples</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span>
<span class="gp">... </span>                         <span class="n">stepsize</span><span class="o">=</span><span class="mf">0.1</span><span class="p">,</span> <span class="n">return_type</span><span class="o">=</span><span class="s1">&#39;dataframe&#39;</span><span class="p">)</span>
<span class="gp">&gt;&gt;&gt; </span><span class="n">samples</span>
<span class="go">           x          y</span>
<span class="go">0  12.000000  -4.000000</span>
<span class="go">1  11.864821  -3.696109</span>
<span class="go">2  10.546986  -4.892169</span>
<span class="go">3   8.526596 -21.555793</span>
<span class="go">4   8.526596 -21.555793</span>
<span class="go">5  11.343194  -6.353789</span>
<span class="go">6  -1.583269 -12.802931</span>
<span class="go">7  12.411957 -11.704859</span>
<span class="go">8  13.253336 -20.169492</span>
<span class="go">9  11.295901  -7.665058</span>
</pre></div>
</div>
</dd></dl>

</dd></dl>

</div>
</div>


          </div>
        </div>
      </div>
      <div class="clearer"></div>
    </div>
    <div class="related" role="navigation" aria-label="related navigation">
      <h3>Navigation</h3>
      <ul>
        <li class="right" style="margin-right: 10px">
          <a href="genindex.html" title="General Index"
             >index</a></li>
        <li class="right" >
          <a href="py-modindex.html" title="Python Module Index"
             >modules</a> |</li>
        <li class="right" >
          <a href="inference.html" title="Algorithms for Inference"
             >next</a> |</li>
        <li class="right" >
          <a href="factors.html" title="Factor"
             >previous</a> |</li>
        <li class="nav-item nav-item-0"><a href="index.html">pgmpy 0.1.2 documentation</a> &#187;</li> 
      </ul>
    </div>
    <div class="footer" role="contentinfo">
        &#169; Copyright 2016, Ankur Ankan.
      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.5.1.
    </div>
  </body>
</html>