<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1" />
<meta name="generator" content="pdoc 0.10.0" />
<title>silk.config.optimizer API documentation</title>
<meta name="description" content="" />
<link rel="preload stylesheet" as="style" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/11.0.1/sanitize.min.css" integrity="sha256-PK9q560IAAa6WVRRh76LtCaI8pjTJ2z11v0miyNNjrs=" crossorigin>
<link rel="preload stylesheet" as="style" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/11.0.1/typography.min.css" integrity="sha256-7l/o7C8jubJiy74VsKTidCy1yBkRtiUGbVkYBylBqUg=" crossorigin>
<link rel="stylesheet preload" as="style" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.1.1/styles/github.min.css" crossorigin>
<style>:root{--highlight-color:#fe9}.flex{display:flex !important}body{line-height:1.5em}#content{padding:20px}#sidebar{padding:30px;overflow:hidden}#sidebar > *:last-child{margin-bottom:2cm}.http-server-breadcrumbs{font-size:130%;margin:0 0 15px 0}#footer{font-size:.75em;padding:5px 30px;border-top:1px solid #ddd;text-align:right}#footer p{margin:0 0 0 1em;display:inline-block}#footer p:last-child{margin-right:30px}h1,h2,h3,h4,h5{font-weight:300}h1{font-size:2.5em;line-height:1.1em}h2{font-size:1.75em;margin:1em 0 .50em 0}h3{font-size:1.4em;margin:25px 0 10px 0}h4{margin:0;font-size:105%}h1:target,h2:target,h3:target,h4:target,h5:target,h6:target{background:var(--highlight-color);padding:.2em 0}a{color:#058;text-decoration:none;transition:color .3s ease-in-out}a:hover{color:#e82}.title code{font-weight:bold}h2[id^="header-"]{margin-top:2em}.ident{color:#900}pre code{background:#f8f8f8;font-size:.8em;line-height:1.4em}code{background:#f2f2f1;padding:1px 4px;overflow-wrap:break-word}h1 code{background:transparent}pre{background:#f8f8f8;border:0;border-top:1px solid #ccc;border-bottom:1px solid #ccc;margin:1em 0;padding:1ex}#http-server-module-list{display:flex;flex-flow:column}#http-server-module-list div{display:flex}#http-server-module-list dt{min-width:10%}#http-server-module-list p{margin-top:0}.toc ul,#index{list-style-type:none;margin:0;padding:0}#index code{background:transparent}#index h3{border-bottom:1px solid #ddd}#index ul{padding:0}#index h4{margin-top:.6em;font-weight:bold}@media (min-width:200ex){#index .two-column{column-count:2}}@media (min-width:300ex){#index .two-column{column-count:3}}dl{margin-bottom:2em}dl dl:last-child{margin-bottom:4em}dd{margin:0 0 1em 3em}#header-classes + dl > dd{margin-bottom:3em}dd dd{margin-left:2em}dd p{margin:10px 0}.name{background:#eee;font-weight:bold;font-size:.85em;padding:5px 10px;display:inline-block;min-width:40%}.name:hover{background:#e0e0e0}dt:target .name{background:var(--highlight-color)}.name > span:first-child{white-space:nowrap}.name.class > span:nth-child(2){margin-left:.4em}.inherited{color:#999;border-left:5px solid #eee;padding-left:1em}.inheritance em{font-style:normal;font-weight:bold}.desc h2{font-weight:400;font-size:1.25em}.desc h3{font-size:1em}.desc dt code{background:inherit}.source summary,.git-link-div{color:#666;text-align:right;font-weight:400;font-size:.8em;text-transform:uppercase}.source summary > *{white-space:nowrap;cursor:pointer}.git-link{color:inherit;margin-left:1em}.source pre{max-height:500px;overflow:auto;margin:0}.source pre code{font-size:12px;overflow:visible}.hlist{list-style:none}.hlist li{display:inline}.hlist li:after{content:',\2002'}.hlist li:last-child:after{content:none}.hlist .hlist{display:inline;padding-left:1em}img{max-width:100%}td{padding:0 .5em}.admonition{padding:.1em .5em;margin-bottom:1em}.admonition-title{font-weight:bold}.admonition.note,.admonition.info,.admonition.important{background:#aef}.admonition.todo,.admonition.versionadded,.admonition.tip,.admonition.hint{background:#dfd}.admonition.warning,.admonition.versionchanged,.admonition.deprecated{background:#fd4}.admonition.error,.admonition.danger,.admonition.caution{background:lightpink}</style>
<style media="screen and (min-width: 700px)">@media screen and (min-width:700px){#sidebar{width:30%;height:100vh;overflow:auto;position:sticky;top:0}#content{width:70%;max-width:100ch;padding:3em 4em;border-left:1px solid #ddd}pre code{font-size:1em}.item .name{font-size:1em}main{display:flex;flex-direction:row-reverse;justify-content:flex-end}.toc ul ul,#index ul{padding-left:1.5em}.toc > ul > li{margin-top:.5em}}</style>
<style media="print">@media print{#sidebar h1{page-break-before:always}.source{display:none}}@media print{*{background:transparent !important;color:#000 !important;box-shadow:none !important;text-shadow:none !important}a[href]:after{content:" (" attr(href) ")";font-size:90%}a[href][title]:after{content:none}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h1,h2,h3,h4,h5,h6{page-break-after:avoid}}</style>
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.1.1/highlight.min.js" integrity="sha256-Uv3H6lx7dJmRfRvH8TH6kJD1TSK1aFcwgx+mdg3epi8=" crossorigin></script>
<script>window.addEventListener('DOMContentLoaded', () => hljs.initHighlighting())</script>
</head>
<body>
<main>
<article id="content">
<header>
<h1 class="title">Module <code>silk.config.optimizer</code></h1>
</header>
<section id="section-intro">
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python"># Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.

# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.

from __future__ import annotations

from typing import Any, Iterable, Union, Dict

import torch
from silk.config.core import find_and_ensure_is_subclass
from silk.optimizers.multiple import MultiOptimizer


class Spec:
    &#34;&#34;&#34;Optimizer Specification = (Optimizer Type + Optimizer Arguments - Model Parameters)

    This class is mostly used for creating PyTorch optimizers only using model parameters as arguments.
    This makes the configuration of optimizers easier since it decouples the optimizer&#39;s parameters from the model&#39;s parameters.

    Examples
    --------

    ```python
    # create the optimizer specification
    optim_spec = Spec(torch.optim.Adam, lr=0.001, eps=1e-9, weight_decay=0.01)

    # create the optimizer object and link it to the model&#39;s parameters
    optim = optim_spec(model.parameters())
    ```

    &#34;&#34;&#34;

    ParametersType = Union[torch.nn.Module, Iterable[torch.nn.parameter.Parameter]]

    def __init__(
        self, optimizer_class: Union[str, type], **default_kwargs: Dict[str, Any]
    ) -&gt; None:
        &#34;&#34;&#34;

        Parameters
        ----------
        optimizer_class : Union[str, type]
            Optimizer class or module path to an optimizer class.

        default_kwargs : Dict[str, Any]
            Default arguments to pass to the optimizer during creation.
        &#34;&#34;&#34;
        self._optimizer_class = find_and_ensure_is_subclass(
            optimizer_class, torch.optim.Optimizer
        )
        self._default_kwargs = default_kwargs

    def __call__(
        self, parameters: Spec.ParametersType, **override_kwargs
    ) -&gt; torch.optim.Optimizer:
        &#34;&#34;&#34;Create optimizer object and link it to a model&#39;s parameters.

        Parameters
        ----------
        parameters : ParametersType
            Parameters of the model to optimize (usually gotten using the `nn.Module.parameters()` method).

        Returns
        -------
        torch.optim.Optimizer
            Instantiated optimizer linked to specific model parameters.
        &#34;&#34;&#34;
        kwargs = {**self._default_kwargs, **override_kwargs}
        parameters = (
            parameters.parameters()
            if isinstance(parameters, torch.nn.Module)
            else parameters
        )
        return self._optimizer_class(parameters, **kwargs)


class MultiSpec:
    &#34;&#34;&#34;MultiSpec is a container of multiple Specs, generating one MultiOptimizer optimizer during training.&#34;&#34;&#34;

    def __init__(self, *specs) -&gt; None:
        self._specs = specs

    def __call__(
        self,
        *parameters: Iterable[Spec.ParametersType],
    ) -&gt; torch.optim.Optimizer:
        if len(parameters) != len(self._specs):
            raise RuntimeError(
                f&#34;the number of provided parameters ({len(parameters)}) should match the number of optimizer specs ({len(self._specs)})&#34;
            )
        optimizers = (spec(params) for spec, params in zip(self._specs, parameters))
        return MultiOptimizer(*optimizers)</code></pre>
</details>
</section>
<section>
</section>
<section>
</section>
<section>
</section>
<section>
<h2 class="section-title" id="header-classes">Classes</h2>
<dl>
<dt id="silk.config.optimizer.MultiSpec"><code class="flex name class">
<span>class <span class="ident">MultiSpec</span></span>
<span>(</span><span>*specs)</span>
</code></dt>
<dd>
<div class="desc"><p>MultiSpec is a container of multiple Specs, generating one MultiOptimizer optimizer during training.</p></div>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">class MultiSpec:
    &#34;&#34;&#34;MultiSpec is a container of multiple Specs, generating one MultiOptimizer optimizer during training.&#34;&#34;&#34;

    def __init__(self, *specs) -&gt; None:
        self._specs = specs

    def __call__(
        self,
        *parameters: Iterable[Spec.ParametersType],
    ) -&gt; torch.optim.Optimizer:
        if len(parameters) != len(self._specs):
            raise RuntimeError(
                f&#34;the number of provided parameters ({len(parameters)}) should match the number of optimizer specs ({len(self._specs)})&#34;
            )
        optimizers = (spec(params) for spec, params in zip(self._specs, parameters))
        return MultiOptimizer(*optimizers)</code></pre>
</details>
</dd>
<dt id="silk.config.optimizer.Spec"><code class="flex name class">
<span>class <span class="ident">Spec</span></span>
<span>(</span><span>optimizer_class: Union[str, type], **default_kwargs: Dict[str, Any])</span>
</code></dt>
<dd>
<div class="desc"><p>Optimizer Specification = (Optimizer Type + Optimizer Arguments - Model Parameters)</p>
<p>This class is mostly used for creating PyTorch optimizers only using model parameters as arguments.
This makes the configuration of optimizers easier since it decouples the optimizer's parameters from the model's parameters.</p>
<h2 id="examples">Examples</h2>
<pre><code class="language-python"># create the optimizer specification
optim_spec = Spec(torch.optim.Adam, lr=0.001, eps=1e-9, weight_decay=0.01)

# create the optimizer object and link it to the model's parameters
optim = optim_spec(model.parameters())
</code></pre>
<h2 id="parameters">Parameters</h2>
<dl>
<dt><strong><code>optimizer_class</code></strong> :&ensp;<code>Union[str, type]</code></dt>
<dd>Optimizer class or module path to an optimizer class.</dd>
<dt><strong><code>default_kwargs</code></strong> :&ensp;<code>Dict[str, Any]</code></dt>
<dd>Default arguments to pass to the optimizer during creation.</dd>
</dl></div>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">class Spec:
    &#34;&#34;&#34;Optimizer Specification = (Optimizer Type + Optimizer Arguments - Model Parameters)

    This class is mostly used for creating PyTorch optimizers only using model parameters as arguments.
    This makes the configuration of optimizers easier since it decouples the optimizer&#39;s parameters from the model&#39;s parameters.

    Examples
    --------

    ```python
    # create the optimizer specification
    optim_spec = Spec(torch.optim.Adam, lr=0.001, eps=1e-9, weight_decay=0.01)

    # create the optimizer object and link it to the model&#39;s parameters
    optim = optim_spec(model.parameters())
    ```

    &#34;&#34;&#34;

    ParametersType = Union[torch.nn.Module, Iterable[torch.nn.parameter.Parameter]]

    def __init__(
        self, optimizer_class: Union[str, type], **default_kwargs: Dict[str, Any]
    ) -&gt; None:
        &#34;&#34;&#34;

        Parameters
        ----------
        optimizer_class : Union[str, type]
            Optimizer class or module path to an optimizer class.

        default_kwargs : Dict[str, Any]
            Default arguments to pass to the optimizer during creation.
        &#34;&#34;&#34;
        self._optimizer_class = find_and_ensure_is_subclass(
            optimizer_class, torch.optim.Optimizer
        )
        self._default_kwargs = default_kwargs

    def __call__(
        self, parameters: Spec.ParametersType, **override_kwargs
    ) -&gt; torch.optim.Optimizer:
        &#34;&#34;&#34;Create optimizer object and link it to a model&#39;s parameters.

        Parameters
        ----------
        parameters : ParametersType
            Parameters of the model to optimize (usually gotten using the `nn.Module.parameters()` method).

        Returns
        -------
        torch.optim.Optimizer
            Instantiated optimizer linked to specific model parameters.
        &#34;&#34;&#34;
        kwargs = {**self._default_kwargs, **override_kwargs}
        parameters = (
            parameters.parameters()
            if isinstance(parameters, torch.nn.Module)
            else parameters
        )
        return self._optimizer_class(parameters, **kwargs)</code></pre>
</details>
<h3>Class variables</h3>
<dl>
<dt id="silk.config.optimizer.Spec.ParametersType"><code class="name">var <span class="ident">ParametersType</span></code></dt>
<dd>
<div class="desc"></div>
</dd>
</dl>
</dd>
</dl>
</section>
</article>
<nav id="sidebar">
<h1>Index</h1>
<div class="toc">
<ul></ul>
</div>
<ul id="index">
<li><h3>Super-module</h3>
<ul>
<li><code><a title="silk.config" href="index.html">silk.config</a></code></li>
</ul>
</li>
<li><h3><a href="#header-classes">Classes</a></h3>
<ul>
<li>
<h4><code><a title="silk.config.optimizer.MultiSpec" href="#silk.config.optimizer.MultiSpec">MultiSpec</a></code></h4>
</li>
<li>
<h4><code><a title="silk.config.optimizer.Spec" href="#silk.config.optimizer.Spec">Spec</a></code></h4>
<ul class="">
<li><code><a title="silk.config.optimizer.Spec.ParametersType" href="#silk.config.optimizer.Spec.ParametersType">ParametersType</a></code></li>
</ul>
</li>
</ul>
</li>
</ul>
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.10.0</a>.</p>
</footer>
</body>
</html>